content
stringlengths
0
1.55M
# encoding=utf8 <import_from_stmt>collections defaultdict<import_from_stmt>metrics.webqsp.utils *<import_from_stmt>rdflib.graph Graph<line_sep>kg_files=[f"third_party/webqsp/freebase150k_part{i}.txt"<for>i range(3)]<line_sep>kg_str="".join([open(f).read()<for>f kg_files])<line_sep>g=Graph()<line_sep>g.parse(data=kg_str format="nt")<def_stmt>execute_sparql sparql<block_start><try_stmt><block_start>qres=g.query(sparql)<line_sep>answers=[str(a[0])<for>a qres]<line_sep><return>answers<block_end><except_stmt><block_start><return>[]<block_end><block_end><def_stmt>compute_answers_F1 pred gold<block_start>ans_ents=[e[0]<for>e gold['answers']]<try_stmt><block_start>sparql=lisp_to_sparql(pred)<line_sep>pred_ents=execute_sparql(sparql)<block_end><except_stmt><block_start>pred_ents=[]<block_end>tp=len([p<for>p pred_ents<if>p<in>ans_ents])<line_sep>P=tp/len(pred_ents)<if>len(pred_ents)<else>0<line_sep>R=tp/len(ans_ents)<if>len(ans_ents)<else>0<line_sep>F1=2<times>(P<times>R)/(P+R)<if>(P+R)<else>0<line_sep><return>F1<block_end><class_stmt>EvaluateTool(object)<block_start><def_stmt>__init__ self args<block_start>self.args=args<block_end><def_stmt>evaluate self preds golds section<block_start>eval_dict=defaultdict(float)<for_stmt>pred,gold zip(preds golds)<block_start>eval_dict["F1"]<augadd>compute_answers_F1(pred gold)<block_end><for_stmt>key eval_dict# print (key, eval_dict[key], '/', len(golds)) <block_start>eval_dict[key]=eval_dict[key]/len(golds)<if>len(golds)<else>0<block_end><return>eval_dict<block_end><block_end>
<import_from_stmt>django.test TestCase<class_stmt>TestDummy(TestCase)<block_start><def_stmt>test_one_plus_one self<block_start><assert_stmt>1+1<eq>2<block_end><block_end>
<import_from_stmt>.exceptions *# NOQA: F403 <import_from_stmt>.routing MATCH_ALL Route<import_from_stmt>.errors *# NOQA: F403 <import_from_stmt>.view LonaView<import_from_stmt>.app LonaApp<line_sep>VERSION=(1 8 5)<line_sep>VERSION_STRING='.'.join(str(i)<for>i VERSION)<line_sep>
<import_stmt>numpy<as>np<import_from_stmt>scipy.io.wavfile read<import_stmt>torch<import_from_stmt>pathlib Path<def_stmt>get_mask_from_lengths lengths<block_start>max_len=torch.max(lengths).item()<line_sep>ids=torch.arange(0 max_len device=lengths.device)<line_sep>mask=(ids<l>lengths.unsqueeze(1)).bool()<line_sep><return>mask<block_end><def_stmt>load_wav_to_torch full_path<block_start>sampling_rate,data=read(full_path)<line_sep><return>torch.FloatTensor(data.astype(np.float32)) sampling_rate<block_end><def_stmt>load_filepaths_and_text filename split="|"<block_start>root=str(Path(__file__).parent)<with_stmt>open(filename encoding='utf-8')<as>f<block_start>filepaths_and_text=[]<for_stmt>line f<block_start>filename,*text=line.strip().split(split)<line_sep>filename=f'{root}/{filename}'<line_sep>filepaths_and_text.append((filename *text))<block_end><block_end><return>filepaths_and_text<block_end><def_stmt>to_gpu x<block_start>x=x.contiguous()<if_stmt>torch.cuda.is_available()<block_start>x=x.cuda(non_blocking=<true>)<block_end><return>torch.autograd.Variable(x)<block_end>
# coding: utf-8 """ OpenAPI Petstore This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501 The version of the OpenAPI document: 1.0.0 Generated by: https://openapi-generator.tech """<import_from_future_stmt> absolute_import<import_stmt>sys<import_stmt>unittest<import_stmt>petstore_api<try_stmt><block_start><import_from_stmt>petstore_api.model animal<block_end><except_stmt>ImportError<block_start>animal=sys.modules['petstore_api.model.animal']<block_end><try_stmt><block_start><import_from_stmt>petstore_api.model dog_all_of<block_end><except_stmt>ImportError<block_start>dog_all_of=sys.modules['petstore_api.model.dog_all_of']<block_end><import_from_stmt>petstore_api.model.dog Dog<class_stmt>TestDog(unittest.TestCase)<block_start>"""Dog unit test stubs"""<def_stmt>setUp self<block_start><pass><block_end><def_stmt>tearDown self<block_start><pass><block_end><def_stmt>testDog self<block_start>"""Test Dog This will fail because additional_properties_type is None in Animal and it must be defined as any type to allow in the property breed which is not defined in Animal, it is defined in Dog """<line_sep># make an instance of dog, a composed schema model class_name='Dog'<line_sep>color='white'<line_sep>breed='<NAME>'<with_stmt>self.assertRaises(petstore_api.exceptions.ApiValueError)<block_start>dog=Dog(class_name=class_name color=color breed=breed)<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end>
""" This example extract just the quotes, you end up with a structure like: [quote0, quote1, ...] Note: It uses beautifulsoup4 :) """<import_from_stmt>sukhoi MinerBS4 core<class_stmt>QuoteMiner(MinerBS4)<block_start><def_stmt>run self dom<block_start>elems=dom.find_all('div' {'class':'quote'})<line_sep>self.extend(list(map(self.extract_quote elems)))<line_sep>elem=dom.find('li' {'class' 'next'})<if_stmt>elem<block_start>self.next(elem.find('a').get('href'))<block_end><block_end><def_stmt>extract_quote self elem<block_start>quote=elem.find('span' {'class':'text'})<line_sep><return>quote.text<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>URL='http://quotes.toscrape.com/'<line_sep>quotes=QuoteMiner(URL)<line_sep>core.gear.mainloop()<line_sep>print(quotes)<block_end>
<import_stmt>copy<import_stmt>torch<import_from_stmt>fedml_api.standalone.fedavg.client Client<class_stmt>Client(Client)<block_start><def_stmt>train self global_round_idx group_round_idx w<block_start>self.model.load_state_dict(w)<line_sep>self.model.to(self.device)<if_stmt>self.args.client_optimizer<eq>"sgd"<block_start>optimizer=torch.optim.SGD(self.model.parameters() lr=self.args.lr)<block_end><else_stmt><block_start>optimizer=torch.optim.Adam(filter(<lambda>p:p.requires_grad self.model.parameters()) lr=self.args.lr weight_decay=self.args.wd amsgrad=<true>)<block_end>w_list=[]<for_stmt>epoch range(self.args.epochs)<block_start><for_stmt>x,labels self.local_training_data<block_start>x,labels=x.to(self.device) labels.to(self.device)<line_sep>self.model.zero_grad()<line_sep>log_probs=self.model(x)<line_sep>loss=self.criterion(log_probs labels)<line_sep>loss.backward()<line_sep>optimizer.step()<block_end>global_epoch=global_round_idx<times>self.args.group_comm_round<times>self.args.epochs+group_round_idx<times>self.args.epochs+epoch<if_stmt>global_epoch%self.args.frequency_of_the_test<eq>0<or>epoch<eq>self.args.epochs-1<block_start>w_list.append((global_epoch copy.deepcopy(self.model.state_dict())))<block_end><block_end><return>w_list<block_end><block_end>
<import_from_stmt>driver SPI<line_sep>print("-------------------spi test--------------------")<line_sep>spi=SPI()<line_sep>spi.open("SPI0")<line_sep>readBuf=bytearray(3)<line_sep>writeBuf=bytearray([0x9f])<line_sep>print(writeBuf)<line_sep>print(readBuf)<line_sep>value=spi.sendRecv(writeBuf readBuf)<line_sep>print(value)<line_sep>print(writeBuf)<line_sep>print(readBuf)<line_sep>spi.close()<line_sep>print("-------------------spi test--------------------")<line_sep>
<import_from_stmt>argparse ArgumentParser<import_from_stmt>benchmark_utils PATH<import_from_stmt>benchmarker Benchmarker<import_from_stmt>platform system<line_sep>""" Image capture benchmarks """<def_stmt>granular <block_start>output="| Test | FPS |\n| --- | --- |\n"<line_sep>rows=[]<line_sep>b.start()<line_sep>rows.append(b.run(boxes=<true> images=<true> passes="_img" row="--images --passes _img"))<line_sep>b.start()<line_sep>rows.append(b.run(boxes=<true> images=<true> passes="_id" row="--boxes --images --passes _id"))<line_sep>b.start()<line_sep>rows.append(b.run(boxes=<true> images=<true> passes="_img" hi_res=<true> row="--images --passes _img --hi_res"))<line_sep>b.start()<line_sep>rows.append(b.run(boxes=<true> images=<true> passes="_img" png=<true> row="--images --passes _img --png"))<line_sep>b.start()<line_sep>b.run(boxes=<true> images=<true> passes="_img_id" row="--boxes --images --passes _img_id")<line_sep>b.start()<line_sep>rows.append(b.run(boxes=<true> images=<true> passes="_img" hi_res=<true> size=1024 row="--images --passes _img --hi_res --size 1024"))<line_sep>b.start()<line_sep>rows.append(b.run(boxes=<true> images=<true> passes="_id" hi_res=<true> size=1024 row="--images --passes _id --hi_res --size 1024"))<line_sep>b.start()<line_sep>rows.append(b.run(boxes=<true> images=<true> passes="_img_id" hi_res=<true> size=1024 row="--images --passes _img_id --hi_res --size 1024"))<line_sep>b.start()<line_sep>rows.append(b.run(boxes=<true> images=<true> passes="_img_id" hi_res=<true> size=1024 png=<true> row="--images --passes _img_id --hi_res --size 1024 --png"))<for_stmt>row rows<block_start>output<augadd>row+"\n"<block_end>print(output)<block_end><def_stmt>write_to_main <block_start>b.start()<line_sep>tr=b.run(boxes=<true> transforms=<true> return_row=<false>)<line_sep>b.start()<line_sep>lo=b.run(images=<true> passes="_img" return_row=<false>)<line_sep>b.start()<line_sep>hi=b.run(images=<true> passes="_img" return_row=<false> hi_res=<true> size=1024)<line_sep>txt=PATH.read_text()<line_sep>txt=txt.replace("$TRANSFORMS_"+machine_key str(tr))<line_sep>txt=txt.replace("$IMG_LOW_"+machine_key str(lo))<line_sep>txt=txt.replace("$IMG_HIGH_"+machine_key str(hi))<line_sep>PATH.write_text(txt)<block_end><if_stmt>__name__<eq>"__main__"<block_start>parser=ArgumentParser()<line_sep>parser.add_argument('--main' action='store_true')<line_sep>parser.add_argument('--machine' type=str default='legion_lenovo' choices=['legion_lenovo' 'braintree' 'node11'])<line_sep>args=parser.parse_args()<line_sep>machine_key=args.machine.upper()<if_stmt>machine_key<eq>"LEGION_LENOVO"<block_start><if_stmt>system()<eq>"Windows"<block_start>machine_key<augadd>"_WINDOWS"<block_end><else_stmt><block_start>machine_key<augadd>"_UBUNTU"<block_end><block_end>b=Benchmarker()<if_stmt>args.main<block_start>write_to_main()<block_end><else_stmt><block_start>granular()<block_end>b.communicate({"$type":"terminate"})<block_end>
""" Simple wrapper to start emerge as a standalone tool. """<line_sep># Authors: <NAME> <<EMAIL>> # License: MIT <import_from_stmt>emerge.appear Emerge<def_stmt>run <block_start>emerge=Emerge()<line_sep>emerge.start()<block_end><if_stmt>__name__<eq>"__main__"<block_start>run()<block_end>
<import_stmt>logging<import_from_stmt>configparser ConfigParser<import_from_stmt>pathlib Path<import_from_stmt>unittest.mock patch MagicMock<import_stmt>pytest<import_from_stmt>sciencebeam.utils.mime_type_constants MimeTypes<import_from_stmt>sciencebeam.transformers convert_doc<as>convert_doc_module<import_from_stmt>sciencebeam.transformers.convert_doc DEFAULT_DOC_CONVERT_PROCESS_TIMEOUT DEFAULT_DOC_CONVERT_MAX_UPTIME DOC_CONVERT_SECTION_NAME AppConfigOptions EnvironmentVariables _get_default_config _convert_doc_to doc_to_pdf doc_to_docx <line_sep>LOGGER=logging.getLogger(__name__)<line_sep>DOC_CONTENT_1=b'doc content 1'<line_sep>PDF_CONTENT_1=b'pdf content 1'<line_sep>DOCX_CONTENT_1=b'docx content 1'<line_sep>@pytest.fixture(name='get_doc_converter_mock' autouse=<true>)<def_stmt>_get_doc_converter_mock <block_start><with_stmt>patch.object(convert_doc_module '_get_doc_converter')<as>m<block_start><yield>m<block_end><block_end>@pytest.fixture(name='pdf_path')<def_stmt>_pdf_path temp_dir:Path<block_start><return>temp_dir.joinpath('temp.pdf')<block_end>@pytest.fixture(name='doc_converter_mock' autouse=<true>)<def_stmt>_doc_converter_mock get_doc_converter_mock:MagicMock pdf_path:Path<block_start>doc_converter_mock=get_doc_converter_mock.return_value<line_sep>doc_converter_mock.convert.return_value=str(pdf_path)<line_sep><return>doc_converter_mock<block_end>@pytest.fixture(name='TemporaryDirectory_mock' autouse=<true>)<def_stmt>_mock_temp_directory tmpdir<block_start><with_stmt>patch.object(convert_doc_module 'TemporaryDirectory')<as>m<block_start>m.return_value.__enter__.return_value=str(tmpdir)<line_sep><yield>m<block_end><block_end>@pytest.fixture(name='get_app_config_mock')<def_stmt>_get_app_config_mock <block_start><with_stmt>patch.object(convert_doc_module 'get_app_config')<as>m<block_start>m.return_value=ConfigParser()<line_sep><yield>m<block_end><block_end>@pytest.fixture(name='app_config_mock')<def_stmt>_app_config_mock get_app_config_mock:MagicMock<arrow>ConfigParser<block_start><return>get_app_config_mock.return_value<block_end><class_stmt>TestGetDefaultConfig<block_start>@patch('os.environ' {})<def_stmt>test_should_load_config_from_app_config self app_config_mock:ConfigParser<block_start>app_config_mock.read_dict({DOC_CONVERT_SECTION_NAME:{AppConfigOptions.PROCESS_TIMEOUT:'123' AppConfigOptions.MAX_UPTIME:'101' AppConfigOptions.STOP_LISTENER_ON_ERROR:'true' AppConfigOptions.ENABLE_DEBUG:'true'}})<line_sep>config=_get_default_config()<line_sep>LOGGER.debug('config: %s' config)<assert_stmt>config.get('process_timeout')<eq>123<assert_stmt>config.get('max_uptime')<eq>101<assert_stmt>config.get('stop_listener_on_error')<is><true><assert_stmt>config.get('enable_debug')<is><true><block_end>@patch('os.environ' {EnvironmentVariables.DOC_CONVERT_PROCESS_TIMEOUT:'123' EnvironmentVariables.DOC_CONVERT_MAX_UPTIME:'101' EnvironmentVariables.DOC_CONVERT_ENABLE_DEBUG:'true'})<def_stmt>test_should_load_config_from_env self app_config_mock:ConfigParser<block_start>app_config_mock.read_dict({DOC_CONVERT_SECTION_NAME:{AppConfigOptions.PROCESS_TIMEOUT:'1' AppConfigOptions.MAX_UPTIME:'1' AppConfigOptions.STOP_LISTENER_ON_ERROR:'true' AppConfigOptions.ENABLE_DEBUG:'false'}})<line_sep>config=_get_default_config()<line_sep>LOGGER.debug('config: %s' config)<assert_stmt>config.get('process_timeout')<eq>123<assert_stmt>config.get('max_uptime')<eq>101<assert_stmt>config.get('enable_debug')<is><true><block_end>@patch('os.environ' {})<def_stmt>test_should_use_defaults self app_config_mock:ConfigParser<block_start>app_config_mock.read_dict({DOC_CONVERT_SECTION_NAME:{AppConfigOptions.STOP_LISTENER_ON_ERROR:'true' AppConfigOptions.ENABLE_DEBUG:'true'}})<line_sep>config=_get_default_config()<line_sep>LOGGER.debug('config: %s' config)<assert_stmt>config.get('process_timeout')<eq>DEFAULT_DOC_CONVERT_PROCESS_TIMEOUT<assert_stmt>config.get('max_uptime')<eq>DEFAULT_DOC_CONVERT_MAX_UPTIME<block_end><block_end><class_stmt>TestConvertDocTo<block_start><def_stmt>test_should_return_pdf self pdf_path:Path<block_start>pdf_path.write_bytes(PDF_CONTENT_1)<assert_stmt>_convert_doc_to(DOC_CONTENT_1 MimeTypes.DOC 'pdf')<eq>PDF_CONTENT_1<block_end><def_stmt>test_should_call_convert_with_doc self temp_dir:Path pdf_path:Path doc_converter_mock:MagicMock<block_start>pdf_path.write_bytes(PDF_CONTENT_1)<line_sep>_convert_doc_to(DOC_CONTENT_1 MimeTypes.DOC 'pdf')<line_sep>doc_converter_mock.convert.assert_called_with(str(temp_dir.joinpath('temp.doc')) output_type='pdf')<block_end><def_stmt>test_should_call_check_output_with_docx self temp_dir:Path pdf_path:Path doc_converter_mock:MagicMock<block_start>pdf_path.write_bytes(PDF_CONTENT_1)<line_sep>_convert_doc_to(DOC_CONTENT_1 MimeTypes.DOCX 'pdf')<line_sep>doc_converter_mock.convert.assert_called_with(str(temp_dir.joinpath('temp.docx')) output_type='pdf')<block_end><def_stmt>test_should_call_check_output_with_dotx self temp_dir:Path pdf_path:Path doc_converter_mock:MagicMock<block_start>pdf_path.write_bytes(PDF_CONTENT_1)<line_sep>_convert_doc_to(DOC_CONTENT_1 MimeTypes.DOTX 'pdf')<line_sep>doc_converter_mock.convert.assert_called_with(str(temp_dir.joinpath('temp.dotx')) output_type='pdf')<block_end><def_stmt>test_should_call_check_output_with_rtf self temp_dir:Path pdf_path:Path doc_converter_mock:MagicMock<block_start>pdf_path.write_bytes(PDF_CONTENT_1)<line_sep>_convert_doc_to(DOC_CONTENT_1 MimeTypes.RTF 'pdf')<line_sep>doc_converter_mock.convert.assert_called_with(str(temp_dir.joinpath('temp.rtf')) output_type='pdf')<block_end><block_end><class_stmt>TestDocToPdf<block_start><def_stmt>test_should_return_pdf self pdf_path:Path<block_start>pdf_path.write_bytes(PDF_CONTENT_1)<assert_stmt>doc_to_pdf(DOC_CONTENT_1 MimeTypes.DOC)<eq>PDF_CONTENT_1<block_end><block_end><class_stmt>TestDocToDocx<block_start><def_stmt>test_should_return_docx self temp_dir:Path doc_converter_mock:MagicMock<block_start>docx_path=temp_dir.joinpath('temp.docx')<line_sep>doc_converter_mock.convert.return_value=str(docx_path)<line_sep>docx_path.write_bytes(DOCX_CONTENT_1)<assert_stmt>doc_to_docx(DOC_CONTENT_1 MimeTypes.DOC)<eq>DOCX_CONTENT_1<block_end><block_end>
msg=<none><if_stmt>msg<block_start>print(msg)<block_end>
<import_from_stmt>django.test TestCase<import_from_stmt>dojo.tools.eslint.parser ESLintParser<import_from_stmt>dojo.models Test<class_stmt>TestESLintParser(TestCase)<block_start><def_stmt>test_parse_file_has_two_findings self<block_start>testfile=open("dojo/unittests/scans/eslint/scan.json")<line_sep>parser=ESLintParser()<line_sep>findings=parser.get_findings(testfile Test())<line_sep>testfile.close()<line_sep>self.assertEqual(2 len(findings))<block_end><def_stmt>test_parse_empty_file self<block_start>testfile=open("dojo/unittests/scans/eslint/empty.json")<line_sep>parser=ESLintParser()<line_sep>findings=parser.get_findings(testfile Test())<line_sep>testfile.close()<line_sep>self.assertEqual(0 len(findings))<block_end><def_stmt>test_parse_file_with_no_finding self<block_start>testfile=open("dojo/unittests/scans/eslint/no_finding.json")<line_sep>parser=ESLintParser()<line_sep>findings=parser.get_findings(testfile Test())<line_sep>testfile.close()<line_sep>self.assertEqual(0 len(findings))<block_end><block_end>
# Type definiton for (type, data) tuples representing a value # See http://androidxref.com/9.0.0_r3/xref/frameworks/base/libs/androidfw/include/androidfw/ResourceTypes.h#262 # The 'data' is either 0 or 1, specifying this resource is either # undefined or empty, respectively. TYPE_NULL=0x00<line_sep># The 'data' holds a ResTable_ref, a reference to another resource # table entry. TYPE_REFERENCE=0x01<line_sep># The 'data' holds an attribute resource identifier. TYPE_ATTRIBUTE=0x02<line_sep># The 'data' holds an index into the containing resource table's # global value string pool. TYPE_STRING=0x03<line_sep># The 'data' holds a single-precision floating point number. TYPE_FLOAT=0x04<line_sep># The 'data' holds a complex number encoding a dimension value # such as "100in". TYPE_DIMENSION=0x05<line_sep># The 'data' holds a complex number encoding a fraction of a # container. TYPE_FRACTION=0x06<line_sep># The 'data' holds a dynamic ResTable_ref, which needs to be # resolved before it can be used like a TYPE_REFERENCE. TYPE_DYNAMIC_REFERENCE=0x07<line_sep># The 'data' holds an attribute resource identifier, which needs to be resolved # before it can be used like a TYPE_ATTRIBUTE. TYPE_DYNAMIC_ATTRIBUTE=0x08<line_sep># Beginning of integer flavors... TYPE_FIRST_INT=0x10<line_sep># The 'data' is a raw integer value of the form n..n. TYPE_INT_DEC=0x10<line_sep># The 'data' is a raw integer value of the form 0xn..n. TYPE_INT_HEX=0x11<line_sep># The 'data' is either 0 or 1, for input "false" or "true" respectively. TYPE_INT_BOOLEAN=0x12<line_sep># Beginning of color integer flavors... TYPE_FIRST_COLOR_INT=0x1c<line_sep># The 'data' is a raw integer value of the form #aarrggbb. TYPE_INT_COLOR_ARGB8=0x1c<line_sep># The 'data' is a raw integer value of the form #rrggbb. TYPE_INT_COLOR_RGB8=0x1d<line_sep># The 'data' is a raw integer value of the form #argb. TYPE_INT_COLOR_ARGB4=0x1e<line_sep># The 'data' is a raw integer value of the form #rgb. TYPE_INT_COLOR_RGB4=0x1f<line_sep># ...end of integer flavors. TYPE_LAST_COLOR_INT=0x1f<line_sep># ...end of integer flavors. TYPE_LAST_INT=0x1f<line_sep>
<import_stmt>warnings<line_sep>warnings.filterwarnings('ignore' category=FutureWarning)<import_from_stmt>deepposekit Annotator<import_stmt>cv2<import_stmt>numpy<as>np<import_stmt>warnings<import_from_stmt>configparser ConfigParser<import_stmt>os<line_sep>warnings.filterwarnings('ignore')<def_stmt>dpkAnnotator dpkini annotationfile<block_start>config=ConfigParser()<line_sep>configFile=str(dpkini)<line_sep>config.read(configFile)<line_sep>project_path=config.get('general DPK settings' 'project_folder')<line_sep>annotationsPath=annotationfile<line_sep>bodyPartsListPath=os.path.join(project_path 'skeleton.csv')<line_sep>app=Annotator(datapath=annotationsPath dataset='images' skeleton=bodyPartsListPath shuffle_colors=<false> text_scale=1)<line_sep>im=np.zeros((300 600 3))<line_sep>cv2.putText(im 'Instructions' (10 20) cv2.FONT_HERSHEY_SIMPLEX 0.5 (255 255 255) 2)<line_sep>cv2.putText(im '+- = rescale image by +/- 10%' (10 40) cv2.FONT_HERSHEY_SIMPLEX 0.5 (0 255 0) 2)<line_sep>cv2.putText(im 'left mouse button = move active keypoint to cursor location' (10 60) cv2.FONT_HERSHEY_SIMPLEX 0.5 (0 255 0) 2)<line_sep>cv2.putText(im 'WASD = move active keypoint 1px or 10px' (10 80) cv2.FONT_HERSHEY_SIMPLEX 0.5 (0 255 0) 2)<line_sep>cv2.putText(im 'JL = next or previous image' (10 100) cv2.FONT_HERSHEY_SIMPLEX 0.5 (0 255 0) 2)<line_sep>cv2.putText(im '<> = jump 10 images forward or backward' (10 120) cv2.FONT_HERSHEY_SIMPLEX 0.5 (0 255 0) 2)<line_sep>cv2.putText(im 'I,K or tab, shift+tab = switch active keypoint' (10 140) cv2.FONT_HERSHEY_SIMPLEX 0.5 (0 255 0) 2)<line_sep>cv2.putText(im 'R = mark image as unannotated ("reset")' (10 160) cv2.FONT_HERSHEY_SIMPLEX 0.5 (0 255 0) 2)<line_sep>cv2.putText(im 'F = mark image as annotated ("finished")' (10 180) cv2.FONT_HERSHEY_SIMPLEX 0.5 (0 255 0) 2)<line_sep>cv2.putText(im 'esc or Q = quit' (10 200) cv2.FONT_HERSHEY_SIMPLEX 0.5 (0 255 0) 2)<line_sep>cv2.putText(im 'Tap tab to begin' (10 240) cv2.FONT_HERSHEY_SIMPLEX 0.7 (0 255 0) 2)<line_sep>cv2.imshow('Instructions' im)<line_sep>k=cv2.waitKey(0)<while_stmt>(1)<block_start>cv2.imshow('Instructions' im)<line_sep>k=cv2.waitKey(0)<line_sep>app.run()<if_stmt>k<eq>27# Esc key to stop <block_start>print('Annotatations saved in: '+str(annotationfile))<line_sep><break><block_end><block_end><block_end>
# Usage: # python xor-multidevice.py --dynet-devices CPU,GPU:0,GPU:1 # or python xor-multidevice.py --dynet-gpus 2 <import_stmt>sys<import_stmt>dynet<as>dy<line_sep>#xsent = True xsent=<false><line_sep>HIDDEN_SIZE=8<line_sep>ITERATIONS=2000<line_sep>m=dy.Model()<line_sep>trainer=dy.SimpleSGDTrainer(m)<line_sep>pW1=m.add_parameters((HIDDEN_SIZE 2) device="GPU:1")<line_sep>pb1=m.add_parameters(HIDDEN_SIZE device="GPU:1")<line_sep>pW2=m.add_parameters((HIDDEN_SIZE HIDDEN_SIZE) device="GPU:0")<line_sep>pb2=m.add_parameters(HIDDEN_SIZE device="GPU:0")<line_sep>pV=m.add_parameters((1 HIDDEN_SIZE) device="CPU")<line_sep>pa=m.add_parameters(1 device="CPU")<if_stmt>len(sys.argv)<eq>2<block_start>m.populate_from_textfile(sys.argv[1])<block_end>dy.renew_cg()<line_sep>W1,b1,W2,b2,V,a=dy.parameter(pW1 pb1 pW2 pb2 pV pa)<line_sep>x=dy.vecInput(2 "GPU:1")<line_sep>y=dy.scalarInput(0 "CPU")<line_sep>h1=dy.tanh((W1<times>x)+b1)<line_sep>h1_gpu0=dy.to_device(h1 "GPU:0")<line_sep>h2=dy.tanh((W2<times>h1_gpu0)+b2)<line_sep>h2_cpu=dy.to_device(h2 "CPU")<if_stmt>xsent<block_start>y_pred=dy.logistic((V<times>h2_cpu)+a)<line_sep>loss=dy.binary_log_loss(y_pred y)<line_sep>T=1<line_sep>F=0<block_end><else_stmt><block_start>y_pred=(V<times>h2_cpu)+a<line_sep>loss=dy.squared_distance(y_pred y)<line_sep>T=1<line_sep>F=-1<block_end><for_stmt>iter range(ITERATIONS)<block_start>mloss=0.0<for_stmt>mi range(4)<block_start>x1=mi%2<line_sep>x2=(mi<floordiv>2)%2<line_sep>x.set([T<if>x1<else>F T<if>x2<else>F])<line_sep>y.set(T<if>x1<ne>x2<else>F)<line_sep>mloss<augadd>loss.scalar_value()<line_sep>loss.backward()<line_sep>trainer.update()<block_end>mloss<augdiv>4.<line_sep>print("loss: %0.9f"%mloss)<block_end>x.set([F T])<line_sep>z=-(-y_pred)<line_sep>print(z.scalar_value())<line_sep>m.save("xor.pymodel")<line_sep>dy.renew_cg()<line_sep>W1,b1,W2,b2,V,a=dy.parameter(pW1 pb1 pW2 pb2 pV pa)<line_sep>x=dy.vecInput(2 "GPU:1")<line_sep>y=dy.scalarInput(0 "CPU")<line_sep>h1=dy.tanh((W1<times>x)+b1)<line_sep>h1_gpu0=dy.to_device(h1 "GPU:0")<line_sep>h2=dy.tanh((W2<times>h1_gpu0)+b2)<line_sep>h2_cpu=dy.to_device(h2 "CPU")<if_stmt>xsent<block_start>y_pred=dy.logistic((V<times>h2_cpu)+a)<block_end><else_stmt><block_start>y_pred=(V<times>h2_cpu)+a<block_end>x.set([T F])<line_sep>print("TF" y_pred.scalar_value())<line_sep>x.set([F F])<line_sep>print("FF" y_pred.scalar_value())<line_sep>x.set([T T])<line_sep>print("TT" y_pred.scalar_value())<line_sep>x.set([F T])<line_sep>print("FT" y_pred.scalar_value())<line_sep>
# Copyright Contributors to the Amundsen project. # SPDX-License-Identifier: Apache-2.0 <import_from_stmt>typing Iterator<import_from_stmt>databuilder.models.graph_serializable GraphSerializable<class_stmt>QueryBase(GraphSerializable)<block_start>@staticmethod<def_stmt>_normalize sql:str<arrow>str<block_start>""" Normalizes a SQL query or SQL expression. No checks are made to ensure that the input is valid SQL. This is not a full normalization. The following operations are preformed: - Any run of whitespace characters outside of a quoted region is replaces by a single ' ' character. - Characters outside of quoted regions are made lower case. - If present, a trailing ';' is removed from the query. Note: Making characters outside quoted regions lower case does not in general result in an equivalent SQL statement. For example, with MySQL the case sensitivity of table names is operating system dependant. In practice, modern systems rarely rely on case sensitivity, and since making the non-quoted regions of the query lowercase is very helpful in identifying queries, we go ahead and do so. Also, this method fails to identify expressions such as `1 + 2` and `1+2`. There are likely too many special cases in this area to make much progress without doing a proper parse. """<line_sep>text=sql.strip()<line_sep>it=iter(text)<line_sep>sb=[]<for_stmt>c it<block_start><if_stmt>c.isspace()<block_start>c=QueryBase._process_whitespace(it)<line_sep>sb.append(' ')<block_end>sb.append(c.lower())<if_stmt>c<in>('`' '"' "'")<block_start><for_stmt>d QueryBase._process_quoted(it c)<block_start>sb.append(d)<block_end><block_end><block_end><if_stmt>sb[-1]<eq>';'<block_start>sb.pop()<block_end><return>''.join(sb)<block_end>@staticmethod<def_stmt>_process_quoted it:Iterator[str] quote:str<arrow>Iterator[str]<block_start>""" Yields characters up to and including the first occurrence of the (non-escaped) character `quote`. Allows `quote` to be escaped with '\\'. """<line_sep>p=''<for_stmt>c it<block_start><yield>c<if_stmt>c<eq>quote<and>p<ne>'\\'<block_start><break><block_end>p=c<block_end><block_end>@staticmethod<def_stmt>_process_whitespace it:Iterator[str]<arrow>str<block_start>""" Returns the first non-whitespace character encountered. This should never return `None` since the query text is striped before being processed. That is, if the current character is a whitespace character, then there remains at least one non-whitespace character in the stream. """<for_stmt>c it<block_start><if_stmt><not>c.isspace()<block_start><return>c<block_end><block_end><raise>ValueError("Input string was not stripped!")<block_end><block_end>
<import_stmt>os<import_stmt>sys<line_sep># NOTE # ==== # Renaming should happen in groups based on extention. # All files should first be renamed with a unique ID. ################################################################################ ERR=<false><line_sep>ALL=''.join(map(chr xrange(256)))<line_sep>NUM='0123456789'<line_sep>LET=ALL.translate(ALL NUM)<line_sep>EXT='avi' 'bmp' 'gif' 'jpg' 'wmv'<line_sep>################################################################################ <class_stmt>Filename<block_start><def_stmt>__init__ self filename<block_start>self.filename=filename.lower()<line_sep>split=self.filename.rsplit('.' 1)<line_sep>self.name=split[0]<line_sep>self.ext=split[1]<if>len(split)<eq>2<else>''<line_sep>self.let=self.name.translate(ALL NUM)<line_sep>self.num=self.name.translate(ALL LET)<block_end><def_stmt>__eq__ self other<block_start><return>bool(self.num)<and>other<eq>int(self.num)<block_end><block_end>################################################################################ <def_stmt>main <block_start><try_stmt><block_start>arguments=sys.argv[1:]<assert_stmt>arguments<for_stmt>path arguments<block_start><assert_stmt>os.path.isdir(path)<block_end><for_stmt>path arguments<block_start>engine(path)<block_end><block_end><except_stmt><block_start>sys.stdout.write('Usage: %s <directory>'%os.path.basename(sys.argv[0]))<block_end><block_end><def_stmt>engine path<block_start><global>ERR<for_stmt>root,dirs,files os.walk(path)# gather all relevant names <block_start>files=filter(<lambda>name:name.num<and>name.ext<in>EXT map(Filename files))<line_sep># find all taken number names taken=[]<for_stmt>name files[:]<block_start><if_stmt>name.name<eq>name.num<block_start>files.remove(name)<line_sep>taken.append(name)<block_end><block_end># put all names in order files.sort(compare)<line_sep>taken.sort(compare)<line_sep># rename all non-number names count=0<for_stmt>name files<block_start><while_stmt>count<in>taken<block_start>taken.remove(count)<line_sep>count<augadd>1<block_end>name.new=str(count)<line_sep>count<augadd>1<block_end># condense all numerical names <for_stmt>name taken<block_start><if_stmt>name.num<ne>str(count)<block_start>name.new=str(count)<line_sep>files.append(name)<block_end>count<augadd>1<block_end># rename files needing new names <for_stmt>name files<block_start>old=os.path.join(root name.filename)<try_stmt><block_start>os.rename(old os.path.join(root name.new+'.'+name.ext))<block_end><except_stmt><block_start>sys.stderr.write('%sError: %s'%(ERR<and>'\n'<or>'' old))<line_sep>ERR=<true><block_end><block_end><block_end><block_end><def_stmt>compare x y<block_start>integer=cmp(x.let y.let)<line_sep><return>integer<if>integer<else>cmp(int(x.num) int(y.num))<block_end>################################################################################ <if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
<import_stmt>torch<line_sep>__all__=["DeepSpeech"]<class_stmt>FullyConnected(torch.nn.Module)<block_start>""" Args: n_feature: Number of input features n_hidden: Internal hidden unit size. """<def_stmt>__init__ self n_feature:int n_hidden:int dropout:float relu_max_clip:int=20<arrow><none><block_start>super(FullyConnected self).__init__()<line_sep>self.fc=torch.nn.Linear(n_feature n_hidden bias=<true>)<line_sep>self.relu_max_clip=relu_max_clip<line_sep>self.dropout=dropout<block_end><def_stmt>forward self x:torch.Tensor<arrow>torch.Tensor<block_start>x=self.fc(x)<line_sep>x=torch.nn.functional.relu(x)<line_sep>x=torch.nn.functional.hardtanh(x 0 self.relu_max_clip)<if_stmt>self.dropout<block_start>x=torch.nn.functional.dropout(x self.dropout self.training)<block_end><return>x<block_end><block_end><class_stmt>DeepSpeech(torch.nn.Module)<block_start>""" DeepSpeech model architecture from *Deep Speech: Scaling up end-to-end speech recognition* [:footcite:`hannun2014deep`]. Args: n_feature: Number of input features n_hidden: Internal hidden unit size. n_class: Number of output classes """<def_stmt>__init__ self n_feature:int n_hidden:int=2048 n_class:int=40 dropout:float=0.0 <arrow><none><block_start>super(DeepSpeech self).__init__()<line_sep>self.n_hidden=n_hidden<line_sep>self.fc1=FullyConnected(n_feature n_hidden dropout)<line_sep>self.fc2=FullyConnected(n_hidden n_hidden dropout)<line_sep>self.fc3=FullyConnected(n_hidden n_hidden dropout)<line_sep>self.bi_rnn=torch.nn.RNN(n_hidden n_hidden num_layers=1 nonlinearity="relu" bidirectional=<true>)<line_sep>self.fc4=FullyConnected(n_hidden n_hidden dropout)<line_sep>self.out=torch.nn.Linear(n_hidden n_class)<block_end><def_stmt>forward self x:torch.Tensor<arrow>torch.Tensor<block_start>""" Args: x (torch.Tensor): Tensor of dimension (batch, channel, time, feature). Returns: Tensor: Predictor tensor of dimension (batch, time, class). """<line_sep># N x C x T x F x=self.fc1(x)<line_sep># N x C x T x H x=self.fc2(x)<line_sep># N x C x T x H x=self.fc3(x)<line_sep># N x C x T x H x=x.squeeze(1)<line_sep># N x T x H x=x.transpose(0 1)<line_sep># T x N x H x,_=self.bi_rnn(x)<line_sep># The fifth (non-recurrent) layer takes both the forward and backward units as inputs x=x[: : :self.n_hidden]+x[: : self.n_hidden:]<line_sep># T x N x H x=self.fc4(x)<line_sep># T x N x H x=self.out(x)<line_sep># T x N x n_class x=x.permute(1 0 2)<line_sep># N x T x n_class x=torch.nn.functional.log_softmax(x dim=2)<line_sep># N x T x n_class <return>x<block_end><block_end>
# # This file is part of LUNA. # # Copyright (c) 2020 <NAME> <<EMAIL>> # SPDX-License-Identifier: BSD-3-Clause """ LambdaConcept board platform definitions. This is a non-core platform. To use it, you'll need to set your LUNA_PLATFORM variable: > export LUNA_PLATFORM="luna.gateware.platform.lambdaconcept:USB2SnifferPlatform" or > export LUNA_PLATFORM="luna.gateware.platform.lambdaconcept:ECPIX5PlatformRev02" """<import_stmt>os<import_stmt>subprocess<import_from_stmt>amaranth Elaboratable ClockDomain Module ResetSignal<import_from_stmt>amaranth.build Resource Subsignal Pins PinsN Attrs Clock DiffPairs Connector<import_from_stmt>amaranth.vendor.xilinx_7series Xilinx7SeriesPlatform<import_from_stmt>amaranth.vendor.lattice_ecp5 LatticeECP5Platform<import_from_stmt>.core LUNAPlatform<import_from_stmt>..architecture.car PHYResetController<def_stmt>ULPIResource name data_sites clk_site dir_site nxt_site stp_site reset_site extras=() attrs=<none><block_start>""" Generates a set of resources for a ULPI-connected USB PHY. """<line_sep>attrs=Attrs()<if>attrs<is><none><else>attrs<line_sep><return>Resource(name 0 Subsignal("data" Pins(data_sites dir="io")) Subsignal("clk" Pins(clk_site dir="i") Clock(60e6)) Subsignal("dir" Pins(dir_site dir="i")) Subsignal("nxt" Pins(nxt_site dir="i")) Subsignal("stp" Pins(stp_site dir="o")) Subsignal("rst" Pins(reset_site dir="o")) attrs)<block_end><class_stmt>StubClockDomainGenerator(Elaboratable)<block_start>""" Stub clock domain generator; stands in for the typical LUNA one. This generator creates domains; but currently does not configuration. """<def_stmt>__init__ self * clock_frequencies=<none> clock_signal_name=<none><block_start><pass><block_end><def_stmt>elaborate self platform<block_start>m=Module()<line_sep># Create our domains; but don't do anything else for them, for now. m.domains.usb=ClockDomain()<line_sep>m.domains.fast=ClockDomain()<line_sep># Handle USB PHY resets. m.submodules.usb_reset=controller=PHYResetController()<line_sep>m.d.comb<augadd>[ResetSignal("usb").eq(controller.phy_reset)]<line_sep><return>m<block_end><block_end><class_stmt>USB2SnifferPlatform(Xilinx7SeriesPlatform LUNAPlatform)<block_start>""" Board description for OpenVizsla USB analyzer. """<line_sep>name="LambdaConcept USB2Sniffer"<line_sep>device="xc7a35t"<line_sep>package="fgg484"<line_sep>speed="1"<line_sep>default_clk="clk100"<line_sep># Provide the type that'll be used to create our clock domains. clock_domain_generator=StubClockDomainGenerator<line_sep># We only have a single PHY; so use it directly. default_usb_connection="target_phy"<line_sep># # I/O resources. # resources=[Resource("clk100" 0 Pins("J19") Attrs(IOStandard="LVCMOS33")) Resource("led" 0 PinsN("W1") Attrs(IOStandard="LVCMOS33")) Resource("led" 1 PinsN("Y2") Attrs(IOStandard="LVCMOS33")) Resource("rgb_led" 0 Subsignal("r" PinsN("W2")) Subsignal("g" PinsN("Y1")) Subsignal("b" PinsN("W1")) Attrs(IOStandard="LVCMOS33") ) Resource("rgb_led" 1 Subsignal("r" PinsN("AA1")) Subsignal("g" PinsN("AB1")) Subsignal("b" PinsN("Y2")) Attrs(IOStandard="LVCMOS33") ) Resource("serial" 0 Subsignal("tx" Pins("U21")) # FPGA_GPIO0 Subsignal("rx" Pins("T21")) # FPGA_GPIO1 Attrs(IOStandard="LVCMOS33") ) Resource("ddram" 0 Subsignal("a" Pins("M2 M5 M3 M1 L6 P1 N3 N2"<concat>"M6 R1 L5 N5 N4 P2 P6") Attrs(IOStandard="SSTL15")) Subsignal("ba" Pins("L3 K6 L4") Attrs(IOStandard="SSTL15")) Subsignal("ras_n" Pins("J4") Attrs(IOStandard="SSTL15")) Subsignal("cas_n" Pins("K3") Attrs(IOStandard="SSTL15")) Subsignal("we_n" Pins("L1") Attrs(IOStandard="SSTL15")) Subsignal("dm" Pins("G3 F1") Attrs(IOStandard="SSTL15")) Subsignal("dq" Pins("G2 H4 H5 J1 K1 H3 H2 J5"<concat>"E3 B2 F3 D2 C2 A1 E2 B1") Attrs(IOStandard="SSTL15" IN_TERM="UNTUNED_SPLIT_50")) Subsignal("dqs_p" Pins("K2 E1") Attrs(IOStandard="DIFF_SSTL15")) Subsignal("dqs_n" Pins("J2 D1") Attrs(IOStandard="DIFF_SSTL15")) Subsignal("clk_p" Pins("P5") Attrs(IOStandard="DIFF_SSTL15")) Subsignal("clk_n" Pins("P4") Attrs(IOStandard="DIFF_SSTL15")) Subsignal("cke" Pins("J6") Attrs(IOStandard="SSTL15")) Subsignal("odt" Pins("K4") Attrs(IOStandard="SSTL15")) Subsignal("reset_n" Pins("G1") Attrs(IOStandard="SSTL15")) Attrs(SLEW="FAST") ) Resource("flash" 0 Subsignal("cs_n" Pins("T19")) Subsignal("mosi" Pins("P22")) Subsignal("miso" Pins("R22")) Subsignal("vpp" Pins("P21")) Subsignal("hold" Pins("R21")) Attrs(IOStandard="LVCMOS33")) Resource("usb_fifo_clock" 0 Pins("D17") Attrs(IOStandard="LVCMOS33")) Resource("usb_fifo" 0 Subsignal("rst" Pins("K22")) Subsignal("data" Pins("A16 F14 A15 F13 A14 E14 A13 E13 B13 C15 C13 C14 B16 E17 B15 F16"<concat>"A20 E18 B20 F18 D19 D21 E19 E21 A21 B21 A19 A18 F20 F19 B18 B17")) Subsignal("be" Pins("K16 L16 G20 H20")) Subsignal("rxf_n" Pins("M13")) Subsignal("txe_n" Pins("L13")) Subsignal("rd_n" Pins("K19")) Subsignal("wr_n" Pins("M15")) Subsignal("oe_n" Pins("L21")) Subsignal("siwua" Pins("M16")) Attrs(IOStandard="LVCMOS33" SLEW="FAST")) Resource("ulpi_sw" 0 Subsignal("s" Pins("Y8" dir="o")) Subsignal("oe" PinsN("Y9" dir="o")) Attrs(IOStandard="LVCMOS33") ) # Host PHY -- connects directly to the host port. ULPIResource("target_phy" data_sites="AB18 AA18 AA19 AB20 AA20 AB21 AA21 AB22" clk_site="W19" dir_site="W21" stp_site="Y22" nxt_site="W22" reset_site="V20" attrs=Attrs(IOStandard="LVCMOS33" SLEW="FAST")) # Target PHY -- connects via a switch to the target port. ULPIResource("sideband_phy" data_sites="AB2 AA3 AB3 Y4 AA4 AB5 AA5 AB6" clk_site="V4" dir_site="AB7" stp_site="AA6" nxt_site="AB8" reset_site="AA8" attrs=Attrs(IOStandard="LVCMOS33" SLEW="FAST"))]<line_sep>connectors=[]<def_stmt>toolchain_program self products name<block_start>xc3sprog=os.environ.get("XC3SPROG" "xc3sprog")<with_stmt>products.extract("{}.bit".format(name))<as>bitstream_file<block_start>subprocess.check_call([xc3sprog "-c" "ft4232h" bitstream_file])<block_end><block_end><block_end><class_stmt>ECPIX5PlatformRev02(LatticeECP5Platform LUNAPlatform)<block_start>name="ECPIX-5 R02"<line_sep>device="LFE5UM5G-85F"<line_sep>package="BG554"<line_sep>speed="8"<line_sep>default_clk="clk100"<line_sep>default_rst="rst"<line_sep># Provide the type that'll be used to create our clock domains. clock_domain_generator=StubClockDomainGenerator<line_sep># We only have a single PHY; so use it directly. default_usb_connection="ulpi"<line_sep>resources=[Resource("rst" 0 PinsN("AB1" dir="i") Attrs(IO_TYPE="LVCMOS33")) Resource("clk100" 0 Pins("K23" dir="i") Clock(100e6) Attrs(IO_TYPE="LVCMOS33")) # LEDs Resource("rgb_led" 0 Subsignal("r" Pins("U21")) Subsignal("g" Pins("W21")) Subsignal("b" Pins("T24")) Attrs(IO_TYPE="LVCMOS33") ) Resource("rgb_led" 1 Subsignal("r" Pins("T23")) Subsignal("g" Pins("R21")) Subsignal("b" Pins("T22")) Attrs(IO_TYPE="LVCMOS33") ) Resource("rgb_led" 2 Subsignal("r" Pins("P21")) Subsignal("g" Pins("R23")) Subsignal("b" Pins("P22")) Attrs(IO_TYPE="LVCMOS33") ) Resource("rgb_led" 3 Subsignal("r" Pins("K21")) Subsignal("g" Pins("K24")) Subsignal("b" Pins("M21")) Attrs(IO_TYPE="LVCMOS33") ) Resource("uart" 0 Subsignal("rx" Pins("R26" dir="i")) Subsignal("tx" Pins("R24" dir="o")) Attrs(IO_TYPE="LVCMOS33" PULLMODE="UP")) Resource("eth_rgmii" 0 Subsignal("rst" PinsN("C13" dir="o")) Subsignal("mdio" Pins("A13" dir="io")) Subsignal("mdc" Pins("C11" dir="o")) Subsignal("tx_clk" Pins("A12" dir="o")) Subsignal("tx_ctrl" Pins("C9" dir="o")) Subsignal("tx_data" Pins("D8 C8 B8 A8" dir="o")) Subsignal("rx_clk" Pins("E11" dir="i")) Subsignal("rx_ctrl" Pins("A11" dir="i")) Subsignal("rx_data" Pins("B11 A10 B10 A9" dir="i")) Attrs(IO_TYPE="LVCMOS33")) Resource("eth_int" 0 PinsN("B13" dir="i") Attrs(IO_TYPE="LVCMOS33")) Resource("ddr3" 0 Subsignal("clk" DiffPairs("H3" "J3" dir="o") Attrs(IO_TYPE="SSTL135D_I")) Subsignal("clk_en" Pins("P1" dir="o")) Subsignal("we" PinsN("R3" dir="o")) Subsignal("ras" PinsN("T3" dir="o")) Subsignal("cas" PinsN("P2" dir="o")) Subsignal("a" Pins("T5 M3 L3 V6 K2 W6 K3 L1 H2 L2 N1 J1 M1 K1" dir="o")) Subsignal("ba" Pins("U6 N3 N4" dir="o")) Subsignal("dqs" DiffPairs("V4 V1" "U5 U2" dir="io") Attrs(IO_TYPE="SSTL135D_I")) Subsignal("dq" Pins("T4 W4 R4 W5 R6 P6 P5 P4 R1 W3 T2 V3 U3 W1 T1 W2" dir="io")) Subsignal("dm" Pins("J4 H5" dir="o")) Subsignal("odt" Pins("L2" dir="o")) Attrs(IO_TYPE="SSTL135_I")) Resource("hdmi" 0 Subsignal("rst" PinsN("N6" dir="o")) Subsignal("scl" Pins("C17" dir="io")) Subsignal("sda" Pins("E17" dir="io")) Subsignal("pclk" Pins("C1" dir="o")) Subsignal("vsync" Pins("A4" dir="o")) Subsignal("hsync" Pins("B4" dir="o")) Subsignal("de" Pins("A3" dir="o")) Subsignal("d" Subsignal("b" Pins("AD25 AC26 AB24 AB25 B3 C3 D3 B1 C2 D2 D1 E3" dir="o")) Subsignal("g" Pins("AA23 AA22 AA24 AA25 E1 F2 F1 D17 D16 E16 J6 H6" dir="o")) Subsignal("r" Pins("AD26 AE25 AF25 AE26 E10 D11 D10 C10 D9 E8 H5 J4" dir="o")) ) Subsignal("mclk" Pins("E19" dir="o")) Subsignal("sck" Pins("D6" dir="o")) Subsignal("ws" Pins("C6" dir="o")) Subsignal("i2s" Pins("A6 B6 A5 C5" dir="o")) Subsignal("int" PinsN("C4" dir="i")) Attrs(IO_TYPE="LVTTL33")) Resource("sata" 0 Subsignal("tx" DiffPairs("AD16" "AD17" dir="o")) Subsignal("rx" DiffPairs("AF15" "AF16" dir="i")) Attrs(IO_TYPE="LVDS")) ULPIResource("ulpi" data_sites="M26 L25 L26 K25 K26 J23 P25 H25" clk_site="H24" dir_site="F22" stp_site="H23" nxt_site="F23" reset_site="E23" attrs=Attrs(IO_TYPE="LVCMOS33")) Resource("usbc_cfg" 0 Subsignal("scl" Pins("D24" dir="io")) Subsignal("sda" Pins("C24" dir="io")) Subsignal("dir" Pins("B23" dir="i")) Subsignal("id" Pins("D23" dir="i")) Subsignal("int" PinsN("B24" dir="i")) Attrs(IO_TYPE="LVCMOS33")) Resource("usbc_mux" 0 Subsignal("en" Pins("C23" dir="oe")) Subsignal("amsel" Pins("B26" dir="oe")) Subsignal("pol" Pins("D26" dir="o")) #Subsignal("lna", DiffPairs( "AF9", "AF10", dir="i"), Attrs(IO_TYPE="LVCMOS18D")), #Subsignal("lnb", DiffPairs("AD10", "AD11", dir="o"), Attrs(IO_TYPE="LVCMOS18D")), #Subsignal("lnc", DiffPairs( "AD7", "AD8", dir="o"), Attrs(IO_TYPE="LVCMOS18D")), #Subsignal("lnd", DiffPairs( "AF6", "AF7", dir="i"), Attrs(IO_TYPE="LVCMOS18D")), Attrs(IO_TYPE="LVCMOS33")) # Compatibility aliases. Resource("led" 0 Pins("W21" dir="o") Attrs(IO_TYPE="LVCMOS33")) Resource("led" 1 Pins("R21" dir="o") Attrs(IO_TYPE="LVCMOS33")) Resource("led" 2 Pins("R23" dir="o") Attrs(IO_TYPE="LVCMOS33")) Resource("led" 3 Pins("K24" dir="o") Attrs(IO_TYPE="LVCMOS33")) Resource("user_io" 0 Pins("T25")) Resource("user_io" 1 Pins("U25")) Resource("user_io" 2 Pins("U24")) Resource("user_io" 3 Pins("V24")) ]<line_sep>connectors=[Connector("pmod" 0 "T25 U25 U24 V24 - - T26 U26 V26 W26 - -") Connector("pmod" 1 "U23 V23 U22 V21 - - W25 W24 W23 W22 - -") Connector("pmod" 2 "J24 H22 E21 D18 - - K22 J21 H21 D22 - -") Connector("pmod" 3 " E4 F4 E6 H4 - - F3 D4 D5 F5 - -") Connector("pmod" 4 "E26 D25 F26 F25 - - A25 A24 C26 C25 - -") Connector("pmod" 5 "D19 C21 B21 C22 - - D21 A21 A22 A23 - -") Connector("pmod" 6 "C16 B17 C18 B19 - - A17 A18 A19 C19 - -") Connector("pmod" 7 "D14 B14 E14 B16 - - C14 A14 A15 A16 - -") ]<line_sep>@property<def_stmt>file_templates self<block_start><return>{**super().file_templates "{{name}}-openocd.cfg":r""" interface ftdi ftdi_vid_pid 0x0403 0x6010 ftdi_channel 0 ftdi_layout_init 0xfff8 0xfffb reset_config none adapter_khz 25000 jtag newtap ecp5 tap -irlen 8 -expected-id 0x81113043 """}<block_end><def_stmt>toolchain_program self products name<block_start>openocd=os.environ.get("OPENOCD" "openocd")<with_stmt>products.extract("{}-openocd.cfg".format(name) "{}.svf".format(name))<as>(config_filename vector_filename)<block_start>subprocess.check_call([openocd "-f" config_filename "-c" "transport select jtag; init; svf -quiet {}; exit".format(vector_filename)])<block_end><block_end><block_end>
<import_from_stmt>.networks *<import_from_stmt>.functions *<line_sep>
"""Auto-generated file, do not edit by hand. NE metadata"""<import_from_stmt>..phonemetadata NumberFormat PhoneNumberDesc PhoneMetadata<line_sep>PHONE_METADATA_NE=PhoneMetadata(id='NE' country_code=<none> international_prefix=<none> general_desc=PhoneNumberDesc(national_number_pattern='[1-3578]\\d(?:\\d(?:\\d{3})?)?' possible_length=(2 3 6)) toll_free=PhoneNumberDesc(national_number_pattern='1(?:18|[578])|723\\d{3}' example_number='15' possible_length=(2 3 6)) emergency=PhoneNumberDesc(national_number_pattern='1(?:18|[578])|723141' example_number='15' possible_length=(2 3 6)) short_code=PhoneNumberDesc(national_number_pattern='1(?:0[01]|1[128]|2[034]|3[013]|[46]0|55?|[78])|222|333|555|723141|888' example_number='15' possible_length=(2 3 6)) carrier_specific=PhoneNumberDesc(national_number_pattern='1(?:0[01]|1[12]|2[034]|3[013]|[46]0|55)|222|333|555|888' example_number='100' possible_length=(3 )) short_data=<true>)<line_sep>
<import_stmt>sys<import_stmt>sqlite3<import_stmt>csv<line_sep>cache_key_prefix="quote"<class_stmt>QuoteCache<block_start><def_stmt>__init__ self filename=""<block_start>self.filename=filename<block_end><def_stmt>get self key<block_start><with_stmt>open(self.filename)<as>csv_file<block_start>items=csv.reader(csv_file delimiter=';')<for_stmt>item items<block_start><if_stmt>item[0]<eq>key.split('.')[1]<block_start><return>item[1]<block_end><block_end><block_end><block_end><def_stmt>set self key quote<block_start>existing=[]<with_stmt>open(self.filename)<as>csv_file<block_start>items=csv.reader(csv_file delimiter=';')<line_sep>existing=[cache_key_prefix+"."+item[0]<for>item items]<block_end><if_stmt>key<in>existing<block_start>print("This is weird. The key already exists.")<block_end><else_stmt># save the new data <block_start><with_stmt>open(self.filename "a" newline="")<as>csv_file<block_start>writer=csv.DictWriter(csv_file fieldnames=['id' 'text'] delimiter=";")<line_sep>#print(f"Adding '{q[1]}' to cache") writer.writerow({'id':key.split('.')[1] 'text':quote})<block_end><block_end><block_end><block_end>cache=QuoteCache('data/quotes_cache.csv')<def_stmt>get_quote quote_id# Return the item from cache if found in it. If not found in cache, read from data store. # Put the read item in cache and return it. <block_start>quote=cache.get(f"quote.{quote_id}")<line_sep>out=""<if_stmt>quote<is><none><block_start><try_stmt><block_start>db=sqlite3.connect('data/quotes.sqlite3')<line_sep>cursor=db.cursor()<line_sep>cursor.execute(f"SELECT text FROM quotes WHERE id = {quote_id}")<for_stmt>row cursor<block_start>quote=row[0]<block_end>print(f"Got '{quote}' FROM DB")<block_end><except_stmt>Exception<as>e<block_start>print(e)<block_end><finally_stmt># Close the db connection <block_start>db.close()<block_end># and add it to the cache key=f"{cache_key_prefix}.{quote_id}"<line_sep>cache.set(key quote)<block_end><if_stmt>quote<block_start>out=f"{quote} (FROM CACHE, with key 'quote.{quote_id}')"<block_end><return>out<block_end><if_stmt>__name__<eq>"__main__"<block_start>args=sys.argv<if_stmt>args[1]<eq>'fetch'<block_start><while_stmt><true><block_start>quote_id=input('Enter the ID of the quote: ')<line_sep>q=get_quote(quote_id)<if_stmt>q<block_start>print(q)<block_end><block_end><block_end><block_end>
# (C) Datadog, Inc. 2020-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) <import_from_stmt>typing List Optional<import_from_stmt>datadog_checks.base ConfigurationError<import_from_stmt>.types Instance<class_stmt>Config(object)<block_start>""" Hold instance configuration for a RethinkDB check. Encapsulates the validation of an `instance` dictionary while improving type information. """<def_stmt>__init__ self instance=<none># type: (Instance) -> None <block_start><if_stmt>instance<is><none><block_start>instance={}<block_end>host=instance.get('host' 'localhost')<line_sep>port=instance.get('port' 28015)<line_sep>user=instance.get('username')<line_sep>password=instance.get('password')<line_sep>tls_ca_cert=instance.get('tls_ca_cert')<line_sep>tags=instance.get('tags' [])<if_stmt><not>isinstance(host str)<block_start><raise>ConfigurationError('host {!r} must be a string (got {!r})'.format(host type(host)))<block_end><try_stmt><block_start>port=int(port)<block_end><except_stmt>(ValueError TypeError)<block_start><raise>ConfigurationError('port {!r} must be convertible to an integer (got {!r})'.format(port type(port)))<block_end><if_stmt>port<l>0<block_start><raise>ConfigurationError('port must be positive (got {!r})'.format(port))<block_end><if_stmt><not>isinstance(tags list)<block_start><raise>ConfigurationError('tags {!r} must be a list (got {!r})'.format(tags type(tags)))<block_end>self.host=host# type: str self.port=port# type: int self.user=user# type: Optional[str] self.password=password# type: Optional[str] self.tls_ca_cert=tls_ca_cert# type: Optional[str] self.tags=tags# type: List[str] self.service_check_tags=('host:{}'.format(self.host) 'port:{}'.format(self.port))+tuple(self.tags)<block_end><block_end>
<import_from_future_stmt> print_function<import_stmt>argparse<import_from_stmt>functools partial<def_stmt>keep_line line pos_cols region<block_start>fields=line.rstrip().split(b'\t')<if_stmt>fields[pos_cols[0]]<eq>region[0]# same chromosome <block_start><if_stmt>(region[1]<l>int(fields[pos_cols[1]])<l>region[2])<or>(region[1]<l>int(fields[pos_cols[2]])<l>region[2])<block_start><return><true><block_end><block_end><block_end><def_stmt>main infile ofile num_header_lines<block_start>print(infile '->' ofile)<with_stmt>open(infile 'rb')<as>i<block_start><with_stmt>open(ofile 'wb')<as>o# copy header lines <block_start><for_stmt>c range(num_header_lines)<block_start>o.write(next(i))<block_end><for_stmt>line i<block_start><if_stmt>keep_line(line)<block_start>o.write(line)<block_end><block_end><block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>p=argparse.ArgumentParser()<line_sep>p.add_argument('infile')<line_sep>p.add_argument('-r' '--region' required=<true> help='the region of the input file to rewrite')<line_sep>p.add_argument('-o' '--ofile' required=<true> help="the name of the output file")<line_sep>p.add_argument('-c' '--cols' nargs=3 type=int required=<true> help="the columns of the input file specifying chrom, start and stop, "<concat>"respectively")<line_sep>p.add_argument('-n' '--num-header-lines' type=int default=0 help='the number of header lines present in the input; These will '<concat>'always be copied over to the new file.')<line_sep>args=vars(p.parse_args())<line_sep>chrom,reg=args['region'].split(':')<line_sep>region=[chrom.encode()]+[int(x)<for>x reg.split('-')]<line_sep>keep_line=partial(keep_line pos_cols=args['cols'] region=region)<line_sep>main(args['infile'] args['ofile'] args['num_header_lines'])<block_end>
# -*- coding: utf-8 -*- <import_from_future_stmt> unicode_literals<import_from_stmt>django.db models<import_stmt>sys<line_sep>reload(sys)<line_sep>sys.setdefaultencoding('utf8')<line_sep># Create your models here. <class_stmt>BaseCheck(models.Model)<block_start>vid=models.IntegerField(primary_key=<true>)#主键 ip=models.CharField(max_length=255 null=<true> blank=<true>)#扫描ip time=models.CharField(max_length=255 null=<true> blank=<true>)#扫描时间 checkpoint=models.CharField(max_length=255 null=<true> blank=<true>)#检查项 level=models.CharField(max_length=255 null=<true> blank=<true>)#漏洞等级 suggestion=models.CharField(max_length=255 null=<true> blank=<true>)#修复建议 describe=models.CharField(max_length=255 null=<true> blank=<true>)<block_end>#漏洞描述 <class_stmt>Process_save(models.Model)<block_start>vid=models.IntegerField(primary_key=<true>)#主键 ip=models.CharField(max_length=255 null=<true> blank=<true>)#扫描ip time=models.CharField(max_length=255 null=<true> blank=<true>)#扫描时间 describe=models.TextField()#进程描述 checkpoint=models.CharField(max_length=255 null=<true> blank=<true>)#检查项 level=models.CharField(max_length=255 null=<true> blank=<true>)#漏洞等级 suggestion=models.CharField(max_length=255 null=<true> blank=<true>)<block_end>#修复建议 <class_stmt>Scan_number(models.Model)<block_start>vid=models.IntegerField(primary_key=<true>)#主键 ip=models.CharField(max_length=255 null=<true> blank=<true>)#扫描ip time=models.CharField(max_length=255 null=<true> blank=<true>)<block_end>#扫描时间
# coding=utf-8 # Copyright 2018 The DisentanglementLib Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for optimizer.py."""<import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_from_stmt>absl.testing parameterized<import_from_stmt>disentanglement_lib.methods.shared optimizers<import_from_stmt>six.moves range<import_stmt>tensorflow.compat.v1<as>tf<import_stmt>gin.tf.external_configurables# pylint: disable=unused-import <import_stmt>gin.tf<def_stmt>_make_vae_optimizer_configs <block_start>"""Yield different vae_optimizer test configurations. Yields: A tuple containing a list of gin bindings, and the expected learning rate after 10 steps. """<line_sep># Constant learning rate specified in the optimizer. bindings=["vae_optimizer.optimizer_fn = @GradientDescentOptimizer" "GradientDescentOptimizer.learning_rate = 0.1" ]<line_sep><yield>(bindings 0.1)<line_sep># Constant learning rate specified in vae_optimizer. bindings=["vae_optimizer.optimizer_fn = @GradientDescentOptimizer" "vae_optimizer.learning_rate = 0.1" ]<line_sep><yield>(bindings 0.1)<line_sep># Piecewise constant learning rate. bindings=["vae_optimizer.optimizer_fn = @GradientDescentOptimizer" "vae_optimizer.learning_rate = @piecewise_constant" "piecewise_constant.boundaries = (3, 5)" "piecewise_constant.values = (0.2, 0.1, 0.01)" ]<line_sep><yield>(bindings 0.01)<line_sep># Exponential decay learning rate. bindings=["vae_optimizer.optimizer_fn = @GradientDescentOptimizer" "vae_optimizer.learning_rate = @exponential_decay" "exponential_decay.learning_rate = 0.1" "exponential_decay.decay_steps = 1" "exponential_decay.decay_rate = 0.9" ]<line_sep><yield>(bindings 0.03486784401)<block_end><class_stmt>OptimizerTest(parameterized.TestCase tf.test.TestCase)<block_start>@parameterized.parameters(list(_make_vae_optimizer_configs()))<def_stmt>test_vae_optimizer self gin_bindings expected_learning_rate<block_start>gin.parse_config_files_and_bindings([] gin_bindings)<with_stmt>self.test_session()<block_start>x=tf.Variable(0.0)<line_sep>y=tf.pow(x+2.0 2.0)<line_sep>global_step=tf.train.get_or_create_global_step()<line_sep>optimizer=optimizers.make_vae_optimizer()<line_sep>train_op=optimizer.minimize(loss=y global_step=global_step)<line_sep>tf.global_variables_initializer().run()<for_stmt>it range(10)<block_start>self.evaluate(train_op)<line_sep>self.assertEqual(it+1 self.evaluate(global_step))<block_end>current_learning_rate=self.evaluate(optimizer._learning_rate_tensor)<line_sep>self.assertAlmostEqual(expected_learning_rate current_learning_rate)<block_end>gin.clear_config()<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>tf.test.main()<block_end>
<class_stmt>BaseDerivative<block_start><def_stmt>__init__ self config instance *args **kwargs<block_start>self.config=config<line_sep>self.instance=instance<block_end><block_end>
<import_from_future_stmt> annotations<import_stmt>itertools<import_from_stmt>functools partial<import_from_stmt>typing Dict List<import_stmt>dill<import_stmt>numpy<as>np<import_stmt>pandas<as>pd<import_stmt>plotly.figure_factory<as>ff<import_stmt>plotly.graph_objects<as>go<import_from_stmt>plotly.graph_objs Figure<import_from_stmt>plotly.subplots make_subplots<class_stmt>ReportColumn<block_start>"""A single column in the Robustness Report."""<def_stmt>__init__ self title:str<block_start>self.title=title<block_end><def_stmt>set_title self title:str<block_start>self.title=title<block_end><block_end><class_stmt>ScoreColumn(ReportColumn)<block_start>"""A column for numeric scores in the Robustness Report, displayed as a bar chart."""<def_stmt>__init__ self title:str min_val:float max_val:float is_0_to_1:bool=<false><block_start>super(ScoreColumn self).__init__(title)<line_sep>self.min_val=min_val<line_sep>self.max_val=max_val<line_sep>self.is_0_to_1=is_0_to_1<block_end><def_stmt>set_min self min_val:float<block_start>self.min_val=min_val<block_end><def_stmt>set_max self max_val:float<block_start>self.max_val=max_val<block_end><block_end><class_stmt>ClassDistributionColumn(ReportColumn)<block_start>"""A column for discrete class distributions in the Robustness Report, displayed as a heatmap."""<def_stmt>__init__ self title:str class_codes:List[str]<block_start>super(ClassDistributionColumn self).__init__(title)<line_sep>self.class_codes=class_codes<block_end><def_stmt>set_class_codes self class_codes:List[str]<block_start>self.class_codes=class_codes<block_end><block_end><class_stmt>NumericColumn(ReportColumn)<block_start>"""A column for numeric data in the Robustness Report, displayed as the raw value."""<def_stmt>__init__ self title:str<block_start>super(NumericColumn self).__init__(title)<block_end><block_end><class_stmt>Report<block_start>"""Class for Robustness Gym Report."""<def_stmt>__init__ self data:pd.DataFrame columns:List[ReportColumn] model_name:str=<none> dataset_name:str=<none> **kwargs <block_start>""" Args: data: Pandas dataframe in the following format: column 1: category name column 2: slice name columns 3-N: data corresponding to passed columns parameter columns: ReportColumn objects specifying format of columns 3-N in data model_name (optional): model name to show in report dataset_name (optional): dataset name to show in report **kwargs: any additional config paramters """<line_sep># Make a copy of data since may be modified by methods below self.data=data.copy()<line_sep>self.columns=columns<line_sep>self.model_name=model_name<line_sep>self.dataset_name=dataset_name<line_sep>self.config={"color_scheme":["#ec7734" "#3499ec" "#ec34c1" "#9cec34"] "score_color_complement":"#F3F4F7" "text_fill_color":"#F3F4F7" "text_border_color":"#BEC4CE" "distribution_color_scale":[[0.0 "#FBF5F2"] [1.0 "#EC7734"]] "col_spacing":0.035 "row_height":24 "category_padding":24 "header_padding":80 "score_col_width":0.6 "class_dist_col_width":0.35 "numeric_col_width":0.25 "layout_width":960 "font_size_dist":12 "font_size_data":13 "font_size_heading":14 "font_size_category":14 }<line_sep>self.update_config(**kwargs)<block_end><def_stmt>sort self category_order:Dict[str int]=<none> slice_order:Dict[str int]=<none><block_start>"""Sort rows in report by category / slice alphabetically, or using specified order. Args: category_order (optional): map from category name to sorting rank. If None, sort categories alphabetically. slice_order (optional): map from slice name to sorting rank. If None, sort slices alphabetically (within a category). """<if_stmt>category_order<is><none><block_start>category_order={}<block_end><if_stmt>slice_order<is><none><block_start>slice_order={}<block_end><for_stmt>col_name ["sort-order-category" "sort-order-slice"]<block_start><if_stmt>col_name<in>self.data<block_start><raise>ValueError(f"Column name '{col_name}' is reserved")<block_end><block_end>self.data["sort-order-category"]=self.data[0].map(<lambda>x:(category_order.get(x 2<power>10000) x))<line_sep>self.data["sort-order-slice"]=self.data[1].map(<lambda>x:(slice_order.get(x 2<power>10000) x))<line_sep>self.data=self.data.sort_values(by=["sort-order-category" "sort-order-slice"]).drop(["sort-order-category" "sort-order-slice"] axis="columns")<line_sep>self.data.reset_index(inplace=<true> drop=<true>)<block_end><def_stmt>filter self categories:List[str]=<none> slices:List[str]=<none><block_start>"""Filter report to specific categories AND slices Args: categories (optional): list of category names to filter by slices (optional):list of slice names to filter by """<if_stmt>categories<is><not><none># self.data = self.data.loc(self.data[0].isin(categories)) <block_start>self.data=self.data[self.data[0].isin(categories)]<block_end><if_stmt>slices<is><not><none><block_start>self.data=self.data[self.data[1].isin(slices)]<block_end>self.data.reset_index(inplace=<true> drop=<true>)<block_end><def_stmt>rename self category_map:Dict[str str] slice_map:Dict[str str]<block_start>"""Rename categories, slices Args: category_map (optional): map from old to new category name slice_map (optional): map from old to new slice name """<if_stmt>category_map<is><not><none><block_start>self.data[0]=self.data[0].map(<lambda>x:category_map.get(x x))<block_end><if_stmt>slice_map<is><not><none><block_start>self.data[1]=self.data[1].map(<lambda>x:slice_map.get(x x))<block_end><block_end><def_stmt>set_class_codes self class_cds:List[str]<block_start>"""Set single-letter class codes used for class distribution columns."""<for_stmt>col self.columns<block_start><if_stmt>isinstance(col ClassDistributionColumn)<block_start>col.set_class_codes(class_cds)<block_end><block_end><block_end><def_stmt>set_model_name self model_name<block_start>"""Set model name displayed on report."""<line_sep>self.model_name=model_name<block_end><def_stmt>set_dataset_name self dataset_name<block_start>"""Set dataset name displayed on report."""<line_sep>self.dataset_name=dataset_name<block_end><def_stmt>set_range self col_title:str min_val:float=<none> max_val:float=<none><block_start>"""Set min and max values for score columns Args: col_title: title of column to update min_val: minimum value max_val: maximum value """<for_stmt>col self.columns<block_start><if_stmt>isinstance(col ScoreColumn)<and>col.title<eq>col_title<block_start><if_stmt>min_val<is><not><none><block_start>col.min_val=min_val<block_end><if_stmt>max_val<is><not><none><block_start>col.max_val=max_val<block_end><block_end><block_end><block_end><def_stmt>update_config self **kwargs<block_start><for_stmt>k,v kwargs.items()<block_start><if_stmt>k<not><in>self.config<block_start><raise>ValueError(f"Invalid config param: '{k}'")<block_end>self.config[k]=v<block_end><block_end><def_stmt>round self# Round everything <block_start>self.data=self.data.round(3)<line_sep>self.data.class_dist=self.data.class_dist.apply(partial(np.round decimals=3))<line_sep>self.data.pred_dist=self.data.pred_dist.apply(partial(np.round decimals=3))<block_end>@classmethod<def_stmt>load cls path:str<arrow>Report<block_start>obj=dill.load(open(path "rb"))<assert_stmt>isinstance(obj Report) (f"dill loaded an instance of {type(obj)}, "<concat>f"must load {cls.__name__}.")<line_sep><return>obj<block_end><def_stmt>save self path:str<block_start><return>dill.dump(self open(path "wb"))<block_end><def_stmt>figure self show_title=<false><arrow>Figure# Verify that rows are grouped by category <block_start>row_categories=self.data[0].tolist()<line_sep>save_cat_groups=set()# Previous category groupings already encountered prev_cat=<none><line_sep># Loop through each row and see if a category is encountered outside of first # identified group for that category <for_stmt>cat row_categories<block_start><if_stmt>cat<ne>prev_cat# category changes <block_start><if_stmt>cat<in>save_cat_groups# if new category previously encountered <block_start><raise>ValueError("Rows must be grouped by category.")<block_end>prev_cat=cat<line_sep>save_cat_groups.add(cat)<block_end><block_end>categories=[]<line_sep>category_sizes=[]# Num rows in each category <for_stmt>category,group itertools.groupby(self.data[0])# column 0 is category <block_start>categories.append(category)<line_sep>category_sizes.append(len(list(group)))<block_end>n_rows=sum(category_sizes)<line_sep>height=(n_rows<times>self.config["row_height"]+len(categories)<times>self.config["category_padding"]+self.config["header_padding"])<line_sep>col_widths=[]<for_stmt>col self.columns<block_start><if_stmt>isinstance(col ScoreColumn)<block_start>col_width=self.config["score_col_width"]<block_end><elif_stmt>isinstance(col ClassDistributionColumn)<block_start>col_width=self.config["class_dist_col_width"]<block_end><else_stmt><block_start>col_width=self.config["numeric_col_width"]<block_end>col_widths.append(col_width)<block_end>fig=make_subplots(rows=len(categories) row_titles=categories cols=len(self.columns) shared_yaxes=<true> subplot_titles=[col.title<for>col self.columns] horizontal_spacing=self.config["col_spacing"] vertical_spacing=self.config["category_padding"]/height row_width=list(reversed(category_sizes)) column_width=col_widths )<line_sep>hms=[]<line_sep>coords=[]<line_sep>category_ndx=1<line_sep># Group data by category <for_stmt>category,category_data self.data.groupby(0 sort=<false>)<block_start>score_col_ndx=0<line_sep>slice_names=category_data[1]<line_sep>slice_names=[s+" "<times>3<for>s slice_names]<for_stmt>col_ndx,col enumerate(self.columns)<block_start>df_col_ndx=col_ndx+2<line_sep># Dataframe has two leading columns with category, slice fig_col_ndx=col_ndx+1# figure columns are 1-indexed x=category_data[df_col_ndx].tolist()<if_stmt>isinstance(col ScoreColumn)<block_start><if_stmt>col.is_0_to_1<block_start>x=[100<times>x_i<for>x_i x]<block_end>col_max=col.max_val<if_stmt>col.is_0_to_1<block_start>col_max=100<times>col.max_val<block_end>fig.add_trace(go.Bar(x=x y=slice_names orientation="h" marker=dict(color=self.get_color(score_col_ndx)) showlegend=<false> text=[f"{x_i:.1f}"<for>x_i x] textposition="inside" width=0.95 textfont=dict(color="white") ) row=category_ndx col=fig_col_ndx )<line_sep># Add marker for gray fill fig.add_trace(go.Bar(x=[col_max-x_i<for>x_i x] y=slice_names orientation="h" marker=dict(color=self.config["score_color_complement"]) showlegend=<false> width=0.9 ) row=category_ndx col=fig_col_ndx )<line_sep>score_col_ndx<augadd>1<block_end><elif_stmt>isinstance(col ClassDistributionColumn)<block_start>annotation_text=[[f"{int(round(z<times>100)):d}"<for>z rw]<for>rw x]<line_sep>hm=ff.create_annotated_heatmap(x x=col.class_codes xgap=1 ygap=1 annotation_text=annotation_text colorscale=self.config["distribution_color_scale"] zmin=0 zmax=1 )<line_sep>hms.append(hm)<line_sep># Save annotation data for special code related to heatmaps at end coords.append(len(self.columns)<times>(category_ndx-1)+fig_col_ndx)<line_sep>fig.add_trace(hm.data[0] row=category_ndx col=fig_col_ndx )<block_end><elif_stmt>isinstance(col NumericColumn)# Repurpose bar chart as text field. <block_start>fig.add_trace(go.Bar(x=[1]<times>len(x) y=slice_names orientation="h" marker=dict(color=self.config["text_fill_color"] line=dict(width=0 color=self.config["text_border_color"]) ) showlegend=<false> text=[human_format(x_i)<for>x_i x] textposition="inside" insidetextanchor="middle" width=0.9 ) row=category_ndx col=fig_col_ndx )<block_end><else_stmt><block_start><raise>ValueError("Invalid col type")<block_end><block_end>category_ndx<augadd>1<block_end><for_stmt>category_ndx range(1 len(categories)+1)<block_start><if_stmt>category_ndx<eq>len(categories)<block_start>show_x_axis=<true><block_end><else_stmt><block_start>show_x_axis=<false><block_end><for_stmt>col_ndx,col enumerate(self.columns)<block_start>fig_col_ndx=col_ndx+1# plotly cols are 1-indexed fig.update_yaxes(autorange="reversed" automargin=<true>)<if_stmt>isinstance(col ScoreColumn)<block_start><if_stmt>col.is_0_to_1<block_start>col_min,col_max=100<times>col.min_val 100<times>col.max_val<block_end><else_stmt><block_start>col_min,col_max=col.min_val col.max_val<block_end>fig.update_xaxes(range=[col_min col_max] row=category_ndx col=fig_col_ndx tickvals=[col_min col_max] showticklabels=show_x_axis )<block_end><elif_stmt>isinstance(col ClassDistributionColumn)<block_start>fig.update_xaxes(row=category_ndx col=fig_col_ndx showticklabels=show_x_axis)<block_end><elif_stmt>isinstance(col NumericColumn)<block_start>fig.update_xaxes(range=[0 1] row=category_ndx col=fig_col_ndx showticklabels=<false> )<block_end><block_end><block_end>fig.update_layout(height=height width=self.config["layout_width"] barmode="stack" plot_bgcolor="rgba(0, 0, 0, 0)" paper_bgcolor="rgba(0, 0, 0, 0)" font=dict(size=self.config["font_size_data"]) yaxis={"autorange":"reversed"} margin=go.layout.Margin(r=0 b=0 t=20# right margin # bottom margin # top margin ) )<line_sep># Use low-level plotly interface to update padding / font size <for_stmt>a fig["layout"]["annotations"]# If label for group <block_start><if_stmt>a["text"]<in>categories<block_start>a["x"]=0.99# Add padding a["font"]=dict(size=self.config["font_size_category"])<block_end><else_stmt><block_start>a["font"]=dict(size=self.config["font_size_heading"])<block_end><block_end># Adjust font size for non-category labels # Due to a quirk in plotly, need to do some special low-level coding # Code from https://community.plotly.com/t/how-to-create-annotated-heatmaps # -in-subplots/36686/25 newfont=[go.layout.Annotation(font_size=self.config["font_size_heading"])]<times>len(fig.layout.annotations)<line_sep>fig_annots=[newfont]+[hm.layout.annotations<for>hm hms]<for_stmt>col_ndx range(1 len(fig_annots))<block_start><for_stmt>k range(len(fig_annots[col_ndx]))<block_start>coord=coords[col_ndx-1]<line_sep>fig_annots[col_ndx][k]["xref"]=f"x{coord}"<line_sep>fig_annots[col_ndx][k]["yref"]=f"y{coord}"<line_sep>fig_annots[col_ndx][k]["font_size"]=self.config["font_size_dist"]<block_end><block_end><def_stmt>recursive_extend mylist nr# mylist is a list of lists <block_start>result=[]<if_stmt>nr<eq>1<block_start>result.extend(mylist[nr-1])<block_end><else_stmt><block_start>result.extend(mylist[nr-1])<line_sep>result.extend(recursive_extend(mylist nr-1))<block_end><return>result<block_end>new_annotations=recursive_extend(fig_annots[::-1] len(fig_annots))<line_sep>fig.update_layout(annotations=new_annotations)<if_stmt>show_title<block_start>title={"text":f"{self.dataset_name<or>''} {self.model_name<or>''} "<concat>f"Robustness Report" "x":0.5 "xanchor":"center" }<block_end><else_stmt><block_start>title=<none><block_end>fig.update_layout(title=title margin=go.layout.Margin(r=0 b=0 t=80# right margin # bottom margin # top margin ) )<line_sep><return>fig<block_end><def_stmt>get_color self col_ndx<block_start><return>self.config["color_scheme"][col_ndx%len(self.config["color_scheme"])]<block_end><block_end><def_stmt>human_format num<block_start>num=float("{:.3g}".format(num))<line_sep>magnitude=0<while_stmt>abs(num)<ge>1000<block_start>magnitude<augadd>1<line_sep>num<augdiv>1000.0<block_end><return>"{}{}".format("{:f}".format(num).rstrip("0").rstrip(".") ["" "K" "M" "B" "T"][magnitude])<block_end>
<import_from_stmt>.MoLFI *<line_sep>
# -*- coding: utf-8 -*- <class_stmt>Question<block_start><def_stmt>__init__ self title selectors redirect<block_start>self.title=title<line_sep>self.selectors=selectors<line_sep>self.redirect=redirect<block_end>@property<def_stmt>serialize self<block_start><return>{"title":self.title "selectors":self.selectors "redirect":self.redirect }<block_end><block_end><class_stmt>Session<block_start><def_stmt>__init__ self session_title title questions<block_start>self.session_title=session_title<line_sep>self.title=title<line_sep>self.questions=questions<block_end><block_end>development_agents_questions=[Question('Qual a rede de produtos da Localidade Y?' selectors=["Location"] redirect="/location/%s") Question('Quais os produtos mais próximos da estrutura produtiva da Localidade Y?' selectors=["Location"] redirect="/location/%s") Question('Quais os produtos de maior complexidade exportados por uma Localidade Y?' selectors=["Location"] redirect="/location/%s") Question('Quais os produtos de maior complexidade importados por uma Localidade Y?' selectors=["Location"] redirect="/location/%s") Question('Qual a rede de atividades da Localidade Y?' selectors=["Location"] redirect="/location/%s") Question('Quais as atividades mais próximas da estrutura produtiva da Localidade Y?' selectors=["Location"] redirect="/location/%s") Question('Quais localidades concentram o emprego na Atividade X?' selectors=["Industry"] redirect="/industry/%s") ]<line_sep>student_questions=[Question('Quais os cursos de nível superior oferecidos na Localidade Y?' selectors=["Location"] redirect="/location/%s") Question('Quais os cursos de nível técnico oferecidos na Localidade Y?' selectors=["Location"] redirect="/location/%s") Question('Qual o salário médio da Ocupação Z na Localidade Y?' selectors=["Occupation" "Location"] redirect="/occupation/%s?bra_id=%s") Question('Em quais localidades paga-se o maior salário médio da Ocupação Z?' selectors=["Occupation"] redirect="/occupation/%s") Question('Em quais localidades cresce o número de empregados da Ocupação Z?' selectors=["Occupation"] redirect="/occupation/%s") Question('Quais os principais produtos exportados pela Localidade Y?' selectors=["Location"] redirect="/location/%s") Question('Quais as principais atividades econômicas de uma Localidade Y?' selectors=["Location"] redirect="/location/%s") ]<line_sep>entrepreneur_questions=[Question("Qual o número de estabelecimentos na Atividade X, na Localidade Y?" selectors=["Industry" "Location"] redirect="/industry/%s?bra_id=%s") Question("Qual o salário médio da Atividade X, na Localidade Y?" selectors=["Industry" "Location"] redirect="/industry/%s?bra_id=%s") Question("Qual o salário médio da Ocupação Z, na Atividade X, na Localidade Y?" selectors=["Occupation" "Industry" "Location"] redirect="/occupation/%s?cnae_id=%s?bra_id=%s") Question("Quais os principais parceiros comerciais de um Produto P na Localidade Y?" selectors=["Product" "Location"] redirect="/product/%s?bra_id=%s") Question("Quais localidades concentram o emprego na Atividade X?" selectors=["Industry"] redirect="/industry/%s") Question("Quais as localidades que mais importam o Produto P?" selectors=["Product"] redirect="/product/%s") Question("Quais as localidades que mais exportam o Produto P?" selectors=["Product"] redirect="/product/%s") Question("Quais os produtos mais próximos da estrutura produtiva da Localidade Y?" selectors=["Location"] redirect="/location/%s") Question("Quais os cursos de nível superior oferecidos na Localidade Y?" selectors=["Location"] redirect="/location/%s") Question("Quais os cursos de nível técnico oferecidos na Localidade Y?" selectors=["Location"] redirect="/location/%s") ]<line_sep>entrepreneur_session=Session(session_title="Empreendedores" title="Identifique o perfil econômico e as oportunidades de negócios de uma região" questions=entrepreneur_questions)<line_sep>development_agents_session=Session(session_title="Agentes de Desenvolvimento" title="Avalie a criação de políticas de desenvolvimento de acordo com a localidade" questions=development_agents_questions)<line_sep>student_session=Session(session_title="Estudantes e Profissionais" title="Descubra informações sobre empregos disponíveis, renda por ocupação e cursos" questions=student_questions)<line_sep>SESSIONS={'entrepreneur':entrepreneur_session 'development_agents':development_agents_session 'student':student_session }<line_sep>
# -*- coding: utf-8 -*- <import_stmt>os<import_stmt>pandas<as>pd<import_from_stmt>fooltrader settings<line_sep># 获取存档的代理列表 <def_stmt>get_proxy_dir <block_start><return>os.path.join(settings.FOOLTRADER_STORE_PATH "proxy")<block_end><def_stmt>get_proxy_path protocol='http'<block_start><return>os.path.join(get_proxy_dir() "{}_proxy.csv".format(protocol))<block_end><def_stmt>get_checked_proxy_dir part_name=<none><block_start><if_stmt>part_name<block_start><return>os.path.join(get_proxy_dir() 'checked' 'tmp')<block_end><else_stmt><block_start><return>os.path.join(get_proxy_dir() 'checked')<block_end><block_end><def_stmt>get_checked_proxy_path protocol='http' part_name=<none><block_start><if_stmt><not>os.path.exists(get_checked_proxy_dir(part_name))<block_start>os.makedirs(get_checked_proxy_dir(part_name))<block_end><if_stmt>part_name<block_start><return>os.path.join(get_checked_proxy_dir(part_name) "{}_{}_proxy.csv".format(protocol part_name))<block_end><else_stmt><block_start><return>os.path.join(get_checked_proxy_dir() "{}_proxy.csv".format(protocol))<block_end><block_end><def_stmt>get_sorted_proxy_dir domain<block_start><return>os.path.join(get_proxy_dir() domain)<block_end><def_stmt>get_sorted_proxy_path domain protocol='http' part_name=<none><block_start><if_stmt><not>os.path.exists(get_sorted_proxy_dir(domain))<block_start>os.makedirs(get_sorted_proxy_dir(domain))<block_end><if_stmt>part_name<block_start><return>os.path.join(get_sorted_proxy_dir(domain) "tmp" "{}_{}_proxy.csv".format(protocol part_name))<block_end><else_stmt><block_start><return>os.path.join(get_sorted_proxy_dir(domain) "{}_proxy.csv".format(protocol))<block_end><block_end><def_stmt>get_checked_proxy domain=<none> protocol='http'<block_start><if_stmt>domain<and>os.path.exists(get_sorted_proxy_path(domain protocol=protocol))<block_start><return>pd.read_csv(get_sorted_proxy_path(domain protocol))<block_end><if_stmt>os.path.exists(get_checked_proxy_path(protocol))<block_start><return>pd.read_csv(get_checked_proxy_path(protocol))<block_end><block_end><def_stmt>get_proxy protocol='http'<block_start><if_stmt>os.path.exists(get_proxy_path(protocol))<block_start><return>pd.read_csv(get_proxy_path(protocol))<block_end><else_stmt><block_start><return>pd.DataFrame()<block_end><block_end><def_stmt>save_proxy proxies protocol='http'<block_start>proxy_df=get_proxy(protocol)<line_sep>proxy_df=proxy_df.append(proxies)<line_sep>proxy_df.drop_duplicates(subset=('url') keep='last')<line_sep>proxy_df.to_csv(get_proxy_path(protocol) index=<false>)<block_end><if_stmt><not>os.path.exists(get_proxy_dir())<block_start>os.makedirs(get_proxy_dir())<block_end>
""" This problem was asked by Uber. Given an array of integers, return a new array such that each element at index i of the new array is the product of all the numbers in the original array except the one at i.For example, if our input was [1, 2, 3, 4, 5], the expected output would be [120, 60, 40, 30, 24]. If our input was [3, 2, 1], the expected output would be [2, 3, 6]. Follow-up: what if you can't use division? """<line_sep># function to get the product of elements inside an array. <def_stmt>get_array_product array<block_start>product=1<for_stmt>i array<block_start>product<augmul>i<block_end><return>product<block_end># this function does the main job of the problem but with division. <def_stmt>get_new_array_division array# array holding the final result <block_start>new_array=[]<line_sep># getting the product of the array elements. product=get_array_product(array)<for_stmt>i array# for each number divide the product by the current element. <block_start>new_array.append(product/i)<block_end># return the array <return>new_array<block_end># this function does the main job of the problem without division.(a to -1 power) <def_stmt>get_new_array array# array holding the final result <block_start>new_array=[]<line_sep># getting the product of the array elements. product=get_array_product(array)<for_stmt>i array# for each number product the product by the current element. <block_start>new_array.append(product<times>(i<power>-1))<block_end># return the array <return>new_array<block_end># This function does the main job of the problem without division.(a to -1 power) <def_stmt>get_new_array_hard array# array holding the final result <block_start>new_array=[]<for_stmt>i range(len(array))<block_start>p=1<for_stmt>j range(len(array))<block_start><if_stmt>j<ne>i<block_start>p<augmul>array[j]<block_end><block_end>new_array.append(p)<block_end># return the array <return>new_array<block_end># test the functions. # print(get_new_array_division([1, 2, 3, 4, 5])) # print(get_new_array_division([3, 2, 1])) # print(get_new_array([1, 2, 3, 4, 5])) # print(get_new_array([3, 2, 1])) print(get_new_array_hard([1 2 3 4 5]))<line_sep>print(get_new_array_hard([3 2 1]))<line_sep>
<import_stmt>cv2<import_stmt>time<import_stmt>numpy<as>np<import_from_stmt>grabscreen grab_screen<import_from_stmt>directkeys PressKey ReleaseKey<import_from_stmt>directkeys W A D<import_from_stmt>countdown CountDown<line_sep>''' Most of the code in this script was taken from Sentdex's Python plays GTA-V '''<def_stmt>roi img vertices<block_start>mask=np.zeros_like(img)<line_sep>cv2.fillPoly(mask vertices 255)<line_sep>masked=cv2.bitwise_and(img mask)<line_sep><return>masked<block_end><def_stmt>straight <block_start>print('straight')<line_sep>PressKey(W)<line_sep>ReleaseKey(A)<line_sep>ReleaseKey(D)<block_end><def_stmt>left <block_start>print('left')<line_sep>PressKey(W)<line_sep>PressKey(A)<line_sep>time.sleep(0.05)<line_sep>ReleaseKey(A)<block_end><def_stmt>right <block_start>print('right')<line_sep>PressKey(W)<line_sep>PressKey(D)<line_sep>time.sleep(0.05)<line_sep>ReleaseKey(D)<block_end><def_stmt>auto_canny image sigma=0.33<block_start>''' Reference: https://www.pyimagesearch.com/ '''<line_sep>v=np.median(image)<line_sep># apply automatic Canny edge detection using the computed median lower=int(max(0 (1.0-sigma)<times>v))<line_sep>upper=int(min(255 (1.0+sigma)<times>v))<line_sep>edged=cv2.Canny(image lower upper)<line_sep># return the edged image <return>edged<block_end><def_stmt>draw_lanes img lines color=[0 255 255] thickness=3# if this fails, go with some default line <block_start><try_stmt># finds the maximum y value for a lane marker # (since we cannot assume the horizon will always be at the same point.) <block_start>ys=[]<for_stmt>i lines<block_start><for_stmt>ii i<block_start>ys<augadd>[ii[1] ii[3]]<block_end><block_end>min_y=min(ys)<line_sep>max_y=150<line_sep>new_lines=[]<line_sep>line_dict={}<for_stmt>idx,i enumerate(lines)<block_start><for_stmt>xyxy i# These four lines: # modified from http://stackoverflow.com/questions/21565994/method-to-return-the-equation-of-a-straight-line-given-two-points # Used to calculate the definition of a line, given two sets of coords. <block_start>x_coords=(xyxy[0] xyxy[2])<line_sep>y_coords=(xyxy[1] xyxy[3])<line_sep>A=np.vstack([x_coords np.ones(len(x_coords))]).T<line_sep>m,b=np.linalg.lstsq(A y_coords)[0]<line_sep># Calculating our new, and improved, xs x1=(min_y-b)/m<line_sep>x2=(max_y-b)/m<line_sep>line_dict[idx]=[m b [int(x1) min_y int(x2) max_y]]<line_sep>new_lines.append([int(x1) min_y int(x2) max_y])<block_end><block_end>final_lanes={}<for_stmt>idx line_dict<block_start>final_lanes_copy=final_lanes.copy()<line_sep>m=line_dict[idx][0]<line_sep>b=line_dict[idx][1]<line_sep>line=line_dict[idx][2]<if_stmt>len(final_lanes)<eq>0<block_start>final_lanes[m]=[[m b line]]<block_end><else_stmt><block_start>found_copy=<false><for_stmt>other_ms final_lanes_copy<block_start><if_stmt><not>found_copy<block_start><if_stmt>abs(other_ms<times>1.2)<g>abs(m)<g>abs(other_ms<times>0.8)<block_start><if_stmt>abs(final_lanes_copy[other_ms][0][1]<times>1.2)<g>abs(b)<g>abs(final_lanes_copy[other_ms][0][1]<times>0.8)<block_start>final_lanes[other_ms].append([m b line])<line_sep>found_copy=<true><line_sep><break><block_end><block_end><else_stmt><block_start>final_lanes[m]=[[m b line]]<block_end><block_end><block_end><block_end><block_end>line_counter={}<for_stmt>lanes final_lanes<block_start>line_counter[lanes]=len(final_lanes[lanes])<block_end>top_lanes=sorted(line_counter.items() key=<lambda>item:item[1])[::-1][:2]<line_sep>lane1_id=top_lanes[0][0]<line_sep>lane2_id=top_lanes[1][0]<def_stmt>average_lane lane_data<block_start>x1s=[]<line_sep>y1s=[]<line_sep>x2s=[]<line_sep>y2s=[]<for_stmt>data lane_data<block_start>x1s.append(data[2][0])<line_sep>y1s.append(data[2][1])<line_sep>x2s.append(data[2][2])<line_sep>y2s.append(data[2][3])<block_end><return>int(np.mean(x1s)) int(np.mean(y1s)) int(np.mean(x2s)) int(np.mean(y2s))<block_end>l1_x1,l1_y1,l1_x2,l1_y2=average_lane(final_lanes[lane1_id])<line_sep>l2_x1,l2_y1,l2_x2,l2_y2=average_lane(final_lanes[lane2_id])<line_sep><return>[l1_x1 l1_y1 l1_x2 l1_y2] [l2_x1 l2_y1 l2_x2 l2_y2] lane1_id lane2_id<block_end><except_stmt>Exception<block_start><pass><block_end><block_end><def_stmt>LaneFinder image<block_start>org_image=image<line_sep># convert to grayscale image=cv2.cvtColor(image cv2.COLOR_BGR2GRAY)<line_sep># gaussian blur image=cv2.GaussianBlur(image (3 3) 0)<line_sep># edge detection image=auto_canny(image)<line_sep># Masking Region of Interest vertices=np.array([[0 201] [0 50] [381 50] [381 201]] np.int32)<line_sep>image=roi(image [vertices])<line_sep># probabilistic hough transform lines=cv2.HoughLinesP(image rho=1 theta=(np.pi/180) threshold=5 minLineLength=20 maxLineGap=5)<line_sep>m1=0<line_sep>m2=0<line_sep># drawing lines <try_stmt><block_start>l1,l2,m1,m2=draw_lanes(org_image lines)<line_sep>cv2.line(org_image (l1[0] l1[1]) (l1[2] l1[3]) [0 255 0] 3)<line_sep>cv2.line(org_image (l2[0] l2[1]) (l2[2] l2[3]) [0 255 0] 3)<block_end><except_stmt>Exception<block_start><pass><block_end><try_stmt><block_start><for_stmt>coords lines<block_start>coords=coords[0]<try_stmt><block_start>cv2.line(image (coords[0] coords[1]) (coords[2] coords[3]) [255 0 0] 3)<block_end><except_stmt>Exception<block_start><pass><block_end><block_end><block_end><except_stmt>Exception<block_start><pass><block_end><return>image org_image m1 m2<block_end><if_stmt>__name__<eq>'__main__'<block_start>CountDown(5)<while_stmt><true><block_start>screen=grab_screen(region=(270 250 650 450))<line_sep>new_screen,original_image,m1,m2=LaneFinder(screen)<line_sep># cv2.imshow('window', new_screen) # cv2.imshow('window2', cv2.cvtColor(original_image, cv2.COLOR_BGR2RGB)) <if_stmt>m1<l>0<and>m2<l>0<block_start>right()<block_end><elif_stmt>m1<g>0<and>m2<g>0<block_start>left()<block_end><else_stmt><block_start>straight()<block_end><if_stmt>cv2.waitKey(25)<eq>ord('q')<block_start>cv2.destroyAllWindows()<line_sep><break><block_end><block_end><block_end>
# -*- coding: utf-8 -*- <import_stmt>pytest<import_from_stmt>django.core.urlresolvers reverse<import_from_stmt>.. factories<as>f<import_from_stmt>. helpers<line_sep>pytestmark=pytest.mark.django_db<class_stmt>TestReviewerViews<block_start><def_stmt>test_reviewer_private_comment self settings login conferences create_proposal<block_start>client=login[0]<line_sep>conference=conferences["future"]<line_sep>proposal=create_proposal<line_sep>kwargs={"conference_slug":conference.slug "proposal_slug":proposal.slug}<line_sep>url=reverse("proposal-comment-create" kwargs=kwargs)<line_sep>data={"comment":"Test" "private":<true>}<line_sep>response=client.post(url data)<assert_stmt>response.status_code<eq>302<assert_stmt>response.url.endswith("#js-reviewers")<block_end><def_stmt>test_reviewer_only_private_comment self settings login conferences create_proposal<block_start>client=login[0]<line_sep>conference=conferences["future"]<line_sep>proposal=create_proposal<line_sep>kwargs={"conference_slug":conference.slug "proposal_slug":proposal.slug}<line_sep>url=reverse("proposal-comment-create" kwargs=kwargs)<line_sep>data={"comment":"Test" "reviewer":<true>}<line_sep>response=client.post(url data)<assert_stmt>response.status_code<eq>302<assert_stmt>response.url.endswith("#js-only-reviewers")<block_end><def_stmt>test_get_review_proposal_form self settings login conferences create_reviewer create_proposal<block_start>client=login[0]<line_sep>conference=conferences["future"]<line_sep>proposal=create_proposal<line_sep>kwargs={"conference_slug":conference.slug "slug":proposal.slug}<line_sep>url=reverse("proposal-review" kwargs=kwargs)<line_sep>response=client.get(url)<line_sep>context=response.context<assert_stmt>response.status_code<eq>200<assert_stmt>context["proposal"]<eq>proposal<line_sep>helpers.assert_template_used(response "proposals/review.html")<block_end><def_stmt>test_post_review_proposal self settings login conferences create_reviewer create_proposal<block_start>client=login[0]<line_sep>conference=conferences["future"]<line_sep>proposal=create_proposal<line_sep>kwargs={"conference_slug":conference.slug "slug":proposal.slug}<line_sep>url=reverse("proposal-review" kwargs=kwargs)<line_sep>response=client.post(url {"review_status":3})<assert_stmt>response.status_code<eq>302<block_end><def_stmt>test_review_proposal_by_non_reviewer self settings client conferences create_proposal<block_start>username,password="<PASSWORD>" "<PASSWORD>"<line_sep>f.create_user(password=password username=username)<line_sep>conference=conferences["future"]<line_sep>client.login(username=username password=password)<line_sep>proposal=create_proposal<line_sep>kwargs={"conference_slug":conference.slug "slug":proposal.slug}<line_sep>url=reverse("proposal-review" kwargs=kwargs)<line_sep>response=client.get(url)<assert_stmt>response.status_code<eq>403<block_end><def_stmt>test_proposal_reviewer_vote_by_non_reviewer self settings client conferences create_proposal<block_start>username,password="<PASSWORD>" "<PASSWORD>"<line_sep>f.create_user(password=password username=username)<line_sep>conference=conferences["future"]<line_sep>client.login(username=username password=password)<line_sep>proposal=create_proposal<line_sep>kwargs={"conference_slug":conference.slug "proposal_slug":proposal.slug}<line_sep>url=reverse("proposal-reviewer-vote" kwargs=kwargs)<line_sep>response=client.post(url)<assert_stmt>response.status_code<eq>403<block_end><def_stmt>test_get_proposal_reviewer_vote self settings login conferences create_proposal create_reviewer<block_start>client=login[0]<line_sep>conference=conferences["future"]<line_sep>proposal=create_proposal<line_sep>kwargs={"conference_slug":conference.slug "proposal_slug":proposal.slug}<line_sep>url=reverse("proposal-reviewer-vote" kwargs=kwargs)<line_sep>response=client.get(url)<line_sep>context=response.context<assert_stmt>response.status_code<eq>200<assert_stmt>context["proposal"]<eq>proposal<assert_stmt>context["vote"]<is><none><line_sep>helpers.assert_template_used(response "proposals/vote.html")<block_end><def_stmt>test_post_proposal_reviewer_vote self settings login conferences create_proposal create_reviewer<block_start>client=login[0]<line_sep>conference=conferences["future"]<line_sep>proposal=create_proposal<line_sep>kwargs={"conference_slug":conference.slug "proposal_slug":proposal.slug}<line_sep>url=reverse("proposal-reviewer-vote" kwargs=kwargs)<line_sep>data={"vote_value":1 "comment":"Must Have"}<line_sep>response=client.post(url data)<assert_stmt>response.status_code<eq>302<assert_stmt>response.url.endswith("review/")<is><true><block_end><def_stmt>test_update_proposal_reviewer_vote self settings login conferences create_proposal create_reviewer<block_start>client=login[0]<line_sep>conference=conferences["future"]<line_sep>proposal=create_proposal<line_sep>kwargs={"conference_slug":conference.slug "proposal_slug":proposal.slug}<line_sep>url=reverse("proposal-reviewer-vote" kwargs=kwargs)<line_sep>data={"vote_value":1 "comment":"Must Have"}<line_sep>client.post(url data)<line_sep>update_data={"vote_value":2 "comment":"Must Have"}<line_sep>response=client.post(url update_data)<assert_stmt>response.status_code<eq>302<assert_stmt>response.url.endswith("review/")<is><true><block_end><def_stmt>test_get_proposal_reviewer_vote_after_create self settings login conferences create_proposal create_reviewer<block_start>client=login[0]<line_sep>conference=conferences["future"]<line_sep>proposal=create_proposal<line_sep>kwargs={"conference_slug":conference.slug "proposal_slug":proposal.slug}<line_sep>url=reverse("proposal-reviewer-vote" kwargs=kwargs)<line_sep>comment,vote_value="Must Have" 1<line_sep>data={"vote_value":vote_value "comment":comment}<line_sep>client.post(url data)<line_sep>response=client.get(url)<line_sep>context=response.context<assert_stmt>response.status_code<eq>200<assert_stmt>context["form"].initial["vote_value"]<eq>vote_value<assert_stmt>context["form"].initial["comment"]<eq>comment<block_end><def_stmt>test_post_review_proposal_vote_with_invalid_data self settings login conferences create_proposal create_reviewer<block_start>client=login[0]<line_sep>conference=conferences["future"]<line_sep>proposal=create_proposal<line_sep>kwargs={"conference_slug":conference.slug "proposal_slug":proposal.slug}<line_sep>url=reverse("proposal-reviewer-vote" kwargs=kwargs)<line_sep>data={"vote_value":12}<line_sep>response=client.post(url data)<assert_stmt>response.status_code<eq>200<assert_stmt>"vote_value"<in>response.context["form_errors"]<block_end><def_stmt>test_get_proposal_votes_dashboard self login conferences create_superuser<block_start>client=login[0]<line_sep>conference=conferences["future"]<line_sep>kwargs={"conference_slug":conference.slug}<line_sep>url=reverse("export-reviewer-votes" kwargs=kwargs)<line_sep>response=client.get(url)<assert_stmt>response.status_code<eq>200<block_end><block_end><def_stmt>test_public_comment settings login conferences create_proposal<block_start>client=login[0]<line_sep>conference=conferences["future"]<line_sep>proposal=create_proposal<line_sep>username,password="<PASSWORD>" "<PASSWORD>"<line_sep>f.create_user(password=password username=username)<line_sep>client.login(username=username password=password)<line_sep>kwargs={"conference_slug":conference.slug "proposal_slug":proposal.slug}<line_sep>url=reverse("proposal-comment-create" kwargs=kwargs)<line_sep>data={"comment":"Test"}<line_sep>response=client.post(url data)<assert_stmt>response.status_code<eq>302<assert_stmt>response.url.endswith("#js-comments")<block_end>
########################################################################## # # Copyright (c) 2008-2010, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # * Neither the name of Image Engine Design nor the names of any # other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## <import_stmt>unittest<import_stmt>imath<import_stmt>IECore<class_stmt>TestCompoundVectorParameter(unittest.TestCase)<block_start><def_stmt>testConstruction self<block_start>c=IECore.CompoundVectorParameter('a' 'dest')<line_sep># test valid parameters c.addParameter(IECore.IntVectorParameter('a' '' IECore.IntVectorData([1 2])))<line_sep>c.addParameter(IECore.BoolVectorParameter('b' '' IECore.BoolVectorData([<false> <false>])))<line_sep>c.addParameters([IECore.V2fVectorParameter('c' '' IECore.V2fVectorData([imath.V2f() imath.V2f()])) IECore.StringVectorParameter('d' '' IECore.StringVectorData(['one' 'two']))])<line_sep>self.assertEqual(len(c.keys()) 4)<def_stmt>addInvalid <block_start>c.addParameter(IECore.StringParameter('xx' '' 'value'))<block_end># test invalid parameters self.assertRaises(TypeError addInvalid)<block_end><def_stmt>testValidation self<block_start>c=IECore.CompoundVectorParameter('a' 'dest')<line_sep>c.addParameter(IECore.IntVectorParameter('a' '' IECore.IntVectorData([1 2])))<line_sep>c.addParameter(IECore.BoolVectorParameter('b' '' IECore.BoolVectorData([<false> <false>])))<line_sep>c.validate()<line_sep>c.addParameter(IECore.IntVectorParameter('c' '' IECore.IntVectorData([1 2 3])))<with_stmt>self.assertRaises(Exception)<as>e<block_start>c.validate()<block_end>self.assertTrue(('Parameter "c" has wrong size ( expected 2 but found 3 )'<in>str(e.exception))<or>('Parameter "a" has wrong size ( expected 3 but found 2 )'<in>str(e.exception))<or>('Parameter "b" has wrong size ( expected 3 but found 2 )'<in>str(e.exception)))<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>unittest.main()<block_end>
"""Draw the histgram of the pose distributions Run it like this: `python3 -m experimental.distribution.py` Do not forget to set the dataset file path. """<import_stmt>cv2<import_stmt>matplotlib<import_stmt>matplotlib.pyplot<as>plt<import_stmt>numpy<as>np<import_from_stmt>dataset get_parsed_dataset<import_from_stmt>experimental.pose_estimator PoseEstimator<if_stmt>__name__<eq>"__main__"<block_start>ds=get_parsed_dataset("data/helen.record" 1 <false>)<line_sep># Counters n_faces=0<line_sep>pitches=[]<line_sep>yaws=[]<line_sep>rolls=[]<for_stmt>image,marks ds# image = (image.numpy()[0]*255).astype(np.uint8) <block_start>height,width=image.shape[1:3]<line_sep>pose_estimator=PoseEstimator(img_size=(height width))<line_sep>marks=np.reshape(marks (-1 2))<times>width<line_sep>pose=pose_estimator.solve_pose_by_68_points(marks)<line_sep># Solve the pitch, yaw and roll angels. r_mat,_=cv2.Rodrigues(pose[0])<line_sep>p_mat=np.hstack((r_mat np.array([[0] [0] [0]])))<line_sep>_,_,_,_,_,_,u_angle=cv2.decomposeProjectionMatrix(p_mat)<line_sep>pitch,yaw,roll=u_angle.flatten()<line_sep># I do not know why the roll axis seems flipted 180 degree. Manually by pass # this issue. <if_stmt>roll<g>0<block_start>roll=180-roll<block_end><elif_stmt>roll<l>0<block_start>roll=-(180+roll)<block_end>pitches.append(pitch)<line_sep>yaws.append(yaw)<line_sep>rolls.append(roll)<line_sep>n_faces<augadd>1<line_sep># print("pitch: {:.2f}, yaw: {:.2f}, roll: {:.2f}".format( # pitch, yaw, roll)) # for mark in marks: # cv2.circle(image, tuple(mark), 1, (0, 255, 0), 1) # cv2.imshow("image", image) # if cv2.waitKey() == 27: # break <block_end>fig,ax=plt.subplots(3 1)<line_sep>ax[0].hist(pitches 40 (-60 60) density=<true>)<line_sep>ax[1].hist(yaws 40 (-60 60) density=<true>)<line_sep>ax[2].hist(rolls 40 (-60 60) density=<true>)<line_sep>plt.show()<line_sep>print(n_faces)<block_end>
<import_from_stmt>typing List<import_from_stmt>pyrep.objects.dummy Dummy<import_from_stmt>pyrep.objects.joint Joint<import_from_stmt>rlbench.backend.task Task<import_from_stmt>rlbench.backend.conditions JointCondition<line_sep>OPTIONS=['left' 'right']<class_stmt>TurnTap(Task)<block_start><def_stmt>init_task self<arrow><none><block_start>self.left_start=Dummy('waypoint0')<line_sep>self.left_end=Dummy('waypoint1')<line_sep>self.right_start=Dummy('waypoint5')<line_sep>self.right_end=Dummy('waypoint6')<line_sep>self.left_joint=Joint('left_joint')<line_sep>self.right_joint=Joint('right_joint')<block_end><def_stmt>init_episode self index:int<arrow>List[str]<block_start>option=OPTIONS[index]<if_stmt>option<eq>'right'<block_start>self.left_start.set_position(self.right_start.get_position())<line_sep>self.left_start.set_orientation(self.right_start.get_orientation())<line_sep>self.left_end.set_position(self.right_end.get_position())<line_sep>self.left_end.set_orientation(self.right_end.get_orientation())<line_sep>self.register_success_conditions([JointCondition(self.right_joint 1.57)])<block_end><else_stmt><block_start>self.register_success_conditions([JointCondition(self.left_joint 1.57)])<block_end><return>['turn %s tap'%option 'rotate the %s tap'%option 'grasp the %s tap and turn it'%option]<block_end><def_stmt>variation_count self<arrow>int<block_start><return>2<block_end><block_end>
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. <import_from_stmt>tvm te<def_stmt>schedule_branch attrs output prefix<block_start>cfg,s=attrs.auto_config attrs.scheduler<line_sep>th_vals=[attrs.get_extent(x)<for>x output.op.axis]<line_sep># Normal Schedule Plan blocks=[te.thread_axis('blockIdx.x') te.thread_axis('blockIdx.y') te.thread_axis('blockIdx.z')]<line_sep>threads=[te.thread_axis('threadIdx.x') te.thread_axis('threadIdx.y') te.thread_axis('threadIdx.z')]<line_sep>th_idx=[]<for_stmt>i range(len(th_vals))<block_start><if_stmt>th_vals[i]<g>1<or>(i+1<eq>len(th_vals)<and>len(th_idx)<eq>0)<block_start>th_idx.append(i)<block_end><else_stmt><block_start>s[output].bind(output.op.axis[i] te.thread_axis('vthread'))<block_end><block_end>high_vaxis,low_vaxis=[] []<for_stmt>i range(len(th_idx))<block_start>ax_name=f'{prefix}D{th_idx[i]}'<line_sep>ax_obj=output.op.axis[th_idx[i]]<if_stmt>i<l>len(blocks)<block_start>sizes=cfg.define_split(ax_name attrs.get_extent(ax_obj) num_outputs=4)<line_sep>ax1,ax2,ax3,ax4=cfg.apply_split(s output ax_obj sizes)<line_sep>s[output].bind(ax1 blocks[i])<line_sep>s[output].bind(ax3 threads[i])<block_end><else_stmt><block_start>sizes=cfg.define_split(ax_name attrs.get_extent(ax_obj) num_outputs=2)<line_sep>ax2,ax4=cfg.apply_split(s output ax_obj sizes)<block_end>s[output].bind(ax2 te.thread_axis('vthread'))<line_sep>s[output].bind(ax4 te.thread_axis('vthread'))<line_sep>high_vaxis.append(ax2)<line_sep>low_vaxis.append(ax4)<block_end>ord_name=f"{prefix}O"<line_sep>permut=cfg.define_reorder(ord_name len(high_vaxis) "all")<line_sep>plan_order=[]<for_stmt>i permut<block_start>plan_order.append(low_vaxis[i])<line_sep>plan_order.append(high_vaxis[i])<block_end>s[output].reorder(*plan_order)<line_sep># unroll unroll_step=cfg.define_knob(f"{prefix}S" [1 4 16 64 512])<line_sep>unroll_explicit=cfg.define_knob(f"{prefix}R" [<false> <true>])<line_sep>kernel_scope=plan_order[0]<line_sep>s[output].pragma(kernel_scope 'auto_unroll_max_step' unroll_step)<line_sep>s[output].pragma(kernel_scope 'unroll_explicit' unroll_explicit)<block_end>
# -*- coding:utf-8 -*- # &Author AnFany # 自适应优化绘制决策树程序 # 绘制决策图主要包括四部分 # 1,确定每一个节点展示的内容(内部节点展示,节点名称,类别比例,分类特征,本节点的结果, 叶子节点没有分类特征的内容) # 2,确定每一个节点的位置(垂直方向平均分配,水平方向按照这一层的节点个数平均分配) # 3,确定节点之间的连线 # 4,展示连线的内容(分类规则以及分分割值) # 5,内部节点,子节点以不用的颜色展示,对给出图例 # 根据所有节点的数据集、所有节点的结果、所有节点的规则、剪枝后代表着树的节点关系绘制树 <import_from_stmt>pylab mpl<line_sep>mpl.rcParams['font.sans-serif']=['FangSong']# 显示中文 mpl.rcParams['axes.unicode_minus']=<false># 显示负号 <import_stmt>matplotlib.pyplot<as>plt<line_sep># 引入绘制树需要的信息 <import_stmt>AnFany_DT_Classify<as>tree<line_sep># 获得数据的字段名称 ziduan=['Age' 'workclass' 'fnlwgt' 'education' 'education-num' 'marital-status' 'occupation' 'relationship' 'race' 'sex' 'capital-gain' 'capital-loss' 'hours-per-week' 'native-country']<line_sep>'''准备部分'''<line_sep># 要展示的所有的节点 <def_stmt>allnodes guanxi<block_start>allnode=list(guanxi.keys())<for_stmt>jj guanxi<block_start><for_stmt>hhh guanxi[jj]<block_start><if_stmt>hhh<not><in>allnode<block_start>allnode.append(hhh)<block_end><block_end><block_end># 之所以要按顺序输出,是因为先画父节点,后画子节点,可以将箭头盖住,更为美观 <return>sorted(allnode)<block_end># 要展示的所有的叶子节点 <def_stmt>leafnodes guanxi<block_start>allnode=list(guanxi.keys())<line_sep>leafnode=[]<for_stmt>jj guanxi<block_start><for_stmt>hhh guanxi[jj]<block_start><if_stmt>hhh<not><in>allnode<block_start>leafnode.append(hhh)<block_end><block_end><block_end><return>leafnode<block_end># 要展示的所有的内部节点 <def_stmt>noye_node guanxi<block_start><return>list(guanxi.keys())<block_end>'''第一部分:展示内容'''<line_sep># 根据数据集输出各类别之间的比值 <def_stmt>output shujuji guanxi# 字典 <block_start>leibie={}<for_stmt>jjj allnodes(guanxi)<block_start>leibie[jjj]=[]<line_sep>cu=list(shujuji[jjj][: -1])<line_sep>gu=sorted(list(set(list(shujuji[jjj][: -1]))))<for_stmt>du gu<block_start>leibie[jjj].append([du cu.count(du)])# 各个类别及其数量 <block_end><block_end><return>leibie<block_end># 节点数据集、节点结果、节点规则绘制树 # 制作节点里面的内容 <def_stmt>dingyistr shujuji reeult guize guanxi zian# 规则字典 <block_start>guizezidian={}<line_sep># 类别字典 leibii=output(shujuji guanxi)<line_sep># 字符串字典 strdict={}<line_sep># 内部节点 nonode=noye_node(guanxi)<line_sep># 遍历需要展示的每一个节点,获得每一个节点需展示的字符串内容 <for_stmt>jjj allnodes(guanxi)# 为节点添加名称 <block_start>strdict[jjj]='节点:%s \n'%jjj# 内容分行 # 如果不是内部节点,则不需要添加特征,只添加各个类别的比例 <if_stmt>jjj<not><in>nonode<block_start>hu='占比:'<for_stmt>fu leibii[jjj]<block_start>hu<augadd>'%d:'%fu[1]<block_end>strdict[jjj]<augadd>'%s \n'%hu[:-1]<block_end># 对于内部节点需要多填加一个分类特征的内容、和规则 <else_stmt><block_start>hu='占比:'<for_stmt>fu leibii[jjj]<block_start>hu<augadd>'%d:'%fu[1]<block_end>strdict[jjj]<augadd>'%s \n'%hu[:-1]<line_sep># 添加分类特征 strdict[jjj]<augadd>'特征:%s \n'%zian[guize['%s'%(jjj+'r')][-1][0]]<line_sep># 添加规则 sign=0<try_stmt><block_start>guize['%s'%(jjj+'r')][-1][1]+1<line_sep>sign=1<block_end><except_stmt>TypeError<block_start><pass><block_end><if_stmt>sign<eq>0<block_start>guizezidian[jjj+'l']='值为:\n %s'%guize['%s'%(jjj+'r')][-1][1]<line_sep>guizezidian[jjj+'r']='值不为:\n %s'%guize['%s'%(jjj+'r')][-1][1]<block_end><else_stmt><block_start>guizezidian[jjj+'l']='值不大于:\n %s'%guize['%s'%(jjj+'r')][-1][1]<line_sep>guizezidian[jjj+'r']='值大于:\n %s'%guize['%s'%(jjj+'r')][-1][1]<block_end><block_end># 为需要展示的节点添加结果 strdict[jjj]<augadd>'结果:%s '%reeult[jjj]<block_end><return>strdict guizezidian<block_end># 分别返回节点展示的内容字典、连线上需要展示的内容字典 '''第二部分:节点的位置'''<line_sep># 根据节点名称的最大长度,确定画布的大小 <def_stmt>huabu guanxi# 获得所有的节点 <block_start>suoyounodes=allnodes(guanxi)<line_sep># 获取最长节点名称字符串的长度,这个长度同时也是树的深度。 changdu=max(len(i)<for>i suoyounodes)<line_sep># 返回长度以及画布大小 <return>changdu+1 2<power>max(6 changdu)<block_end># 水平放下的位置,是根据这一层节点的个数、以及此节点的顺序确定的 <def_stmt>getorder exnode guanxi<block_start>fu=[]<for_stmt>jj allnodes(guanxi)<block_start><if_stmt>len(jj)<eq>len(exnode)<block_start>fu.append(jj)<block_end><block_end># 排序 sfu=sorted(fu)<line_sep><return>len(sfu)+1 sfu.index(exnode)+1<block_end>#前者加1是计算间隔,后者加1是因为index从0开始 # 根据画布大小定义每一个节点的横纵坐标位置 <def_stmt>jiedian_location guanxi# 树的深度,画布大小 <block_start>shushen,huahuabu=huabu(guanxi)<line_sep># 返回每个节点坐标的字典 loca={}<line_sep># 首先将节点名称按照长度组成字典 changdu={}<for_stmt>jj allnodes(guanxi)<block_start><try_stmt><block_start>changdu[len(jj)].append(jj)<block_end><except_stmt>KeyError<block_start>changdu[len(jj)]=[jj]<block_end><block_end># 开始确定需要展示节点的位置 <for_stmt>fi allnodes(guanxi)<block_start><if_stmt>fi<not><in>loca<block_start><for_stmt>gu changdu[len(fi)]# 同层的节点(也就是节点名称长度一样的)一起计算 <block_start>number=getorder(gu guanxi)<line_sep>loca[gu]=[huahuabu/number[0]<times>number[1] huahuabu-(huahuabu/shushen)<times>len(gu)]<block_end><block_end><block_end><return>loca<block_end>'''第三部分:准备工作结束,开始绘图'''<line_sep># 开始绘图 <def_stmt>draw_tree shujuji result guize guanxi zian=ziduan# 字符串内容 <block_start>strziu=dingyistr(shujuji result guize guanxi zian)<line_sep># 节点的位置 weihzi=jiedian_location(guanxi)<line_sep>noyye=noye_node(guanxi)<line_sep># 画布的设置 huab=huabu(guanxi)[1]+2# 上下左右预留空间 fig,ax=plt.subplots(figsize=(huab huab))<line_sep># 开始绘制 <for_stmt>jj allnodes(guanxi)<block_start>print(jj)<line_sep># 绘制所有的节点要展示的内容 # 内部节点 <if_stmt>jj<in>noyye<block_start>ax.text(weihzi[jj][0] weihzi[jj][1] strziu[0][jj] size=13 rotation=0. ha="center" va="center" bbox=dict(boxstyle="round" ec=(0.6 0.2 0.6) fc=(0.3 0.6 0.3) ))<block_end># 叶子节点 <else_stmt><block_start>ax.text(weihzi[jj][0] weihzi[jj][1] strziu[0][jj] size=13 rotation=0. ha="center" va="center" bbox=dict(boxstyle="round" ec=(0.2 0.5 0.2) fc=(0.5 0.2 0.5) ))<block_end># 只对内部节点绘制箭头和左右的分类规则 <if_stmt>jj<in>noyye# 添加左右箭头 <block_start>ax.annotate(' ' xy=(weihzi[jj+'r'][0] weihzi[jj+'r'][1]) xytext=(weihzi[jj][0] weihzi[jj][1]) ha="center" va="center" arrowprops=dict(facecolor='darkred' shrink=0.128))<line_sep>ax.annotate(' ' xy=(weihzi[jj+'l'][0] weihzi[jj+'l'][1]) xytext=(weihzi[jj][0] weihzi[jj][1]) ha="center" va="center" arrowprops=dict(facecolor='darkred' shrink=0.128))<line_sep># 添加左右规则 ax.text((weihzi[jj+'l'][0]+weihzi[jj][0])/2 (weihzi[jj+'l'][1]+weihzi[jj][1])/2-0.2 strziu[1][jj+'l'] fontsize=12 color='red' weight='bold')<line_sep>ax.text((weihzi[jj+'r'][0]+weihzi[jj][0])/2 (weihzi[jj+'r'][1]+weihzi[jj][1])/2-0.2 strziu[1][jj+'r'] fontsize=12 color='red' weight='bold')<block_end><block_end>ax.set(xlim=(0 huab) ylim=(0 huab))<line_sep>plt.show()<block_end># 根据不同的深度。看精确率的变化 <if_stmt>__name__<eq>'__main__'# 获得树的信息 <block_start>decision_tree=tree.DT()<line_sep># 完全成长的树 decision_tree.grow_tree()<line_sep># 剪枝形成的树的集 gu=decision_tree.prue_tree()<line_sep># 交叉验证形成的最好的树 cc=decision_tree.jiaocha_tree(gu[0])<line_sep>print(cc[0])<line_sep># 数据集 shuju=decision_tree.node_shujuji<line_sep># 结果 jieguo=decision_tree.jieguo_tree()<line_sep># 规则 rule=decision_tree.node_rule<line_sep>draw_tree(shuju jieguo rule cc[0])<block_end>
""" Question: 第 0000 题:将你的 QQ 头像(或者微博头像)右上角加上红色的数字,类似于微信未读信息数量那种提示效果。 """<import_stmt>sys<import_from_stmt>PIL Image<import_from_stmt>PIL ImageDraw<import_from_stmt>PIL ImageFont<def_stmt>add_number2img image number<block_start>font=ImageFont.truetype("/Library/Fonts/Chalkduster.ttf" 28)<line_sep>draw=ImageDraw.Draw(image)<line_sep>draw.text((200 0) str(number) (255 255 255) font=font)<line_sep>draw=ImageDraw.Draw(image)<line_sep>image.save("mask_with_num.png")<line_sep>image.show()<block_end>origin=Image.open("mask.png")<line_sep>add_number2img(origin sys.argv[1])<line_sep>
<import_stmt>FWCore.ParameterSet.Config<as>cms<line_sep>EcalTrivialConditionRetriever=cms.ESSource("EcalTrivialConditionRetriever" producedEcalClusterLocalContCorrParameters=cms.untracked.bool(<true>) producedEcalClusterCrackCorrParameters=cms.untracked.bool(<true>) producedEcalClusterEnergyUncertaintyParameters=cms.untracked.bool(<true>) producedEcalClusterEnergyCorrectionParameters=cms.untracked.bool(<true>) producedEcalClusterEnergyCorrectionObjectSpecificParameters=cms.untracked.bool(<true>) getEEAlignmentFromFile=cms.untracked.bool(<true>) EEAlignmentFile=cms.untracked.string('CalibCalorimetry/EcalTrivialCondModules/data/EEAlignment_2015.txt') getESAlignmentFromFile=cms.untracked.bool(<true>) ESAlignmentFile=cms.untracked.string('CalibCalorimetry/EcalTrivialCondModules/data/ESAlignment_2015.txt') getEBAlignmentFromFile=cms.untracked.bool(<true>) EBAlignmentFile=cms.untracked.string('CalibCalorimetry/EcalTrivialCondModules/data/EBAlignment_2015.txt'))<line_sep>
<import_from_stmt>django.views generic<import_from_stmt>.models Article<class_stmt>ArticleList(generic.ListView)<block_start>model=Article<line_sep>paginate_by=10<block_end><class_stmt>ArticleDetail(generic.DetailView)<block_start>model=Article<block_end>
<import_from_stmt>typing Optional<import_from_stmt>botocore.client BaseClient<import_from_stmt>typing Dict<import_from_stmt>botocore.paginate Paginator<import_from_stmt>botocore.waiter Waiter<import_from_stmt>typing Union<import_from_stmt>typing List<class_stmt>Client(BaseClient)<block_start><def_stmt>can_paginate self operation_name:str=<none><block_start>""" Check if an operation can be paginated. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is ``create_foo``, and you\'d normally invoke the operation as ``client.create_foo(**kwargs)``, if the ``create_foo`` operation can be paginated, you can use the call ``client.get_paginator(\"create_foo\")``. :return: ``True`` if the operation can be paginated, ``False`` otherwise. """<line_sep><pass><block_end><def_stmt>create_group self Name:str ResourceQuery:Dict Description:str=<none> Tags:Dict=<none><arrow>Dict<block_start>""" Creates a group with a specified name, description, and resource query. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/CreateGroup>`_ **Request Syntax** :: response = client.create_group( Name='string', Description='string', ResourceQuery={ 'Type': 'TAG_FILTERS_1_0'|'CLOUDFORMATION_STACK_1_0', 'Query': 'string' }, Tags={ 'string': 'string' } ) **Response Syntax** :: { 'Group': { 'GroupArn': 'string', 'Name': 'string', 'Description': 'string' }, 'ResourceQuery': { 'Type': 'TAG_FILTERS_1_0'|'CLOUDFORMATION_STACK_1_0', 'Query': 'string' }, 'Tags': { 'string': 'string' } } **Response Structure** - *(dict) --* - **Group** *(dict) --* A full description of the resource group after it is created. - **GroupArn** *(string) --* The ARN of a resource group. - **Name** *(string) --* The name of a resource group. - **Description** *(string) --* The description of the resource group. - **ResourceQuery** *(dict) --* The resource query associated with the group. - **Type** *(string) --* The type of the query. The valid values in this release are ``TAG_FILTERS_1_0`` and ``CLOUDFORMATION_STACK_1_0`` . * ``TAG_FILTERS_1_0:`` * A JSON syntax that lets you specify a collection of simple tag filters for resource types and tags, as supported by the AWS Tagging API `GetResources <https://docs.aws.amazon.com/resourcegroupstagging/latest/APIReference/API_GetResources.html>`__ operation. If you specify more than one tag key, only resources that match all tag keys, and at least one value of each specified tag key, are returned in your query. If you specify more than one value for a tag key, a resource matches the filter if it has a tag key value that matches *any* of the specified values. For example, consider the following sample query for resources that have two tags, ``Stage`` and ``Version`` , with two values each. (``[{"Key":"Stage","Values":["Test","Deploy"]},{"Key":"Version","Values":["1","2"]}]`` ) The results of this query might include the following. * An EC2 instance that has the following two tags: ``{"Key":"Stage","Value":"Deploy"}`` , and ``{"Key":"Version","Value":"2"}`` * An S3 bucket that has the following two tags: {"Key":"Stage","Value":"Test"}, and {"Key":"Version","Value":"1"} The query would not return the following results, however. The following EC2 instance does not have all tag keys specified in the filter, so it is rejected. The RDS database has all of the tag keys, but no values that match at least one of the specified tag key values in the filter. * An EC2 instance that has only the following tag: ``{"Key":"Stage","Value":"Deploy"}`` . * An RDS database that has the following two tags: ``{"Key":"Stage","Value":"Archived"}`` , and ``{"Key":"Version","Value":"4"}`` * ``CLOUDFORMATION_STACK_1_0:`` * A JSON syntax that lets you specify a CloudFormation stack ARN. - **Query** *(string) --* The query that defines a group or a search. - **Tags** *(dict) --* The tags associated with the group. - *(string) --* - *(string) --* :type Name: string :param Name: **[REQUIRED]** The name of the group, which is the identifier of the group in other operations. A resource group name cannot be updated after it is created. A resource group name can have a maximum of 128 characters, including letters, numbers, hyphens, dots, and underscores. The name cannot start with ``AWS`` or ``aws`` ; these are reserved. A resource group name must be unique within your account. :type Description: string :param Description: The description of the resource group. Descriptions can have a maximum of 511 characters, including letters, numbers, hyphens, underscores, punctuation, and spaces. :type ResourceQuery: dict :param ResourceQuery: **[REQUIRED]** The resource query that determines which AWS resources are members of this group. - **Type** *(string) --* **[REQUIRED]** The type of the query. The valid values in this release are ``TAG_FILTERS_1_0`` and ``CLOUDFORMATION_STACK_1_0`` . * ``TAG_FILTERS_1_0:`` * A JSON syntax that lets you specify a collection of simple tag filters for resource types and tags, as supported by the AWS Tagging API `GetResources <https://docs.aws.amazon.com/resourcegroupstagging/latest/APIReference/API_GetResources.html>`__ operation. If you specify more than one tag key, only resources that match all tag keys, and at least one value of each specified tag key, are returned in your query. If you specify more than one value for a tag key, a resource matches the filter if it has a tag key value that matches *any* of the specified values. For example, consider the following sample query for resources that have two tags, ``Stage`` and ``Version`` , with two values each. (``[{\"Key\":\"Stage\",\"Values\":[\"Test\",\"Deploy\"]},{\"Key\":\"Version\",\"Values\":[\"1\",\"2\"]}]`` ) The results of this query might include the following. * An EC2 instance that has the following two tags: ``{\"Key\":\"Stage\",\"Value\":\"Deploy\"}`` , and ``{\"Key\":\"Version\",\"Value\":\"2\"}`` * An S3 bucket that has the following two tags: {\"Key\":\"Stage\",\"Value\":\"Test\"}, and {\"Key\":\"Version\",\"Value\":\"1\"} The query would not return the following results, however. The following EC2 instance does not have all tag keys specified in the filter, so it is rejected. The RDS database has all of the tag keys, but no values that match at least one of the specified tag key values in the filter. * An EC2 instance that has only the following tag: ``{\"Key\":\"Stage\",\"Value\":\"Deploy\"}`` . * An RDS database that has the following two tags: ``{\"Key\":\"Stage\",\"Value\":\"Archived\"}`` , and ``{\"Key\":\"Version\",\"Value\":\"4\"}`` * ``CLOUDFORMATION_STACK_1_0:`` * A JSON syntax that lets you specify a CloudFormation stack ARN. - **Query** *(string) --* **[REQUIRED]** The query that defines a group or a search. :type Tags: dict :param Tags: The tags to add to the group. A tag is a string-to-string map of key-value pairs. Tag keys can have a maximum character length of 128 characters, and tag values can have a maximum length of 256 characters. - *(string) --* - *(string) --* :rtype: dict :returns: """<line_sep><pass><block_end><def_stmt>delete_group self GroupName:str<arrow>Dict<block_start>""" Deletes a specified resource group. Deleting a resource group does not delete resources that are members of the group; it only deletes the group structure. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/DeleteGroup>`_ **Request Syntax** :: response = client.delete_group( GroupName='string' ) **Response Syntax** :: { 'Group': { 'GroupArn': 'string', 'Name': 'string', 'Description': 'string' } } **Response Structure** - *(dict) --* - **Group** *(dict) --* A full description of the deleted resource group. - **GroupArn** *(string) --* The ARN of a resource group. - **Name** *(string) --* The name of a resource group. - **Description** *(string) --* The description of the resource group. :type GroupName: string :param GroupName: **[REQUIRED]** The name of the resource group to delete. :rtype: dict :returns: """<line_sep><pass><block_end><def_stmt>generate_presigned_url self ClientMethod:str=<none> Params:Dict=<none> ExpiresIn:int=<none> HttpMethod:str=<none><block_start>""" Generate a presigned url given a client, its method, and arguments :type ClientMethod: string :param ClientMethod: The client method to presign for :type Params: dict :param Params: The parameters normally passed to ``ClientMethod``. :type ExpiresIn: int :param ExpiresIn: The number of seconds the presigned url is valid for. By default it expires in an hour (3600 seconds) :type HttpMethod: string :param HttpMethod: The http method to use on the generated url. By default, the http method is whatever is used in the method\'s model. :returns: The presigned url """<line_sep><pass><block_end><def_stmt>get_group self GroupName:str<arrow>Dict<block_start>""" Returns information about a specified resource group. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/GetGroup>`_ **Request Syntax** :: response = client.get_group( GroupName='string' ) **Response Syntax** :: { 'Group': { 'GroupArn': 'string', 'Name': 'string', 'Description': 'string' } } **Response Structure** - *(dict) --* - **Group** *(dict) --* A full description of the resource group. - **GroupArn** *(string) --* The ARN of a resource group. - **Name** *(string) --* The name of a resource group. - **Description** *(string) --* The description of the resource group. :type GroupName: string :param GroupName: **[REQUIRED]** The name of the resource group. :rtype: dict :returns: """<line_sep><pass><block_end><def_stmt>get_group_query self GroupName:str<arrow>Dict<block_start>""" Returns the resource query associated with the specified resource group. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/GetGroupQuery>`_ **Request Syntax** :: response = client.get_group_query( GroupName='string' ) **Response Syntax** :: { 'GroupQuery': { 'GroupName': 'string', 'ResourceQuery': { 'Type': 'TAG_FILTERS_1_0'|'CLOUDFORMATION_STACK_1_0', 'Query': 'string' } } } **Response Structure** - *(dict) --* - **GroupQuery** *(dict) --* The resource query associated with the specified group. - **GroupName** *(string) --* The name of a resource group that is associated with a specific resource query. - **ResourceQuery** *(dict) --* The resource query which determines which AWS resources are members of the associated resource group. - **Type** *(string) --* The type of the query. The valid values in this release are ``TAG_FILTERS_1_0`` and ``CLOUDFORMATION_STACK_1_0`` . * ``TAG_FILTERS_1_0:`` * A JSON syntax that lets you specify a collection of simple tag filters for resource types and tags, as supported by the AWS Tagging API `GetResources <https://docs.aws.amazon.com/resourcegroupstagging/latest/APIReference/API_GetResources.html>`__ operation. If you specify more than one tag key, only resources that match all tag keys, and at least one value of each specified tag key, are returned in your query. If you specify more than one value for a tag key, a resource matches the filter if it has a tag key value that matches *any* of the specified values. For example, consider the following sample query for resources that have two tags, ``Stage`` and ``Version`` , with two values each. (``[{"Key":"Stage","Values":["Test","Deploy"]},{"Key":"Version","Values":["1","2"]}]`` ) The results of this query might include the following. * An EC2 instance that has the following two tags: ``{"Key":"Stage","Value":"Deploy"}`` , and ``{"Key":"Version","Value":"2"}`` * An S3 bucket that has the following two tags: {"Key":"Stage","Value":"Test"}, and {"Key":"Version","Value":"1"} The query would not return the following results, however. The following EC2 instance does not have all tag keys specified in the filter, so it is rejected. The RDS database has all of the tag keys, but no values that match at least one of the specified tag key values in the filter. * An EC2 instance that has only the following tag: ``{"Key":"Stage","Value":"Deploy"}`` . * An RDS database that has the following two tags: ``{"Key":"Stage","Value":"Archived"}`` , and ``{"Key":"Version","Value":"4"}`` * ``CLOUDFORMATION_STACK_1_0:`` * A JSON syntax that lets you specify a CloudFormation stack ARN. - **Query** *(string) --* The query that defines a group or a search. :type GroupName: string :param GroupName: **[REQUIRED]** The name of the resource group. :rtype: dict :returns: """<line_sep><pass><block_end><def_stmt>get_paginator self operation_name:str=<none><arrow>Paginator<block_start>""" Create a paginator for an operation. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is ``create_foo``, and you\'d normally invoke the operation as ``client.create_foo(**kwargs)``, if the ``create_foo`` operation can be paginated, you can use the call ``client.get_paginator(\"create_foo\")``. :raise OperationNotPageableError: Raised if the operation is not pageable. You can use the ``client.can_paginate`` method to check if an operation is pageable. :rtype: L{botocore.paginate.Paginator} :return: A paginator object. """<line_sep><pass><block_end><def_stmt>get_tags self Arn:str<arrow>Dict<block_start>""" Returns a list of tags that are associated with a resource group, specified by an ARN. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/GetTags>`_ **Request Syntax** :: response = client.get_tags( Arn='string' ) **Response Syntax** :: { 'Arn': 'string', 'Tags': { 'string': 'string' } } **Response Structure** - *(dict) --* - **Arn** *(string) --* The ARN of the tagged resource group. - **Tags** *(dict) --* The tags associated with the specified resource group. - *(string) --* - *(string) --* :type Arn: string :param Arn: **[REQUIRED]** The ARN of the resource group for which you want a list of tags. The resource must exist within the account you are using. :rtype: dict :returns: """<line_sep><pass><block_end><def_stmt>get_waiter self waiter_name:str=<none><arrow>Waiter<block_start>""" Returns an object that can wait for some condition. :type waiter_name: str :param waiter_name: The name of the waiter to get. See the waiters section of the service docs for a list of available waiters. :returns: The specified waiter object. :rtype: botocore.waiter.Waiter """<line_sep><pass><block_end><def_stmt>list_group_resources self GroupName:str Filters:List=<none> MaxResults:int=<none> NextToken:str=<none><arrow>Dict<block_start>""" Returns a list of ARNs of resources that are members of a specified resource group. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/ListGroupResources>`_ **Request Syntax** :: response = client.list_group_resources( GroupName='string', Filters=[ { 'Name': 'resource-type', 'Values': [ 'string', ] }, ], MaxResults=123, NextToken='string' ) **Response Syntax** :: { 'ResourceIdentifiers': [ { 'ResourceArn': 'string', 'ResourceType': 'string' }, ], 'NextToken': 'string', 'QueryErrors': [ { 'ErrorCode': 'CLOUDFORMATION_STACK_INACTIVE'|'CLOUDFORMATION_STACK_NOT_EXISTING', 'Message': 'string' }, ] } **Response Structure** - *(dict) --* - **ResourceIdentifiers** *(list) --* The ARNs and resource types of resources that are members of the group that you specified. - *(dict) --* The ARN of a resource, and its resource type. - **ResourceArn** *(string) --* The ARN of a resource. - **ResourceType** *(string) --* The resource type of a resource, such as ``AWS::EC2::Instance`` . - **NextToken** *(string) --* The NextToken value to include in a subsequent ``ListGroupResources`` request, to get more results. - **QueryErrors** *(list) --* A list of ``QueryError`` objects. Each error is an object that contains ``ErrorCode`` and ``Message`` structures. Possible values for ``ErrorCode`` are ``CLOUDFORMATION_STACK_INACTIVE`` and ``CLOUDFORMATION_STACK_NOT_EXISTING`` . - *(dict) --* A two-part error structure that can occur in ``ListGroupResources`` or ``SearchResources`` operations on CloudFormation stack-based queries. The error occurs if the CloudFormation stack on which the query is based either does not exist, or has a status that renders the stack inactive. A ``QueryError`` occurrence does not necessarily mean that AWS Resource Groups could not complete the operation, but the resulting group might have no member resources. - **ErrorCode** *(string) --* Possible values are ``CLOUDFORMATION_STACK_INACTIVE`` and ``CLOUDFORMATION_STACK_NOT_EXISTING`` . - **Message** *(string) --* A message that explains the ``ErrorCode`` value. Messages might state that the specified CloudFormation stack does not exist (or no longer exists). For ``CLOUDFORMATION_STACK_INACTIVE`` , the message typically states that the CloudFormation stack has a status that is not (or no longer) active, such as ``CREATE_FAILED`` . :type GroupName: string :param GroupName: **[REQUIRED]** The name of the resource group. :type Filters: list :param Filters: Filters, formatted as ResourceFilter objects, that you want to apply to a ListGroupResources operation. * ``resource-type`` - Filter resources by their type. Specify up to five resource types in the format AWS::ServiceCode::ResourceType. For example, AWS::EC2::Instance, or AWS::S3::Bucket. - *(dict) --* A filter name and value pair that is used to obtain more specific results from a list of resources. - **Name** *(string) --* **[REQUIRED]** The name of the filter. Filter names are case-sensitive. - **Values** *(list) --* **[REQUIRED]** One or more filter values. Allowed filter values vary by resource filter name, and are case-sensitive. - *(string) --* :type MaxResults: integer :param MaxResults: The maximum number of group member ARNs that are returned in a single call by ListGroupResources, in paginated output. By default, this number is 50. :type NextToken: string :param NextToken: The NextToken value that is returned in a paginated ListGroupResources request. To get the next page of results, run the call again, add the NextToken parameter, and specify the NextToken value. :rtype: dict :returns: """<line_sep><pass><block_end><def_stmt>list_groups self Filters:List=<none> MaxResults:int=<none> NextToken:str=<none><arrow>Dict<block_start>""" Returns a list of existing resource groups in your account. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/ListGroups>`_ **Request Syntax** :: response = client.list_groups( Filters=[ { 'Name': 'resource-type', 'Values': [ 'string', ] }, ], MaxResults=123, NextToken='string' ) **Response Syntax** :: { 'GroupIdentifiers': [ { 'GroupName': 'string', 'GroupArn': 'string' }, ], 'Groups': [ { 'GroupArn': 'string', 'Name': 'string', 'Description': 'string' }, ], 'NextToken': 'string' } **Response Structure** - *(dict) --* - **GroupIdentifiers** *(list) --* A list of GroupIdentifier objects. Each identifier is an object that contains both the GroupName and the GroupArn. - *(dict) --* The ARN and group name of a group. - **GroupName** *(string) --* The name of a resource group. - **GroupArn** *(string) --* The ARN of a resource group. - **Groups** *(list) --* A list of resource groups. - *(dict) --* A resource group. - **GroupArn** *(string) --* The ARN of a resource group. - **Name** *(string) --* The name of a resource group. - **Description** *(string) --* The description of the resource group. - **NextToken** *(string) --* The NextToken value to include in a subsequent ``ListGroups`` request, to get more results. :type Filters: list :param Filters: Filters, formatted as GroupFilter objects, that you want to apply to a ListGroups operation. * ``resource-type`` - Filter groups by resource type. Specify up to five resource types in the format AWS::ServiceCode::ResourceType. For example, AWS::EC2::Instance, or AWS::S3::Bucket. - *(dict) --* A filter name and value pair that is used to obtain more specific results from a list of groups. - **Name** *(string) --* **[REQUIRED]** The name of the filter. Filter names are case-sensitive. - **Values** *(list) --* **[REQUIRED]** One or more filter values. Allowed filter values vary by group filter name, and are case-sensitive. - *(string) --* :type MaxResults: integer :param MaxResults: The maximum number of resource group results that are returned by ListGroups in paginated output. By default, this number is 50. :type NextToken: string :param NextToken: The NextToken value that is returned in a paginated ``ListGroups`` request. To get the next page of results, run the call again, add the NextToken parameter, and specify the NextToken value. :rtype: dict :returns: """<line_sep><pass><block_end><def_stmt>search_resources self ResourceQuery:Dict MaxResults:int=<none> NextToken:str=<none><arrow>Dict<block_start>""" Returns a list of AWS resource identifiers that matches a specified query. The query uses the same format as a resource query in a CreateGroup or UpdateGroupQuery operation. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/SearchResources>`_ **Request Syntax** :: response = client.search_resources( ResourceQuery={ 'Type': 'TAG_FILTERS_1_0'|'CLOUDFORMATION_STACK_1_0', 'Query': 'string' }, MaxResults=123, NextToken='string' ) **Response Syntax** :: { 'ResourceIdentifiers': [ { 'ResourceArn': 'string', 'ResourceType': 'string' }, ], 'NextToken': 'string', 'QueryErrors': [ { 'ErrorCode': 'CLOUDFORMATION_STACK_INACTIVE'|'CLOUDFORMATION_STACK_NOT_EXISTING', 'Message': 'string' }, ] } **Response Structure** - *(dict) --* - **ResourceIdentifiers** *(list) --* The ARNs and resource types of resources that are members of the group that you specified. - *(dict) --* The ARN of a resource, and its resource type. - **ResourceArn** *(string) --* The ARN of a resource. - **ResourceType** *(string) --* The resource type of a resource, such as ``AWS::EC2::Instance`` . - **NextToken** *(string) --* The NextToken value to include in a subsequent ``SearchResources`` request, to get more results. - **QueryErrors** *(list) --* A list of ``QueryError`` objects. Each error is an object that contains ``ErrorCode`` and ``Message`` structures. Possible values for ``ErrorCode`` are ``CLOUDFORMATION_STACK_INACTIVE`` and ``CLOUDFORMATION_STACK_NOT_EXISTING`` . - *(dict) --* A two-part error structure that can occur in ``ListGroupResources`` or ``SearchResources`` operations on CloudFormation stack-based queries. The error occurs if the CloudFormation stack on which the query is based either does not exist, or has a status that renders the stack inactive. A ``QueryError`` occurrence does not necessarily mean that AWS Resource Groups could not complete the operation, but the resulting group might have no member resources. - **ErrorCode** *(string) --* Possible values are ``CLOUDFORMATION_STACK_INACTIVE`` and ``CLOUDFORMATION_STACK_NOT_EXISTING`` . - **Message** *(string) --* A message that explains the ``ErrorCode`` value. Messages might state that the specified CloudFormation stack does not exist (or no longer exists). For ``CLOUDFORMATION_STACK_INACTIVE`` , the message typically states that the CloudFormation stack has a status that is not (or no longer) active, such as ``CREATE_FAILED`` . :type ResourceQuery: dict :param ResourceQuery: **[REQUIRED]** The search query, using the same formats that are supported for resource group definition. - **Type** *(string) --* **[REQUIRED]** The type of the query. The valid values in this release are ``TAG_FILTERS_1_0`` and ``CLOUDFORMATION_STACK_1_0`` . * ``TAG_FILTERS_1_0:`` * A JSON syntax that lets you specify a collection of simple tag filters for resource types and tags, as supported by the AWS Tagging API `GetResources <https://docs.aws.amazon.com/resourcegroupstagging/latest/APIReference/API_GetResources.html>`__ operation. If you specify more than one tag key, only resources that match all tag keys, and at least one value of each specified tag key, are returned in your query. If you specify more than one value for a tag key, a resource matches the filter if it has a tag key value that matches *any* of the specified values. For example, consider the following sample query for resources that have two tags, ``Stage`` and ``Version`` , with two values each. (``[{\"Key\":\"Stage\",\"Values\":[\"Test\",\"Deploy\"]},{\"Key\":\"Version\",\"Values\":[\"1\",\"2\"]}]`` ) The results of this query might include the following. * An EC2 instance that has the following two tags: ``{\"Key\":\"Stage\",\"Value\":\"Deploy\"}`` , and ``{\"Key\":\"Version\",\"Value\":\"2\"}`` * An S3 bucket that has the following two tags: {\"Key\":\"Stage\",\"Value\":\"Test\"}, and {\"Key\":\"Version\",\"Value\":\"1\"} The query would not return the following results, however. The following EC2 instance does not have all tag keys specified in the filter, so it is rejected. The RDS database has all of the tag keys, but no values that match at least one of the specified tag key values in the filter. * An EC2 instance that has only the following tag: ``{\"Key\":\"Stage\",\"Value\":\"Deploy\"}`` . * An RDS database that has the following two tags: ``{\"Key\":\"Stage\",\"Value\":\"Archived\"}`` , and ``{\"Key\":\"Version\",\"Value\":\"4\"}`` * ``CLOUDFORMATION_STACK_1_0:`` * A JSON syntax that lets you specify a CloudFormation stack ARN. - **Query** *(string) --* **[REQUIRED]** The query that defines a group or a search. :type MaxResults: integer :param MaxResults: The maximum number of group member ARNs returned by ``SearchResources`` in paginated output. By default, this number is 50. :type NextToken: string :param NextToken: The NextToken value that is returned in a paginated ``SearchResources`` request. To get the next page of results, run the call again, add the NextToken parameter, and specify the NextToken value. :rtype: dict :returns: """<line_sep><pass><block_end><def_stmt>tag self Arn:str Tags:Dict<arrow>Dict<block_start>""" Adds tags to a resource group with the specified ARN. Existing tags on a resource group are not changed if they are not specified in the request parameters. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/Tag>`_ **Request Syntax** :: response = client.tag( Arn='string', Tags={ 'string': 'string' } ) **Response Syntax** :: { 'Arn': 'string', 'Tags': { 'string': 'string' } } **Response Structure** - *(dict) --* - **Arn** *(string) --* The ARN of the tagged resource. - **Tags** *(dict) --* The tags that have been added to the specified resource. - *(string) --* - *(string) --* :type Arn: string :param Arn: **[REQUIRED]** The ARN of the resource to which to add tags. :type Tags: dict :param Tags: **[REQUIRED]** The tags to add to the specified resource. A tag is a string-to-string map of key-value pairs. Tag keys can have a maximum character length of 128 characters, and tag values can have a maximum length of 256 characters. - *(string) --* - *(string) --* :rtype: dict :returns: """<line_sep><pass><block_end><def_stmt>untag self Arn:str Keys:List<arrow>Dict<block_start>""" Deletes specified tags from a specified resource. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/Untag>`_ **Request Syntax** :: response = client.untag( Arn='string', Keys=[ 'string', ] ) **Response Syntax** :: { 'Arn': 'string', 'Keys': [ 'string', ] } **Response Structure** - *(dict) --* - **Arn** *(string) --* The ARN of the resource from which tags have been removed. - **Keys** *(list) --* The keys of tags that have been removed. - *(string) --* :type Arn: string :param Arn: **[REQUIRED]** The ARN of the resource from which to remove tags. :type Keys: list :param Keys: **[REQUIRED]** The keys of the tags to be removed. - *(string) --* :rtype: dict :returns: """<line_sep><pass><block_end><def_stmt>update_group self GroupName:str Description:str=<none><arrow>Dict<block_start>""" Updates an existing group with a new or changed description. You cannot update the name of a resource group. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/UpdateGroup>`_ **Request Syntax** :: response = client.update_group( GroupName='string', Description='string' ) **Response Syntax** :: { 'Group': { 'GroupArn': 'string', 'Name': 'string', 'Description': 'string' } } **Response Structure** - *(dict) --* - **Group** *(dict) --* The full description of the resource group after it has been updated. - **GroupArn** *(string) --* The ARN of a resource group. - **Name** *(string) --* The name of a resource group. - **Description** *(string) --* The description of the resource group. :type GroupName: string :param GroupName: **[REQUIRED]** The name of the resource group for which you want to update its description. :type Description: string :param Description: The description of the resource group. Descriptions can have a maximum of 511 characters, including letters, numbers, hyphens, underscores, punctuation, and spaces. :rtype: dict :returns: """<line_sep><pass><block_end><def_stmt>update_group_query self GroupName:str ResourceQuery:Dict<arrow>Dict<block_start>""" Updates the resource query of a group. See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/resource-groups-2017-11-27/UpdateGroupQuery>`_ **Request Syntax** :: response = client.update_group_query( GroupName='string', ResourceQuery={ 'Type': 'TAG_FILTERS_1_0'|'CLOUDFORMATION_STACK_1_0', 'Query': 'string' } ) **Response Syntax** :: { 'GroupQuery': { 'GroupName': 'string', 'ResourceQuery': { 'Type': 'TAG_FILTERS_1_0'|'CLOUDFORMATION_STACK_1_0', 'Query': 'string' } } } **Response Structure** - *(dict) --* - **GroupQuery** *(dict) --* The resource query associated with the resource group after the update. - **GroupName** *(string) --* The name of a resource group that is associated with a specific resource query. - **ResourceQuery** *(dict) --* The resource query which determines which AWS resources are members of the associated resource group. - **Type** *(string) --* The type of the query. The valid values in this release are ``TAG_FILTERS_1_0`` and ``CLOUDFORMATION_STACK_1_0`` . * ``TAG_FILTERS_1_0:`` * A JSON syntax that lets you specify a collection of simple tag filters for resource types and tags, as supported by the AWS Tagging API `GetResources <https://docs.aws.amazon.com/resourcegroupstagging/latest/APIReference/API_GetResources.html>`__ operation. If you specify more than one tag key, only resources that match all tag keys, and at least one value of each specified tag key, are returned in your query. If you specify more than one value for a tag key, a resource matches the filter if it has a tag key value that matches *any* of the specified values. For example, consider the following sample query for resources that have two tags, ``Stage`` and ``Version`` , with two values each. (``[{"Key":"Stage","Values":["Test","Deploy"]},{"Key":"Version","Values":["1","2"]}]`` ) The results of this query might include the following. * An EC2 instance that has the following two tags: ``{"Key":"Stage","Value":"Deploy"}`` , and ``{"Key":"Version","Value":"2"}`` * An S3 bucket that has the following two tags: {"Key":"Stage","Value":"Test"}, and {"Key":"Version","Value":"1"} The query would not return the following results, however. The following EC2 instance does not have all tag keys specified in the filter, so it is rejected. The RDS database has all of the tag keys, but no values that match at least one of the specified tag key values in the filter. * An EC2 instance that has only the following tag: ``{"Key":"Stage","Value":"Deploy"}`` . * An RDS database that has the following two tags: ``{"Key":"Stage","Value":"Archived"}`` , and ``{"Key":"Version","Value":"4"}`` * ``CLOUDFORMATION_STACK_1_0:`` * A JSON syntax that lets you specify a CloudFormation stack ARN. - **Query** *(string) --* The query that defines a group or a search. :type GroupName: string :param GroupName: **[REQUIRED]** The name of the resource group for which you want to edit the query. :type ResourceQuery: dict :param ResourceQuery: **[REQUIRED]** The resource query that determines which AWS resources are members of the resource group. - **Type** *(string) --* **[REQUIRED]** The type of the query. The valid values in this release are ``TAG_FILTERS_1_0`` and ``CLOUDFORMATION_STACK_1_0`` . * ``TAG_FILTERS_1_0:`` * A JSON syntax that lets you specify a collection of simple tag filters for resource types and tags, as supported by the AWS Tagging API `GetResources <https://docs.aws.amazon.com/resourcegroupstagging/latest/APIReference/API_GetResources.html>`__ operation. If you specify more than one tag key, only resources that match all tag keys, and at least one value of each specified tag key, are returned in your query. If you specify more than one value for a tag key, a resource matches the filter if it has a tag key value that matches *any* of the specified values. For example, consider the following sample query for resources that have two tags, ``Stage`` and ``Version`` , with two values each. (``[{\"Key\":\"Stage\",\"Values\":[\"Test\",\"Deploy\"]},{\"Key\":\"Version\",\"Values\":[\"1\",\"2\"]}]`` ) The results of this query might include the following. * An EC2 instance that has the following two tags: ``{\"Key\":\"Stage\",\"Value\":\"Deploy\"}`` , and ``{\"Key\":\"Version\",\"Value\":\"2\"}`` * An S3 bucket that has the following two tags: {\"Key\":\"Stage\",\"Value\":\"Test\"}, and {\"Key\":\"Version\",\"Value\":\"1\"} The query would not return the following results, however. The following EC2 instance does not have all tag keys specified in the filter, so it is rejected. The RDS database has all of the tag keys, but no values that match at least one of the specified tag key values in the filter. * An EC2 instance that has only the following tag: ``{\"Key\":\"Stage\",\"Value\":\"Deploy\"}`` . * An RDS database that has the following two tags: ``{\"Key\":\"Stage\",\"Value\":\"Archived\"}`` , and ``{\"Key\":\"Version\",\"Value\":\"4\"}`` * ``CLOUDFORMATION_STACK_1_0:`` * A JSON syntax that lets you specify a CloudFormation stack ARN. - **Query** *(string) --* **[REQUIRED]** The query that defines a group or a search. :rtype: dict :returns: """<line_sep><pass><block_end><block_end>
# Graph Traversing # <import_from_stmt>tkinter *<import_stmt>time<class_stmt>GraphTraversal<block_start><def_stmt>__init__ self root<block_start>self.window=root<line_sep>self.make_canvas=Canvas(self.window bg="chocolate" relief=RAISED bd=7 width=500 height=500)<line_sep>self.make_canvas.pack()<line_sep># Status label initialization self.status=<none><line_sep># Some list initialization bt default self.vertex_store=[]<line_sep>self.total_circle=[]<line_sep>self.queue_bfs=[]<line_sep>self.stack_dfs=[]<line_sep># Some default function call self.basic_set_up()<line_sep>self.make_vertex()<block_end><def_stmt>basic_set_up self<block_start>heading=Label(self.make_canvas text="Graph Traversing Visualization" bg="chocolate" fg="yellow" font=("Arial" 20 "bold" "italic"))<line_sep>heading.place(x=50 y=10)<line_sep>bfs_btn=Button(self.window text="BFS" font=("Arial" 15 "bold") bg="black" fg="green" relief=RAISED bd=8 command=self.bfs_traversing)<line_sep>bfs_btn.place(x=20 y=530)<line_sep>dfs_btn=Button(self.window text="DFS" font=("Arial" 15 "bold") bg="black" fg="green" relief=RAISED bd=8 command=self.dfs_traversing)<line_sep>dfs_btn.place(x=400 y=530)<line_sep>self.status=Label(self.make_canvas text="Not Visited" bg="chocolate" fg="brown" font=("Arial" 20 "bold" "italic"))<line_sep>self.status.place(x=50 y=450)<block_end><def_stmt>make_vertex self# Vertex with connection make <block_start><for_stmt>i range(15)<block_start>self.total_circle.append(i)<block_end>self.total_circle[0]=self.make_canvas.create_oval(80 250 110 280 width=3)<line_sep>self.total_circle[1]=self.make_canvas.create_oval(160 180 190 210 width=3)<line_sep>self.total_circle[2]=self.make_canvas.create_oval(160 320 190 350 width=3)<line_sep>self.total_circle[3]=self.make_canvas.create_oval(230 130 260 160 width=3)<line_sep>self.total_circle[4]=self.make_canvas.create_oval(230 230 260 260 width=3)<line_sep>self.total_circle[5]=self.make_canvas.create_oval(230 270 260 300 width=3)<line_sep>self.total_circle[6]=self.make_canvas.create_oval(230 370 260 400 width=3)<line_sep>self.total_circle[7]=self.make_canvas.create_oval(280 80 310 110 width=3)<line_sep>self.total_circle[8]=self.make_canvas.create_oval(280 180 310 210 width=3)<line_sep>self.total_circle[9]=self.make_canvas.create_oval(280 250 310 280 width=3)<line_sep>self.total_circle[10]=self.make_canvas.create_oval(280 320 310 350 width=3)<line_sep>self.total_circle[11]=self.make_canvas.create_oval(280 420 310 450 width=3)<line_sep>self.total_circle[12]=self.make_canvas.create_oval(350 130 380 160 width=3)<line_sep>self.total_circle[13]=self.make_canvas.create_oval(350 220 380 250 width=3)<line_sep>self.total_circle[14]=self.make_canvas.create_oval(350 360 380 390 width=3)<line_sep>self.make_connector_up(0 1)<line_sep>self.make_connector_down(0 2)<line_sep>self.collector_connector(0 1 2)<line_sep>self.make_connector_up(1 3)<line_sep>self.make_connector_down(1 4)<line_sep>self.collector_connector(1 3 4)<line_sep>self.make_connector_up(2 5)<line_sep>self.make_connector_down(2 6)<line_sep>self.collector_connector(2 5 6)<line_sep>self.make_connector_up(3 7)<line_sep>self.make_connector_down(3 8)<line_sep>self.collector_connector(3 7 8)<line_sep>self.make_connector_down(4 9)<line_sep>self.collector_connector(4 <none> 9)<line_sep>self.make_connector_down(5 10)<line_sep>self.collector_connector(5 <none> 10)<line_sep>self.make_connector_down(6 11)<line_sep>self.collector_connector(6 <none> 11)<line_sep>self.make_connector_up(8 12)<line_sep>self.collector_connector(8 12 <none>)<line_sep>self.make_connector_up(9 13)<line_sep>self.collector_connector(9 13 <none>)<line_sep>self.make_connector_down(10 14)<line_sep>self.collector_connector(10 <none> 14)<line_sep>print(self.vertex_store)<block_end><def_stmt>make_connector_up self index1 index2# Up node connection make <block_start>first_coord=self.make_canvas.coords(self.total_circle[index1])# Source node coordinates second_coord=self.make_canvas.coords(self.total_circle[index2])# Destination node coordinates line_start_x=(first_coord[0]+first_coord[2])/2# Connector line start_x line_end_x=(second_coord[0]+second_coord[2])/2# Connector line end_x line_start_y=(first_coord[1]+first_coord[3])/2# Connector line start_y line_end_y=(second_coord[1]+second_coord[3])/2# Connector line end_y self.make_canvas.create_line(line_start_x+10 line_start_y-10 line_end_x-10 line_end_y+10 width=3)<block_end><def_stmt>make_connector_down self index1 index2# Down node connection make <block_start>first_coord=self.make_canvas.coords(self.total_circle[index1])# Source node coordinates second_coord=self.make_canvas.coords(self.total_circle[index2])# Destination node coordinates line_start_x=(first_coord[0]+first_coord[2])/2# Connector line start_x line_end_x=(second_coord[0]+second_coord[2])/2# Connector line end_x line_start_y=(first_coord[1]+first_coord[3])/2# Connector line start_y line_end_y=(second_coord[1]+second_coord[3])/2# Connector line end_y self.make_canvas.create_line(line_start_x+12 line_start_y+5 line_end_x-12 line_end_y-5 width=3)<block_end><def_stmt>collector_connector self source connector1 connector2# All about node data collect and store <block_start>temp=[]<line_sep>temp.append(self.total_circle[source])<if_stmt>connector1<block_start>temp.append(self.total_circle[connector1])<block_end><else_stmt><block_start>temp.append(<none>)<block_end><if_stmt>connector2<block_start>temp.append(self.total_circle[connector2])<block_end><else_stmt><block_start>temp.append(<none>)<block_end>self.vertex_store.append(temp)<block_end><def_stmt>binary_search self start end find_it_as_source# Binary search algorithm use here <block_start><while_stmt>start<le>end<block_start>mid=int((start+end)/2)<if_stmt>self.vertex_store[mid][0]<eq>find_it_as_source<block_start><return>self.vertex_store[mid]<block_end><elif_stmt>self.vertex_store[mid][0]<l>find_it_as_source<block_start>start=mid+1<block_end><else_stmt><block_start>end=mid-1<block_end><block_end><return>-1<block_end><def_stmt>bfs_traversing self<block_start><try_stmt><block_start>self.status['text']="Red: Visited"<line_sep>self.queue_bfs.append(self.vertex_store[0][0])<while_stmt>self.queue_bfs<block_start>temp=self.binary_search(0 9 self.queue_bfs[0])<if_stmt>temp<ne>-1<block_start><if_stmt>temp[1]<block_start>self.queue_bfs.append(temp[1])<block_end><if_stmt>temp[2]<block_start>self.queue_bfs.append(temp[2])<block_end><block_end>take_vertex=self.queue_bfs.pop(0)<line_sep>print(take_vertex)<line_sep>self.make_canvas.itemconfig(take_vertex fill="red")<line_sep>self.window.update()<line_sep>time.sleep(0.3)<block_end>self.status['text']="All node Visited"<block_end><except_stmt><block_start>print("Force stop error")<block_end><block_end><def_stmt>dfs_traversing self<block_start><try_stmt><block_start>self.status['text']="Blue: Visited"<line_sep>self.stack_dfs.append(self.vertex_store[0][0])<while_stmt>self.stack_dfs<block_start>take_vertex=self.stack_dfs.pop()<line_sep>print(take_vertex)<line_sep>self.make_canvas.itemconfig(take_vertex fill="blue")<line_sep>self.window.update()<line_sep>time.sleep(0.3)<line_sep>temp=self.binary_search(0 9 take_vertex)<if_stmt>temp<ne>-1<block_start><if_stmt>temp[1]<block_start>self.stack_dfs.append(temp[1])<block_end><if_stmt>temp[2]<block_start>self.stack_dfs.append(temp[2])<block_end><block_end><block_end>self.status['text']="All node Visited"<block_end><except_stmt><block_start>print("Force stop error")<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>window=Tk()<line_sep>window.title("Graph Traversal Visualizer")<line_sep>window.geometry("400x600")<line_sep>window.maxsize(500 600)<line_sep>window.minsize(500 600)<line_sep>window.config(bg="orange")<line_sep>GraphTraversal(window)<line_sep>window.mainloop()<block_end>
#https://www.hackerrank.com/challenges/interchange-two-numbers <import_stmt>fileinput<line_sep>#Input a,b=fileinput.input()<line_sep>#Solve a,b=(b a)<line_sep>#Output print(a)<line_sep>print(b)<line_sep>
<import_from_stmt>polygraphy.mod.importer *<import_from_stmt>polygraphy.mod.exporter *<import_from_stmt>polygraphy.mod.util version<line_sep>
<import_stmt>numpy<as>np<import_stmt>cv2<import_stmt>logging<import_from_stmt>.utils.localization LocResult<class_stmt>CppLocalization<block_start><def_stmt>__init__ self db_ids local_db global_descriptors images points<block_start><import_stmt>_hloc_cpp<line_sep>self.hloc=_hloc_cpp.HLoc()<line_sep>id_to_idx={}<line_sep>old_to_new_kpt={}<for_stmt>idx,i enumerate(db_ids)<block_start>keypoints=local_db[i].keypoints.T.astype(np.float32).copy()<line_sep>local_desc=local_db[i].descriptors.T.astype(np.float32).copy()<line_sep>global_desc=global_descriptors[idx].astype(np.float32).copy()<line_sep># keypoints are NOT undistorted or nomalized idx=self.hloc.addImage(global_desc keypoints local_desc)<line_sep>id_to_idx[i]=idx<line_sep>old_to_new_kpt[i]={k:j<for>j,k enumerate(np.where(images[i].point3D_ids<ge>0)[0])}<block_end><for_stmt>i,pt points.items()<block_start>observations=np.array([[id_to_idx[im_id] old_to_new_kpt[im_id][kpt_id]]<for>im_id,kpt_id zip(pt.image_ids pt.point2D_idxs)] dtype=np.int32)<line_sep>self.hloc.add3dPoint(pt.xyz.astype(np.float32).copy() observations.copy())<block_end>self.hloc.buildIndex()<block_end><def_stmt>localize self query_info query_item global_transf local_transf<block_start>global_desc=global_transf(query_item.global_desc[np.newaxis])[0]<line_sep>local_desc=local_transf(query_item.local_desc)<line_sep>keypoints=cv2.undistortPoints(query_item.keypoints[np.newaxis] query_info.K np.array([query_info.dist 0 0 0]))[0]<line_sep>logging.info('Localizing image %s' query_info.name)<line_sep>ret=self.cpp_backend.localize(global_desc.astype(np.float32) keypoints.astype(np.float32).T.copy() local_desc.astype(np.float32).T.copy())<line_sep>(success num_components_total num_components_tested last_component_size num_db_landmarks num_matches num_inliers num_iters global_ms covis_ms local_ms pnp_ms)=ret<line_sep>result=LocResult(success num_inliers 0 np.eye(4))<line_sep>stats={'success':success 'num_components_total':num_components_total 'num_components_tested':num_components_tested 'last_component_size':last_component_size 'num_db_landmarks':num_db_landmarks 'num_matches':num_matches 'num_inliers':num_inliers 'num_ransac_iters':num_iters 'timings':{'global':global_ms 'covis':covis_ms 'local':local_ms 'pnp':pnp_ms }}<line_sep><return>(result stats)<block_end><block_end>
<import_stmt>collections<import_from_stmt>supriya CalculationRate<import_from_stmt>supriya.synthdefs UGen<class_stmt>Gendy1(UGen)<block_start>""" A dynamic stochastic synthesis generator. :: >>> gendy_1 = supriya.ugens.Gendy1.ar( ... adparam=1, ... ampdist=1, ... ampscale=0.5, ... ddparam=1, ... durdist=1, ... durscale=0.5, ... init_cps=12, ... knum=10, ... maxfrequency=660, ... minfrequency=440, ... ) >>> gendy_1 Gendy1.ar() """<line_sep>### CLASS VARIABLES ### _ordered_input_names=collections.OrderedDict([("ampdist" 1) ("durdist" 1) ("adparam" 1) ("ddparam" 1) ("minfrequency" 440) ("maxfrequency" 660) ("ampscale" 0.5) ("durscale" 0.5) ("init_cps" 12) ("knum" <none>) ])<line_sep>_valid_calculation_rates=(CalculationRate.AUDIO CalculationRate.CONTROL)<line_sep>### INITIALIZER ### <def_stmt>__init__ self calculation_rate=<none> adparam=1 ampdist=1 ampscale=0.5 ddparam=1 durdist=1 durscale=0.5 init_cps=12 knum=<none> maxfrequency=660 minfrequency=440 <block_start><if_stmt>knum<is><none><block_start>knum=init_cps<block_end>UGen.__init__(self calculation_rate=calculation_rate adparam=adparam ampdist=ampdist ampscale=ampscale ddparam=ddparam durdist=durdist durscale=durscale init_cps=init_cps knum=knum maxfrequency=maxfrequency minfrequency=minfrequency )<block_end><block_end><class_stmt>Gendy2(UGen)<block_start>""" A dynamic stochastic synthesis generator. :: >>> gendy_2 = supriya.ugens.Gendy2.ar( ... a=1.17, ... adparam=1, ... ampdist=1, ... ampscale=0.5, ... c=0.31, ... ddparam=1, ... durdist=1, ... durscale=0.5, ... init_cps=12, ... knum=10, ... maxfrequency=660, ... minfrequency=440, ... ) >>> gendy_2 Gendy2.ar() """<line_sep>_ordered_input_names=collections.OrderedDict([("ampdist" 1) ("durdist" 1) ("adparam" 1) ("ddparam" 1) ("minfrequency" 440) ("maxfrequency" 660) ("ampscale" 0.5) ("durscale" 0.5) ("init_cps" 12) ("knum" <none>) ("a" 1.17) ("c" 0.31) ])<line_sep>_valid_calculation_rates=(CalculationRate.AUDIO CalculationRate.CONTROL)<block_end><class_stmt>Gendy3(UGen)<block_start>""" A dynamic stochastic synthesis generator. :: >>> gendy_3 = supriya.ugens.Gendy3.ar( ... adparam=1, ... ampdist=1, ... ampscale=0.5, ... ddparam=1, ... durdist=1, ... durscale=0.5, ... frequency=440, ... init_cps=12, ... knum=10, ... ) >>> gendy_3 Gendy3.ar() """<line_sep>_ordered_input_names=collections.OrderedDict([("ampdist" 1) ("durdist" 1) ("adparam" 1) ("ddparam" 1) ("frequency" 440) ("ampscale" 0.5) ("durscale" 0.5) ("init_cps" 12) ("knum" <none>) ])<line_sep>_valid_calculation_rates=(CalculationRate.AUDIO CalculationRate.CONTROL)<block_end>
''' @author: <NAME> '''<import_stmt>pandas<as>pd<import_stmt>sys<import_stmt>numpy<as>np<import_stmt>gzip<line_sep>df=pd.read_csv(sys.stdin)<line_sep>p=0.55<times>df.p1+0.15<times>df.p2+0.15<times>df.p3+0.15<times>df.p4<line_sep>df['Predicted']=prob=1.0/(1.0+np.exp(-p))<line_sep>submission='submission.cvs.gz'<line_sep>print('saving to' submission '...')<with_stmt>gzip.open(submission 'wt')<as>f<block_start>df[['Id' 'Predicted']].to_csv(f index=<false>)<block_end>
<import_from_future_stmt> print_function unicode_literals<import_from_stmt>aspen json<import_from_stmt>gratipay.testing Harness<class_stmt>Tests(Harness)<block_start><def_stmt>setUp self<block_start>Harness.setUp(self)<line_sep>self.make_participant('alice' claimed_time='now')<block_end><def_stmt>hit_privacy self method='GET' expected_code=200 **kw<block_start>response=self.client.hit(method "/~alice/privacy.json" auth_as='alice' **kw)<if_stmt>response.code<ne>expected_code<block_start>print(response.body)<block_end><return>response<block_end><def_stmt>test_participant_can_get_their_privacy_settings self<block_start>response=self.hit_privacy('GET')<line_sep>actual=json.loads(response.body)<assert_stmt>actual<eq>{'is_searchable':<true> 'anonymous_giving':<false> }<block_end><def_stmt>test_participant_can_toggle_is_searchable self<block_start>response=self.hit_privacy('POST' data={'toggle':'is_searchable'})<line_sep>actual=json.loads(response.body)<assert_stmt>actual['is_searchable']<is><false><block_end><def_stmt>test_participant_can_toggle_is_searchable_back self<block_start>response=self.hit_privacy('POST' data={'toggle':'is_searchable'})<line_sep>response=self.hit_privacy('POST' data={'toggle':'is_searchable'})<line_sep>actual=json.loads(response.body)<assert_stmt>actual['is_searchable']<is><true><block_end><def_stmt>test_participant_can_toggle_anonymous_giving self<block_start>response=self.hit_privacy('POST' data={'toggle':'anonymous_giving'})<line_sep>actual=json.loads(response.body)<assert_stmt>actual['anonymous_giving']<is><true><block_end><def_stmt>test_participant_can_toggle_anonymous_giving_back self<block_start>response=self.hit_privacy('POST' data={'toggle':'anonymous_giving'})<line_sep>response=self.hit_privacy('POST' data={'toggle':'anonymous_giving'})<line_sep>actual=json.loads(response.body)['anonymous_giving']<assert_stmt>actual<is><false><block_end># Related to is-searchable <def_stmt>test_meta_robots_tag_added_on_opt_out self<block_start>self.hit_privacy('POST' data={'toggle':'is_searchable'})<line_sep>expected='<meta name="robots" content="noindex,nofollow" />'<assert_stmt>expected<in>self.client.GET("/~alice/").body<block_end><def_stmt>test_participant_does_show_up_on_search self<block_start><assert_stmt>'alice'<in>self.client.GET("/search?q=alice").body<block_end><def_stmt>test_participant_doesnt_show_up_on_search self<block_start>self.hit_privacy('POST' data={'toggle':'is_searchable'})<assert_stmt>'alice'<not><in>self.client.GET("/search.json?q=alice").body<block_end># Related to anonymous_giving <def_stmt>test_anon_can_see_giving_for_non_anonymous_giving self<block_start>self.make_participant('bob' claimed_time='now' giving=10.79 ngiving_to=342 anonymous_giving=<false>)<assert_stmt>'10.79'<in>self.client.GET('/~bob/').body<assert_stmt>'342'<in>self.client.GET('/~bob/').body<block_end><def_stmt>test_auth_can_see_giving_for_non_anonymous_giving self<block_start>self.make_participant('bob' claimed_time='now' giving=10.79 ngiving_to=342 anonymous_giving=<false>)<assert_stmt>'10.79'<in>self.client.GET('/~bob/' auth_as='alice').body<assert_stmt>'342'<in>self.client.GET('/~bob/' auth_as='alice').body<block_end><def_stmt>test_admin_can_see_giving_for_non_anonymous_giving self<block_start>self.make_participant('bob' claimed_time='now' giving=10.79 ngiving_to=342 anonymous_giving=<false>)<line_sep>self.make_participant('admin' is_admin=<true>)<assert_stmt>'10.79'<in>self.client.GET('/~bob/' auth_as='admin').body<assert_stmt>'342'<in>self.client.GET('/~bob/' auth_as='admin').body<assert_stmt>'[342]'<not><in>self.client.GET('/~bob/' auth_as='admin').body<block_end><def_stmt>test_self_can_see_giving_for_non_anonymous_giving self<block_start>self.make_participant('bob' claimed_time='now' giving=10.79 ngiving_to=342 anonymous_giving=<false>)<assert_stmt>'10.79'<in>self.client.GET('/~bob/' auth_as='bob').body.decode('utf8')<assert_stmt>'342'<in>self.client.GET('/~bob/' auth_as='bob').body.decode('utf8')<assert_stmt>'[342]'<not><in>self.client.GET('/~bob/' auth_as='bob').body.decode('utf8')<block_end><def_stmt>test_anon_cannot_see_giving_for_anonymous_giving self<block_start>self.make_participant('bob' claimed_time='now' giving=10.79 ngiving_to=342 anonymous_giving=<true>)<assert_stmt>'10.79'<not><in>self.client.GET('/~bob/').body<assert_stmt>'342'<not><in>self.client.GET('/~bob/').body<block_end><def_stmt>test_auth_cannot_see_giving_for_anonymous_giving self<block_start>self.make_participant('bob' claimed_time='now' giving=10.79 ngiving_to=342 anonymous_giving=<true>)<assert_stmt>'10.79'<not><in>self.client.GET('/~bob/' auth_as='alice').body<assert_stmt>'342'<not><in>self.client.GET('/~bob/' auth_as='alice').body<block_end><def_stmt>test_admin_can_see_giving_for_anonymous_giving self<block_start>self.make_participant('bob' claimed_time='now' giving=10.79 ngiving_to=342 anonymous_giving=<true>)<line_sep>self.make_participant('admin' is_admin=<true>)<assert_stmt>'10.79'<in>self.client.GET('/~bob/' auth_as='admin').body<assert_stmt>'[342]'<in>self.client.GET('/~bob/' auth_as='admin').body<block_end><def_stmt>test_self_can_see_giving_for_anonymous_giving self<block_start>self.make_participant('bob' claimed_time='now' giving=10.79 ngiving_to=342 anonymous_giving=<true>)<assert_stmt>'10.79'<in>self.client.GET('/~bob/' auth_as='bob').body.decode('utf8')<assert_stmt>'[342]'<in>self.client.GET('/~bob/' auth_as='bob').body.decode('utf8')<block_end><block_end>
<import_stmt>FWCore.ParameterSet.Config<as>cms<line_sep>process=cms.Process("SKIM")<line_sep>process.configurationMetadata=cms.untracked.PSet(version=cms.untracked.string('$Revision: 1.4 $') name=cms.untracked.string('$Source: /cvs/CMSSW/CMSSW/DPGAnalysis/Skims/python/EGPDSkim_cfg.py,v $') annotation=cms.untracked.string('EGamma skim'))<line_sep># # # This is for testing purposes. # # ##run143960 process.source=cms.Source("PoolSource" fileNames=cms.untracked.vstring('/store/data/Run2010A/EG/RECO/v4/000/143/960/84DEE17A-44B1-DF11-B844-001D09F29849.root') secondaryFileNames=cms.untracked.vstring('/store/data/Run2010A/EG/RAW/v1/000/143/960/C40C9318-0FB1-DF11-A974-0030487CBD0A.root'))<line_sep>process.source.inputCommands=cms.untracked.vstring("keep *" "drop *_MEtoEDMConverter_*_*")<line_sep>process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(1000))<line_sep>#------------------------------------------ # Load standard sequences. #------------------------------------------ process.load('Configuration/StandardSequences/MagneticField_AutoFromDBCurrent_cff')<line_sep>process.load('Configuration/StandardSequences/GeometryIdeal_cff')<line_sep>process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")<line_sep>process.GlobalTag.globaltag='GR10_P_V8::All'<line_sep>process.load("Configuration/StandardSequences/RawToDigi_Data_cff")<line_sep>process.load("Configuration/StandardSequences/Reconstruction_cff")<line_sep>process.load('Configuration/EventContent/EventContent_cff')<line_sep>#drop collections created on the fly process.FEVTEventContent.outputCommands.append("drop *_MEtoEDMConverter_*_*")<line_sep>process.FEVTEventContent.outputCommands.append("drop *_*_*_SKIM")<line_sep># # Load common sequences # process.load('L1TriggerConfig.L1GtConfigProducers.L1GtTriggerMaskAlgoTrigConfig_cff')<line_sep>process.load('L1TriggerConfig.L1GtConfigProducers.L1GtTriggerMaskTechTrigConfig_cff')<line_sep>process.load('HLTrigger/HLTfilters/hltLevel1GTSeed_cfi')<line_sep>#################################WZFilter############################################ process.hltFilter=cms.EDFilter("HLTHighLevel" TriggerResultsTag=cms.InputTag("TriggerResults" "" "HLT") HLTPaths=cms.vstring(# "HLT_Photon15_L1R", # "HLT_Photon15_Cleaned_L1R", # "HLT_Photon20_Cleaned_L1R", "HLT_Ele15_LW_L1R" "HLT_Ele15_SW_L1R" "HLT_Ele15_SW_CaloEleId_L1R" "HLT_Ele17_SW_CaloEleId_L1R" "HLT_Ele17_SW_L1R" "HLT_Ele17_SW_TightEleId_L1R" "HLT_Ele17_SW_TightCaloEleId_SC8HE_L1R") eventSetupPathsKey=cms.string('') andOr=cms.bool(<true>) throw=cms.bool(<false>) saveTags=cms.bool(<false>))<line_sep>process.load("DPGAnalysis/Skims/WZinterestingEventFilter_cfi")<line_sep>process.WZfilter=cms.Path(process.hltFilter<times>process.WZInterestingEventSelector)<line_sep># Output definition process.outWZfilter=cms.OutputModule("PoolOutputModule" # splitLevel = cms.untracked.int32(0), outputCommands=process.FEVTEventContent.outputCommands fileName=cms.untracked.string('/tmp/azzi/EGMWZ_filter.root') dataset=cms.untracked.PSet(dataTier=cms.untracked.string('RAW-RECO') filterName=cms.untracked.string('EGMWZFilter')) SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring('WZfilter')))<line_sep>#################################logerrorharvester############################################ process.load("FWCore.Modules.logErrorFilter_cfi")<import_from_stmt>Configuration.StandardSequences.RawToDigi_Data_cff gtEvmDigis<line_sep>process.gtEvmDigis=gtEvmDigis.clone()<line_sep>process.stableBeam=cms.EDFilter("HLTBeamModeFilter" L1GtEvmReadoutRecordTag=cms.InputTag("gtEvmDigis") AllowedBeamMode=cms.vuint32(11) saveTags=cms.bool(<false>))<line_sep>process.logerrorpath=cms.Path(process.gtEvmDigis+process.stableBeam+process.logErrorFilter)<line_sep>process.outlogerr=cms.OutputModule("PoolOutputModule" outputCommands=process.FEVTEventContent.outputCommands fileName=cms.untracked.string('/tmp/azzi/logerror_filter.root') dataset=cms.untracked.PSet(dataTier=cms.untracked.string('RAW-RECO') filterName=cms.untracked.string('Skim_logerror')) SelectEvents=cms.untracked.PSet(SelectEvents=cms.vstring("logerrorpath")))<line_sep>#====================== process.options=cms.untracked.PSet(wantSummary=cms.untracked.bool(<true>))<line_sep>process.outpath=cms.EndPath(process.outlogerr+process.outWZfilter)<line_sep>
""" Modify header or status in response """<import_from_stmt>sanic Sanic response<line_sep>app=Sanic("Example")<line_sep>@app.route("/")<def_stmt>handle_request request<block_start><return>response.json({"message":"Hello world!"} headers={"X-Served-By":"sanic"} status=200 )<block_end>@app.route("/unauthorized")<def_stmt>handle_request request<block_start><return>response.json({"message":"You are not authorized"} headers={"X-Served-By":"sanic"} status=404 )<block_end><if_stmt>__name__<eq>"__main__"<block_start>app.run(host="0.0.0.0" port=8000 debug=<true>)<block_end>
_base_='../_base_/default_runtime.py'<line_sep># dataset settings dataset_type='CocoPanopticDataset'<line_sep>data_root='data/coco/'<line_sep>img_norm_cfg=dict(mean=[123.675 116.28 103.53] std=[58.395 57.12 57.375] to_rgb=<true>)<line_sep># file_client_args = dict(backend='disk',) # file_client_args = dict( # backend='petrel', # path_mapping=dict({ # './data/': 's3://openmmlab/datasets/detection/', # 'data/': 's3://openmmlab/datasets/detection/' # })) file_client_args=dict(backend='memcached' server_list_cfg='/mnt/lustre/share/memcached_client/server_list.conf' client_cfg='/mnt/lustre/share/memcached_client/client.conf' sys_path='/mnt/lustre/share/pymc/py3' )<line_sep># In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], # multiscale_mode='range' train_pipeline=[dict(type='LoadImageFromFile' file_client_args=file_client_args) dict(type='LoadPanopticAnnotations' with_bbox=<true> with_mask=<true> with_seg=<true> file_client_args=file_client_args) dict(type='Resize' img_scale=[(1333 640) (1333 800)] multiscale_mode='range' keep_ratio=<true>) dict(type='RandomFlip' flip_ratio=0.5) dict(type='Normalize' **img_norm_cfg) dict(type='Pad' size_divisor=32) dict(type='DefaultFormatBundle') dict(type='Collect' keys=['img' 'gt_bboxes' 'gt_labels' 'gt_masks' 'gt_semantic_seg']) ]<line_sep>test_pipeline=[dict(type='LoadImageFromFile' file_client_args=file_client_args) dict(type='MultiScaleFlipAug' img_scale=(1333 800) flip=<false> transforms=[dict(type='Resize' keep_ratio=<true>) dict(type='RandomFlip') dict(type='Normalize' **img_norm_cfg) dict(type='Pad' size_divisor=32) dict(type='ImageToTensor' keys=['img']) dict(type='Collect' keys=['img']) ])]<line_sep># Use RepeatDataset to speed up training data=dict(samples_per_gpu=2 workers_per_gpu=2 train=dict(type='RepeatDataset' times=3 dataset=dict(type=dataset_type ann_file=data_root+'annotations/panoptic_train2017.json' img_prefix=data_root+'train2017/' seg_prefix=data_root+'annotations/panoptic_train2017/' pipeline=train_pipeline)) val=dict(type=dataset_type ann_file=data_root+'annotations/panoptic_val2017.json' img_prefix=data_root+'val2017/' seg_prefix=data_root+'annotations/panoptic_val2017/' pipeline=test_pipeline) test=dict(type=dataset_type ann_file=data_root+'annotations/panoptic_val2017.json' img_prefix=data_root+'val2017/' seg_prefix=data_root+'annotations/panoptic_val2017/' pipeline=test_pipeline))<line_sep>evaluation=dict(interval=1 metric=['pq'])<line_sep># optimizer # this is different from the original 1x schedule that use SGD optimizer=dict(type='AdamW' lr=0.0001 weight_decay=0.05 paramwise_cfg=dict(custom_keys={'backbone':dict(lr_mult=0.25)}))<line_sep>optimizer_config=dict(grad_clip=dict(max_norm=1 norm_type=2))<line_sep># learning policy # Experiments show that using step=[9, 11] has higher performance lr_config=dict(policy='step' warmup='linear' warmup_iters=1000 warmup_ratio=0.001 step=[9 11])<line_sep>runner=dict(type='EpochBasedRunner' max_epochs=12)<line_sep>
""" pytorch (0.3.1) miss some transforms, will be removed after official support. """<import_stmt>torch<import_stmt>numpy<as>np<import_from_stmt>PIL Image<import_stmt>torchvision.transforms.functional<as>F<import_stmt>torch.nn.functional<as>Func<import_stmt>random<line_sep>imagenet_pca={'eigval':np.asarray([0.2175 0.0188 0.0045]) 'eigvec':np.asarray([[-0.5675 0.7192 0.4009] [-0.5808 -0.0045 -0.8140] [-0.5836 -0.6948 0.4203] ])}<class_stmt>Lighting(object)<block_start><def_stmt>__init__ self alphastd eigval=imagenet_pca['eigval'] eigvec=imagenet_pca['eigvec']<block_start>self.alphastd=alphastd<assert_stmt>eigval.shape<eq>(3 )<assert_stmt>eigvec.shape<eq>(3 3)<line_sep>self.eigval=eigval<line_sep>self.eigvec=eigvec<block_end><def_stmt>__call__ self img<block_start><if_stmt>self.alphastd<eq>0.<block_start><return>img<block_end>rnd=np.random.randn(3)<times>self.alphastd<line_sep>rnd=rnd.astype('float32')<line_sep>v=rnd<line_sep>old_dtype=np.asarray(img).dtype<line_sep>v=v<times>self.eigval<line_sep>v=v.reshape((3 1))<line_sep>inc=np.dot(self.eigvec v).reshape((3 ))<line_sep>img=np.add(img inc)<if_stmt>old_dtype<eq>np.uint8<block_start>img=np.clip(img 0 255)<block_end>img=Image.fromarray(img.astype(old_dtype) 'RGB')<line_sep><return>img<block_end><def_stmt>__repr__ self<block_start><return>self.__class__.__name__+'()'<block_end><block_end><class_stmt>InputList(object)<block_start><def_stmt>__init__ self scales<block_start>self.scales=scales<block_end><def_stmt>__call__ self img# assert img.size[0] == self.scales[0], 'image shape should be equal to max scale' # input_list = [] # for i in range(len(self.scales)): # input_list.append(F.resize(img, self.scales[i])) <block_start><assert_stmt>img.size()[1]<eq>self.scales[0] 'image shape should be equal to max scale'<line_sep>input_list=[]<line_sep>img=img[np.newaxis :]<for_stmt>i range(len(self.scales))<block_start>resized_img=Func.interpolate(img (self.scales[i] self.scales[i]) mode='bilinear' align_corners=<true>)<line_sep>resized_img=torch.squeeze(resized_img)<line_sep>input_list.append(resized_img)<block_end><return>input_list<block_end><block_end><class_stmt>ListToTensor(object)<block_start><def_stmt>__call__ self input_list<block_start>tensor_list=[]<for_stmt>i range(len(input_list))<block_start>pic=input_list[i]<line_sep>tensor_list.append(F.to_tensor(pic).detach())<block_end><return>tensor_list<block_end><block_end><class_stmt>ListNormalize(object)<block_start><def_stmt>__init__ self mean std inplace=<false><block_start>self.mean=mean<line_sep>self.std=std<line_sep>self.inplace=inplace<block_end><def_stmt>__call__ self tensor_list<block_start>norm_list=[]<for_stmt>i range(len(tensor_list))<block_start>norm_list.append(F.normalize(tensor_list[i] self.mean self.std self.inplace))<block_end><return>norm_list<block_end><block_end>
<import_from_stmt>. bp<import_from_stmt>flask_login current_user login_required<import_from_stmt>flask render_template redirect url_for flash request send_file<import_from_stmt>app.lib.base.provider Provider<line_sep>@bp.route('/' methods=['GET'])@login_required<def_stmt>index <block_start>results_per_page=20<line_sep>provider=Provider()<line_sep>zones=provider.dns_zones()<line_sep>tags=provider.tags()<line_sep>search=request.args.get('search' '').strip()<line_sep>search_tags=request.args.getlist('tags')<line_sep>page=int(request.args.get('page' 1))<if_stmt>page<le>0<block_start>page=1<block_end>user_id=<none><if>current_user.admin<else>current_user.id<line_sep>page_url='tags='+'&tags='.join(search_tags)<line_sep>page_url<augadd>"&search={0}&page=".format(search)<line_sep><return>render_template('dns/zones/index.html' zones=zones.get_user_zones_paginated(user_id order_by='domain' page=page per_page=results_per_page search=search tags=search_tags) page=page per_page=results_per_page page_url=page_url search=search search_tags=search_tags tags=tags.all(user_id=user_id order_by='asc' order_column='name'))<block_end>@bp.route('/<int:dns_zone_id>/view' methods=['GET'])@login_required<def_stmt>zone_view dns_zone_id<block_start>provider=Provider()<line_sep>zones=provider.dns_zones()<line_sep>records=provider.dns_records()<if_stmt><not>zones.can_access(dns_zone_id current_user.id)<block_start>flash('Access Denied' 'error')<line_sep><return>redirect(url_for('home.index'))<block_end>zone=zones.get(dns_zone_id)<if_stmt><not>zone<block_start>flash('Zone not found' 'error')<line_sep><return>redirect(url_for('home.index'))<block_end><return>render_template('dns/zones/view.html' zone=zone records=records.get_zone_records(dns_zone_id order_column='type') section='records' tab='records')<block_end>@bp.route('/<int:dns_zone_id>/edit' methods=['GET'])@login_required<def_stmt>zone_edit dns_zone_id<block_start>provider=Provider()<line_sep>zones=provider.dns_zones()<line_sep>tags=provider.tags()<line_sep>zone=<none><line_sep>dns_zone_id=0<if>dns_zone_id<l>0<else>dns_zone_id<if_stmt>dns_zone_id<g>0<block_start><if_stmt><not>zones.can_access(dns_zone_id current_user.id)<block_start>flash('Access Denied' 'error')<line_sep><return>redirect(url_for('home.index'))<block_end>zone=zones.get(dns_zone_id)<if_stmt><not>zone<block_start>flash('Zone not found' 'error')<line_sep><return>redirect(url_for('home.index'))<block_end><block_end>username=current_user.username<if>zone<is><none><else>zone.username<line_sep>user_id=zone.user_id<if>dns_zone_id<g>0<else>current_user.id<line_sep><return>render_template('dns/zones/edit.html' dns_zone_id=dns_zone_id user_domain=zones.get_user_base_domain(username) zone=zone tags=tags.all(user_id=user_id order_column='name' order_by='asc'))<block_end>@bp.route('/<int:dns_zone_id>/edit/save' methods=['POST'])@login_required<def_stmt>zone_edit_save dns_zone_id<block_start>dns_zone_id=0<if>dns_zone_id<l>0<else>dns_zone_id<line_sep><return>__zone_create()<if>dns_zone_id<eq>0<else>__zone_update(dns_zone_id)<block_end>@bp.route('/<int:dns_zone_id>/delete' methods=['POST'])@login_required<def_stmt>zone_delete dns_zone_id<block_start>provider=Provider()<line_sep>zones=provider.dns_zones()<if_stmt><not>zones.can_access(dns_zone_id current_user.id)<block_start>flash('Access Denied' 'error')<line_sep><return>redirect(url_for('home.index'))<block_end>zone=zones.get(dns_zone_id)<if_stmt><not>zone<block_start>flash('Could not get zone' 'error')<line_sep><return>redirect(url_for('dns.index'))<block_end><elif_stmt>zone.master<block_start>flash('You cannot delete a master zone' 'error')<line_sep><return>redirect(url_for('dns.index'))<block_end># Not using the instance's .delete() attribute because we first need to delete all child records. <if_stmt><not>zones.delete(dns_zone_id)<block_start>flash('Could not delete zone' 'error')<line_sep><return>redirect(url_for('dns.index'))<block_end>flash('Zone deleted' 'success')<line_sep><return>redirect(url_for('dns.index'))<block_end>@bp.route('/delete' methods=['POST'])@login_required<def_stmt>zone_group_delete <block_start>provider=Provider()<line_sep>zones=provider.dns_zones()<line_sep>search=request.form['search'].strip()<line_sep>search_tags=request.form['tags'].strip().split(',')<line_sep>zones.group_delete(current_user.id search=search tags=search_tags)<line_sep>flash('Zone(s) deleted' 'success')<line_sep><return>redirect(url_for('dns.index'))<block_end><def_stmt>__zone_create <block_start>provider=Provider()<line_sep>zones=provider.dns_zones()<line_sep>dns_zone_id=0<line_sep>domain=request.form['domain'].strip().lower()<line_sep>active=<true><if>int(request.form.get('active' 0))<eq>1<else><false><line_sep>catch_all=<true><if>int(request.form.get('catch_all' 0))<eq>1<else><false><line_sep>forwarding=<true><if>int(request.form.get('forwarding' 0))<eq>1<else><false><line_sep>regex=<true><if>int(request.form.get('regex' 0))<eq>1<else><false><line_sep>tags=request.form.getlist('tags')<line_sep>zone=zones.new(domain active catch_all forwarding regex current_user.id update_old_logs=<true>)<if_stmt>isinstance(zone list)<block_start><for_stmt>error zone<block_start>flash(error 'error')<block_end><return>redirect(url_for('dns.zone_edit' dns_zone_id=dns_zone_id))<block_end>zone=zones.save_tags(zone tags)<if_stmt><not>zone<block_start>flash('Could not save zone tags' 'error')<line_sep><return>redirect(url_for('dns.zone_edit' dns_zone_id=dns_zone_id))<block_end>flash('Zone created' 'success')<line_sep><return>redirect(url_for('dns.zone_view' dns_zone_id=zone.id))<block_end><def_stmt>__zone_update dns_zone_id<block_start>provider=Provider()<line_sep>zones=provider.dns_zones()<if_stmt><not>zones.can_access(dns_zone_id current_user.id)<block_start>flash('Access Denied' 'error')<line_sep><return>redirect(url_for('home.index'))<block_end>zone=zones.get(dns_zone_id)<if_stmt><not>zone<block_start>flash('Could not get zone' 'error')<line_sep><return>redirect(url_for('dns.zone_edit' dns_zone_id=dns_zone_id))<block_end>domain=request.form['domain'].strip().lower()<if><not>zone.master<else>zone.domain<line_sep>active=<true><if>int(request.form.get('active' 0))<eq>1<else><false><line_sep>catch_all=<true><if>int(request.form.get('catch_all' 0))<eq>1<else><false><line_sep>forwarding=<true><if>int(request.form.get('forwarding' 0))<eq>1<else><false><line_sep>regex=<true><if>int(request.form.get('regex' 0))<eq>1<else><false><line_sep>tags=request.form.getlist('tags')<if_stmt>len(domain)<eq>0<block_start>flash('Invalid domain' 'error')<line_sep><return>redirect(url_for('dns.zone_edit' dns_zone_id=dns_zone_id))<block_end><if_stmt>zones.has_duplicate(dns_zone_id domain)<block_start>flash('This domain already exists.' 'error')<line_sep><return>redirect(url_for('dns.zone_edit' dns_zone_id=dns_zone_id))<block_end>zone=zones.update(zone.id domain active catch_all forwarding regex zone.user_id master=zone.master update_old_logs=<true>)<if_stmt>isinstance(zone list)<block_start><for_stmt>error zone<block_start>flash(error 'error')<block_end><return>redirect(url_for('dns.zone_edit' dns_zone_id=dns_zone_id))<block_end>zone=zones.save_tags(zone tags)<if_stmt><not>zone<block_start>flash('Could not save zone tags' 'error')<line_sep><return>redirect(url_for('dns.zone_edit' dns_zone_id=dns_zone_id))<block_end>flash('Zone saved' 'success')<line_sep><return>redirect(url_for('dns.zone_view' dns_zone_id=zone.id))<block_end>@bp.route('/create/log/<int:query_log_id>' methods=['POST'])@login_required<def_stmt>zone_create_from_log query_log_id<block_start>provider=Provider()<line_sep>logging=provider.dns_logs()<line_sep>zones=provider.dns_zones()<line_sep>log=logging.get(query_log_id)<if_stmt><not>log<block_start>flash('Could not retrieve log record' 'error')<line_sep><return>redirect(url_for('home.index'))<block_end><if_stmt>log.dns_zone_id<g>0# This means that the zone exists. <block_start><if_stmt><not>zones.can_access(log.dns_zone_id current_user.id)# This error is misleading on purpose to prevent zone enumeration. Not that it's important by meh. <block_start>flash('Could not retrieve log record' 'error')<line_sep><return>redirect(url_for('home.index'))<block_end>flash('Zone already exists' 'error')<line_sep><return>redirect(url_for('dns.zone_view' dns_zone_id=log.dns_zone_id))<block_end>zone=zones.new(log.domain <true> <false> <false> <false> current_user.id update_old_logs=<true>)<if_stmt>isinstance(zone list)<block_start><for_stmt>error zone<block_start>flash(error 'error')<block_end><return>redirect(url_for('dns.zone_edit' dns_zone_id=0))<block_end>flash('Zone created' 'success')<line_sep><return>redirect(url_for('dns.zone_view' dns_zone_id=zone.id))<block_end>@bp.route('/export' methods=['POST'])@login_required<def_stmt>zones_export <block_start>provider=Provider()<line_sep>zones=provider.dns_zones()<line_sep>search=request.form['search'].strip()<line_sep>search_tags=request.form['tags'].strip().split(',')<line_sep>result=zones.export(user_id=current_user.id export_zones=<true> export_records=<true> compress_export=<true> search=search tags=search_tags)<if_stmt><not>result<block_start>flash('Could not generate export file.' 'error')<line_sep><return>redirect(url_for('dns.index'))<block_end># And download. <return>send_file(result['zip'] attachment_filename='snitch_export.zip' as_attachment=<true>)<block_end>
<import_stmt>math<import_from_stmt>typing Dict Tuple Union<import_stmt>torch<import_stmt>torch.nn<as>nn<import_from_stmt>flownmt.flows.nmt NMTFlow<import_from_stmt>flownmt.modules.priors.length_predictors LengthPredictor<class_stmt>Prior(nn.Module)<block_start>""" class for Prior with a NMTFlow inside """<line_sep>_registry=dict()<def_stmt>__init__ self flow:NMTFlow length_predictor:LengthPredictor<block_start>super(Prior self).__init__()<assert_stmt>flow.inverse 'prior flow should have inverse mode'<line_sep>self.flow=flow<line_sep>self.length_unit=max(2 2<power>(self.flow.levels-1))<line_sep>self.features=self.flow.features<line_sep>self._length_predictor=length_predictor<line_sep>self._length_predictor.set_length_unit(self.length_unit)<block_end><def_stmt>sync self<block_start>self.flow.sync()<block_end><def_stmt>predict_length self ctx:torch.Tensor src_mask:torch.Tensor topk:int=1<arrow>Tuple[torch.LongTensor torch.Tensor]<block_start>""" Args: ctx: Tensor tensor [batch, features] src_mask: Tensor tensor for source mask [batch, src_length] topk: int (default 1) return top k length candidates for each src sentence Returns: LongTensor1, Tensor2 LongTensor1: tensor for lengths [batch, topk] Tensor2: log probs for each length [batch, topk] """<line_sep><return>self._length_predictor.predict(ctx src_mask topk=topk)<block_end><def_stmt>length_loss self ctx:torch.Tensor src_mask:torch.Tensor tgt_mask:torch.Tensor<arrow>torch.Tensor<block_start>""" Args: ctx: Tensor tensor [batch, features] src_mask: Tensor tensor for source mask [batch, src_length] tgt_mask: Tensor tensor for target mask [batch, tgt_length] Returns: Tensor tensor for loss [batch] """<line_sep><return>self._length_predictor.loss(ctx src_mask tgt_mask)<block_end><def_stmt>decode self epsilon:torch.Tensor tgt_mask:torch.Tensor src:torch.Tensor src_mask:torch.Tensor<arrow>Tuple[torch.Tensor torch.Tensor]<block_start>""" Args: epsilon: Tensor epslion [batch, tgt_length, nz] tgt_mask: Tensor tensor of target masks [batch, tgt_length] src: Tensor source encoding [batch, src_length, hidden_size] src_mask: Tensor tensor of source masks [batch, src_length] Returns: Tensor1, Tensor2 Tensor1: decoded latent code z [batch, tgt_length, nz] Tensor2: log probabilities [batch] """<line_sep># [batch, tgt_length, nz] z,logdet=self.flow.fwdpass(epsilon tgt_mask src src_mask)<line_sep># [batch, tgt_length, nz] log_probs=epsilon.mul(epsilon)+math.log(math.pi<times>2.0)<line_sep># apply mask log_probs=log_probs.mul(tgt_mask.unsqueeze(2))<line_sep># [batch] log_probs=log_probs.view(z.size(0) -1).sum(dim=1).mul(-0.5)+logdet<line_sep><return>z log_probs<block_end><def_stmt>sample self nlengths:int nsamples:int src:torch.Tensor ctx:torch.Tensor src_mask:torch.Tensor tau=0.0 include_zero=<false><arrow>Tuple[Tuple[torch.Tensor torch.Tensor torch.Tensor] Tuple[torch.Tensor torch.Tensor] Tuple[torch.Tensor torch.Tensor torch.Tensor]]<block_start>""" Args: nlengths: int number of lengths per sentence nsamples: int number of samples per sentence per length src: Tensor source encoding [batch, src_length, hidden_size] ctx: Tensor tensor for global state [batch, hidden_size] src_mask: Tensor tensor of masks [batch, src_length] tau: float (default 0.0) temperature of density include_zero: bool (default False) include zero sample Returns: (Tensor1, Tensor2, Tensor3), (Tensor4, Tensor5), (Tensor6, Tensor7, Tensor8) Tensor1: samples from the prior [batch * nlengths * nsamples, tgt_length, nz] Tensor2: log probabilities [batch * nlengths * nsamples] Tensor3: target masks [batch * nlengths * nsamples, tgt_length] Tensor4: lengths [batch * nlengths] Tensor5: log probabilities of lengths [batch * nlengths] Tensor6: source encoding with shape [batch * nlengths * nsamples, src_length, hidden_size] Tensor7: tensor for global state [batch * nlengths * nsamples, hidden_size] Tensor8: source masks with shape [batch * nlengths * nsamples, src_length] """<line_sep>batch=src.size(0)<line_sep>batch_nlen=batch<times>nlengths<line_sep># [batch, nlenths] lengths,log_probs_length=self.predict_length(ctx src_mask topk=nlengths)<line_sep># [batch * nlengths] log_probs_length=log_probs_length.view(-1)<line_sep>lengths=lengths.view(-1)<line_sep>max_length=lengths.max().item()<line_sep># [batch * nlengths, max_length] tgt_mask=torch.arange(max_length).to(src.device).unsqueeze(0).expand(batch_nlen max_length).lt(lengths.unsqueeze(1)).float()<line_sep># [batch * nlengths, nsamples, tgt_length, nz] epsilon=src.new_empty(batch_nlen nsamples max_length self.features).normal_()<line_sep>epsilon=epsilon.mul(tgt_mask.view(batch_nlen 1 max_length 1))<times>tau<if_stmt>include_zero<block_start>epsilon[: 0].zero_()<block_end># [batch * nlengths * nsamples, tgt_length, nz] epsilon=epsilon.view(-1 max_length self.features)<if_stmt>nsamples<times>nlengths<g>1# [batch, nlengths * nsamples, src_length, hidden_size] <block_start>src=src.unsqueeze(1)+src.new_zeros(batch nlengths<times>nsamples *src.size()[1:])<line_sep># [batch * nlengths * nsamples, src_length, hidden_size] src=src.view(batch_nlen<times>nsamples *src.size()[2:])<line_sep># [batch, nlengths * nsamples, hidden_size] ctx=ctx.unsqueeze(1)+ctx.new_zeros(batch nlengths<times>nsamples ctx.size(1))<line_sep># [batch * nlengths * nsamples, hidden_size] ctx=ctx.view(batch_nlen<times>nsamples ctx.size(2))<line_sep># [batch, nlengths * nsamples, src_length] src_mask=src_mask.unsqueeze(1)+src_mask.new_zeros(batch nlengths<times>nsamples src_mask.size(1))<line_sep># [batch * nlengths * nsamples, src_length] src_mask=src_mask.view(batch_nlen<times>nsamples src_mask.size(2))<line_sep># [batch * nlengths, nsamples, tgt_length] tgt_mask=tgt_mask.unsqueeze(1)+tgt_mask.new_zeros(batch_nlen nsamples tgt_mask.size(1))<line_sep># [batch * nlengths * nsamples, tgt_length] tgt_mask=tgt_mask.view(batch_nlen<times>nsamples tgt_mask.size(2))<block_end># [batch * nlength * nsamples, tgt_length, nz] z,log_probs=self.decode(epsilon tgt_mask src src_mask)<line_sep><return>(z log_probs tgt_mask) (lengths log_probs_length) (src ctx src_mask)<block_end><def_stmt>log_probability self z:torch.Tensor tgt_mask:torch.Tensor src:torch.Tensor ctx:torch.Tensor src_mask:torch.Tensor length_loss:bool=<true><arrow>Tuple[torch.Tensor Union[torch.Tensor <none>]]<block_start>""" Args: z: Tensor tensor of latent code [batch, length, nz] tgt_mask: Tensor tensor of target masks [batch, length] src: Tensor source encoding [batch, src_length, hidden_size] ctx: Tensor tensor for global state [batch, hidden_size] src_mask: Tensor tensor of source masks [batch, src_length] length_loss: bool (default True) compute loss of length Returns: Tensor1, Tensor2 Tensor1: log probabilities of z [batch] Tensor2: length loss [batch] """<line_sep># [batch] loss_length=self.length_loss(ctx src_mask tgt_mask)<if>length_loss<else><none><line_sep># [batch, length, nz] epsilon,logdet=self.flow.bwdpass(z tgt_mask src src_mask)<line_sep># [batch, tgt_length, nz] log_probs=epsilon.mul(epsilon)+math.log(math.pi<times>2.0)<line_sep># apply mask log_probs=log_probs.mul(tgt_mask.unsqueeze(2))<line_sep>log_probs=log_probs.view(z.size(0) -1).sum(dim=1).mul(-0.5)+logdet<line_sep><return>log_probs loss_length<block_end><def_stmt>init self z tgt_mask src src_mask init_scale=1.0<block_start><return>self.flow.bwdpass(z tgt_mask src src_mask init=<true> init_scale=init_scale)<block_end>@classmethod<def_stmt>register cls name:str<block_start>Prior._registry[name]=cls<block_end>@classmethod<def_stmt>by_name cls name:str<block_start><return>Prior._registry[name]<block_end>@classmethod<def_stmt>from_params cls params:Dict<arrow>"Prior"<block_start>flow_params=params.pop('flow')<line_sep>flow=NMTFlow.from_params(flow_params)<line_sep>predictor_params=params.pop('length_predictor')<line_sep>length_predictor=LengthPredictor.by_name(predictor_params.pop('type')).from_params(predictor_params)<line_sep><return>Prior(flow length_predictor)<block_end><block_end>Prior.register('normal')<line_sep>
"""Create a new secret scope in Databricks."""<import_stmt>kfp.dsl<as>dsl<import_stmt>kfp.compiler<as>compiler<import_stmt>databricks<def_stmt>create_secretscope scope_name string_secret byte_secret ref_secret_name ref_secret_key principal_name<block_start><return>databricks.CreateSecretScopeOp(name="createsecretscope" scope_name=scope_name initial_manage_principal="users" secrets=[{"key":"string-secret" "string_value":string_secret} {"key":"byte-secret" "byte_value":byte_secret} {"key":"ref-secret" "value_from":{"secret_key_ref":{"name":ref_secret_name "key":ref_secret_key}}}] acls=[{"principal":principal_name "permission":"READ"}])<block_end><def_stmt>delete_secretscope scope_name<block_start><return>databricks.DeleteSecretScopeOp(name="deletesecretscope" scope_name=scope_name)<block_end>@dsl.pipeline(name="DatabricksSecretScope" description="A toy pipeline that sets some secrets and acls in an Azure Databricks Secret Scope.")<def_stmt>calc_pipeline scope_name="test-secretscope" string_secret="helloworld" byte_secret="aGVsbG93b3JsZA==" ref_secret_name="mysecret" ref_secret_key="username" principal_name="<EMAIL>"<block_start>create_secretscope_task=create_secretscope(scope_name string_secret byte_secret ref_secret_name ref_secret_key principal_name)<line_sep>delete_secretscope_task=delete_secretscope(scope_name)<line_sep>delete_secretscope_task.after(create_secretscope_task)<block_end><if_stmt>__name__<eq>"__main__"<block_start>compiler.Compiler()._create_and_write_workflow(pipeline_func=calc_pipeline package_path=__file__+".tar.gz")<block_end>
######### # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. <import_from_stmt>flask_restful fields<import_from_stmt>flask_restful_swagger swagger<line_sep>@swagger.model<class_stmt>ExecutionRequest(object)<block_start>resource_fields={'workflow_id':fields.String 'parameters':fields.Raw 'allow_custom_parameters':fields.Boolean 'force':fields.Boolean}<block_end>@swagger.model<class_stmt>DeploymentRequest(object)<block_start>resource_fields={'blueprint_id':fields.String }<block_end>@swagger.model<class_stmt>DeploymentModificationRequest(object)<block_start>resource_fields={'stage':fields.String 'nodes':fields.Raw }<block_end>@swagger.model<class_stmt>ModifyExecutionRequest(object)<block_start>resource_fields={'action':fields.String}<block_end>@swagger.model<class_stmt>PostProviderContextRequest(object)<block_start>resource_fields={'name':fields.String 'context':fields.Raw}<block_end>@swagger.model<class_stmt>EvaluateFunctionsRequest(object)<block_start>resource_fields={'deployment_id':fields.String 'context':fields.Raw 'payload':fields.Raw}<block_end>
<import_stmt>hearthbreaker.cards<import_from_stmt>hearthbreaker.cards.base MinionCard<import_from_stmt>hearthbreaker.constants CHARACTER_CLASS CARD_RARITY MINION_TYPE<import_from_stmt>hearthbreaker.game_objects Minion<import_from_stmt>hearthbreaker.tags.action AddCard Give GiveAura Damage<import_from_stmt>hearthbreaker.tags.base Effect Aura Battlecry AuraUntil ActionTag<import_from_stmt>hearthbreaker.tags.condition HasSecret GreaterThan IsType Adjacent IsSecret IsSpell<import_from_stmt>hearthbreaker.tags.event SpellCast DidDamage TurnEnded CardPlayed Drawn CardUsed<import_from_stmt>hearthbreaker.tags.selector SelfSelector PlayerSelector TargetSelector CharacterSelector EnemyPlayer RandomPicker MinionSelector Count BothPlayer CardSelector<import_from_stmt>hearthbreaker.tags.status ChangeAttack ChangeHealth Frozen NoSpellTarget ManaChange<class_stmt>ManaWyrm(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Mana Wyrm" 1 CHARACTER_CLASS.MAGE CARD_RARITY.COMMON)<block_end><def_stmt>create_minion self player<block_start><return>Minion(1 3 effects=[Effect(SpellCast() ActionTag(Give(ChangeAttack(1)) SelfSelector()))])<block_end><block_end><class_stmt>SorcerersApprentice(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Sorcerer's Apprentice" 2 CHARACTER_CLASS.MAGE CARD_RARITY.COMMON)<block_end><def_stmt>create_minion self player<block_start><return>Minion(3 2 auras=[Aura(ManaChange(-1) CardSelector(condition=IsSpell()))])<block_end><block_end><class_stmt>KirinTorMage(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Kirin Tor Mage" 3 CHARACTER_CLASS.MAGE CARD_RARITY.RARE battlecry=Battlecry(GiveAura([AuraUntil(ManaChange(-100) CardSelector(condition=IsSecret()) CardPlayed(IsSecret()))]) PlayerSelector()))<block_end><def_stmt>create_minion self player<block_start><return>Minion(4 3)<block_end><block_end><class_stmt>EtherealArcanist(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Ethereal Arcanist" 4 CHARACTER_CLASS.MAGE CARD_RARITY.RARE)<block_end><def_stmt>create_minion self player<block_start><return>Minion(3 3 effects=[Effect(TurnEnded(HasSecret()) ActionTag(Give(ChangeAttack(2)) SelfSelector())) Effect(TurnEnded(HasSecret()) ActionTag(Give(ChangeHealth(2)) SelfSelector()))])<block_end><block_end><class_stmt>Sheep(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Sheep" 0 CHARACTER_CLASS.ALL CARD_RARITY.COMMON <false> MINION_TYPE.BEAST)<block_end><def_stmt>create_minion self p<block_start><return>Minion(1 1)<block_end><block_end><class_stmt>WaterElemental(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Water Elemental" 4 CHARACTER_CLASS.MAGE CARD_RARITY.COMMON)<block_end><def_stmt>create_minion self player<block_start><return>Minion(3 6 effects=[Effect(DidDamage() ActionTag(Give(Frozen()) TargetSelector()))])<block_end><block_end><class_stmt>ArchmageAntonidas(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Archmage Antonidas" 7 CHARACTER_CLASS.MAGE CARD_RARITY.LEGENDARY)<block_end><def_stmt>create_minion self player<block_start><return>Minion(5 7 effects=[Effect(SpellCast() ActionTag(AddCard(hearthbreaker.cards.Fireball()) PlayerSelector()))])<block_end><block_end><class_stmt>Snowchugger(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Snowchugger" 2 CHARACTER_CLASS.MAGE CARD_RARITY.COMMON minion_type=MINION_TYPE.MECH)<block_end><def_stmt>create_minion self player<block_start><return>Minion(2 3 effects=[Effect(DidDamage() ActionTag(Give(Frozen()) TargetSelector()))])<block_end><block_end><class_stmt>SpellbenderMinion(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Spellbender" 0 CHARACTER_CLASS.MAGE CARD_RARITY.EPIC <false> ref_name="Spellbender (minion)")<block_end><def_stmt>create_minion self p<block_start><return>Minion(1 3)<block_end><block_end><class_stmt>MirrorImageMinion(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Mirror Image" 0 CHARACTER_CLASS.MAGE CARD_RARITY.COMMON <false> ref_name="Mirror Image (minion)")<block_end><def_stmt>create_minion self p<block_start><return>Minion(0 2 taunt=<true>)<block_end><block_end><class_stmt>GoblinBlastmage(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Goblin Blastmage" 4 CHARACTER_CLASS.MAGE CARD_RARITY.RARE battlecry=Battlecry(Damage(1) CharacterSelector(<none> EnemyPlayer() RandomPicker(4)) GreaterThan(Count(MinionSelector(IsType(MINION_TYPE.MECH))) value=0)))<block_end><def_stmt>create_minion self player<block_start><return>Minion(5 4)<block_end><block_end><class_stmt>SootSpewer(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Soot Spewer" 3 CHARACTER_CLASS.MAGE CARD_RARITY.RARE minion_type=MINION_TYPE.MECH)<block_end><def_stmt>create_minion self player<block_start><return>Minion(3 3 spell_damage=1)<block_end><block_end><class_stmt>WeeSpellstopper(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Wee Spellstopper" 4 CHARACTER_CLASS.MAGE CARD_RARITY.EPIC)<block_end><def_stmt>create_minion self player<block_start><return>Minion(2 5 auras=[Aura(NoSpellTarget() MinionSelector(Adjacent()))])<block_end><block_end><class_stmt>FlameLeviathan(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Flame Leviathan" 7 CHARACTER_CLASS.MAGE CARD_RARITY.LEGENDARY minion_type=MINION_TYPE.MECH effects=[Effect(Drawn() ActionTag(Damage(2) CharacterSelector(<none> BothPlayer())))])<block_end><def_stmt>create_minion self player<block_start><return>Minion(7 7)<block_end><block_end><class_stmt>Flamewaker(MinionCard)<block_start><def_stmt>__init__ self<block_start>super().__init__("Flamewaker" 3 CHARACTER_CLASS.MAGE CARD_RARITY.RARE)<block_end><def_stmt>create_minion self player<block_start><return>Minion(2 4 effects=[Effect(CardUsed(IsSpell()) ActionTag(Damage(1) CharacterSelector(<none> EnemyPlayer() RandomPicker(2))))])<block_end><block_end>
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. # -------------------------------------------------------------------------------------------- # - Generated by tools/entrypoint_compiler.py: do not edit by hand """ MinMaxScaler """<line_sep>__all__=["MinMaxScaler"]<import_from_stmt>sklearn.base TransformerMixin<import_from_stmt>...base_transform BaseTransform<import_from_stmt>...internal.core.preprocessing.normalization.minmaxscaler MinMaxScaler<as>core<import_from_stmt>...internal.utils.utils trace<class_stmt>MinMaxScaler(core BaseTransform TransformerMixin)<block_start>""" Normalizes columns as specified below. .. remarks:: In linear classification algorithms instances are viewed as vectors in multi-dimensional space. Since the range of values of raw data varies widely, some objective functions do not work properly without normalization. For example, if one of the features has a broad range of values, the distances between points is governed by this particular feature. Therefore, the range of all features should be normalized so that each feature contributes approximately proportionately to the final distance. This can provide significant speedup and accuracy benefits. In all the linear algorithms in nimbusml (:py:class:`Logistic Regression <nimbusml.linear_model.LogisticRegressionClassifier>`, :py:class:`Averaged Perceptron <nimbusml.linear_model.AveragedPerceptronBinaryClassifier>`, etc.), the default is to normalize features before training. ``MinMaxScaler`` is the default normalizer for many `nimbusml` algorithms and linearly rescales every feature to the [0,1] or the [-1,1] interval. Rescaling to the [0,1] interval is done by shifting the values of each feature so that the minimal value is 0, and then dividing by the new maximal value (which is the difference between the original maximal and minimal values). Rescaling to the [-1,1] interval is done by dividing the values of each feature by the maximal absolute value of the feature. This method is useful for preserving the sparsity of a dataset, since 0 values do not change. The scaling method can be specified by setting the `fix_zero` to `False` for the first method, or setting it to `True` for the second method. :param columns: a dictionary of key-value pairs, where key is the output column name and value is the input column name. * Multiple key-value pairs are allowed. * Input column type: float or double or `Vector Type </nimbusml/concepts/types#vectortype-column>`_ of floats or doubles. * Output column type: `Vector Type </nimbusml/concepts/types#vectortype-column>`_. * If the output column names are same as the input column names, then simply specify ``columns`` as a list of strings. The << operator can be used to set this value (see `Column Operator </nimbusml/concepts/columns>`_) For example * MinMaxScaler(columns={'out1':'input1', 'out2':'input2'}) * MinMaxScaler() << {'out1':'input1', 'out2':'input2'} For more details see `Columns </nimbusml/concepts/columns>`_. :param fix_zero: Whether to map zero to zero, preserving sparsity. :param max_training_examples: Max number of examples used to train the normalizer. :param params: Additional arguments sent to compute engine. .. note:: *MinMaxScaler* as many other transforms requires input to be of numeric type. It will fail for other types. Most of the times, features are float but a column could be unexpectedly of type string. That explains why the following code raises an exception. :: in_df = pandas.DataFrame(data=dict(Sepal_Length=["2,2", 1, 2, 1])) normed = MinMaxScaler() << [Sepal_Length'] normed.fit_transform(in_df) The displayed message is:: 'Source column 'Petal_Length' has invalid type ('TX'): Expected R4 or R8 item type. The input column must be converted into float or double in the dataframe before running the pipeline or inside the pipeline with transform :py:class:`TypeConverter <nimbusml.preprocessing.schema.TypeConverter>`. This transform is automatically added in case of integers. .. seealso:: :py:class:`Binner <nimbusml.preprocessing.normalization.Binner>`, :py:class:`MeanVarianceScaler <nimbusml.preprocessing.normalization.MeanVarianceScaler>`, :py:class:`LogMeanVarianceScaler <nimbusml.preprocessing.normalization.LogMeanVarianceScaler>`, :py:class:`GlobalContrastRowScaler <nimbusml.preprocessing.normalization.GlobalContrastRowScaler>`. .. index:: normalize, preprocessing Example: .. literalinclude:: /../nimbusml/examples/MinMaxScaler.py :language: python """<line_sep>@trace<def_stmt>__init__ self fix_zero=<true> max_training_examples=1000000000 columns=<none> **params<block_start><if_stmt>columns<block_start>params['columns']=columns<block_end>BaseTransform.__init__(self **params)<line_sep>core.__init__(self fix_zero=fix_zero max_training_examples=max_training_examples **params)<line_sep>self._columns=columns<block_end><def_stmt>get_params self deep=<false><block_start>""" Get the parameters for this operator. """<line_sep><return>core.get_params(self)<block_end><def_stmt>_nodes_with_presteps self<block_start>""" Inserts preprocessing before this one. """<import_from_stmt>..schema TypeConverter<line_sep><return>[TypeConverter(result_type='R4')._steal_io(self) self]<block_end><block_end>
<import_from_stmt>core.redis rds<import_from_stmt>core.triage Triage<import_from_stmt>core.parser ScanParser<class_stmt>Rule<block_start><def_stmt>__init__ self<block_start>self.rule='VLN_65C8'<line_sep>self.rule_severity=2<line_sep>self.rule_description='This rule checks for FrontPage configuration information disclosure'<line_sep>self.rule_confirm='FrontPage misconfiguration'<line_sep>self.rule_details=''<line_sep>self.rule_mitigation='''Ensure SharePoint is not anonymously accessible'''<line_sep>self.intensity=1<block_end><def_stmt>check_rule self ip port values conf<block_start>t=Triage()<line_sep>p=ScanParser(port values)<line_sep>domain=p.get_domain()<line_sep>module=p.get_module()<if_stmt>'http'<not><in>module<block_start><return><block_end>resp=t.http_request(ip port uri='/_vti_inf.html')<if_stmt><not>resp<block_start><return><block_end><if_stmt>'Content-Length'<in>resp.headers<and>resp.headers['Content-Length']<eq>'247'<block_start>self.rule_details='Exposed FrontPage at {}'.format(resp.url)<line_sep>rds.store_vuln({'ip':ip 'port':port 'domain':domain 'rule_id':self.rule 'rule_sev':self.rule_severity 'rule_desc':self.rule_description 'rule_confirm':self.rule_confirm 'rule_details':self.rule_details 'rule_mitigation':self.rule_mitigation})<block_end><return><block_end><block_end>
<import_stmt>pandas<as>pd<import_from_stmt>calendar isleap<def_stmt>get_date_range_hours_from_year year<block_start>""" creates date range in hours for the year excluding leap day :param year: year of date range :type year: int :return: pd.date_range with 8760 values :rtype: pandas.data_range """<line_sep>date_range=pd.date_range(start=str(year) end=str(year+1) freq='H' closed='left')<line_sep># Check if leap year and remove extra day <if_stmt>isleap(year)<block_start>date_range=date_range[~((date_range.month<eq>2)&(date_range.day<eq>29))]<block_end><return>date_range<block_end>
<import_from_stmt>.wav2vec *<line_sep>
""" Copyright (c) Facebook, Inc. and its affiliates. This source code is licensed under the MIT license found in the LICENSE file in the root directory of this source tree. """<import_stmt>math<import_stmt>torch<import_from_stmt>.atom_update_block AtomUpdateBlock<import_from_stmt>.base_layers Dense ResidualLayer<import_from_stmt>.efficient EfficientInteractionBilinear <import_from_stmt>.embedding_block EdgeEmbedding<import_from_stmt>.scaling ScalingFactor<class_stmt>InteractionBlockTripletsOnly(torch.nn.Module)<block_start>""" Interaction block for GemNet-T/dT. Parameters ---------- emb_size_atom: int Embedding size of the atoms. emb_size_edge: int Embedding size of the edges. emb_size_trip: int (Down-projected) Embedding size in the triplet message passing block. emb_size_rbf: int Embedding size of the radial basis transformation. emb_size_cbf: int Embedding size of the circular basis transformation (one angle). emb_size_bil_trip: int Embedding size of the edge embeddings in the triplet-based message passing block after the bilinear layer. num_before_skip: int Number of residual blocks before the first skip connection. num_after_skip: int Number of residual blocks after the first skip connection. num_concat: int Number of residual blocks after the concatenation. num_atom: int Number of residual blocks in the atom embedding blocks. activation: str Name of the activation function to use in the dense layers except for the final dense layer. scale_file: str Path to the json file containing the scaling factors. """<def_stmt>__init__ self emb_size_atom emb_size_edge emb_size_trip emb_size_rbf emb_size_cbf emb_size_bil_trip num_before_skip num_after_skip num_concat num_atom activation=<none> scale_file=<none> name="Interaction" <block_start>super().__init__()<line_sep>self.name=name<line_sep>block_nr=name.split("_")[-1]<line_sep>## -------------------------------------------- Message Passing ------------------------------------------- ## # Dense transformation of skip connection self.dense_ca=Dense(emb_size_edge emb_size_edge activation=activation bias=<false> )<line_sep># Triplet Interaction self.trip_interaction=TripletInteraction(emb_size_edge=emb_size_edge emb_size_trip=emb_size_trip emb_size_bilinear=emb_size_bil_trip emb_size_rbf=emb_size_rbf emb_size_cbf=emb_size_cbf activation=activation scale_file=scale_file name=f"TripInteraction_{block_nr}" )<line_sep>## ---------------------------------------- Update Edge Embeddings ---------------------------------------- ## # Residual layers before skip connection self.layers_before_skip=torch.nn.ModuleList([ResidualLayer(emb_size_edge activation=activation )<for>i range(num_before_skip)])<line_sep># Residual layers after skip connection self.layers_after_skip=torch.nn.ModuleList([ResidualLayer(emb_size_edge activation=activation )<for>i range(num_after_skip)])<line_sep>## ---------------------------------------- Update Atom Embeddings ---------------------------------------- ## self.atom_update=AtomUpdateBlock(emb_size_atom=emb_size_atom emb_size_edge=emb_size_edge emb_size_rbf=emb_size_rbf nHidden=num_atom activation=activation scale_file=scale_file name=f"AtomUpdate_{block_nr}" )<line_sep>## ------------------------------ Update Edge Embeddings with Atom Embeddings ----------------------------- ## self.concat_layer=EdgeEmbedding(emb_size_atom emb_size_edge emb_size_edge activation=activation )<line_sep>self.residual_m=torch.nn.ModuleList([ResidualLayer(emb_size_edge activation=activation)<for>_ range(num_concat)])<line_sep>self.inv_sqrt_2=1/math.sqrt(2.0)<block_end><def_stmt>forward self h m rbf3 cbf3 id3_ragged_idx id_swap id3_ba id3_ca rbf_h idx_s idx_t <block_start>""" Returns ------- h: torch.Tensor, shape=(nEdges, emb_size_atom) Atom embeddings. m: torch.Tensor, shape=(nEdges, emb_size_edge) Edge embeddings (c->a). """<line_sep># Initial transformation x_ca_skip=self.dense_ca(m)# (nEdges, emb_size_edge) x3=self.trip_interaction(m rbf3 cbf3 id3_ragged_idx id_swap id3_ba id3_ca )<line_sep>## ----------------------------- Merge Embeddings after Triplet Interaction ------------------------------ ## x=x_ca_skip+x3# (nEdges, emb_size_edge) x=x<times>self.inv_sqrt_2<line_sep>## ---------------------------------------- Update Edge Embeddings --------------------------------------- ## # Transformations before skip connection <for_stmt>i,layer enumerate(self.layers_before_skip)<block_start>x=layer(x)<block_end># (nEdges, emb_size_edge) # Skip connection m=m+x# (nEdges, emb_size_edge) m=m<times>self.inv_sqrt_2<line_sep># Transformations after skip connection <for_stmt>i,layer enumerate(self.layers_after_skip)<block_start>m=layer(m)<block_end># (nEdges, emb_size_edge) ## ---------------------------------------- Update Atom Embeddings --------------------------------------- ## h2=self.atom_update(h m rbf_h idx_t)<line_sep># Skip connection h=h+h2# (nAtoms, emb_size_atom) h=h<times>self.inv_sqrt_2<line_sep>## ----------------------------- Update Edge Embeddings with Atom Embeddings ----------------------------- ## m2=self.concat_layer(h m idx_s idx_t)# (nEdges, emb_size_edge) <for_stmt>i,layer enumerate(self.residual_m)<block_start>m2=layer(m2)<block_end># (nEdges, emb_size_edge) # Skip connection m=m+m2# (nEdges, emb_size_edge) m=m<times>self.inv_sqrt_2<line_sep><return>h m<block_end><block_end><class_stmt>TripletInteraction(torch.nn.Module)<block_start>""" Triplet-based message passing block. Parameters ---------- emb_size_edge: int Embedding size of the edges. emb_size_trip: int (Down-projected) Embedding size of the edge embeddings after the hadamard product with rbf. emb_size_bilinear: int Embedding size of the edge embeddings after the bilinear layer. emb_size_rbf: int Embedding size of the radial basis transformation. emb_size_cbf: int Embedding size of the circular basis transformation (one angle). activation: str Name of the activation function to use in the dense layers except for the final dense layer. scale_file: str Path to the json file containing the scaling factors. """<def_stmt>__init__ self emb_size_edge emb_size_trip emb_size_bilinear emb_size_rbf emb_size_cbf activation=<none> scale_file=<none> name="TripletInteraction" **kwargs <block_start>super().__init__()<line_sep>self.name=name<line_sep># Dense transformation self.dense_ba=Dense(emb_size_edge emb_size_edge activation=activation bias=<false> )<line_sep># Up projections of basis representations, bilinear layer and scaling factors self.mlp_rbf=Dense(emb_size_rbf emb_size_edge activation=<none> bias=<false> )<line_sep>self.scale_rbf=ScalingFactor(scale_file=scale_file name=name+"_had_rbf")<line_sep>self.mlp_cbf=EfficientInteractionBilinear(emb_size_trip emb_size_cbf emb_size_bilinear)<line_sep>self.scale_cbf_sum=ScalingFactor(scale_file=scale_file name=name+"_sum_cbf")<line_sep># combines scaling for bilinear layer and summation # Down and up projections self.down_projection=Dense(emb_size_edge emb_size_trip activation=activation bias=<false> )<line_sep>self.up_projection_ca=Dense(emb_size_bilinear emb_size_edge activation=activation bias=<false> )<line_sep>self.up_projection_ac=Dense(emb_size_bilinear emb_size_edge activation=activation bias=<false> )<line_sep>self.inv_sqrt_2=1/math.sqrt(2.0)<block_end><def_stmt>forward self m rbf3 cbf3 id3_ragged_idx id_swap id3_ba id3_ca <block_start>""" Returns ------- m: torch.Tensor, shape=(nEdges, emb_size_edge) Edge embeddings (c->a). """<line_sep># Dense transformation x_ba=self.dense_ba(m)# (nEdges, emb_size_edge) # Transform via radial bessel basis rbf_emb=self.mlp_rbf(rbf3)# (nEdges, emb_size_edge) x_ba2=x_ba<times>rbf_emb<line_sep>x_ba=self.scale_rbf(x_ba x_ba2)<line_sep>x_ba=self.down_projection(x_ba)# (nEdges, emb_size_trip) # Transform via circular spherical basis x_ba=x_ba[id3_ba]<line_sep># Efficient bilinear layer x=self.mlp_cbf(cbf3 x_ba id3_ca id3_ragged_idx)<line_sep># (nEdges, emb_size_quad) x=self.scale_cbf_sum(x_ba x)<line_sep># => # rbf(d_ba) # cbf(d_ca, angle_cab) # Up project embeddings x_ca=self.up_projection_ca(x)# (nEdges, emb_size_edge) x_ac=self.up_projection_ac(x)# (nEdges, emb_size_edge) # Merge interaction of c->a and a->c x_ac=x_ac[id_swap]# swap to add to edge a->c and not c->a x3=x_ca+x_ac<line_sep>x3=x3<times>self.inv_sqrt_2<line_sep><return>x3<block_end><block_end>
# coding=utf-8 <import_stmt>sys<import_stmt>os<line_sep>sys.path.append([os.getcwd()])<line_sep>
<import_from_stmt>.escaped_fragment EscapedFragmentMiddleware<import_from_stmt>.hashbang HashBangMiddleware<import_from_stmt>.useragent UserAgentMiddleware<line_sep>
# Copyright (c) 2020 fortiss GmbH # # Authors: <NAME>, <NAME>, <NAME>, # <NAME> and <NAME> # # This work is licensed under the terms of the MIT license. # For a copy, see <https://opensource.org/licenses/MIT>. <import_stmt>os<import_from_stmt>pathlib Path<class_stmt>Data#xodr data <block_start>_xodr_data={}<line_sep>#track data _track_data={}<line_sep>#params data _params_data={}<line_sep>@staticmethod<def_stmt>xodr_data name<block_start><if_stmt>Data._xodr_data<block_start><return>Data._xodr_data[name]<block_end>data_dir=os.path.join(os.path.dirname(__file__) "../runtime/tests/data")<line_sep>files=[f<for>f os.listdir(data_dir)<if>f.endswith(".xodr")]<for_stmt>file files<block_start>Data._xodr_data[Path(file).stem]=os.path.join(data_dir file)<block_end><return>Data._xodr_data[name]<block_end>@staticmethod<def_stmt>track_data name<block_start><if_stmt>Data._track_data<block_start><return>Data._track_data[name]<block_end>data_dir=os.path.join(os.path.dirname(__file__) "../runtime/tests/data")<line_sep>files=[f<for>f os.listdir(data_dir)<if>f.endswith(".csv")]<for_stmt>file files<block_start>Data._track_data[Path(file).stem]=os.path.join(data_dir file)<block_end><return>Data._track_data[name]<block_end>@staticmethod<def_stmt>params_data name<block_start><if_stmt>Data._params_data<block_start><return>Data._params_data[name]<block_end>data_dir=os.path.join(os.path.dirname(__file__) "params")<line_sep>files=[f<for>f os.listdir(data_dir)<if>f.endswith(".json")]<for_stmt>file files<block_start>Data._params_data[Path(file).stem]=os.path.join(data_dir file)<block_end><return>Data._params_data[name]<block_end><block_end>
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. <import_from_stmt>copy deepcopy<import_from_stmt>tests.comparison.common Table<import_from_stmt>tests.comparison.funcs CastFunc<import_from_stmt>tests.comparison.query InsertClause InsertStatement Query StatementExecutionMode ValuesClause ValuesRow <import_from_stmt>tests.comparison.query_generator QueryGenerator<class_stmt>InsertStatementGenerator(object)<block_start><def_stmt>__init__ self profile# QueryProfile-like object <block_start>self.profile=profile<line_sep># used to generate SELECT queries for INSERT/UPSERT ... SELECT statements; # to ensure state is completely reset, this is created anew with each call to # generate_statement() self.select_stmt_generator=<none><block_end><def_stmt>generate_statement self tables dml_table<block_start>""" Return a randomly generated INSERT or UPSERT statement. Note that UPSERTs are very similar to INSERTs, which is why this generator handles both. tables should be a list of Table objects. A typical source of such a list comes from db_connection.DbCursor.describe_common_tables(). This list describes the possible "sources" of the INSERT/UPSERT's WITH and FROM/WHERE clauses. dml_table is a required Table object. The INSERT/UPSERT will be into this table. """<if_stmt><not>(isinstance(tables list)<and>len(tables)<g>0<and>all((isinstance(t Table)<for>t tables)))<block_start><raise>Exception('tables must be a not-empty list of Table objects')<block_end><if_stmt><not>isinstance(dml_table Table)<block_start><raise>Exception('dml_table must be a Table')<block_end>self.select_stmt_generator=QueryGenerator(self.profile)<line_sep>insert_statement=InsertStatement(execution=StatementExecutionMode.DML_TEST)<line_sep># Choose whether this is a # INSERT/UPSERT INTO table SELECT/VALUES # or # INSERT/UPSERT INTO table (col1, col2, ...) SELECT/VALUES # If the method returns None, it's the former. insert_column_list=self.profile.choose_insert_column_list(dml_table)<if_stmt>dml_table.primary_keys# Having primary keys implies the table is a Kudu table, which makes it subject to # both INSERTs (with automatic ignoring of primary key duplicates) and UPSERTs. <block_start>conflict_action=self.profile.choose_insert_vs_upsert()<block_end><else_stmt><block_start>conflict_action=InsertClause.CONFLICT_ACTION_DEFAULT<block_end>insert_statement.insert_clause=InsertClause(dml_table column_list=insert_column_list conflict_action=conflict_action)<line_sep># We still need to internally track the columns we're inserting. Keep in mind None # means "all" without an explicit column list. Since we've already created the # InsertClause object though, we can fill this in for ourselves. <if_stmt>insert_column_list<is><none><block_start>insert_column_list=dml_table.cols<block_end>insert_item_data_types=[col.type<for>col insert_column_list]<line_sep># Decide whether this is INSERT/UPSERT VALUES or INSERT/UPSERT SELECT insert_source_clause=self.profile.choose_insert_source_clause()<if_stmt>issubclass(insert_source_clause Query)# Use QueryGenerator()'s public interface to generate the SELECT. <block_start>select_query=self.select_stmt_generator.generate_statement(tables select_item_data_types=insert_item_data_types)<line_sep># To avoid many loss-of-precision errors, explicitly cast the SelectItems. The # generator's type system is not near sophisticated enough to know how random # expressions will be implicitly casted in the databases. This requires less work # to implement. IMPALA-4693 considers alternative approaches. self._cast_select_items(select_query insert_column_list)<line_sep>insert_statement.with_clause=deepcopy(select_query.with_clause)<line_sep>select_query.with_clause=<none><line_sep>insert_statement.select_query=select_query<block_end><elif_stmt>issubclass(insert_source_clause ValuesClause)<block_start>insert_statement.values_clause=self._generate_values_clause(insert_column_list)<block_end><else_stmt><block_start><raise>Exception('unsupported INSERT/UPSERT source clause: {0}'.format(insert_source_clause))<block_end><return>insert_statement<block_end><def_stmt>_generate_values_clause self columns<block_start>""" Return a VALUES clause containing a variable number of rows. The values corresponding to primary keys will be non-null constants. Any other columns could be null, constants, or function trees that may or may not evaluate to null. """<line_sep>values_rows=[]<for_stmt>_ xrange(self.profile.choose_insert_values_row_count())<block_start>values_row=[]<for_stmt>col columns<block_start><if_stmt>col.is_primary_key<block_start>val=self.profile.choose_constant(return_type=col.exact_type allow_null=<false>)<block_end><elif_stmt>'constant'<eq>self.profile.choose_values_item_expr()<block_start>val=self.profile.choose_constant(return_type=col.exact_type allow_null=<true>)<block_end><else_stmt><block_start>func_tree=self.select_stmt_generator.create_func_tree(col.type allow_subquery=<false>)<line_sep>val=self.select_stmt_generator.populate_func_with_vals(func_tree)<line_sep># Only the generic type, not the exact type, of the value will be known. To # avoid a lot of failed queries due to precision errors, we cast the val to # the exact type of the column. This will still not prevent "out of range" # conditions, as we don't try to evaluate the random expressions. val=CastFunc(val col.exact_type)<block_end>values_row.append(val)<block_end>values_rows.append(ValuesRow(values_row))<block_end><return>ValuesClause(values_rows)<block_end><def_stmt>_cast_select_items self select_query column_list<block_start>""" For a given Query select_query and a column_list (list of Columns), cast each select item in select_query to the exact type of the column. A Query may have a UNION, recursively do this down the line. """<for_stmt>col_idx,select_item enumerate(select_query.select_clause.items)<block_start>cast_val_expr=CastFunc(select_item.val_expr column_list[col_idx].exact_type)<line_sep>select_item.val_expr=cast_val_expr<block_end><if_stmt>select_query.union_clause<block_start>self._cast_select_items(select_query.union_clause.query column_list)<block_end><block_end><block_end><def_stmt>get_generator statement_type<block_start>""" Given a statement type, return the proper statement generator. """<line_sep>STATEMENT_GENERATOR_MAP={InsertStatement:InsertStatementGenerator Query:QueryGenerator }<line_sep><return>STATEMENT_GENERATOR_MAP[statement_type]<block_end>
<class_stmt>InvalidTag(Exception)<block_start><pass><block_end><class_stmt>IgnoreObject(Exception)<block_start><def_stmt>__init__ self original_exception=<none> trback=<none> *args **kwargs<block_start>super(Exception self).__init__(*args **kwargs)<line_sep>self.original_exception=original_exception<line_sep>self.trback=trback<block_end><block_end><class_stmt>UnknownProtocol(Exception)<block_start><pass><block_end><class_stmt>MissingTransform(Exception)<block_start><pass><block_end><class_stmt>ExtraTransform(Exception)<block_start><pass><block_end>
<import_from_stmt>docutils nodes<def_stmt>disguise_role typ rawtext text lineno inliner options={} content=[]<block_start>""" Role to obfuscate e-mail addresses using DISGUISE comments. """<line_sep>obfuscated='<!-- DISGUISE -->'.join(list(text))<line_sep>obfuscated='<b>'+obfuscated<line_sep>obfuscated=obfuscated+'</b>'<line_sep>node=nodes.raw('' obfuscated format='html')<line_sep><return>[node] []<block_end><def_stmt>setup app<block_start>app.add_role('disguise' disguise_role)<block_end>
<import_from_stmt>. graphicsPrimitives<as>graphics<import_from_stmt>.. options<def_stmt>find_voxel x y z g<block_start>"""returns (i,j,k) of voxel containing point x,y,z if the point is within the grid, otherwise return the corresponding grid boundary. """<line_sep># g is grid boundaries i=max(0 int((x-g["xlo"])<floordiv>g["dx"]))<line_sep>j=max(0 int((y-g["ylo"])<floordiv>g["dy"]))<line_sep>k=max(0 int((z-g["zlo"])<floordiv>g["dz"]))<line_sep><return>(i j k)<block_end><def_stmt>get_verts voxel g<block_start>"""return list (len=8) of point coordinates (x,y,z) that are vertices of the voxel (i,j,k)"""<line_sep>(i j k)=voxel<line_sep>dx,dy,dz=g["dx"] g["dy"] g["dz"]<line_sep>v1_0,v1_1,v1_2=g["xlo"]+i<times>dx g["ylo"]+j<times>dy g["zlo"]+k<times>dz<line_sep>vertices=[(v1_0 v1_1 v1_2) (v1_0+dx v1_1 v1_2) (v1_0+dx v1_1+dy v1_2) (v1_0 v1_1+dy v1_2) (v1_0 v1_1 v1_2+dz) (v1_0+dx v1_1 v1_2+dz) (v1_0+dx v1_1+dy v1_2+dz) (v1_0 v1_1+dy v1_2+dz) ]<line_sep><return>vertices<block_end><def_stmt>get_surr_rows row endpoints g<block_start>"""return list (len=4) of the rows surrounding the current one on all sides IF the surrounding row is within the bounds of the grid."""<line_sep>(y z)=row<line_sep>surr=[]<if_stmt>y<ge>1<block_start>surr.append(((y-1 z) endpoints))<block_end><if_stmt>z<ge>1<block_start>surr.append(((y z-1) endpoints))<block_end><if_stmt>(g["ylo"]+(y+1)<times>g["dy"])<l>g["yhi"]<block_start>surr.append(((y+1 z) endpoints))<block_end><if_stmt>(g["zlo"]+(z+1)<times>g["dz"])<l>g["zhi"]<block_start>surr.append(((y z+1) endpoints))<block_end><return>surr<block_end><def_stmt>verts_in f voxel surf g<block_start>"""return the number of vertices of this voxel that are contained within the surface"""<line_sep>verts=get_verts(voxel g)<line_sep>ins=0<line_sep>distlist=[]<for_stmt>(x y z) verts<block_start><if_stmt>(g["xlo"]<le>x<le>g["xhi"]<and>g["ylo"]<le>y<le>g["yhi"]<and>g["zlo"]<le>z<le>g["zhi"])<block_start>dist=f.distance(x y z)<block_end><else_stmt><block_start>dist=float("inf")<block_end>distlist.append(dist)<if_stmt>dist<le>options.ics_distance_threshold<block_start>ins<augadd>1<block_end><block_end><if_stmt>1<le>ins<le>7<block_start>surf[voxel]=distlist<block_end><return>ins<block_end><def_stmt>find_endpoints f surf include_ga row guesses g<block_start>"""return the endpoints (L,R) contained in the frustum f; if only one voxel both endpoints will be the same; if none both will be None f: frustum object surf: surface voxels row: current row guesses: estimates for endpoints g: grid boundaries"""<line_sep># +x or right endpoint Rend,Lend=<none> <none><line_sep>check_surf_L,check_surf_R=(<none> <none>) (<none> <none>)<line_sep>stop=<false><line_sep>Ri=guesses[1]<line_sep>ogrverts=verts_in(f (Ri row[0] row[1]) surf g)<if_stmt>ogrverts<eq>0<block_start>going_in=<true><block_end><elif_stmt>1<le>ogrverts<l>8<block_start>going_in=<false><line_sep>check_surf_R=(<true> Ri)<block_end><else_stmt><block_start>going_in=<false><block_end><while_stmt>(0<le>Ri<and>(g["xlo"]+(Ri)<times>g["dx"])<l>g["xhi"])<and><not>stop<block_start>verts=verts_in(f (Ri row[0] row[1]) surf g)<if_stmt>verts<eq>0<block_start><if_stmt><not>going_in<block_start>stop=<true><line_sep><continue><block_end><else_stmt><block_start><if_stmt>Ri<eq>guesses[0]# row is empty between guesses <block_start><return>(<none> <none>)<block_end>Ri<augsub>1<line_sep><continue><block_end><block_end><elif_stmt>verts<eq>8<block_start>Rend=Ri<line_sep>Ri<augadd>1<line_sep><continue><block_end><else_stmt><block_start>Rend=Ri<if_stmt>going_in<block_start>check_surf_R=(<true> Ri)<line_sep><break><block_end>Ri<augadd>1<block_end><block_end># the -x or left endpoint stop=<false><line_sep>Li=guesses[0]<line_sep>oglverts=verts_in(f (Li row[0] row[1]) surf g)<if_stmt>oglverts<eq>0<block_start>going_in=<true><block_end><elif_stmt>1<le>oglverts<l>8<block_start>going_in=<false><line_sep>check_surf_L=(<true> Li)<block_end><else_stmt><block_start>going_in=<false><block_end><while_stmt>(0<le>Li<and>(g["xlo"]+(Li)<times>g["dx"])<l>g["xhi"])<and><not>stop<block_start>verts=verts_in(f (Li row[0] row[1]) surf g)<if_stmt>verts<eq>0<block_start><if_stmt><not>going_in<block_start>stop=<true><line_sep><continue><block_end><else_stmt># it's not empty or would have already returned <block_start>Li<augadd>1<line_sep><continue><block_end><block_end><elif_stmt>verts<eq>8<block_start>Lend=Li<line_sep>Li<augsub>1<line_sep><continue><block_end><else_stmt><block_start>Lend=Li<if_stmt>going_in<block_start>check_surf_L=(<true> Li)<line_sep><break><block_end>Li<augsub>1<block_end><block_end># check for extra surface voxels missed <if_stmt>check_surf_R[0]<and>Lend<is><not><none><block_start>r=check_surf_R[1]<while_stmt>r<g>Lend<block_start>verts=verts_in(f (r row[0] row[1]) surf g)<if_stmt>verts<eq>8<block_start><break><block_end><else_stmt><block_start>r<augsub>1<block_end><block_end><block_end><if_stmt>check_surf_L[0]<and>Rend<is><not><none><block_start>l=check_surf_L[1]<while_stmt>l<l>Rend<block_start>verts=verts_in(f (l row[0] row[1]) surf g)<if_stmt>verts<eq>8<block_start><break><block_end><else_stmt><block_start>l<augadd>1<block_end><block_end><block_end># if keeping non-surface but grid-adjacent voxels: <if_stmt>include_ga<block_start>surf.add((Lend row[0] row[1]))<line_sep>surf.add((Rend row[0] row[1]))<block_end><return>(Lend Rend)<block_end><def_stmt>voxelize grid Object corners=<none> include_ga=<false><block_start>"""return a list of all voxels (i,j,k) that contain part of the object Other returned elements: set of surface voxels, possibly_missed for error handling"""<line_sep># include_ga is whether to include grid-adjacent voxels in the surface, even if entirely within the surface yes_voxels=set()<line_sep>checked=set()<line_sep>surface={}<if_stmt>corners<is><not><none><block_start><for_stmt>i range(4)<block_start>x0,y0,z0=corners[i][0] corners[i][1] corners[i][2]<line_sep>(i0 j0 k0)=find_voxel(x0 y0 z0 grid)<line_sep># find the contained endpoints and start the set with initial row and initial endpoints s=set()<line_sep>ends=find_endpoints(Object surface include_ga (j0 k0) (i0 i0) grid)<if_stmt>ends[0]<block_start><break><block_end><block_end><block_end><else_stmt><block_start><if_stmt>isinstance(Object graphics.Sphere)<block_start>x0,y0,z0=Object.x Object.y Object.z<block_end><else_stmt><block_start>x0,y0,z0=Object._x0 Object._y0 Object._z0<block_end># find the starting voxel (i0 j0 k0)=find_voxel(x0 y0 z0 grid)<line_sep># find the contained endpoints and start the set with initial row and initial endpoints s=set()<line_sep>ends=find_endpoints(Object surface include_ga (j0 k0) (i0-1 i0+1) grid)<block_end># the given starting voxel is not actually found possibly_missed=<false><if_stmt><not>ends[0]<block_start>possibly_missed=<true><line_sep>ends=(i0 i0)<block_end># ------ <for_stmt>i range(ends[0] ends[1]+1)<block_start>yes_voxels.add((i j0 k0))<block_end># add that initial row to checked and the set (otherwise inital voxel missed) checked.add((j0 k0))<line_sep>s.add(((j0 k0) ends))<while_stmt>s<block_start>startr=s.pop()<line_sep>newr=get_surr_rows(startr[0] startr[1] grid)<for_stmt>r newr<block_start>(row guesses)=r<if_stmt>row<not><in>checked<block_start>(Lend Rend)=find_endpoints(Object surface include_ga row guesses grid)<if_stmt>Lend<is><not><none><block_start><for_stmt>i range(Lend Rend+1)<block_start>yes_voxels.add((i row[0] row[1]))<block_end>s.add((row (Lend Rend)))<block_end>checked.add(row)<block_end><block_end><block_end>missed=<false><if_stmt>possibly_missed<and>(len(yes_voxels)<eq>1)# no voxels were found, return empty set <block_start>missed=(i0 j0 k0)<line_sep>yes_voxels=set()<block_end><return>[yes_voxels surface missed]<block_end>
<import_stmt>copy<import_stmt>time<line_sep>""" The implementation of the IncDS attack. """<def_stmt>compute_density user_product_graph product_user_graph c t<block_start>""" Compute the density of controlled accounts according to their local structural density """<line_sep>density={}<line_sep># intialize the auxiliary graph aux_user_graph=copy.deepcopy(user_product_graph)<line_sep>aux_prod_graph=copy.deepcopy(product_user_graph)<for_stmt>u c<block_start>aux_user_graph[u].append((t 1 -1 '2012-06-01'))<line_sep>aux_prod_graph[t].append((u 1 -1 '2012-06-01'))<block_end><for_stmt>u c<block_start>user_degree=len(aux_user_graph[u])<line_sep>prod_degree=sum([len(aux_prod_graph[review[0]])<for>review aux_user_graph[u]])<line_sep>density[u]=user_degree/prod_degree<block_end><return>density<block_end><def_stmt>ds_evasion user_product_graph product_user_graph c r t<block_start>""" Args: user_product_graph: key = user_id, value = list of review tuples product_product_graph: key = product_id, value = list of review tuples priors: node priors c: list of controlled accounts r: number of reviews to be posted each account t: target list feature_config: """<line_sep># total number of spams posted count=0<line_sep>added_edges=[]<line_sep>t0=time.time()<line_sep># how many new controlled accounts are selected to post spams for the current iteration unique=0<line_sep>new_user_graph=copy.deepcopy(user_product_graph)<line_sep>new_product_graph=copy.deepcopy(product_user_graph)<line_sep>account_log=[]<line_sep># for each target, find controlled accounts to post spams <for_stmt>target t# compute the density <block_start>density=compute_density(new_user_graph new_product_graph c target)<line_sep>selected_accounts=[(account density[account])<for>account c]<line_sep>selected_accounts=sorted(selected_accounts reverse=<false> key=<lambda>x:x[1])<line_sep>print("Dict of densities of controlled accounts")<line_sep>print(selected_accounts)<line_sep>selected_accounts=[account[0]<for>account selected_accounts[:r]]<line_sep>print("List of selected accounts")<line_sep>print(selected_accounts)<line_sep># count the number of unique added accounts <for_stmt>account selected_accounts<block_start><if_stmt>account<not><in>account_log<block_start>unique<augadd>1<block_end><block_end>print('Total number of selected unique accounts: %d'%(unique))<line_sep>account_log=account_log+selected_accounts<line_sep># add the added_edges to the global graph <for_stmt>added_account selected_accounts<block_start>new_user_graph[added_account].append((target 1 -1 '2012-06-01'))<line_sep>new_product_graph[target].append((added_account 1 -1 '2012-06-01'))<block_end># add new nodes to output <for_stmt>added_account selected_accounts<block_start>review_id=(added_account target)<line_sep>added_edges.append(review_id)<block_end>t1=time.time()<line_sep>print('Time consumed: %s'%str(t1-t0))<line_sep>print('\n---------------------------------\n')<block_end><return>added_edges user_product_graph<block_end>
<import_stmt>torch<import_stmt>numpy<as>np<import_from_stmt>smplx SMPL<as>_SMPL<import_from_stmt>smplx.body_models ModelOutput<import_from_stmt>smplx.lbs vertices2joints<import_stmt>config<class_stmt>SMPL(_SMPL)<block_start>""" Extension of the official SMPL (from the smplx python package) implementation to support more joints. """<def_stmt>__init__ self *args **kwargs<block_start>super(SMPL self).__init__(*args **kwargs)<line_sep>J_regressor_extra=np.load(config.J_REGRESSOR_EXTRA_PATH)<line_sep>J_regressor_cocoplus=np.load(config.COCOPLUS_REGRESSOR_PATH)<line_sep>J_regressor_h36m=np.load(config.H36M_REGRESSOR_PATH)<line_sep>self.register_buffer('J_regressor_extra' torch.tensor(J_regressor_extra dtype=torch.float32))<line_sep>self.register_buffer('J_regressor_cocoplus' torch.tensor(J_regressor_cocoplus dtype=torch.float32))<line_sep>self.register_buffer('J_regressor_h36m' torch.tensor(J_regressor_h36m dtype=torch.float32))<block_end><def_stmt>forward self *args **kwargs<block_start>kwargs['get_skin']=<true><line_sep>smpl_output=super(SMPL self).forward(*args **kwargs)<line_sep>extra_joints=vertices2joints(self.J_regressor_extra smpl_output.vertices)<line_sep>cocoplus_joints=vertices2joints(self.J_regressor_cocoplus smpl_output.vertices)<line_sep>h36m_joints=vertices2joints(self.J_regressor_h36m smpl_output.vertices)<line_sep>all_joints=torch.cat([smpl_output.joints extra_joints cocoplus_joints h36m_joints] dim=1)<line_sep>output=ModelOutput(vertices=smpl_output.vertices global_orient=smpl_output.global_orient body_pose=smpl_output.body_pose joints=all_joints betas=smpl_output.betas full_pose=smpl_output.full_pose)<line_sep><return>output<block_end><block_end>
<import_from_stmt>fuzzconfig FuzzConfig<import_stmt>nonrouting<import_stmt>pytrellis<import_stmt>fuzzloops<import_stmt>interconnect<line_sep>cfg=FuzzConfig(job="DTR" family="ECP5" device="LFE5U-45F" ncl="empty.ncl" tiles=["CIB_R71C22:DTR"])<def_stmt>get_substs mode="DTR"<block_start><if_stmt>mode<eq>"NONE"<block_start>comment="//"<block_end><else_stmt><block_start>comment=""<block_end><return>dict(comment=comment)<block_end><def_stmt>main <block_start>pytrellis.load_database("../../../database")<line_sep>cfg.setup()<line_sep>empty_bitfile=cfg.build_design(cfg.ncl {})<line_sep>cfg.ncl="dtr.ncl"<line_sep>nonrouting.fuzz_enum_setting(cfg "DTR.MODE" ["NONE" "DTR"] <lambda>x:get_substs(mode=x) empty_bitfile)<line_sep>cfg.ncl="dtr_routing.ncl"<line_sep>interconnect.fuzz_interconnect_with_netnames(cfg ["R70C22_JSTARTPULSE_DTR"]+["R70C22_JDTROUT{}_DTR".format(i)<for>i range(8)] bidir=<true>)<block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end>
<import_stmt>pandas<as>pd<import_from_stmt>.exponential_moving_average exponential_moving_average<as>ema<import_from_stmt>.volatility volatility<as>vol<import_from_stmt>.stochastic percent_k<as>K<import_from_stmt>.stochastic percent_d<as>D<import_from_stmt>.relative_strength_index relative_strength_index<as>RSI<import_from_stmt>.moving_average_convergence_divergence moving_average_convergence_divergence<as>macd<import_from_stmt>.bollinger_bands bandwidth<as>bb<class_stmt>Indicators()<block_start><def_stmt>__init__ self settings=<none><block_start>self.bb_period=20<line_sep>self.rsi_period=14<line_sep>self.sd_period=0<line_sep>self.sv_period=0<line_sep>self.stoch_period=14<line_sep>self.volatility_period=20<line_sep>self.macd_long=24<line_sep>self.macd_short=12<line_sep>self.ema_periods=[20 50 100]<line_sep>self.settings=settings<line_sep>self.build_func=<none><line_sep>self.names=[]<block_end><def_stmt>add_building self settings=<none><block_start><if_stmt>settings<block_start>self.settings=settings<block_end><if_stmt>self.settings<block_start>self.build_func=[]<for_stmt>key,value self.settings.items()<block_start><if_stmt><not>value<block_start><continue><block_end><elif_stmt>"RSI"<eq>key<and>value<block_start>self.names.append('RSI')<if_stmt>'default'<ne>value<block_start>self.rsi_period=value<block_end>self.build_func.append([RSI 'RSI' self.rsi_period])<block_end><elif_stmt>"MACD"<eq>key<and>value<block_start>self.names.append('MACD')<if_stmt>'default'<ne>value<block_start>self.macd_long=value[1] <line_sep>self.macd_short=value[0]<block_end>self.build_func.append([macd 'MACD' [self.macd_short self.macd_long]])<block_end><elif_stmt>"Volatility"<eq>key<and>value<block_start>self.names.append('Volatility')<if_stmt>'default'<ne>value<block_start>self.volatility_period=value<block_end>self.build_func.append([vol 'Volatility' self.volatility_period])<block_end><elif_stmt>"EMA"<eq>key<and>value<block_start><if_stmt>'default'<ne>value<block_start><for_stmt>values value<block_start>self.names.append('EMA'+str(values))<line_sep>self.build_func.append([ema 'EMA'+str(values) values])<block_end><block_end><block_end><elif_stmt>"Bollinger_bands"<eq>key<and>value<block_start>self.names.append('Bollinger_bands')<if_stmt>'default'<ne>value<block_start>self.bb_period=value<block_end>self.build_func.append([bb 'Bollinger_bands' self.bb_period])<block_end><elif_stmt>"Stochastic"<eq>key<and>value<block_start>self.names.append('Stochastic_D')<line_sep>self.names.append('Stochastic_K')<if_stmt>'default'<ne>value<block_start>self.stoch_period=value<block_end>self.build_func.append([D 'Stochastic_D' self.stoch_period])<line_sep>self.build_func.append([K 'Stochastic_K' self.stoch_period])<block_end><block_end><block_end><block_end><def_stmt>build_indicators self data<block_start><if_stmt><not>self.build_func<block_start><raise>ValueError("No indicators to build.")<block_end>indicators=pd.DataFrame(columns=self.names)<for_stmt>idx self.build_func<block_start><if_stmt>"MACD"<in>idx[1]<block_start>indicators[idx[1]]=idx[0](data idx[2][0] idx[2][1])<block_end><else_stmt><block_start>indicators[idx[1]]=idx[0](data idx[2])<block_end><block_end><return>indicators<block_end><block_end>
<import_from_stmt>datetime timedelta<import_from_stmt>airflow DAG<import_from_stmt>airflow.utils.dates days_ago<import_from_stmt>dag_test_examples t_A t_B<line_sep>default_args={"owner":"airflow" "depends_on_past":<false> "start_date":days_ago(2) "retries":1 "retry_delay":timedelta(minutes=5) "dbnd_config":{"databand":{"env":"gcp"}} }<with_stmt>DAG(dag_id="dbnd_dag_at_gcp" default_args=default_args)<as>dag_remote_fs<block_start>a=t_A()<line_sep>b=t_B(a)<block_end><if_stmt>__name__<eq>"__main__"<block_start>dag_remote_fs.clear()<line_sep>dag_remote_fs.run(start_date=days_ago(0) end_date=days_ago(0))<block_end>
# -*- coding: utf-8 -*- <import_stmt>os<import_from_stmt>django forms<import_from_stmt>django.utils.translation gettext_lazy<as>_<import_from_stmt>django.contrib.auth get_user_model<import_from_stmt>django.utils timezone<import_from_stmt>django.template defaultfilters<import_from_stmt>django.core.files.uploadedfile UploadedFile<import_from_stmt>spirit.core tasks<import_from_stmt>spirit.core.conf settings<import_from_stmt>spirit.core.utils.timezone timezones<import_from_stmt>.models UserProfile<line_sep>User=get_user_model()<line_sep>TIMEZONE_CHOICES=timezones()<line_sep>Notify=UserProfile.Notify<class_stmt>CleanEmailMixin<block_start><def_stmt>clean_email self<block_start>email=self.cleaned_data["email"]<if_stmt>settings.ST_CASE_INSENSITIVE_EMAILS<block_start>email=email.lower()<block_end><if_stmt><not>settings.ST_UNIQUE_EMAILS<block_start><return>email<block_end>is_taken=(User.objects.filter(email=email).exists())<if_stmt>is_taken<block_start><raise>forms.ValidationError(_("The email is taken."))<block_end><return>email<block_end><def_stmt>get_email self<block_start><return>self.cleaned_data["email"]<block_end><block_end><class_stmt>EmailCheckForm(CleanEmailMixin forms.Form)<block_start>email=forms.CharField(label=_("Email") widget=forms.EmailInput max_length=254)<block_end><class_stmt>EmailChangeForm(CleanEmailMixin forms.Form)<block_start>email=forms.CharField(label=_("Email") widget=forms.EmailInput max_length=254)<line_sep>password=forms.CharField(label=_("Password") widget=forms.PasswordInput)<def_stmt>__init__ self user=<none> *args **kwargs<block_start>self.user=user<line_sep>super(EmailChangeForm self).__init__(*args **kwargs)<block_end><def_stmt>clean_password self<block_start>password=self.cleaned_data["password"]<if_stmt><not>self.user.check_password(password)<block_start><raise>forms.ValidationError(_("The provided password is incorrect."))<block_end><return>password<block_end><block_end><class_stmt>UserForm(forms.ModelForm)<block_start><class_stmt>Meta<block_start>model=User<line_sep>fields=("first_name" "last_name")<block_end><block_end><class_stmt>AvatarWidget(forms.ClearableFileInput)<block_start>template_name='spirit/user/_image_widget.html'<line_sep>clear_checkbox_label=_('Remove avatar')<line_sep>accept=', '.join('.%s'%ext<for>ext sorted(settings.ST_ALLOWED_AVATAR_FORMAT))<block_end><class_stmt>UserProfileForm(forms.ModelForm)<block_start>timezone=forms.ChoiceField(label=_("Time zone") choices=TIMEZONE_CHOICES)<line_sep>notify_when=forms.TypedChoiceField(label=_("Email notifications") coerce=int choices=Notify.WHEN)<line_sep>notify_mentions=forms.BooleanField(label=_("Email mentions") required=<false>)<line_sep>notify_replies=forms.BooleanField(label=_("Email replies") required=<false>)<class_stmt>Meta<block_start>model=UserProfile<line_sep>fields=("avatar" "location" "timezone")<line_sep>widgets={'avatar':AvatarWidget(attrs={'accept':AvatarWidget.accept})}<block_end><def_stmt>__init__ self *args **kwargs<block_start>super().__init__(*args **kwargs)<line_sep>now=timezone.localtime(timezone.now())<line_sep>self.fields['timezone'].help_text=_('Current time is: %(date)s %(time)s')%{'date':defaultfilters.date(now) 'time':defaultfilters.time(now)}<line_sep>self.fields['notify_when'].initial=self.instance.notify_when<line_sep>self.fields['notify_mentions'].initial=bool(self.instance.notify&Notify.MENTION)<line_sep>self.fields['notify_replies'].initial=bool(self.instance.notify&Notify.REPLY)<block_end><def_stmt>clean_avatar self<block_start>file=self.cleaned_data['avatar']<line_sep># can be bool (clear) or not an image (empty) <if_stmt><not>isinstance(file UploadedFile)<block_start><return>file<block_end>ext=os.path.splitext(file.name)[1].lstrip('.').lower()<if_stmt>(ext<not><in>settings.ST_ALLOWED_AVATAR_FORMAT<or>file.image.format.lower()<not><in>settings.ST_ALLOWED_AVATAR_FORMAT)<block_start><raise>forms.ValidationError(_("Unsupported file format. Supported formats are %s.")%", ".join(settings.ST_ALLOWED_AVATAR_FORMAT))<block_end><return>file<block_end><def_stmt>clean_notify_mentions self<block_start><if_stmt>self.cleaned_data['notify_mentions']<block_start><return>Notify.MENTION<block_end><return>0<block_end><def_stmt>clean_notify_replies self<block_start><if_stmt>self.cleaned_data['notify_replies']<block_start><return>Notify.REPLY<block_end><return>0<block_end><def_stmt>save self *args **kwargs<block_start>self.instance.notify=(self.cleaned_data['notify_when']|self.cleaned_data['notify_mentions']|self.cleaned_data['notify_replies'])<line_sep>instance=super().save(*args **kwargs)<if_stmt>isinstance(self.cleaned_data['avatar'] UploadedFile)<block_start>tasks.make_avatars(self.instance.user_id)<block_end><return>instance<block_end><block_end>
<import_stmt>flask werkzeug.serving<import_from_stmt>werkzeug.datastructures ImmutableOrderedMultiDict<import_from_stmt>...util log<import_from_stmt>...util.threads runnable<import_from_stmt>...animation.remote opener<class_stmt>OrderedFlask(flask.Flask)# http://flask.pocoo.org/docs/1.0/patterns/subclassing/ <block_start><class_stmt>request_class(flask.Request)<block_start>parameter_storage_class=ImmutableOrderedMultiDict<block_end><block_end><class_stmt>FlaskServer(runnable.LoopThread)<block_start>OPEN_DELAY=1<def_stmt>__init__ self port external_access open_page **kwds<block_start>super().__init__()<line_sep>self.port=port<line_sep>self.hostname='0.0.0.0'<if>external_access<else>'localhost'<line_sep>self.app=OrderedFlask(__name__ **kwds)<line_sep>self.open_page=open_page<block_end><def_stmt>run_once self<block_start><if_stmt>self.open_page<block_start>opener.raw_opener('localhost' self.port self.OPEN_DELAY)<block_end>werkzeug.serving.run_simple(self.hostname self.port self.app)<line_sep>super().stop()<block_end><def_stmt>stop self<block_start><def_stmt>error <block_start>log.error('Unable to shut down REST server on port %d' self.port)<block_end>super().stop()<try_stmt><block_start>flask.request.environ.get('werkzeug.server.shutdown' error)()<block_end><except_stmt>Exception<block_start>log.debug('Exception shutting werkzeug down')<block_end><block_end><block_end>
# Copyright (c) ZenML GmbH 2020. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing # permissions and limitations under the License. <import_stmt>os<import_from_stmt>typing List Text Dict<import_stmt>tensorflow_data_validation<as>tfdv<import_from_stmt>tfx types<import_from_stmt>tfx.components.schema_gen.executor _DEFAULT_FILE_NAME<import_from_stmt>tfx.types artifact_utils<import_from_stmt>tfx.types.artifact Artifact<import_from_stmt>tfx.utils io_utils<def_stmt>parse_statistics split_name:Text statistics:List[Artifact]<arrow>Dict[Text int]<block_start>stats_uri=io_utils.get_only_uri_in_dir(artifact_utils.get_split_uri(statistics split_name))<line_sep>stats=tfdv.load_stats_binary(stats_uri)<line_sep><return>stats<block_end><def_stmt>parse_schema input_dict:Dict[Text List[types.Artifact]]<block_start>schema=input_dict.get('schema' <none>)<if_stmt><not>schema<block_start><return>schema<block_end><else_stmt><block_start>schema_path=os.path.join(artifact_utils.get_single_uri(schema) _DEFAULT_FILE_NAME)<line_sep>schema_reader=io_utils.SchemaReader()<line_sep>parsed_schema=schema_reader.read(schema_path)<line_sep><return>parsed_schema<block_end><block_end>
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>csv<import_stmt>sys<import_stmt>json<import_stmt>shutil<import_stmt>warnings<import_stmt>traceback<import_from_stmt>pathlib Path<import_from_stmt>typing Dict Iterable List<import_from_stmt>.io read_lines read_table<line_sep># Any CSV file under 50 MB can use the fast JSON converter JSON_FAST_CONVERTER_SIZE_BYTES=50<times>1000<times>1000<line_sep># Any CSV file above 150 MB should not be converted to JSON JSON_MAX_SIZE_BYTES=150<times>1000<times>1000<def_stmt>get_table_columns table_path:Path<arrow>List[str]<block_start>""" Memory-efficient method used to extract the columns of a table without reading the entire file into memory. Arguments: table_path: Path to the table Returns: List[str]: Column names from the table header """<with_stmt>open(table_path "r")<as>fd<block_start>reader=csv.reader(fd)<line_sep><return>next(reader)<block_end><block_end><def_stmt>table_sort table_path:Path output_path:Path<arrow><none><block_start>""" Memory-efficient method used to perform a lexical sort of all the rows of this table, excluding the table header. Arguments: table_path: Path of the table to be sorted. output_path: Output location for the sorted table. """<with_stmt>open(table_path "r")<as>fd_in<block_start>header=next(fd_in)<with_stmt>open(output_path "w")<as>fd_out<block_start>fd_out.write(f"{header}")<line_sep>records=[]<for_stmt>record fd_in<block_start>records.append(record)<block_end><for_stmt>record sorted(records)<block_start>fd_out.write(f"{record}")<block_end><block_end><block_end><block_end><def_stmt>table_join left:Path right:Path on:List[str] output:Path how:str="outer"<arrow><none><block_start>""" Performs a memory efficient left join between two CSV files. The records of the right table are held in memory, so in case of inner joins where order does not matter it is more efficient to pass the bigger table as `left` and smaller one as `right`. Arguments: left: Left table to join. Only rows present in this table will be present in the output. right: Right table to join. All of its columns will be added to those of `left`. on: Column names to perform the join. output: Path to write the joined table to. how: Either "inner" or "outer" indicating whether records present only in the `left` table should be dropped or not. """<def_stmt>compute_join_indices columns:List[str]<arrow>List[str]<block_start><assert_stmt>all(name<in>columns.keys()<for>name on) f"Column provided in `on` not present in right table. Expected {on} but found {columns}"<line_sep>join_indices=[columns[name]<for>name on]<line_sep><return>join_indices<block_end>records_right={}<with_stmt>open(right "r")<as>fd<block_start>reader=csv.reader(fd)<line_sep>columns_right={name:idx<for>idx,name enumerate(next(reader))}<line_sep>join_indices=compute_join_indices(columns_right)<line_sep># Only save the data which is not part of the join, which will be added by the left table columns_right_output={name:idx<for>name,idx columns_right.items()<if>idx<not><in>join_indices}<for_stmt>record reader<block_start>key=tuple([record[idx]<for>idx join_indices])<line_sep>data=[record[idx]<for>idx columns_right_output.values()]<line_sep>records_right[key]=data<block_end><block_end><with_stmt>open(output "w")<as>fd_out<block_start>writer=csv.writer(fd_out)<with_stmt>open(left "r")<as>fd_in<block_start>reader=csv.reader(fd_in)<line_sep>columns_left={name:idx<for>idx,name enumerate(next(reader))}<line_sep>join_indices=compute_join_indices(columns_left)<line_sep># Write the output columns as a header columns_output=list(columns_left.keys())+list(columns_right_output.keys())<line_sep>writer.writerow(columns_output)<for_stmt>record_left reader<block_start>key=tuple([record_left[idx]<for>idx join_indices])<line_sep>data_left=[record_left[idx]<for>idx columns_left.values()]<line_sep># If this is an inner join and the key is not in the right table, drop it <if_stmt>how<eq>"inner"<and><not>key<in>records_right<block_start><continue><block_end># Get the data from the right table and write to output data_right=records_right.get(key [<none>]<times>len(columns_right_output))<line_sep>writer.writerow(data_left+data_right)<block_end><block_end><block_end><block_end><def_stmt>skip_head_reader path:Path n:int=1 **read_opts<arrow>Iterable[str]<block_start>fd=read_lines(path **read_opts)<for_stmt>_ range(n)<block_start>next(fd)<block_end><yield><from>fd<block_end><def_stmt>table_cross_product left:Path right:Path output:Path<arrow><none><block_start>""" Memory efficient method to perform the cross product of all columns in two tables. Columns which are present in both tables will be duplicated in the output. Arguments: left: Left table. All columns from this table will be present in the output. right: Right table. All columns from this table will be present in the output. output: Path to write the joined table to. """<line_sep>columns_left=get_table_columns(left)<line_sep>columns_right=get_table_columns(right)<with_stmt>open(output "w")<as>fd<block_start>writer=csv.writer(fd)<line_sep>writer.writerow(columns_left+columns_right)<line_sep>reader_left=csv.reader(skip_head_reader(left))<for_stmt>record_left reader_left<block_start>reader_right=csv.reader(skip_head_reader(right))<for_stmt>record_right reader_right<block_start>writer.writerow(record_left+record_right)<block_end><block_end><block_end><block_end><def_stmt>table_group_tail table:Path output:Path<arrow><none><block_start>""" Outputs latest data for each key, assumes records are indexed by <key, date> """<line_sep>reader=csv.reader(read_lines(table))<line_sep>columns={name:idx<for>idx,name enumerate(next(reader))}<if_stmt><not>"date"<in>columns.keys()# Degenerate case: this table has no date <block_start>shutil.copyfile(table output)<block_end><else_stmt><block_start>has_epi="total_confirmed"<in>columns<line_sep># To stay memory-efficient, do the latest subset "by hand" instead of using pandas grouping # This assumes that the CSV file is sorted in ascending order, which should always be true latest_date:Dict[str str]={}<line_sep>records:Dict[str List[str]]={}<for_stmt>record reader<block_start><try_stmt><block_start>key=record[columns["key"]]<line_sep>date=record[columns["date"]]<line_sep>total_confirmed=record[columns["total_confirmed"]]<if>has_epi<else><true><line_sep>latest_seen=latest_date.get(key date)<l>date<and>total_confirmed<is><not><none><if_stmt>key<not><in>records<or>latest_seen<block_start>latest_date[key]=date<line_sep>records[key]=record<block_end><block_end><except_stmt>Exception<as>exc<block_start>print(f"Error parsing record {record} in table {table}: {exc}" file=sys.stderr)<line_sep>traceback.print_exc()<block_end><block_end><with_stmt>open(output "w")<as>fd_out<block_start>writer=csv.writer(fd_out)<line_sep>writer.writerow(columns.keys())<for_stmt>key,record records.items()<block_start>writer.writerow(record)<block_end><block_end><block_end><block_end><def_stmt>convert_csv_to_json_records schema:Dict[str type] csv_file:Path output_file:Path skip_size_threshold:int=<none> fast_size_threshold:int=<none> <arrow><none><block_start><if_stmt>skip_size_threshold<is><none><block_start>skip_size_threshold=JSON_MAX_SIZE_BYTES<block_end><if_stmt>fast_size_threshold<is><none><block_start>fast_size_threshold=JSON_FAST_CONVERTER_SIZE_BYTES<block_end>file_size=csv_file.stat().st_size<line_sep>json_coverter_method=_convert_csv_to_json_records_fast<if_stmt>skip_size_threshold<g>0<and>file_size<g>skip_size_threshold<block_start><raise>ValueError(f"Size of {csv_file} too large for conversion: {file_size<floordiv>1E6} MB")<block_end><if_stmt>fast_size_threshold<g>0<and>file_size<g>fast_size_threshold<block_start>warnings.warn(f"Size of {csv_file} too large for fast method: {file_size<floordiv>1E6} MB")<line_sep>json_coverter_method=_convert_csv_to_json_records_slow<block_end>json_coverter_method(schema csv_file output_file)<block_end><def_stmt>_convert_csv_to_json_records_slow schema:Dict[str type] csv_file:Path output_file<arrow><none><block_start>""" Slow but memory efficient method to convert the provided CSV file to a record-like JSON format """<with_stmt>output_file.open("w")<as>fd_out# Write the header first <block_start>columns=get_table_columns(csv_file)<line_sep>columns_str=",".join([f'"{col}"'<for>col columns])<line_sep>fd_out.write(f'{{"columns":[{columns_str}],"data":[')<line_sep># Read the CSV file in chunks but keep only the values first_record=<true><for_stmt>chunk read_table(csv_file schema=schema chunksize=256)<block_start><if_stmt>first_record<block_start>first_record=<false><block_end><else_stmt><block_start>fd_out.write(",")<block_end>fd_out.write(chunk.to_json(orient="values")[1:-1])<block_end>fd_out.write("]}")<block_end><block_end><def_stmt>_convert_csv_to_json_records_fast schema:Dict[str type] csv_file:Path output_file:Path<arrow><none><block_start>""" Fast but memory intensive method to convert the provided CSV file to a record-like JSON format """<line_sep>table=read_table(csv_file schema=schema)<line_sep>json_dict=json.loads(table.to_json(orient="split"))<del_stmt>json_dict["index"]<with_stmt>open(output_file "w")<as>fd<block_start>json.dump(json_dict fd)<block_end><block_end>
# -*-coding:utf-8-*- # Copyright (c) 2020 DJI. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License in the file LICENSE.txt or at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>cv2<import_from_stmt>robomaster robot<import_stmt>threading<class_stmt>GestureInfo<block_start><def_stmt>__init__ self x y w h info<block_start>self._x=x<line_sep>self._y=y<line_sep>self._w=w<line_sep>self._h=h<line_sep>self._info=info<block_end>@property<def_stmt>pt1 self<block_start><return>int((self._x-self._w/2)<times>1280) int((self._y-self._h/2)<times>720)<block_end>@property<def_stmt>pt2 self<block_start><return>int((self._x+self._w/2)<times>1280) int((self._y+self._h/2)<times>720)<block_end>@property<def_stmt>center self<block_start><return>int(self._x<times>1280) int(self._y<times>720)<block_end>@property<def_stmt>text self<block_start><return>str(self._info)<block_end><block_end>gestures=[]<def_stmt>on_detect_person gesture_info<block_start>number=len(gesture_info)<line_sep>value_lock.acquire()<line_sep>gestures.clear()<for_stmt>i range(0 number)<block_start>x,y,w,h,info=gesture_info[i]<line_sep>gestures.append(GestureInfo(x y w h info))<line_sep>print("gesture: info:{0}, x:{1}, y:{2}, w:{3}, h:{4}".format(info x y w h))<block_end>value_lock.release()<block_end><if_stmt>__name__<eq>'__main__'<block_start>ep_robot=robot.Robot()<line_sep>ep_robot.initialize(conn_type="rndis")<line_sep>ep_vision=ep_robot.vision<line_sep>ep_camera=ep_robot.camera<line_sep>ep_camera.start_video_stream(<false>)<line_sep>result=ep_vision.sub_detect_info(name="gesture" callback=on_detect_person)<line_sep>value_lock=threading.Lock()<for_stmt>i range(0 500)<block_start>img=ep_camera.read_cv2_image(strategy="newest" timeout=1.5)<for_stmt>j range(0 len(gestures))<block_start>value_lock.acquire()<line_sep>cv2.rectangle(img gestures[j].pt1 gestures[j].pt2 (255 255 255))<line_sep>cv2.putText(img gestures[j].text gestures[j].center cv2.FONT_HERSHEY_SIMPLEX 1.5 (255 255 255) 3)<line_sep>value_lock.release()<block_end>cv2.imshow("Gestures" img)<line_sep>key=cv2.waitKey(1)<block_end>cv2.destroyAllWindows()<line_sep>result=ep_vision.unsub_detect_info("gesture")<line_sep>cv2.destroyAllWindows()<line_sep>ep_camera.stop_video_stream()<line_sep>ep_robot.close()<block_end>
# DO NOT EDIT THIS FILE. This file will be overwritten when re-running go-raml. <import_from_stmt>sanic Blueprint<import_from_stmt>sanic.views HTTPMethodView<import_from_stmt>sanic.response text<import_from_stmt>. deliveries_api<line_sep>deliveries_if=Blueprint('deliveries_if')<class_stmt>deliveriesView(HTTPMethodView)<block_start><async_keyword><def_stmt>get self request<block_start><return><await>deliveries_api.deliveries_get(request)<block_end><async_keyword><def_stmt>post self request<block_start><return><await>deliveries_api.deliveries_post(request)<block_end><block_end>deliveries_if.add_route(deliveriesView.as_view() '/deliveries')<class_stmt>deliveries_bydeliveryIdView(HTTPMethodView)<block_start><async_keyword><def_stmt>get self request deliveryId<block_start><return><await>deliveries_api.deliveries_byDeliveryId_get(request deliveryId)<block_end><async_keyword><def_stmt>patch self request deliveryId<block_start><return><await>deliveries_api.deliveries_byDeliveryId_patch(request deliveryId)<block_end><async_keyword><def_stmt>delete self request deliveryId<block_start><return><await>deliveries_api.deliveries_byDeliveryId_delete(request deliveryId)<block_end><block_end>deliveries_if.add_route(deliveries_bydeliveryIdView.as_view() '/deliveries/<deliveryId>')<line_sep>
<import_from_stmt>IAMApiModule *<line_sep>APP_USER_OUTPUT={"user_id":"mock_id" "user_name":"mock_user_name" "first_name":"mock_first_name" "last_name":"mock_last_name" "active":"true" "email":"<EMAIL>"}<line_sep>USER_APP_DATA=IAMUserAppData("mock_id" "mock_user_name" is_active=<true> app_data=APP_USER_OUTPUT)<line_sep>APP_DISABLED_USER_OUTPUT={"user_id":"mock_id" "user_name":"mock_user_name" "first_name":"mock_first_name" "last_name":"mock_last_name" "active":"false" "email":"<EMAIL>"}<line_sep>DISABLED_USER_APP_DATA=IAMUserAppData("mock_id" "mock_user_name" is_active=<false> app_data=APP_DISABLED_USER_OUTPUT)<class_stmt>MockCLient()<block_start><def_stmt>get_user self<block_start><return><none><block_end><def_stmt>create_user self<block_start><return><none><block_end><def_stmt>update_user self<block_start><return><none><block_end><def_stmt>enable_user self<block_start><return><none><block_end><def_stmt>disable_user self<block_start><return><none><block_end><block_end><def_stmt>get_outputs_from_user_profile user_profile<block_start>entry_context=user_profile.to_entry()<line_sep>outputs=entry_context.get('Contents')<line_sep><return>outputs<block_end><def_stmt>test_get_user_command__existing_user mocker<block_start>""" Given: - An app client object - A user-profile argument that contains an email of a user When: - The user exists in the application - Calling function get_user_command Then: - Ensure the resulted User Profile object holds the correct user details """<line_sep>client=MockCLient()<line_sep>args={'user-profile':{'email':'<EMAIL>'}}<line_sep>mocker.patch.object(client 'get_user' return_value=USER_APP_DATA)<line_sep>mocker.patch.object(IAMUserProfile 'update_with_app_data' return_value={})<line_sep>user_profile=IAMCommand().get_user(client args)<line_sep>outputs=get_outputs_from_user_profile(user_profile)<assert_stmt>outputs.get('action')<eq>IAMActions.GET_USER<assert_stmt>outputs.get('success')<is><true><assert_stmt>outputs.get('active')<is><true><assert_stmt>outputs.get('id')<eq>'mock_id'<assert_stmt>outputs.get('username')<eq>'mock_user_name'<assert_stmt>outputs.get('details' {}).get('first_name')<eq>'mock_first_name'<assert_stmt>outputs.get('details' {}).get('last_name')<eq>'mock_last_name'<block_end><def_stmt>test_get_user_command__non_existing_user mocker<block_start>""" Given: - An app client object - A user-profile argument that contains an email a user When: - The user does not exist in the application - Calling function get_user_command Then: - Ensure the resulted User Profile object holds information about an unsuccessful result. """<line_sep>client=MockCLient()<line_sep>args={'user-profile':{'email':'<EMAIL>'}}<line_sep>mocker.patch.object(client 'get_user' return_value=<none>)<line_sep>user_profile=IAMCommand().get_user(client args)<line_sep>outputs=get_outputs_from_user_profile(user_profile)<assert_stmt>outputs.get('action')<eq>IAMActions.GET_USER<assert_stmt>outputs.get('success')<is><false><assert_stmt>outputs.get('errorCode')<eq>IAMErrors.USER_DOES_NOT_EXIST[0]<assert_stmt>outputs.get('errorMessage')<eq>IAMErrors.USER_DOES_NOT_EXIST[1]<block_end><def_stmt>test_create_user_command__success mocker<block_start>""" Given: - An app client object - A user-profile argument that contains an email of a non-existing user in the application When: - Calling function create_user_command Then: - Ensure a User Profile object with the user data is returned """<line_sep>client=MockCLient()<line_sep>args={'user-profile':{'email':'<EMAIL>'}}<line_sep>mocker.patch.object(client 'get_user' return_value=<none>)<line_sep>mocker.patch.object(client 'create_user' return_value=USER_APP_DATA)<line_sep>user_profile=IAMCommand(get_user_iam_attrs=['email']).create_user(client args)<line_sep>outputs=get_outputs_from_user_profile(user_profile)<assert_stmt>outputs.get('action')<eq>IAMActions.CREATE_USER<assert_stmt>outputs.get('success')<is><true><assert_stmt>outputs.get('active')<is><true><assert_stmt>outputs.get('id')<eq>'mock_id'<assert_stmt>outputs.get('username')<eq>'mock_user_name'<assert_stmt>outputs.get('details' {}).get('first_name')<eq>'mock_first_name'<assert_stmt>outputs.get('details' {}).get('last_name')<eq>'mock_last_name'<block_end><def_stmt>test_create_user_command__user_already_exists mocker<block_start>""" Given: - An app client object - A user-profile argument that contains an email of a user When: - The user already exists in the application and disabled - allow-enable argument is false - Calling function create_user_command Then: - Ensure the command is considered successful and the user is still disabled """<line_sep>client=MockCLient()<line_sep>args={'user-profile':{'email':'<EMAIL>'} 'allow-enable':'false'}<line_sep>mocker.patch.object(client 'get_user' return_value=DISABLED_USER_APP_DATA)<line_sep>mocker.patch.object(client 'update_user' return_value=DISABLED_USER_APP_DATA)<line_sep>user_profile=IAMCommand().create_user(client args)<line_sep>outputs=get_outputs_from_user_profile(user_profile)<assert_stmt>outputs.get('action')<eq>IAMActions.UPDATE_USER<assert_stmt>outputs.get('success')<is><true><assert_stmt>outputs.get('active')<is><false><assert_stmt>outputs.get('id')<eq>'mock_id'<assert_stmt>outputs.get('username')<eq>'mock_user_name'<assert_stmt>outputs.get('details' {}).get('first_name')<eq>'mock_first_name'<assert_stmt>outputs.get('details' {}).get('last_name')<eq>'mock_last_name'<block_end><def_stmt>test_update_user_command__non_existing_user mocker<block_start>""" Given: - An app client object - A user-profile argument that contains user data When: - The user does not exist in the application - create-if-not-exists parameter is checked - Create User command is enabled - Calling function update_user_command Then: - Ensure the create action is executed - Ensure a User Profile object with the user data is returned """<line_sep>client=MockCLient()<line_sep>args={'user-profile':{'email':'<EMAIL>' 'givenname':'mock_first_name'}}<line_sep>mocker.patch.object(client 'get_user' return_value=<none>)<line_sep>mocker.patch.object(client 'create_user' return_value=USER_APP_DATA)<line_sep>user_profile=IAMCommand(create_if_not_exists=<true>).update_user(client args)<line_sep>outputs=get_outputs_from_user_profile(user_profile)<assert_stmt>outputs.get('action')<eq>IAMActions.CREATE_USER<assert_stmt>outputs.get('success')<is><true><assert_stmt>outputs.get('active')<is><true><assert_stmt>outputs.get('id')<eq>'mock_id'<assert_stmt>outputs.get('username')<eq>'mock_user_name'<assert_stmt>outputs.get('details' {}).get('first_name')<eq>'mock_first_name'<assert_stmt>outputs.get('details' {}).get('last_name')<eq>'mock_last_name'<block_end><def_stmt>test_update_user_command__command_is_disabled mocker<block_start>""" Given: - An app client object - A user-profile argument that contains user data When: - Update User command is disabled - Calling function update_user_command Then: - Ensure the command is considered successful and skipped """<line_sep>client=MockCLient()<line_sep>args={'user-profile':{'email':'<EMAIL>' 'givenname':'mock_first_name'}}<line_sep>mocker.patch.object(client 'get_user' return_value=<none>)<line_sep>mocker.patch.object(client 'update_user' return_value=USER_APP_DATA)<line_sep>user_profile=IAMCommand(is_update_enabled=<false>).update_user(client args)<line_sep>outputs=get_outputs_from_user_profile(user_profile)<assert_stmt>outputs.get('action')<eq>IAMActions.UPDATE_USER<assert_stmt>outputs.get('success')<is><true><assert_stmt>outputs.get('skipped')<is><true><assert_stmt>outputs.get('reason')<eq>'Command is disabled.'<block_end><def_stmt>test_disable_user_command__non_existing_user mocker<block_start>""" Given: - An app client object - A user-profile argument that contains an email of a user When: - create-if-not-exists parameter is unchecked - The user does not exist in the application - Calling function disable_user_command Then: - Ensure the command is considered successful and skipped """<line_sep>client=MockCLient()<line_sep>args={'user-profile':{'email':'<EMAIL>'}}<line_sep>mocker.patch.object(client 'get_user' return_value=<none>)<line_sep>user_profile=IAMCommand().disable_user(client args)<line_sep>outputs=get_outputs_from_user_profile(user_profile)<assert_stmt>outputs.get('action')<eq>IAMActions.DISABLE_USER<assert_stmt>outputs.get('success')<is><true><assert_stmt>outputs.get('skipped')<is><true><assert_stmt>outputs.get('reason')<eq>IAMErrors.USER_DOES_NOT_EXIST[1]<block_end>
<import_from_stmt>django template<import_from_stmt>django.forms.fields CheckboxInput<line_sep>register=template.Library()<line_sep>@register.filter(name='is_checkbox')<def_stmt>is_checkbox value<block_start><return>isinstance(value CheckboxInput)<block_end>
<import_from_stmt>bip_utils.bip.bip84.bip84 Bip84<line_sep>
""" ``Example``:: ``csprng_trivium`` load, req = pyrtl.Input(1, 'load'), pyrtl.Input(1, 'req') ready, rand = pyrtl.Output(1, 'ready'), pyrtl.Output(128, 'rand') ready_out, rand_out = prngs.csprng_trivium(128, load, req) ready <<= ready_out rand <<= rand_out sim_trace = pyrtl.SimulationTrace() sim = pyrtl.Simulation(tracer=sim_trace) # seed once at the beginning sim.step({'load': 1, 'req': 0}) while sim.value[ready] == 0: # or loop 19 cycles sim.step({'load': 0, 'req': 0}) sim.step({'load': 0, 'req': 1}) while sim.value[ready] == 0: # or loop 2 cycles sim.step({'load': 0, 'req': 0}) print(sim.inspect(rand)) sim_trace.render_trace(symbol_len=45, segment_size=5) ``prng_xoroshiro128`` load, req = pyrtl.Input(1, 'load'), pyrtl.Input(1, 'req') ready, rand = pyrtl.Output(1, 'ready'), pyrtl.Output(128, 'rand') ready_out, rand_out = prngs.prng_xoroshiro128(128, load, req) ready <<= ready_out rand <<= rand_out sim_trace = pyrtl.SimulationTrace() sim = pyrtl.Simulation(tracer=sim_trace) sim.step({'load': 1, 'req': 0}) # seed once at the beginning sim.step({'load': 0, 'req': 1}) while sim.value[ready] == 0: # or loop 2 cycles sim.step({'load': 0, 'req': 0}) print(sim.inspect(rand)) sim_trace.render_trace(symbol_len=40, segment_size=1) ``prng_lfsr`` load, req = pyrtl.Input(1, 'load'), pyrtl.Input(1, 'req') rand = pyrtl.Output(64, 'rand') rand <<= prngs.prng_lfsr(64, load, req) sim_trace = pyrtl.SimulationTrace() sim = pyrtl.Simulation(tracer=sim_trace) sim.step({'load': 1, 'req': 0}) # seed once at the beginning sim.step({'load': 0, 'req': 1}) sim.step({'load': 0, 'req': 0}) print(sim.inspect(rand)) sim_trace.render_trace(symbol_len=40, segment_size=1) ``explicit seeding`` seed = pyrtl.Input(127, 'seed') load, req = pyrtl.Input(1, 'load'), pyrtl.Input(1, 'req') rand = pyrtl.Output(32, 'rand') rand <<= prngs.prng_lfsr(32, load, req, seed) sim_trace = pyrtl.SimulationTrace() sim = pyrtl.Simulation(tracer=sim_trace) sim.step({'load': 1, 'req': 0, 'seed': 0x102030405060708090a0b0c0d0e0f010}) sim.step({'load': 0, 'req': 1, 'seed': 0x102030405060708090a0b0c0d0e0f010}) sim.step({'load': 0, 'req': 0, 'seed': 0x102030405060708090a0b0c0d0e0f010}) print(sim.inspect(rand)) sim_trace.render_trace(symbol_len=40, segment_size=1) """<import_from_future_stmt> absolute_import<import_stmt>pyrtl<def_stmt>prng_lfsr bitwidth load req seed=<none><block_start>""" Builds a single-cycle PRNG using a 127 bits Fibonacci LFSR. :param bitwidth: the desired bitwidth of the random number :param load: one bit signal to load the seed into the prng :param req: one bit signal to request a random number :param seed: 127 bits WireVector, defaults to None (self-seeding), refrain from self-seeding if reseeding at run time is required :return: register containing the random number with the given bitwidth A very fast and compact PRNG that generates a random number using only one clock cycle. Has a period of 2**127 - 1. Its linearity makes it a bit statistically weak, but should be good enough for any noncryptographic purpose like test pattern generation. """<line_sep># 127 bits is chosen because 127 is a mersenne prime, which makes the period of the # LFSR maximized at 2**127 - 1 for any requested bitwidth <if_stmt>seed<is><none><block_start><import_stmt>random<line_sep>cryptogen=random.SystemRandom()<line_sep>seed=cryptogen.randrange(1 2<power>127)<block_end># seed itself if no seed signal is given lfsr=pyrtl.Register(127<if>bitwidth<l>127<else>bitwidth)<line_sep># leap ahead by shifting the LFSR bitwidth times leap_ahead=lfsr<for_stmt>i range(bitwidth)<block_start>leap_ahead=pyrtl.concat(leap_ahead leap_ahead[125]^leap_ahead[126])<block_end><with_stmt>pyrtl.conditional_assignment<block_start><with_stmt>load<block_start>lfsr.next<augor>seed<block_end><with_stmt>req<block_start>lfsr.next<augor>leap_ahead<block_end><block_end><return>lfsr[:bitwidth]<block_end><def_stmt>prng_xoroshiro128 bitwidth load req seed=<none><block_start>""" Builds a PRNG using the Xoroshiro128+ algorithm in hardware. :param bitwidth: the desired bitwidth of the random number :param load: one bit signal to load the seed into the prng :param req: one bit signal to request a random number :param seed: 128 bits WireVector, defaults to None (self-seeding), refrain from self-seeding if reseeding at run time is required :return ready, rand: ready is a one bit signal showing the random number has been produced, rand is a register containing the random number with the given bitwidth An efficient noncryptographic PRNG, has much smaller area than Trivium. But it does require a 64-bit adder to compute the output, so it is a bit slower. Has a period of 2**128 - 1. Passes most statistical tests. Outputs a 64-bit random word each cycle, takes multiple cycles if more than 64 bits are requested, and MSBs of the random words are returned if the bitwidth is not a multiple of 64. See also http://xoroshiro.di.unimi.it/ """<import_from_stmt>math ceil log<import_from_stmt>pyrtl.rtllib adders<import_from_stmt>pyrtl.rtllib.libutils _shifted_reg_next<as>shift# for readability <if_stmt>seed<is><none><block_start><import_stmt>random<line_sep>cryptogen=random.SystemRandom()<line_sep>seed=cryptogen.randrange(1 2<power>128)# seed itself if no seed signal is given <block_end>seed=pyrtl.as_wires(seed 128)<line_sep>s0,s1=(pyrtl.Register(64)<for>i range(2))<line_sep>output=pyrtl.WireVector(64)<line_sep># update internal states by xoring, rotating, and shifting _s1=s0^s1<line_sep>s0_next=(shift(s0 'l' 55)|shift(s0 'r' 9))^shift(_s1 'l' 14)^_s1<line_sep>s1_next=shift(_s1 'l' 36)|shift(_s1 'r' 28)<line_sep>output<auglshift>adders.kogge_stone(s0 s1)<line_sep>gen_cycles=int(ceil(bitwidth/64))<line_sep>counter_bitwidth=int(ceil(log(gen_cycles 2)))<if>gen_cycles<g>1<else>1<line_sep>rand=pyrtl.Register(gen_cycles<times>64)<line_sep>counter=pyrtl.Register(counter_bitwidth 'counter')<line_sep>gen_done=counter<eq>gen_cycles-1<line_sep>state=pyrtl.Register(1)<line_sep>WAIT,GEN=(pyrtl.Const(x)<for>x range(2))<with_stmt>pyrtl.conditional_assignment<block_start><with_stmt>load<block_start>s0.next<augor>seed[:64]<line_sep>s1.next<augor>seed[64:]<line_sep>state.next<augor>WAIT<block_end><with_stmt>req<block_start>counter.next<augor>0<line_sep>s0.next<augor>s0_next<line_sep>s1.next<augor>s1_next<line_sep>rand.next<augor>pyrtl.concat(rand output)<line_sep>state.next<augor>GEN<block_end><with_stmt>state<eq>GEN<block_start><with_stmt>~gen_done<block_start>counter.next<augor>counter+1<line_sep>s0.next<augor>s0_next<line_sep>s1.next<augor>s1_next<line_sep>rand.next<augor>pyrtl.concat(rand output)<block_end><block_end><block_end>ready=~load&~req&(state<eq>GEN)&gen_done<line_sep><return>ready rand[-bitwidth:]<block_end># return MSBs because LSBs are less random <def_stmt>csprng_trivium bitwidth load req seed=<none> bits_per_cycle=64<block_start>""" Builds a cyptographically secure PRNG using the Trivium stream cipher. :param bitwidth: the desired bitwidth of the random number :param load: one bit signal to load the seed into the prng :param req: one bit signal to request a random number :param seed: 160 bits WireVector (80 bits key + 80 bits IV), defaults to None (self-seeding), refrain from self-seeding if reseeding at run time is needed :param bits_per_cycle: the number of output bits to generate in parallel each cycle, up to 64 bits, must be a power of two: either 1, 2, 4, 8, 16, 32, or 64 :return ready, rand: ready is a one bit signal showing either the random number has been produced or the seed has been initialized, rand is a register containing the random number with the given bitwidth This prng uses Trivium's key stream as its random bits output. Both seed and key stream are MSB first (the earliest bit is stored at the MSB). Trivium has a seed initialization stage that discards the first weak 1152 output bits after each loading. Generation stage can take multiple cycles as well depending on the given bitwidth and bits_per_cycle. Has smaller gate area and faster speed than AES-CTR and any other stream cipher. Passes all known statistical tests. Can be used to generate encryption keys or IVs. Designed to securely generate up to 2**64 bits. If more than 2**64 bits is needed, must reseed after each generation of 2**64 bits. Trivium specifications: http://www.ecrypt.eu.org/stream/ciphers/trivium/trivium.pdf See also the eSTREAM portfolio page: http://www.ecrypt.eu.org/stream/e2-trivium.html """<import_from_stmt>math ceil log<if_stmt>(64<floordiv>bits_per_cycle)<times>bits_per_cycle<ne>64<block_start><raise>pyrtl.PyrtlError('bits_per_cycle is invalid')<block_end><if_stmt>seed<is><none><block_start><import_stmt>random<line_sep>cryptogen=random.SystemRandom()<line_sep>seed=cryptogen.randrange(2<power>160)# seed itself if no seed signal is given <block_end>seed=pyrtl.as_wires(seed 160)<line_sep>key=seed[80:]<line_sep>iv=seed[:80]<line_sep>a=pyrtl.Register(93)<line_sep>b=pyrtl.Register(84)<line_sep>c=pyrtl.Register(111)<line_sep>feedback_a,feedback_b,feedback_c,output=([]<for>i range(4))<for_stmt>i range(bits_per_cycle)<block_start>t1=a[65-i]^a[92-i]<line_sep>t2=b[68-i]^b[83-i]<line_sep>t3=c[65-i]^c[110-i]<line_sep>feedback_a.append(t3^c[108-i]&c[109-i]^a[68-i])<line_sep>feedback_b.append(t1^a[90-i]&a[91-i]^b[77-i])<line_sep>feedback_c.append(t2^b[81-i]&b[82-i]^c[86-i])<line_sep>output.append(t1^t2^t3)<block_end># update internal states by shifting bits_per_cycle times a_next=pyrtl.concat(a *feedback_a)<line_sep>b_next=pyrtl.concat(b *feedback_b)<line_sep>c_next=pyrtl.concat(c *feedback_c)<line_sep>init_cycles=1152<floordiv>bits_per_cycle<line_sep>gen_cycles=int(ceil(bitwidth/bits_per_cycle))<line_sep>counter_bitwidth=int(ceil(log(max(init_cycles+1 gen_cycles) 2)))<line_sep>rand=pyrtl.Register(bitwidth)<line_sep>counter=pyrtl.Register(counter_bitwidth 'counter')<line_sep>init_done=counter<eq>init_cycles<line_sep>gen_done=counter<eq>gen_cycles-1<line_sep>state=pyrtl.Register(2)<line_sep>WAIT,INIT,GEN=(pyrtl.Const(x)<for>x range(3))<with_stmt>pyrtl.conditional_assignment<block_start><with_stmt>load<block_start>counter.next<augor>0<line_sep>a.next<augor>key<line_sep>b.next<augor>iv<line_sep>c.next<augor>pyrtl.concat(pyrtl.Const("3'b111") pyrtl.Const(0 108))<line_sep>state.next<augor>INIT<block_end><with_stmt>req<block_start>counter.next<augor>0<line_sep>a.next<augor>a_next<line_sep>b.next<augor>b_next<line_sep>c.next<augor>c_next<line_sep>rand.next<augor>pyrtl.concat(rand *output)<line_sep>state.next<augor>GEN<block_end><with_stmt>state<eq>INIT<block_start><with_stmt>~init_done<block_start>counter.next<augor>counter+1<line_sep>a.next<augor>a_next<line_sep>b.next<augor>b_next<line_sep>c.next<augor>c_next<block_end><block_end><with_stmt>state<eq>GEN<block_start><with_stmt>~gen_done<block_start>counter.next<augor>counter+1<line_sep>a.next<augor>a_next<line_sep>b.next<augor>b_next<line_sep>c.next<augor>c_next<line_sep>rand.next<augor>pyrtl.concat(rand *output)<block_end><block_end><block_end>ready=~load&~req&((state<eq>INIT)&init_done|(state<eq>GEN)&gen_done)<line_sep><return>ready rand<block_end>
<import_from_stmt>django.urls path<import_from_stmt>. views_api<line_sep>app_name="batch"<line_sep>urlpatterns=[path('get/hosts/' views_api.get_hosts name='get_hosts') path('upload/' views_api.upload name='upload') path('logs/' views_api.logs name='logs') ]<line_sep>
# prepare for Python 3 <import_from_future_stmt> absolute_import division print_function unicode_literals<import_stmt>logging<import_stmt>subprocess<import_stmt>sys<import_stmt>os<import_stmt>re<try_stmt><block_start><import_stmt>benchexec.util<as>util<import_stmt>benchexec.result<as>result<import_from_stmt>benchexec.tools.template BaseTool<block_end><except_stmt>ImportError# fall-back solution (at least for now) <block_start><import_stmt>symbiotic.benchexec.util<as>util<import_stmt>symbiotic.benchexec.result<as>result<import_from_stmt>symbiotic.benchexec.tools.template BaseTool<block_end><import_from_stmt>.tool SymbioticBaseTool<line_sep>SOFTTIMELIMIT='timelimit'<class_stmt>SymbioticTool(BaseTool SymbioticBaseTool)<block_start>""" Tool info for CPAchecker. It has additional features such as building CPAchecker before running it if executed within a source checkout. It also supports extracting data from the statistics output of CPAchecker for adding it to the result tables. """<def_stmt>__init__ self opts<block_start>SymbioticBaseTool.__init__(self opts)<block_end><def_stmt>executable self<block_start><return>util.find_executable('ikos')<block_end><def_stmt>version self executable<block_start>stdout=self._version_from_tool(executable '--version')<line_sep>line=next(l<for>l stdout.splitlines()<if>l.startswith('ikos'))<line_sep>line=line.replace('ikos' '')<line_sep><return>line.strip()<block_end><def_stmt>name self<block_start><return>'ikos'<block_end><def_stmt>cmdline self executable options tasks propertyfile=<none> rlimits={}<block_start>opts=['-d=dbm']<if_stmt>self._options.property.assertions()<block_start>opts.append('-a=prover')<block_end><elif_stmt>self._options.property.memsafety()<block_start>opts.append('-a=boa')<line_sep>opts.append('-a=nullity')<line_sep>opts.append('-a=dfa')<block_end><elif_stmt>self._options.property.signedoverflow()<block_start>opts.append('-a=sio')<block_end><return>[executable]+options+opts+tasks<block_end><def_stmt>determine_result self returncode returnsignal output isTimeout# TODO: fixme for memsafety <block_start><for_stmt>line output<block_start><if_stmt>'error: double free'<in>line<block_start><return>result.RESULT_FALSE_FREE<block_end><elif_stmt>'error: buffer overflow'<in>line<block_start><return>result.RESULT_FALSE_DEREF<block_end><elif_stmt>'error: assertion never holds'<in>line<block_start><return>result.RESULT_FALSE_REACH<block_end><elif_stmt>'The program is SAFE'<in>line<block_start><return>result.RESULT_TRUE_PROP<block_end><elif_stmt>'The program is potentially UNSAFE'<in>line<block_start><return>result.RESULT_UNKNOWN<block_end><block_end><return>result.RESULT_ERROR<block_end><def_stmt>llvm_version self<block_start>""" Return required version of LLVM """<line_sep><return>'7.0.1'<block_end><block_end>
"""empty message Revision ID: 5d5340d8c969 Revises: Create Date: 2021-06-17 11:12:46.834659 """<import_from_stmt>alembic op<import_stmt>sqlalchemy<as>sa<line_sep># revision identifiers, used by Alembic. revision="5d5340d8c969"<line_sep>down_revision=<none><line_sep>branch_labels=<none><line_sep>depends_on=<none><def_stmt>upgrade # ### commands auto generated by Alembic - please adjust! ### <block_start>op.create_table("category" sa.Column("id" sa.Integer() nullable=<false>) sa.Column("text" sa.String(length=50) nullable=<true>) sa.PrimaryKeyConstraint("id") sa.UniqueConstraint("text") )<line_sep>op.create_table("integration" sa.Column("id" sa.Integer() nullable=<false>) sa.Column("name" sa.String(length=50) nullable=<true>) sa.Column("settings" sa.Text() nullable=<true>) sa.Column("enabled" sa.Boolean() nullable=<true>) sa.PrimaryKeyConstraint("id") sa.UniqueConstraint("name") )<line_sep>op.create_table("page" sa.Column("id" sa.Integer() nullable=<false>) sa.Column("slug" sa.String(length=100) nullable=<false>) sa.Column("title" sa.String(length=50) nullable=<false>) sa.Column("display" sa.Boolean() nullable=<true>) sa.Column("ptype" sa.String(length=20) nullable=<true>) sa.Column("content" sa.Text() nullable=<true>) sa.Column("html" sa.Text() nullable=<true>) sa.PrimaryKeyConstraint("id") sa.UniqueConstraint("slug") sa.UniqueConstraint("title") )<line_sep>op.create_table("tag" sa.Column("id" sa.Integer() nullable=<false>) sa.Column("text" sa.String(length=50) nullable=<true>) sa.Column("url" sa.String(length=100) nullable=<true>) sa.PrimaryKeyConstraint("id") sa.UniqueConstraint("text") )<line_sep>op.create_table("user" sa.Column("id" sa.Integer() nullable=<false>) sa.Column("username" sa.String(length=64) nullable=<true>) sa.Column("name" sa.String(length=100) nullable=<true>) sa.Column("email" sa.String(length=100) nullable=<true>) sa.Column("password" sa.String(length=200) nullable=<true>) sa.Column("settings" sa.Text() nullable=<true>) sa.Column("is_admin" sa.Boolean() nullable=<true>) sa.Column("link" sa.String(length=128) nullable=<true>) sa.Column("picture" sa.String(length=512) nullable=<true>) sa.Column("type" sa.String(length=16) nullable=<true>) sa.PrimaryKeyConstraint("id") sa.UniqueConstraint("username" "email" name="_username_email") )<line_sep>op.create_table("o_auth2_token" sa.Column("id" sa.Integer() nullable=<false>) sa.Column("name" sa.String(length=40) nullable=<true>) sa.Column("token_type" sa.String(length=40) nullable=<true>) sa.Column("access_token" sa.String(length=200) nullable=<true>) sa.Column("refresh_token" sa.String(length=200) nullable=<true>) sa.Column("expires_at" sa.Integer() nullable=<true>) sa.Column("user_id" sa.Integer() nullable=<true>) sa.ForeignKeyConstraint(["user_id"] ["user.id"] ) sa.PrimaryKeyConstraint("id") )<line_sep>op.create_table("post" sa.Column("id" sa.Integer() nullable=<false>) sa.Column("title" sa.String(length=200) nullable=<false>) sa.Column("date" sa.DateTime() nullable=<true>) sa.Column("last_modified" sa.DateTime() nullable=<true>) sa.Column("image" sa.String(length=400) nullable=<true>) sa.Column("image_caption" sa.String(length=400) nullable=<true>) sa.Column("lang" sa.String(length=20) nullable=<true>) sa.Column("content" sa.Text() nullable=<true>) sa.Column("html" sa.Text() nullable=<true>) sa.Column("toc" sa.Text() nullable=<true>) sa.Column("url" sa.String(length=80) nullable=<true>) sa.Column("comment" sa.Boolean() nullable=<true>) sa.Column("description" sa.String(length=400) nullable=<true>) sa.Column("author" sa.String(length=50) nullable=<true>) sa.Column("slug" sa.String(length=100) nullable=<true>) sa.Column("is_draft" sa.Boolean() nullable=<true>) sa.Column("category_id" sa.Integer() nullable=<true>) sa.ForeignKeyConstraint(["category_id"] ["category.id"] ) sa.PrimaryKeyConstraint("id") )<line_sep>op.create_table("comment" sa.Column("id" sa.Integer() nullable=<false>) sa.Column("post_id" sa.Integer() nullable=<true>) sa.Column("author_id" sa.Integer() nullable=<true>) sa.Column("floor" sa.Integer() nullable=<true>) sa.Column("content" sa.Text() nullable=<true>) sa.Column("html" sa.Text() nullable=<true>) sa.Column("create_at" sa.DateTime() nullable=<true>) sa.Column("parent_id" sa.Integer() nullable=<true>) sa.ForeignKeyConstraint(["author_id"] ["user.id"] ) sa.ForeignKeyConstraint(["parent_id"] ["comment.id"] ) sa.ForeignKeyConstraint(["post_id"] ["post.id"] ) sa.PrimaryKeyConstraint("id") sa.UniqueConstraint("post_id" "floor" name="_post_floor") )<line_sep>op.create_table("tags" sa.Column("tag_id" sa.Integer() nullable=<true>) sa.Column("post_id" sa.Integer() nullable=<true>) sa.ForeignKeyConstraint(["post_id"] ["post.id"] ) sa.ForeignKeyConstraint(["tag_id"] ["tag.id"] ) )<line_sep># ### end Alembic commands ### <block_end><def_stmt>downgrade # ### commands auto generated by Alembic - please adjust! ### <block_start>op.drop_table("tags")<line_sep>op.drop_table("comment")<line_sep>op.drop_table("post")<line_sep>op.drop_table("o_auth2_token")<line_sep>op.drop_table("user")<line_sep>op.drop_table("tag")<line_sep>op.drop_table("page")<line_sep>op.drop_table("integration")<line_sep>op.drop_table("category")<line_sep># ### end Alembic commands ### <block_end>
# Lint as: python3 # Copyright 2020 DeepMind Technologies Limited. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Trackers running on symbolic alchemy."""<import_stmt>abc<import_stmt>collections<import_stmt>copy<import_stmt>itertools<import_from_stmt>typing Any Callable Dict Optional TypeVar<import_from_stmt>dm_alchemy event_tracker<import_from_stmt>dm_alchemy.ideal_observer ideal_observer<import_from_stmt>dm_alchemy.ideal_observer precomputed_maps<import_from_stmt>dm_alchemy.types graphs<import_from_stmt>dm_alchemy.types stones_and_potions<import_from_stmt>dm_alchemy.types utils<import_stmt>numpy<as>np<line_sep>Graph=graphs.Graph<line_sep>GameState=event_tracker.GameState<line_sep>NO_OUTCOME=event_tracker.NO_OUTCOME<line_sep>PerceivedStone=stones_and_potions.PerceivedStone<line_sep>PerceivedPotion=stones_and_potions.PerceivedPotion<line_sep>AlignedStoneIndex=stones_and_potions.AlignedStoneIndex<line_sep>PerceivedPotionIndex=stones_and_potions.PerceivedPotionIndex<line_sep>StoneMap=stones_and_potions.StoneMap<line_sep>PotionMap=stones_and_potions.PotionMap<line_sep>CAULDRON=stones_and_potions.CAULDRON<line_sep>RewardWeights=stones_and_potions.RewardWeights<line_sep>PrecomputedMaps=precomputed_maps.PrecomputedMaps<line_sep># For typing symbolic_alchemy=Any<line_sep>ActionInfo=collections.namedtuple('ActionInfo' 'original_action has_stone has_potion')<line_sep># Create a type which can refer to anything derived from SymbolicAlchemyTracker BaseOrDerivedTracker=TypeVar('BaseOrDerivedTracker' bound='SymbolicAlchemyTracker')<class_stmt>SequenceStatsTracker<block_start>"""Tracks how a statistic changes throughout an episode."""<def_stmt>__init__ self tracker:BaseOrDerivedTracker get_stat:Callable[[BaseOrDerivedTracker] Any] default_stat:Any=0<block_start>self._get_stat=get_stat<line_sep>self._tracker=tracker<line_sep>self.stats=[]<line_sep>self.default_stat=default_stat<block_end><def_stmt>track self<arrow><none><block_start>self.stats.append(self._get_stat(self._tracker))<block_end><def_stmt>reset self<arrow><none><block_start>self.stats=[]<block_end><block_end><class_stmt>SymbolicAlchemyTracker<block_start>"""Object which has functions called for each action in symbolic alchemy."""<line_sep>@[email protected]<def_stmt>name self<arrow>str<block_start><pass><block_end>@property<def_stmt>per_action_trackers self<arrow>Dict[str SequenceStatsTracker]<block_start><return>{}<block_end>@property<def_stmt>per_trial_trackers self<arrow>Dict[str SequenceStatsTracker]<block_start><return>{}<block_end><def_stmt>episode_start self unused_chemistry:utils.Chemistry<arrow><none><block_start><del_stmt>unused_chemistry<for_stmt>tracker itertools.chain(self.per_trial_trackers.values() self.per_action_trackers.values())<block_start>tracker.reset()<block_end><block_end><def_stmt>trial_start self unused_game_state:GameState<arrow><none><block_start><del_stmt>unused_game_state<for_stmt>tracker self.per_action_trackers.values()<block_start>tracker.track()<block_end><block_end><def_stmt>action_and_outcome self unused_action:utils.TypeBasedAction unused_outcome:Optional[PerceivedStone] unused_action_info:ActionInfo<arrow><none><block_start><del_stmt>unused_action unused_outcome unused_action_info<for_stmt>tracker self.per_action_trackers.values()<block_start>tracker.track()<block_end><block_end><def_stmt>trial_end self<arrow><none><block_start><for_stmt>tracker self.per_trial_trackers.values()<block_start>tracker.track()<block_end><block_end><def_stmt>episode_returns self<arrow>Any<block_start><return>{k:tuple(tracker.stats)<for>k,tracker itertools.chain(self.per_trial_trackers.items() self.per_action_trackers.items())}<block_end><def_stmt>default_returns self num_trials:int num_actions_per_trial:int<arrow>Any<block_start>"""Returns some default values for the tracker."""<line_sep>per_trial=zip(self.per_trial_trackers.items() itertools.repeat(num_trials))<line_sep>num_actions=num_trials<times>(num_actions_per_trial+1)<line_sep>per_action=zip(self.per_action_trackers.items() itertools.repeat(num_actions))<line_sep><return>{k:tuple(tracker.default_stat<for>_ range(expected_length))<for>(k tracker),expected_length itertools.chain(per_trial per_action)}<block_end><block_end>StatTrackerOrDerived=TypeVar('StatTrackerOrDerived' bound='StatTracker')<line_sep>GetStat=Callable[[StatTrackerOrDerived utils.TypeBasedAction Optional[PerceivedStone] ActionInfo] Any]<line_sep>Condition=Callable[[utils.TypeBasedAction Optional[PerceivedStone] ActionInfo] bool]<class_stmt>StatTracker(SymbolicAlchemyTracker)<block_start>"""Tracks a statistic each time an action occurs."""<def_stmt>__init__ self get_stat:GetStat init_step_stat:Any=0<block_start>self._get_stat=get_stat<line_sep>self.cumul_action_occurred=copy.deepcopy(init_step_stat)<line_sep>self.last_step_stat=copy.deepcopy(init_step_stat)<line_sep>self._init_step_stat=init_step_stat<line_sep>self.per_action_tracker=SequenceStatsTracker(self <lambda>tracker:tracker.last_step_stat copy.deepcopy(self._init_step_stat))<line_sep>self.per_trial_tracker=SequenceStatsTracker(self <lambda>tracker:tracker.cumul_action_occurred copy.deepcopy(self._init_step_stat))<block_end>@property<def_stmt>per_action_trackers self<arrow>Dict[str SequenceStatsTracker]<block_start><return>{'per_action':self.per_action_tracker}<block_end>@property<def_stmt>per_trial_trackers self<arrow>Dict[str SequenceStatsTracker]<block_start><return>{'per_trial':self.per_trial_tracker}<block_end><def_stmt>action_and_outcome self action:utils.TypeBasedAction outcome:Optional[PerceivedStone] action_info:ActionInfo<arrow><none><block_start>self.last_step_stat=self._get_stat(self action outcome action_info)<line_sep>self.cumul_action_occurred<augadd>self.last_step_stat<line_sep>super().action_and_outcome(action outcome action_info)<block_end><def_stmt>trial_end self<arrow><none><block_start>super().trial_end()<line_sep>self.cumul_action_occurred=copy.deepcopy(self._init_step_stat)<block_end><block_end><class_stmt>SpecificActionTracker(StatTracker)<block_start>"""Counts number of actions which satisfy some condition."""<def_stmt>__init__ self condition:Condition<block_start><def_stmt>get_stat unused_tracker:StatTracker action:utils.TypeBasedAction outcome:Optional[PerceivedStone] action_info:ActionInfo<arrow>int<block_start><return>1<if>condition(action outcome action_info)<else>0<block_end>super().__init__(get_stat=get_stat)<block_end><block_end><class_stmt>NoChangeActionTracker(SpecificActionTracker)<block_start>"""Counts number of actions which do not cause stone to change."""<line_sep>NAME='no_change'<line_sep>@property<def_stmt>name self<arrow>str<block_start><return>self.NAME<block_end><def_stmt>__init__ self<block_start><def_stmt>condition action:utils.TypeBasedAction outcome:Optional[PerceivedStone] unused_action_info:ActionInfo<arrow>bool<block_start><del_stmt>unused_action_info<line_sep><return>(all(stone<is><not><none><for>stone [outcome action.perceived_stone])<and>action.perceived_stone<eq>outcome)<block_end>super().__init__(condition=condition)<block_end><block_end><class_stmt>NegStoneCashedTracker(SpecificActionTracker)<block_start>"""Counts number of times a negative stone is put in the cauldron."""<line_sep>NAME='neg_stone'<line_sep>@property<def_stmt>name self<arrow>str<block_start><return>self.NAME<block_end><def_stmt>__init__ self<block_start><def_stmt>condition action:utils.TypeBasedAction unused_outcome:Optional[PerceivedStone] unused_action_info:ActionInfo<arrow>bool<block_start><del_stmt>unused_outcome unused_action_info<line_sep><return>(action.cauldron<and>action.perceived_stone<is><not><none><and>action.perceived_stone.reward<l>0)<block_end>super().__init__(condition=condition)<block_end><block_end><class_stmt>CashedStoneValueTracker(SymbolicAlchemyTracker)<block_start>"""Counts average value of cashed stone."""<line_sep>NAME='cashed_stone_value'<line_sep>@property<def_stmt>name self<arrow>str<block_start><return>self.NAME<block_end><def_stmt>__init__ self reward_weights:RewardWeights stone_map:StoneMap rotation:np.ndarray<block_start>self._stone_map=stone_map<line_sep>self._rotation=rotation<line_sep>self.average_stone_value=0.0<line_sep>self._num_stones_cashed=0<line_sep>self._reward_weights=reward_weights<line_sep>self.per_trial_tracker=SequenceStatsTracker(self <lambda>tracker:tracker.average_stone_value 0.0)<block_end>@property<def_stmt>per_trial_trackers self<arrow>Dict[str SequenceStatsTracker]<block_start><return>{'per_trial':self.per_trial_tracker}<block_end><def_stmt>action_and_outcome self action:utils.TypeBasedAction outcome:Optional[PerceivedStone] action_info:ActionInfo<arrow><none><block_start><if_stmt>action.cauldron<and>action.using_stone<block_start>aligned_stone=stones_and_potions.align(action.perceived_stone self._rotation)<line_sep>latent_stone=self._stone_map.apply(aligned_stone)<line_sep>self.average_stone_value<augadd>self._reward_weights(latent_stone.latent_coords)<line_sep>self._num_stones_cashed<augadd>1<block_end>super().action_and_outcome(action outcome action_info)<block_end><def_stmt>trial_end self<arrow><none><block_start><if_stmt>self._num_stones_cashed<g>0<block_start>self.average_stone_value<augdiv>self._num_stones_cashed<block_end>super().trial_end()<line_sep>self.average_stone_value=0.0<line_sep>self._num_stones_cashed=0<block_end><block_end><class_stmt>ChangeGoldstoneTracker(SpecificActionTracker)<block_start>"""Counts number of times a goldstone is changed to something else."""<line_sep>NAME='gold_changed'<line_sep>@property<def_stmt>name self<arrow>str<block_start><return>self.NAME<block_end><def_stmt>__init__ self threshold:int=2<block_start><def_stmt>condition action:utils.TypeBasedAction outcome:Optional[PerceivedStone] unused_action_info:ActionInfo<arrow>bool<block_start><del_stmt>unused_action_info<if_stmt><not>action.using_stone<or><not>action.using_potion<block_start><return><false><block_end>stone_reward=(action.perceived_stone.reward<if>action.perceived_stone<else>0)<line_sep><return>outcome<is><not><none><and>stone_reward<g>threshold<g>outcome.reward<block_end>super().__init__(condition=condition)<block_end><block_end><def_stmt>pos_stone_not_cashed_tracker_name lb:int=0 ub:Optional[int]=<none><arrow>str<block_start><if_stmt>lb<eq>0<and>ub<is><none><block_start><return>'pos_stone_not_cashed'<block_end><elif_stmt>ub<is><none><block_start><return>'stone_above_'+str(lb)+'_not_cashed'<block_end><return>'stone_between_'+str(lb)+'_and_'+str(ub)+'_not_cashed'<block_end><class_stmt>PosStoneNotCashedTracker(SymbolicAlchemyTracker)<block_start>"""Counts number of times a stone with specified reward is not cashed."""<def_stmt>__init__ self reward_weights:RewardWeights lb:int=0 ub:Optional[int]=<none><block_start>self.pos_stones_at_end=0<line_sep>self._condition=<lambda>r:lb<l>r<l>ub<if>ub<is><not><none><else>lb<l>r<line_sep>self._game_state=<none><line_sep>self._reward_weights=reward_weights<line_sep>self.lb=lb<line_sep>self.ub=ub<line_sep>self.per_trial_tracker=SequenceStatsTracker(self <lambda>tracker:tracker.pos_stones_at_end)<block_end>@property<def_stmt>per_trial_trackers self<arrow>Dict[str SequenceStatsTracker]<block_start><return>{'per_trial':self.per_trial_tracker}<block_end>@property<def_stmt>name self<arrow>str<block_start><return>pos_stone_not_cashed_tracker_name(self.lb self.ub)<block_end><def_stmt>trial_start self game_state:GameState<arrow><none><block_start>self._game_state=game_state<line_sep>super().trial_start(game_state)<block_end><def_stmt>trial_end self<arrow><none><block_start>self.pos_stones_at_end=len([s<for>s self._game_state.existing_stones()<if>self._condition(self._reward_weights(s.latent))])<line_sep>super().trial_end()<block_end><block_end><class_stmt>StoneImprovementTracker(SymbolicAlchemyTracker)<block_start>"""Counts number of times a goldstone is changed to something else."""<line_sep># pylint: disable=protected-access # TODO(b/173784755): avoid protected access by using event tracker to tracker # latest slot based action. NAME='stone_improvement'<line_sep>@property<def_stmt>name self<arrow>str<block_start><return>self.NAME<block_end><def_stmt>__init__ self reward_weights:RewardWeights stone_map:StoneMap rotation:np.ndarray<block_start>self._stone_map=stone_map<line_sep>self._rotation=rotation<line_sep>self.average_stone_improvement=0.0<line_sep>self._reward_weights=reward_weights<line_sep>self._game_state=<none><line_sep>self._start_rewards={}<line_sep>self._end_rewards={}<line_sep>self._prev_existing_stones=set()<line_sep>self.per_trial_tracker=SequenceStatsTracker(self <lambda>tracker:tracker.average_stone_improvement 0.0)<block_end>@property<def_stmt>per_trial_trackers self<arrow>Dict[str SequenceStatsTracker]<block_start><return>{'per_trial':self.per_trial_tracker}<block_end><def_stmt>action_and_outcome self action:utils.TypeBasedAction outcome:Optional[PerceivedStone] action_info:ActionInfo<arrow><none><block_start><if_stmt>action.cauldron# We can't get the stone ind as it has already been removed from the game # state, so instead just see what stone ind is missing. <block_start>missing_stones=self._prev_existing_stones.difference(self._game_state._existing_stones)<assert_stmt>len(missing_stones)<eq>1 ('Should be 1 missing stone when stone is used.')<line_sep>aligned_stone=stones_and_potions.align(action.perceived_stone self._rotation)<line_sep>latent_stone=self._stone_map.apply(aligned_stone)<for_stmt>ind missing_stones<block_start>self._end_rewards[ind]=self._reward_weights(latent_stone.latent_coords)<block_end>self._prev_existing_stones=copy.deepcopy(self._game_state._existing_stones)<block_end>super().action_and_outcome(action outcome action_info)<block_end><def_stmt>trial_start self game_state:GameState<arrow><none><block_start>self._game_state=game_state<line_sep>self._prev_existing_stones=copy.deepcopy(self._game_state._existing_stones)<line_sep>self._start_rewards={i:self._reward_weights(self._game_state.get_stone(i).latent)<for>i self._prev_existing_stones}<line_sep>super().trial_start(game_state)<block_end><def_stmt>trial_end self<arrow><none><block_start>stone_improvements=[reward-self._start_rewards[idx]<for>idx,reward self._end_rewards.items()]<line_sep>self.average_stone_improvement=(0.0<if><not>stone_improvements<else>np.mean(stone_improvements))<line_sep>super().trial_end()<line_sep>self.average_stone_improvement=0.0<line_sep>self._start_rewards={}<line_sep>self._end_rewards={}<block_end># pylint: enable=protected-access <block_end><class_stmt>AddMatrixEventTracker(SymbolicAlchemyTracker)<block_start>"""Adds a matrix event tracker per trial and add these to episode returns."""<line_sep>NAME='matrix_event'<line_sep>@property<def_stmt>name self<arrow>str<block_start><return>self.NAME<block_end><def_stmt>__init__ self<block_start>self._event_trackers=<none><line_sep>self.game_state=<none><line_sep>self.per_trial_tracker=SequenceStatsTracker(self <lambda>tracker:tracker.game_state.trackers[self.name] event_tracker.MatrixEventTracker(1 1))<block_end>@property<def_stmt>per_trial_trackers self<arrow>Dict[str SequenceStatsTracker]<block_start><return>{'event_tracker':self.per_trial_tracker}<block_end><def_stmt>trial_start self game_state:GameState<arrow><none><block_start>matrix_event_tracker=event_tracker.MatrixEventTracker(game_state.num_stones game_state.num_potions)<line_sep>self.game_state=game_state<line_sep>game_state.add_event_trackers([matrix_event_tracker])<line_sep>super().trial_start(game_state)<block_end><block_end><class_stmt>ItemGeneratedTracker(SymbolicAlchemyTracker)<block_start>"""Tracks the items generated during the episode."""<line_sep>NAME='items_generated'<line_sep>@property<def_stmt>name self<arrow>str<block_start><return>self.NAME<block_end><def_stmt>__init__ self<block_start>self.trials=<none><line_sep>self.per_trial_tracker=SequenceStatsTracker(self <lambda>tracker:tracker.trials utils.TrialItems(stones=[] potions=[]))<block_end>@property<def_stmt>per_trial_trackers self<arrow>Dict[str SequenceStatsTracker]<block_start><return>{'trials':self.per_trial_tracker}<block_end><def_stmt>trial_start self game_state:GameState<arrow><none><block_start>self.trials=copy.deepcopy(game_state.existing_items())<line_sep>super().trial_start(game_state)<block_end><def_stmt>episode_returns self<arrow>Any<block_start>items=utils.EpisodeItems([] [])<line_sep>items.trials=super().episode_returns()['trials']<line_sep><return>items<block_end><block_end><class_stmt>ScoreTracker(StatTracker)<block_start>"""Adds a reward tracker and return reward per trial."""<line_sep>NAME='score'<line_sep>@property<def_stmt>name self<arrow>str<block_start><return>self.NAME<block_end><def_stmt>__init__ self reward_weights:RewardWeights<block_start>self._reward_weights=reward_weights<line_sep>self.prev_reward=0<line_sep>self.game_state=<none><def_stmt>latest_reward tracker *unused_args **unused_kwargs<block_start><del_stmt>unused_args unused_kwargs<line_sep>cumul_reward=tracker.game_state.trackers['reward'].reward<line_sep>reward=cumul_reward-tracker.prev_reward<line_sep>tracker.prev_reward=cumul_reward<line_sep><return>reward<block_end>super().__init__(get_stat=latest_reward)<block_end><def_stmt>trial_start self game_state:GameState<arrow><none><block_start>reward_tracker=event_tracker.RewardTracker(self._reward_weights)<line_sep>self.game_state=game_state<line_sep>game_state.add_event_trackers([reward_tracker])<line_sep>self.prev_reward=0<line_sep>super().trial_start(game_state)<block_end><block_end><class_stmt>ItemsUsedTracker(StatTracker)<block_start>"""Tracks what stones and potions are used."""<line_sep>NAME='items_used'<line_sep>@property<def_stmt>name self<arrow>str<block_start><return>self.NAME<block_end><def_stmt>__init__ self<block_start>self.prev_items=np.zeros((2 ) dtype=np.int)<line_sep>self.game_state:Optional[GameState]=<none><def_stmt>latest_items_used tracker:'ItemsUsedTracker' unused_action:utils.TypeBasedAction unused_outcome:Optional[PerceivedStone] unused_action_info:ActionInfo<arrow>np.ndarray<block_start><del_stmt>unused_action unused_outcome unused_action_info<line_sep>items_used=tracker.game_state.trackers['items_used']<line_sep>cumul_items_used=np.array([items_used.num_potions_used items_used.num_stones_used] dtype=np.int)<line_sep>items_used=cumul_items_used-tracker.prev_items<line_sep>tracker.prev_items=cumul_items_used<line_sep><return>items_used<block_end>super().__init__(get_stat=latest_items_used init_step_stat=np.zeros((2 ) dtype=np.int))<block_end><def_stmt>trial_start self game_state:GameState<arrow><none><block_start>self.game_state=game_state<line_sep>game_state.add_event_trackers([event_tracker.ItemsUsedTracker()])<line_sep>self.prev_items=np.zeros((2 ) dtype=np.int)<line_sep>super().trial_start(game_state)<block_end><block_end>TrialExtraInfo=collections.namedtuple('TrialExtraInfo' 'num_world_states num_potion_maps num_stone_maps num_graphs')<class_stmt>BeliefStateTracker(SymbolicAlchemyTracker)<block_start>"""Adds a belief state which is updated to a symbolic alchemy bot."""<line_sep>NAME='belief_state'<line_sep>@property<def_stmt>name self<arrow>str<block_start><return>self.NAME<block_end><def_stmt>__init__ self precomputed:PrecomputedMaps env:'symbolic_alchemy.SymbolicAlchemy' init_belief_state=<none><block_start>self.precomputed=precomputed<line_sep>self.belief_state=<none><line_sep>self._init_belief_state=(init_belief_state<or>ideal_observer.BeliefStateWithRotation(self.precomputed))<line_sep>self._extra_info=<none><line_sep>self._world_states_per_action=<none><line_sep>self._env=env<line_sep>self.extra_info_per_action_tracker=SequenceStatsTracker(self <lambda>tracker:tracker.extra_info TrialExtraInfo(num_world_states=0 num_stone_maps=0 num_potion_maps=0 num_graphs=0))<line_sep>self.extra_info_per_trial_tracker=SequenceStatsTracker(self <lambda>tracker:tracker.extra_info TrialExtraInfo(num_world_states=0 num_stone_maps=0 num_potion_maps=0 num_graphs=0))<block_end>@property<def_stmt>per_action_trackers self<arrow>Dict[str SequenceStatsTracker]<block_start><return>{'per_action_extra_info':self.extra_info_per_action_tracker}<block_end>@property<def_stmt>per_trial_trackers self<arrow>Dict[str SequenceStatsTracker]<block_start><return>{'extra_info':self.extra_info_per_trial_tracker}<block_end><def_stmt>episode_start self unused_chemistry:utils.Chemistry<block_start>self.belief_state=copy.deepcopy(self._init_belief_state)<line_sep>super().episode_start(unused_chemistry)<block_end><def_stmt>trial_start self game_state:GameState<arrow><none><block_start>current_stones=collections.Counter(self._env.perceived_stones())<line_sep>current_potions=collections.Counter(self._env.perceived_potions())<line_sep>self.belief_state.new_trial(current_stones current_potions)<line_sep>super().trial_start(game_state)<block_end><def_stmt>action_and_outcome self action:utils.TypeBasedAction outcome:Optional[PerceivedStone] action_info:ActionInfo<arrow><none># A stone value of -1 indicates that the action was invalid <block_start><if_stmt><not>action.using_stone<block_start>super().action_and_outcome(action outcome action_info)<line_sep><return><block_end><if_stmt>action.perceived_stone<is><none><block_start><raise>ValueError('Action says using stone but perceived stone is None.')<block_end># An outcome of -1 means the stone did not change. current_outcome=outcome<or>action.perceived_stone<assert_stmt>current_outcome<is><not><none><if_stmt>action.using_potion<block_start>self.belief_state.action_and_outcome(action.perceived_stone action.perceived_potion current_outcome self.precomputed)<block_end>super().action_and_outcome(action outcome action_info)<block_end>@property<def_stmt>extra_info self<arrow>TrialExtraInfo<block_start><return>TrialExtraInfo(num_world_states=self.belief_state.num_world_states num_potion_maps=self.belief_state.num_potion_maps num_stone_maps=self.belief_state.num_stone_maps num_graphs=self.belief_state.num_graphs)<block_end><def_stmt>get_partial_potion_map self index_to_perm_index:np.ndarray<arrow>stones_and_potions.PartialPotionMap<block_start><return>self.belief_state.partial_potion_map(index_to_perm_index)<block_end><def_stmt>get_partial_stone_map self<arrow>stones_and_potions.PartialStoneMap<block_start><return>self.belief_state.partial_stone_map()<block_end><def_stmt>get_partial_graph self possible_partial_graph_indices:np.ndarray<arrow>graphs.PartialGraph<block_start><return>self.belief_state.partial_graph(possible_partial_graph_indices)<block_end><block_end>
<def_stmt>glen generator<block_start>""" len implementation for generators. """<line_sep><return>sum(1<for>_ generator)<block_end>
__version__='0.5.2'<line_sep>
expected_output={'type':{'BYTE':{'allocated':7045122 'allocations':737743 'frees':734750 'requested':6877514 } 'BYTE*':{'allocated':29128 'allocations':345 'frees':309 'requested':27112 } 'PArray':{'allocated':0 'allocations':180 'frees':180 'requested':0 } 'Summary':{'allocated':7969955 'allocations':762405 'frees':759097 'requested':7784707 } '_btrace_ctx_global_':{'allocated':7864 'allocations':26 'frees':7 'requested':6800 } '_btrace_module_*':{'allocated':4389 'allocations':66 'frees':0 'requested':693 } '_dns_resolver_ctxt':{'allocated':128 'allocations':1 'frees':0 'requested':72 } 'bipc_channel_':{'allocated':136128 'allocations':412 'frees':404 'requested':135680 } 'bipc_rx_stream_':{'allocated':459328 'allocations':412 'frees':404 'requested':458880 } 'brand_context_s':{'allocated':0 'allocations':9 'frees':9 'requested':0 } 'chasfs_ctx_int_':{'allocated':12576 'allocations':6 'frees':3 'requested':12408 } 'confd_cs_node**':{'allocated':0 'allocations':84 'frees':84 'requested':0 } 'confd_event_node':{'allocated':0 'allocations':246 'frees':246 'requested':0 } 'confd_hkeypath':{'allocated':0 'allocations':129 'frees':129 'requested':0 } 'evContext_p':{'allocated':12640 'allocations':1 'frees':0 'requested':12584 } 'file_alloc_handle_s':{'allocated':1120 'allocations':14 'frees':0 'requested':336 } 'file_info':{'allocated':71536 'allocations':34 'frees':0 'requested':69632 } 'filter_key_s':{'allocated':0 'allocations':3 'frees':3 'requested':0 } 'green_args_s':{'allocated':0 'allocations':284 'frees':284 'requested':0 } 'green_assist_be_defer_':{'allocated':0 'allocations':3 'frees':3 'requested':0 } 'green_subscribe_tblcur':{'allocated':0 'allocations':3 'frees':3 'requested':0 } 'green_subscribe_toc_tb':{'allocated':104 'allocations':1 'frees':0 'requested':48 } 'hash_table_s':{'allocated':1664 'allocations':16 'frees':0 'requested':768 } 'hashtable':{'allocated':96 'allocations':1 'frees':0 'requested':40 } 'int32':{'allocated':0 'allocations':1 'frees':1 'requested':0 } 'lru_id_mgr_handle_':{'allocated':372 'allocations':1 'frees':0 'requested':316 } 'mdt_obj_mgr_t':{'allocated':88 'allocations':1 'frees':0 'requested':32 } 'mdtpub_sensor_periodic':{'allocated':0 'allocations':26 'frees':26 'requested':0 } 'mqipc_ctl_':{'allocated':2480 'allocations':79 'frees':69 'requested':1920 } 'netconf_write_buffer_s':{'allocated':0 'allocations':10402 'frees':10402 'requested':0 } 's_mdt_dc_filters_list':{'allocated':0 'allocations':29 'frees':29 'requested':0 } 's_mdt_filter_dc_choice':{'allocated':0 'allocations':29 'frees':29 'requested':0 } 's_yp_sensor_oc':{'allocated':0 'allocations':3 'frees':3 'requested':0 } 'section_data_s':{'allocated':0 'allocations':3 'frees':3 'requested':0 } 'sensor_data_collection':{'allocated':0 'allocations':10399 'frees':10399 'requested':0 } 'service_dir_connect_ac':{'allocated':0 'allocations':28 'frees':28 'requested':0 } 'tc_work_queue_s':{'allocated':0 'allocations':5 'frees':5 'requested':0 } 'tdl_epoch_s':{'allocated':152 'allocations':1 'frees':0 'requested':96 } 'tdldb_info_':{'allocated':3136 'allocations':14 'frees':0 'requested':2352 } 'tdldb_plat_data_s*':{'allocated':2432 'allocations':16 'frees':0 'requested':1536 } 'tdlhandle_s':{'allocated':53584 'allocations':1290 'frees':1256 'requested':51680 } 'tdlhandle_s*':{'allocated':0 'allocations':29 'frees':29 'requested':0 } 'vista_context_':{'allocated':125888 'allocations':30 'frees':0 'requested':124208 } } }<line_sep>
''' * @file RadixSort.py * @author (original JAVA) EAlexa and <NAME>, <EMAIL> * (conversion to Python) <NAME>, <EMAIL> * @date 29 Jun 2020 * @version 0.1 * @brief Radix sort implementation * See https://en.wikipedia.org/wiki/Radix_sort for details on runtime and complexity Radix sorts * operates in O(nw) time, where n is the number of keys, and w is the key length where w is * constant on primitive types like Integer which gives it a better performance than other * compare-based sort algorithms, like i.e. QuickSort '''<import_stmt>math<class_stmt>RadixSort()<block_start>""" Mergesort implements InplaceSort for ease of testings, but in reality it is not really a good fit for an inplace sorting algorithm. """<def_stmt>__init__ self<block_start><pass><block_end><def_stmt>sort self values<block_start><if_stmt>values<eq><none><block_start><return><block_end><return>self.radixSort(values)<block_end><def_stmt>getMax self array<block_start>maxNum=array[0]<for_stmt>i range(0 len(array))<block_start><if_stmt>array[i]<g>maxNum<block_start>maxNum=array[i]<block_end><block_end><return>maxNum<block_end><def_stmt>calculateNumberOfDigits self number<block_start><return>int(math.log(number 10)+1)<block_end><def_stmt>radixSort self numbers<block_start>""" Requires all numbers to be greater than or equal to 1 """<if_stmt>numbers<eq><none><or>len(numbers)<le>1<block_start><return><block_end>maximum=self.getMax(numbers)<line_sep>numberOfDigits=self.calculateNumberOfDigits(maximum)<line_sep>placeValue=1<while_stmt>numberOfDigits<g>0<block_start>numberOfDigits<augsub>1<line_sep>numbers=self.countSort(numbers placeValue)<line_sep>placeValue<augmul>10<block_end><return>numbers<block_end><def_stmt>countSort self numbers placeValue<block_start>rangeParm=10<line_sep>frequency=[0]<times>rangeParm<line_sep>sortedValues=[<none>]<times>len(numbers)<for_stmt>i range(0 len(numbers))<block_start>digit=(numbers[i]<floordiv>placeValue)%rangeParm<line_sep>frequency[digit]<augadd>1<block_end><for_stmt>i range(1 rangeParm)<block_start>frequency[i]<augadd>frequency[i-1]<block_end><for_stmt>i range(len(numbers)-1 -1 -1)<block_start>digit=(numbers[i]<floordiv>placeValue)%rangeParm<line_sep>sortedValues[frequency[digit]-1]=numbers[i]<line_sep>frequency[digit]<augsub>1<block_end><return>sortedValues[:len(numbers)]<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>""" Example usage """<line_sep>sorter=RadixSort()<line_sep>numbers=[387 468 134 123 68 221 769 37 7 890 1 587]<line_sep>numbers=sorter.sort(numbers)<line_sep># Prints: # [1, 7, 37, 68, 123, 134, 221, 387, 468, 587, 769, 890] print(numbers)<block_end>
# ============================================================================== # Copyright (c) Microsoft. All rights reserved. # Licensed under the MIT license. See LICENSE.md file in the project root # for full license information. # ============================================================================== <import_stmt>cntk<as>C<def_stmt>convert root_func filter converter<block_start>''' Clones the graph underlying root_func and in the clone substitutes all Functions obtained by applying 'filter', with a new Function obtained by calling the specified 'converter' Args: root_func: a root function of a graph to be cloned and converted filter: a lambda for filtering out the Functions to be converted converter: a lambda for obtaining the substitute for each of the Functions to be converted Returns: Cloned and converted Function (graph) '''<line_sep># recursively convert for blocks in root_func blocks=C.logging.graph.depth_first_search(root_func <lambda>x:type(x)<eq>C.Function<and>x.root_function.is_block depth=0)<for_stmt>i range(len(blocks))# search for blocks again in case block input/output has been modified <block_start>blocks1=C.logging.graph.depth_first_search(root_func <lambda>x:type(x)<eq>C.Function<and>x.root_function.is_block depth=0)<line_sep>block=blocks1[i]# assuming depth_first_search order to be stable, so use the old index on new search results block_root=C.as_composite(block.block_root)<line_sep>new_block_root=convert(block_root filter converter)<if_stmt>new_block_root<ne>block_root<block_start>block_arguments_mapping=dict(block.block_arguments_mapping)<line_sep>new_block_arguments_mapping=[]<for_stmt>arg,new_arg zip(block_root.arguments new_block_root.arguments)<block_start>new_block_arguments_mapping<augadd>[(new_arg block_arguments_mapping[arg])]<block_end>new_block=C.as_block(new_block_root new_block_arguments_mapping block.op_name block.name)<if_stmt>all([x<not><in>root_func.outputs<for>x block.outputs])<or>all([x<in>block.outputs<for>x root_func.outputs])<block_start>root_func=root_func.clone(C.CloneMethod.share dict(zip(block.outputs new_block.outputs)))<block_end><else_stmt><block_start>new_outputs=[new_block.outputs[block.outputs.index(x)]<if>x<in>block.outputs<else><none><for>x root_func.outputs]<line_sep>root_func_nonreplaced=C.combine([x<for>x root_func.outputs<if>x<not><in>block.outputs])<line_sep>root_func_nonreplaced_clone=root_func_nonreplaced.clone(C.CloneMethod.share dict(zip(block.outputs new_block.outputs)))<line_sep>idx=0<for_stmt>nonreplaced_output root_func_nonreplaced_clone.outputs<block_start><while_stmt>new_outputs[idx]<block_start>idx<augadd>1<block_end>new_outputs[idx]=nonreplaced_output<block_end>root_func=C.combine(new_outputs)<block_end><block_end><block_end># replace all Function instances under root_func that pass the specified 'filter' functions_to_convert=C.logging.graph.depth_first_search(root_func filter depth=0)<for_stmt>i range(len(functions_to_convert))# The graph could be modified already by this function, so we need to rescan to the new set. <block_start>functions_to_convert1=C.logging.graph.depth_first_search(root_func filter depth=0)<line_sep># We are using a filter passed in by the caller. So once a function is converted, we may not # get the same number of functions again, so we need to use correct index depending on the new size. index=0<if_stmt>len(functions_to_convert)<g>len(functions_to_convert1)<block_start><assert_stmt>(len(functions_to_convert)-len(functions_to_convert1)<eq>i)# Only one conversion at a time. # index = 0 will work for this case, we are picking the first function from the new list. <block_end><elif_stmt>len(functions_to_convert)<eq>len(functions_to_convert1)<block_start>index=i# here we pick the current index of the for loop. <block_end><else_stmt><block_start><raise>RuntimeError("The conversion adds another possible conversion(s). Stopping infinite conversions.")<block_end>function_to_convert=functions_to_convert1[index]<line_sep>converted=converter(function_to_convert)<if_stmt><not>function_to_convert.output<in>root_func.outputs<block_start>root_func=root_func.clone(C.CloneMethod.share {function_to_convert.output:converted.output})<block_end><else_stmt># if cudnn_rnn output is the root_func output, just use converted as root_func and no clone needed <block_start><if_stmt>len(root_func.outputs)<g>1<block_start>root_func=C.combine([converted<if>x<eq>function_to_convert.output<else>x<for>x root_func.outputs])<block_end><else_stmt><block_start>root_func=converted<block_end><block_end><block_end><return>root_func<block_end>
<import_stmt>os<import_stmt>pytest<line_sep># Creating a custom mark decorator for units that test belong the incubator. skip_incubator=pytest.mark.skipif('RUN_INCUBATOR_TESTS'<not><in>os.environ reason="Add environment variable RUN_INCUBATOR_TESTS to run this test since \ modules and libraries in the incubator may change abruplty without notice.")<line_sep>
<class_stmt>QueryDeviceGroupsInDTO(object)<block_start><def_stmt>__init__ self<block_start>self.accessAppId=<none><line_sep>self.pageNo=<none><line_sep>self.pageSize=<none><line_sep>self.name=<none><block_end><def_stmt>getAccessAppId self<block_start><return>self.accessAppId<block_end><def_stmt>setAccessAppId self accessAppId<block_start>self.accessAppId=accessAppId<block_end><def_stmt>getPageNo self<block_start><return>self.pageNo<block_end><def_stmt>setPageNo self pageNo<block_start>self.pageNo=pageNo<block_end><def_stmt>getPageSize self<block_start><return>self.pageSize<block_end><def_stmt>setPageSize self pageSize<block_start>self.pageSize=pageSize<block_end><def_stmt>getName self<block_start><return>self.name<block_end><def_stmt>setName self name<block_start>self.name=name<block_end><block_end>
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** <import_stmt>warnings<import_stmt>pulumi<import_stmt>pulumi.runtime<import_from_stmt>typing Any Mapping Optional Sequence Union overload<import_from_stmt>.. _utilities<import_from_stmt>. outputs<import_from_stmt>._inputs *<line_sep>__all__=['InboundSamlConfigArgs' 'InboundSamlConfig']<line_sep>@pulumi.input_type<class_stmt>InboundSamlConfigArgs<block_start><def_stmt>__init__ __self__ * display_name:pulumi.Input[str] idp_config:pulumi.Input['InboundSamlConfigIdpConfigArgs'] sp_config:pulumi.Input['InboundSamlConfigSpConfigArgs'] enabled:Optional[pulumi.Input[bool]]=<none> name:Optional[pulumi.Input[str]]=<none> project:Optional[pulumi.Input[str]]=<none><block_start>""" The set of arguments for constructing a InboundSamlConfig resource. :param pulumi.Input[str] display_name: Human friendly display name. :param pulumi.Input['InboundSamlConfigIdpConfigArgs'] idp_config: SAML IdP configuration when the project acts as the relying party Structure is documented below. :param pulumi.Input['InboundSamlConfigSpConfigArgs'] sp_config: SAML SP (Service Provider) configuration when the project acts as the relying party to receive and accept an authentication assertion issued by a SAML identity provider. Structure is documented below. :param pulumi.Input[bool] enabled: If this config allows users to sign in with the provider. :param pulumi.Input[str] name: The name of the InboundSamlConfig resource. Must start with 'saml.' and can only have alphanumeric characters, hyphens, underscores or periods. The part after 'saml.' must also start with a lowercase letter, end with an alphanumeric character, and have at least 2 characters. :param pulumi.Input[str] project: The ID of the project in which the resource belongs. If it is not provided, the provider project is used. """<line_sep>pulumi.set(__self__ "display_name" display_name)<line_sep>pulumi.set(__self__ "idp_config" idp_config)<line_sep>pulumi.set(__self__ "sp_config" sp_config)<if_stmt>enabled<is><not><none><block_start>pulumi.set(__self__ "enabled" enabled)<block_end><if_stmt>name<is><not><none><block_start>pulumi.set(__self__ "name" name)<block_end><if_stmt>project<is><not><none><block_start>pulumi.set(__self__ "project" project)<block_end><block_end>@[email protected](name="displayName")<def_stmt>display_name self<arrow>pulumi.Input[str]<block_start>""" Human friendly display name. """<line_sep><return>pulumi.get(self "display_name")<block_end>@display_name.setter<def_stmt>display_name self value:pulumi.Input[str]<block_start>pulumi.set(self "display_name" value)<block_end>@[email protected](name="idpConfig")<def_stmt>idp_config self<arrow>pulumi.Input['InboundSamlConfigIdpConfigArgs']<block_start>""" SAML IdP configuration when the project acts as the relying party Structure is documented below. """<line_sep><return>pulumi.get(self "idp_config")<block_end>@idp_config.setter<def_stmt>idp_config self value:pulumi.Input['InboundSamlConfigIdpConfigArgs']<block_start>pulumi.set(self "idp_config" value)<block_end>@[email protected](name="spConfig")<def_stmt>sp_config self<arrow>pulumi.Input['InboundSamlConfigSpConfigArgs']<block_start>""" SAML SP (Service Provider) configuration when the project acts as the relying party to receive and accept an authentication assertion issued by a SAML identity provider. Structure is documented below. """<line_sep><return>pulumi.get(self "sp_config")<block_end>@sp_config.setter<def_stmt>sp_config self value:pulumi.Input['InboundSamlConfigSpConfigArgs']<block_start>pulumi.set(self "sp_config" value)<block_end>@[email protected]<def_stmt>enabled self<arrow>Optional[pulumi.Input[bool]]<block_start>""" If this config allows users to sign in with the provider. """<line_sep><return>pulumi.get(self "enabled")<block_end>@enabled.setter<def_stmt>enabled self value:Optional[pulumi.Input[bool]]<block_start>pulumi.set(self "enabled" value)<block_end>@[email protected]<def_stmt>name self<arrow>Optional[pulumi.Input[str]]<block_start>""" The name of the InboundSamlConfig resource. Must start with 'saml.' and can only have alphanumeric characters, hyphens, underscores or periods. The part after 'saml.' must also start with a lowercase letter, end with an alphanumeric character, and have at least 2 characters. """<line_sep><return>pulumi.get(self "name")<block_end>@name.setter<def_stmt>name self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "name" value)<block_end>@[email protected]<def_stmt>project self<arrow>Optional[pulumi.Input[str]]<block_start>""" The ID of the project in which the resource belongs. If it is not provided, the provider project is used. """<line_sep><return>pulumi.get(self "project")<block_end>@project.setter<def_stmt>project self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "project" value)<block_end><block_end>@pulumi.input_type<class_stmt>_InboundSamlConfigState<block_start><def_stmt>__init__ __self__ * display_name:Optional[pulumi.Input[str]]=<none> enabled:Optional[pulumi.Input[bool]]=<none> idp_config:Optional[pulumi.Input['InboundSamlConfigIdpConfigArgs']]=<none> name:Optional[pulumi.Input[str]]=<none> project:Optional[pulumi.Input[str]]=<none> sp_config:Optional[pulumi.Input['InboundSamlConfigSpConfigArgs']]=<none><block_start>""" Input properties used for looking up and filtering InboundSamlConfig resources. :param pulumi.Input[str] display_name: Human friendly display name. :param pulumi.Input[bool] enabled: If this config allows users to sign in with the provider. :param pulumi.Input['InboundSamlConfigIdpConfigArgs'] idp_config: SAML IdP configuration when the project acts as the relying party Structure is documented below. :param pulumi.Input[str] name: The name of the InboundSamlConfig resource. Must start with 'saml.' and can only have alphanumeric characters, hyphens, underscores or periods. The part after 'saml.' must also start with a lowercase letter, end with an alphanumeric character, and have at least 2 characters. :param pulumi.Input[str] project: The ID of the project in which the resource belongs. If it is not provided, the provider project is used. :param pulumi.Input['InboundSamlConfigSpConfigArgs'] sp_config: SAML SP (Service Provider) configuration when the project acts as the relying party to receive and accept an authentication assertion issued by a SAML identity provider. Structure is documented below. """<if_stmt>display_name<is><not><none><block_start>pulumi.set(__self__ "display_name" display_name)<block_end><if_stmt>enabled<is><not><none><block_start>pulumi.set(__self__ "enabled" enabled)<block_end><if_stmt>idp_config<is><not><none><block_start>pulumi.set(__self__ "idp_config" idp_config)<block_end><if_stmt>name<is><not><none><block_start>pulumi.set(__self__ "name" name)<block_end><if_stmt>project<is><not><none><block_start>pulumi.set(__self__ "project" project)<block_end><if_stmt>sp_config<is><not><none><block_start>pulumi.set(__self__ "sp_config" sp_config)<block_end><block_end>@[email protected](name="displayName")<def_stmt>display_name self<arrow>Optional[pulumi.Input[str]]<block_start>""" Human friendly display name. """<line_sep><return>pulumi.get(self "display_name")<block_end>@display_name.setter<def_stmt>display_name self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "display_name" value)<block_end>@[email protected]<def_stmt>enabled self<arrow>Optional[pulumi.Input[bool]]<block_start>""" If this config allows users to sign in with the provider. """<line_sep><return>pulumi.get(self "enabled")<block_end>@enabled.setter<def_stmt>enabled self value:Optional[pulumi.Input[bool]]<block_start>pulumi.set(self "enabled" value)<block_end>@[email protected](name="idpConfig")<def_stmt>idp_config self<arrow>Optional[pulumi.Input['InboundSamlConfigIdpConfigArgs']]<block_start>""" SAML IdP configuration when the project acts as the relying party Structure is documented below. """<line_sep><return>pulumi.get(self "idp_config")<block_end>@idp_config.setter<def_stmt>idp_config self value:Optional[pulumi.Input['InboundSamlConfigIdpConfigArgs']]<block_start>pulumi.set(self "idp_config" value)<block_end>@[email protected]<def_stmt>name self<arrow>Optional[pulumi.Input[str]]<block_start>""" The name of the InboundSamlConfig resource. Must start with 'saml.' and can only have alphanumeric characters, hyphens, underscores or periods. The part after 'saml.' must also start with a lowercase letter, end with an alphanumeric character, and have at least 2 characters. """<line_sep><return>pulumi.get(self "name")<block_end>@name.setter<def_stmt>name self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "name" value)<block_end>@[email protected]<def_stmt>project self<arrow>Optional[pulumi.Input[str]]<block_start>""" The ID of the project in which the resource belongs. If it is not provided, the provider project is used. """<line_sep><return>pulumi.get(self "project")<block_end>@project.setter<def_stmt>project self value:Optional[pulumi.Input[str]]<block_start>pulumi.set(self "project" value)<block_end>@[email protected](name="spConfig")<def_stmt>sp_config self<arrow>Optional[pulumi.Input['InboundSamlConfigSpConfigArgs']]<block_start>""" SAML SP (Service Provider) configuration when the project acts as the relying party to receive and accept an authentication assertion issued by a SAML identity provider. Structure is documented below. """<line_sep><return>pulumi.get(self "sp_config")<block_end>@sp_config.setter<def_stmt>sp_config self value:Optional[pulumi.Input['InboundSamlConfigSpConfigArgs']]<block_start>pulumi.set(self "sp_config" value)<block_end><block_end><class_stmt>InboundSamlConfig(pulumi.CustomResource)<block_start>@overload<def_stmt>__init__ __self__ resource_name:str opts:Optional[pulumi.ResourceOptions]=<none> display_name:Optional[pulumi.Input[str]]=<none> enabled:Optional[pulumi.Input[bool]]=<none> idp_config:Optional[pulumi.Input[pulumi.InputType['InboundSamlConfigIdpConfigArgs']]]=<none> name:Optional[pulumi.Input[str]]=<none> project:Optional[pulumi.Input[str]]=<none> sp_config:Optional[pulumi.Input[pulumi.InputType['InboundSamlConfigSpConfigArgs']]]=<none> __props__=<none><block_start>""" Inbound SAML configuration for a Identity Toolkit project. You must enable the [Google Identity Platform](https://console.cloud.google.com/marketplace/details/google-cloud-platform/customer-identity) in the marketplace prior to using this resource. ## Example Usage ### Identity Platform Inbound Saml Config Basic ```python import pulumi import pulumi_gcp as gcp saml_config = gcp.identityplatform.InboundSamlConfig("samlConfig", display_name="<NAME>", idp_config=gcp.identityplatform.InboundSamlConfigIdpConfigArgs( idp_entity_id="tf-idp", sign_request=True, sso_url="https://example.com", idp_certificates=[gcp.identityplatform.InboundSamlConfigIdpConfigIdpCertificateArgs( x509_certificate=(lambda path: open(path).read())("test-fixtures/rsa_cert.pem"), )], ), sp_config=gcp.identityplatform.InboundSamlConfigSpConfigArgs( sp_entity_id="tf-sp", callback_uri="https://example.com", )) ``` ## Import InboundSamlConfig can be imported using any of these accepted formats ```sh $ pulumi import gcp:identityplatform/inboundSamlConfig:InboundSamlConfig default projects/{{project}}/inboundSamlConfigs/{{name}} ``` ```sh $ pulumi import gcp:identityplatform/inboundSamlConfig:InboundSamlConfig default {{project}}/{{name}} ``` ```sh $ pulumi import gcp:identityplatform/inboundSamlConfig:InboundSamlConfig default {{name}} ``` :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] display_name: Human friendly display name. :param pulumi.Input[bool] enabled: If this config allows users to sign in with the provider. :param pulumi.Input[pulumi.InputType['InboundSamlConfigIdpConfigArgs']] idp_config: SAML IdP configuration when the project acts as the relying party Structure is documented below. :param pulumi.Input[str] name: The name of the InboundSamlConfig resource. Must start with 'saml.' and can only have alphanumeric characters, hyphens, underscores or periods. The part after 'saml.' must also start with a lowercase letter, end with an alphanumeric character, and have at least 2 characters. :param pulumi.Input[str] project: The ID of the project in which the resource belongs. If it is not provided, the provider project is used. :param pulumi.Input[pulumi.InputType['InboundSamlConfigSpConfigArgs']] sp_config: SAML SP (Service Provider) configuration when the project acts as the relying party to receive and accept an authentication assertion issued by a SAML identity provider. Structure is documented below. """<line_sep><ellipsis><block_end>@overload<def_stmt>__init__ __self__ resource_name:str args:InboundSamlConfigArgs opts:Optional[pulumi.ResourceOptions]=<none><block_start>""" Inbound SAML configuration for a Identity Toolkit project. You must enable the [Google Identity Platform](https://console.cloud.google.com/marketplace/details/google-cloud-platform/customer-identity) in the marketplace prior to using this resource. ## Example Usage ### Identity Platform Inbound Saml Config Basic ```python import pulumi import pulumi_gcp as gcp saml_config = gcp.identityplatform.InboundSamlConfig("samlConfig", display_name="<NAME>", idp_config=gcp.identityplatform.InboundSamlConfigIdpConfigArgs( idp_entity_id="tf-idp", sign_request=True, sso_url="https://example.com", idp_certificates=[gcp.identityplatform.InboundSamlConfigIdpConfigIdpCertificateArgs( x509_certificate=(lambda path: open(path).read())("test-fixtures/rsa_cert.pem"), )], ), sp_config=gcp.identityplatform.InboundSamlConfigSpConfigArgs( sp_entity_id="tf-sp", callback_uri="https://example.com", )) ``` ## Import InboundSamlConfig can be imported using any of these accepted formats ```sh $ pulumi import gcp:identityplatform/inboundSamlConfig:InboundSamlConfig default projects/{{project}}/inboundSamlConfigs/{{name}} ``` ```sh $ pulumi import gcp:identityplatform/inboundSamlConfig:InboundSamlConfig default {{project}}/{{name}} ``` ```sh $ pulumi import gcp:identityplatform/inboundSamlConfig:InboundSamlConfig default {{name}} ``` :param str resource_name: The name of the resource. :param InboundSamlConfigArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """<line_sep><ellipsis><block_end><def_stmt>__init__ __self__ resource_name:str *args **kwargs<block_start>resource_args,opts=_utilities.get_resource_args_opts(InboundSamlConfigArgs pulumi.ResourceOptions *args **kwargs)<if_stmt>resource_args<is><not><none><block_start>__self__._internal_init(resource_name opts **resource_args.__dict__)<block_end><else_stmt><block_start>__self__._internal_init(resource_name *args **kwargs)<block_end><block_end><def_stmt>_internal_init __self__ resource_name:str opts:Optional[pulumi.ResourceOptions]=<none> display_name:Optional[pulumi.Input[str]]=<none> enabled:Optional[pulumi.Input[bool]]=<none> idp_config:Optional[pulumi.Input[pulumi.InputType['InboundSamlConfigIdpConfigArgs']]]=<none> name:Optional[pulumi.Input[str]]=<none> project:Optional[pulumi.Input[str]]=<none> sp_config:Optional[pulumi.Input[pulumi.InputType['InboundSamlConfigSpConfigArgs']]]=<none> __props__=<none><block_start><if_stmt>opts<is><none><block_start>opts=pulumi.ResourceOptions()<block_end><if_stmt><not>isinstance(opts pulumi.ResourceOptions)<block_start><raise>TypeError('Expected resource options to be a ResourceOptions instance')<block_end><if_stmt>opts.version<is><none><block_start>opts.version=_utilities.get_version()<block_end><if_stmt>opts.id<is><none><block_start><if_stmt>__props__<is><not><none><block_start><raise>TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')<block_end>__props__=InboundSamlConfigArgs.__new__(InboundSamlConfigArgs)<if_stmt>display_name<is><none><and><not>opts.urn<block_start><raise>TypeError("Missing required property 'display_name'")<block_end>__props__.__dict__["display_name"]=display_name<line_sep>__props__.__dict__["enabled"]=enabled<if_stmt>idp_config<is><none><and><not>opts.urn<block_start><raise>TypeError("Missing required property 'idp_config'")<block_end>__props__.__dict__["idp_config"]=idp_config<line_sep>__props__.__dict__["name"]=name<line_sep>__props__.__dict__["project"]=project<if_stmt>sp_config<is><none><and><not>opts.urn<block_start><raise>TypeError("Missing required property 'sp_config'")<block_end>__props__.__dict__["sp_config"]=sp_config<block_end>super(InboundSamlConfig __self__).__init__('gcp:identityplatform/inboundSamlConfig:InboundSamlConfig' resource_name __props__ opts)<block_end>@staticmethod<def_stmt>get resource_name:str id:pulumi.Input[str] opts:Optional[pulumi.ResourceOptions]=<none> display_name:Optional[pulumi.Input[str]]=<none> enabled:Optional[pulumi.Input[bool]]=<none> idp_config:Optional[pulumi.Input[pulumi.InputType['InboundSamlConfigIdpConfigArgs']]]=<none> name:Optional[pulumi.Input[str]]=<none> project:Optional[pulumi.Input[str]]=<none> sp_config:Optional[pulumi.Input[pulumi.InputType['InboundSamlConfigSpConfigArgs']]]=<none><arrow>'InboundSamlConfig'<block_start>""" Get an existing InboundSamlConfig resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] display_name: Human friendly display name. :param pulumi.Input[bool] enabled: If this config allows users to sign in with the provider. :param pulumi.Input[pulumi.InputType['InboundSamlConfigIdpConfigArgs']] idp_config: SAML IdP configuration when the project acts as the relying party Structure is documented below. :param pulumi.Input[str] name: The name of the InboundSamlConfig resource. Must start with 'saml.' and can only have alphanumeric characters, hyphens, underscores or periods. The part after 'saml.' must also start with a lowercase letter, end with an alphanumeric character, and have at least 2 characters. :param pulumi.Input[str] project: The ID of the project in which the resource belongs. If it is not provided, the provider project is used. :param pulumi.Input[pulumi.InputType['InboundSamlConfigSpConfigArgs']] sp_config: SAML SP (Service Provider) configuration when the project acts as the relying party to receive and accept an authentication assertion issued by a SAML identity provider. Structure is documented below. """<line_sep>opts=pulumi.ResourceOptions.merge(opts pulumi.ResourceOptions(id=id))<line_sep>__props__=_InboundSamlConfigState.__new__(_InboundSamlConfigState)<line_sep>__props__.__dict__["display_name"]=display_name<line_sep>__props__.__dict__["enabled"]=enabled<line_sep>__props__.__dict__["idp_config"]=idp_config<line_sep>__props__.__dict__["name"]=name<line_sep>__props__.__dict__["project"]=project<line_sep>__props__.__dict__["sp_config"]=sp_config<line_sep><return>InboundSamlConfig(resource_name opts=opts __props__=__props__)<block_end>@[email protected](name="displayName")<def_stmt>display_name self<arrow>pulumi.Output[str]<block_start>""" Human friendly display name. """<line_sep><return>pulumi.get(self "display_name")<block_end>@[email protected]<def_stmt>enabled self<arrow>pulumi.Output[Optional[bool]]<block_start>""" If this config allows users to sign in with the provider. """<line_sep><return>pulumi.get(self "enabled")<block_end>@[email protected](name="idpConfig")<def_stmt>idp_config self<arrow>pulumi.Output['outputs.InboundSamlConfigIdpConfig']<block_start>""" SAML IdP configuration when the project acts as the relying party Structure is documented below. """<line_sep><return>pulumi.get(self "idp_config")<block_end>@[email protected]<def_stmt>name self<arrow>pulumi.Output[str]<block_start>""" The name of the InboundSamlConfig resource. Must start with 'saml.' and can only have alphanumeric characters, hyphens, underscores or periods. The part after 'saml.' must also start with a lowercase letter, end with an alphanumeric character, and have at least 2 characters. """<line_sep><return>pulumi.get(self "name")<block_end>@[email protected]<def_stmt>project self<arrow>pulumi.Output[str]<block_start>""" The ID of the project in which the resource belongs. If it is not provided, the provider project is used. """<line_sep><return>pulumi.get(self "project")<block_end>@[email protected](name="spConfig")<def_stmt>sp_config self<arrow>pulumi.Output['outputs.InboundSamlConfigSpConfig']<block_start>""" SAML SP (Service Provider) configuration when the project acts as the relying party to receive and accept an authentication assertion issued by a SAML identity provider. Structure is documented below. """<line_sep><return>pulumi.get(self "sp_config")<block_end><block_end>
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # cf. https://github.com/pypa/manylinux/issues/53 GOOD_SSL="https://google.com"<line_sep>BAD_SSL="https://self-signed.badssl.com"<import_stmt>sys<line_sep>print("Testing SSL certificate checking for Python:" sys.version)<if_stmt>(sys.version_info[:2]<l>(2 7)<or>sys.version_info[:2]<l>(3 4))<block_start>print("This version never checks SSL certs; skipping tests")<line_sep>sys.exit(0)<block_end><if_stmt>sys.version_info[0]<ge>3<block_start><import_from_stmt>urllib.request urlopen<line_sep>EXC=OSError<block_end><else_stmt><block_start><import_from_stmt>urllib urlopen<line_sep>EXC=IOError<block_end>print("Connecting to %s should work"%(GOOD_SSL ))<line_sep>urlopen(GOOD_SSL)<line_sep>print("...it did, yay.")<line_sep>print("Connecting to %s should fail"%(BAD_SSL ))<try_stmt><block_start>urlopen(BAD_SSL)<line_sep># If we get here then we failed: print("...it DIDN'T!!!!!11!!1one!")<line_sep>sys.exit(1)<block_end><except_stmt>EXC<block_start>print("...it did, yay.")<block_end>