content
stringlengths
0
1.55M
<import_stmt>numpy<as>np<import_stmt>pandas<as>pd<import_from_stmt>itertools product<import_from_stmt>tensortrade.feed Stream<import_from_stmt>tests.utils.ops assert_op<line_sep>arrays=[[-1.5 2.2 -3.3 4.7 -5.1 7.45 8.8] [-1.2 2.3 np.nan 4.4 -5.5 np.nan np.nan] ]<def_stmt>test_ceil <block_start><for_stmt>array arrays<block_start>s=Stream.source(array dtype="float")<line_sep>w=s.ceil().rename("w")<line_sep>expected=list(pd.Series(array).apply(np.ceil))<line_sep>assert_op([w] expected)<block_end><block_end><def_stmt>test_floor <block_start><for_stmt>array arrays<block_start>s=Stream.source(array dtype="float")<line_sep>w=s.floor().rename("w")<line_sep>expected=list(pd.Series(array).apply(np.floor))<line_sep>assert_op([w] expected)<block_end><block_end><def_stmt>test_sqrt <block_start><for_stmt>array arrays<block_start>s=Stream.source(array dtype="float")<line_sep>w=s.sqrt().rename("w")<line_sep>expected=list(pd.Series(array).apply(np.sqrt))<line_sep>assert_op([w] expected)<block_end><block_end><def_stmt>test_square <block_start><for_stmt>array arrays<block_start>s=Stream.source(array dtype="float")<line_sep>w=s.square().rename("w")<line_sep>expected=list(pd.Series(array).apply(np.square))<line_sep>assert_op([w] expected)<block_end><block_end><def_stmt>test_log <block_start><for_stmt>array arrays<block_start>s=Stream.source(array dtype="float")<line_sep>w=s.log().rename("w")<line_sep>expected=list(pd.Series(array).apply(np.log))<line_sep>assert_op([w] expected)<block_end><block_end><def_stmt>test_pct_change <block_start>configs=[{"periods":1 "fill_method":<none>} {"periods":1 "fill_method":"pad"} {"periods":1 "fill_method":"ffill"} {"periods":2 "fill_method":<none>} {"periods":2 "fill_method":"pad"} {"periods":2 "fill_method":"ffill"} ]<for_stmt>array,config product(arrays configs)<block_start>s=Stream.source(array dtype="float")<line_sep>w=s.pct_change(**config).rename("w")<line_sep>expected=list(pd.Series(array).pct_change(**config))<line_sep>print(config)<line_sep>assert_op([w] expected)<block_end><block_end><def_stmt>test_diff <block_start><for_stmt>array arrays<block_start>s=Stream.source(array dtype="float")<line_sep>w=s.diff(periods=1).rename("w")<line_sep>expected=list(pd.Series(array).diff(periods=1))<line_sep>assert_op([w] expected)<block_end><for_stmt>array arrays<block_start>s=Stream.source(array dtype="float")<line_sep>w=s.diff(periods=2).rename("w")<line_sep>expected=list(pd.Series(array).diff(periods=2))<line_sep>assert_op([w] expected)<block_end><block_end>
<import_stmt>redis<line_sep>#r = redis.StrictRedis(host='localhost', port=6379, db=0) #r.set('foo', 'bar') #assert r.get('foo') == 'bar'
''' Copyright 2017 Dell Inc. or its subsidiaries. All Rights Reserved. Author(s): <NAME> This script tests minimum payload base case of the RackHD API 2.0 OS bootstrap workflows using NFS mount or local repo method. This routine runs OS bootstrap jobs simultaneously on multiple nodes. For 12 tests to run, 12 nodes are required in the stack. If there are less than that, tests will be skipped. This test takes 15-20 minutes to run. OS bootstrap tests require the following entries in config/install_default.json. If an entry is missing, then that test will be skipped. The order of entries determines the priority of the test. First one runs on first available node, etc. "os-install": [ { "kvm": false, "path": "/repo/esxi/5.5", "version": "5.5", "workflow": "Graph.InstallESXi" }, { "kvm": false, "path": "/repo/esxi/6.0", "version": "6.0", "workflow": "Graph.InstallESXi" }, { "kvm": false, "path": "/repo/centos/6.5", "version": "6.5", "workflow": "Graph.InstallCentOS" }, { "kvm": false, "path": "/repo/centos/7.0", "version": "7.0", "workflow": "Graph.InstallCentOS" }, { "kvm": false, "path": "/repo/rhel/7.0", "version": "7.0", "workflow": "Graph.InstallRHEL" }, { "kvm": false, "path": "/repo/suse/42.1", "version": "42.1", "workflow": "Graph.InstallSUSE" }, { "kvm": false, "path": "/repo/ubuntu", "version": "trusty", "workflow": "Graph.InstallUbuntu" }, { "kvm": false, "path": "/repo/coreos", "version": "899.17.0", "workflow": "Graph.InstallCoreOS" }, { "kvm": true, "path": "/repo/rhel/7.0", "version": "7.0", "workflow": "Graph.InstallRHEL" }, { "kvm": true, "path": "/repo/centos/6.5", "version": "6.5", "workflow": "Graph.InstallCentOS" }, { "kvm": false, "path": "/repo/winpe", "productkey": "<KEY>", "smbPassword": "<PASSWORD>", "smbRepo": "\\windowsServer2012", "smbUser": "onrack", "version": "2012", "workflow": "Graph.InstallWindowsServer" } ], The OS repos are to be installed under 'on-http/static/http' directory reflecting the paths above. These can be files, links, or nfs mounts to remote repos in the following dirs: on-http/static/http/windowsServer2012 -- requires Samba share on RackHD server on-http/static/http/repo/centos/6.5 on-http/static/http/repo/centos/7.0 on-http/static/http/repo/rhel/7.0 on-http/static/http/repo/suse/42.1 on-http/static/http/repo/esxi/5.5 on-http/static/http/repo/esxi/6.0 on-http/static/http/repo/winpe on-http/static/http/repo/coreos/899.17.0 '''<import_stmt>fit_path# NOQA: unused import <import_from_stmt>nose.plugins.attrib attr<import_stmt>fit_common<import_stmt>flogging<import_stmt>sys<line_sep>log=flogging.get_loggers()<line_sep># This gets the list of nodes NODECATALOG=fit_common.node_select()<line_sep># dict containing bootstrap workflow IDs and states NODE_STATUS={}<line_sep># global timer START_TIME=fit_common.time.time()<line_sep># collect repo information from config files OSLIST=fit_common.fitcfg()["install-config"]["os-install"]<line_sep># download RackHD config from host rackhdresult=fit_common.rackhdapi('/api/2.0/config')<if_stmt>rackhdresult['status']<ne>200<block_start>log.error(" Unable to contact host, exiting. ")<line_sep>sys.exit(255)<block_end>rackhdconfig=rackhdresult['json']<line_sep>statichost="http://"+str(rackhdconfig['fileServerAddress'])+":"+str(rackhdconfig['fileServerPort'])<line_sep># this routine polls a workflow task ID for completion <def_stmt>wait_for_workflow_complete taskid<block_start>result=<none><while_stmt>fit_common.time.time()-START_TIME<l>1800<or>result<is><none># limit test to 30 minutes <block_start>result=fit_common.rackhdapi("/api/2.0/workflows/"+taskid)<if_stmt>result['status']<ne>200<block_start>log.error(" HTTP error: "+result['text'])<line_sep><return><false><block_end><if_stmt>result['json']['status']<eq>'running'<or>result['json']['status']<eq>'pending'<block_start>log.info_5("{} workflow status: {}".format(result['json']['injectableName'] result['json']['status']))<line_sep>fit_common.time.sleep(30)<block_end><elif_stmt>result['json']['status']<eq>'succeeded'<block_start>log.info_5("{} workflow status: {}".format(result['json']['injectableName'] result['json']['status']))<line_sep><return><true><block_end><else_stmt><block_start>log.error(" Workflow failed: "+result['text'])<line_sep><return><false><block_end><block_end>log.error(" Workflow Timeout: "+result['text'])<line_sep><return><false><block_end># helper routine to return the task ID associated with the running bootstrap workflow <def_stmt>node_taskid workflow version kvm<block_start><for_stmt>entry NODE_STATUS<block_start><if_stmt>NODE_STATUS[entry]['workflow']<eq>workflow<and>str(version)<in>NODE_STATUS[entry]['version']<and>NODE_STATUS[entry]['kvm']<eq>kvm<block_start><return>NODE_STATUS[entry]['id']<block_end><block_end><return>""<block_end># Match up tests to node IDs to feed skip decorators index=0# node index <for_stmt>item OSLIST<block_start><if_stmt>index<l>len(NODECATALOG)<block_start>NODE_STATUS[NODECATALOG[index]]={"workflow":item['workflow'] "version":item['version'] "kvm":item['kvm'] "id":"Pending"}<block_end>index<augadd>1<block_end># ------------------------ Tests ------------------------------------- @attr(all=<false>)<class_stmt>api20_bootstrap_base(fit_common.unittest.TestCase)<block_start>@classmethod<def_stmt>setUpClass cls# run all OS install workflows first <block_start>nodeindex=0<for_stmt>item OSLIST# if OS proxy entry exists in RackHD config, run bootstrap against selected node <block_start><if_stmt>nodeindex<l>len(NODECATALOG)# delete active workflows for specified node <block_start>fit_common.cancel_active_workflows(NODECATALOG[nodeindex])<line_sep># base payload common to all Linux payload_data={"options":{"defaults":{"version":item['version'] "kvm":item['kvm'] "repo":statichost+item['path'] "rootPassword":"<PASSWORD>" "hostname":"rackhdnode" "users":[{"name":"rackhduser" "password":"<PASSWORD>!" "uid":1010}]}}}<line_sep># OS specific payload requirements <if_stmt>item['workflow']<eq>"Graph.InstallUbuntu"<block_start>payload_data["options"]["defaults"]["baseUrl"]="install/netboot/ubuntu-installer/amd64"<line_sep>payload_data["options"]["defaults"]["kargs"]={"live-installer/net-image":statichost+item['path']+"/ubuntu/install/filesystem.squashfs"}<block_end><if_stmt>item['workflow']<eq>"Graph.InstallWindowsServer"<block_start>payload_data["options"]["defaults"]["productkey"]=item['productkey']<line_sep>payload_data["options"]["defaults"]["smbUser"]=item['smbUser']<line_sep>payload_data["options"]["defaults"]["smbPassword"]=item['smbPassword']<line_sep>payload_data["options"]["defaults"]["smbRepo"]="\\\\"+str(rackhdconfig['apiServerAddress'])+item['smbRepo']<line_sep>payload_data["options"]["defaults"]["username"]="rackhduser"<line_sep>payload_data["options"]["defaults"]["password"]="RackHDRocks!"<line_sep>payload_data["options"]["defaults"].pop('rootPassword' <none>)<line_sep>payload_data["options"]["defaults"].pop('users' <none>)<line_sep>payload_data["options"]["defaults"].pop('kvm' <none>)<line_sep>payload_data["options"]["defaults"].pop('version' <none>)<block_end># run workflow result=fit_common.rackhdapi('/api/2.0/nodes/'+NODECATALOG[nodeindex]+'/workflows?name='+item['workflow'] action='post' payload=payload_data)<if_stmt>result['status']<eq>201# this saves the task and node IDs <block_start>NODE_STATUS[NODECATALOG[nodeindex]]={"workflow":item['workflow'] "version":item['version'] "kvm":item['kvm'] "id":result['json']['instanceId']}<line_sep>log.info_5(" TaskID: "+result['json']['instanceId'])<line_sep>log.info_5(" Payload: "+fit_common.json.dumps(payload_data))<block_end><else_stmt># if no task ID is returned put 'failed' in ID field <block_start>NODE_STATUS[NODECATALOG[nodeindex]]={"workflow":item['workflow'] "version":item['version'] "kvm":item['kvm'] 'id':"failed"}<line_sep>log.error(" OS install "+item['workflow']+" on node "+NODECATALOG[nodeindex]+" failed! ")<line_sep>log.error(" Error text: "+result['text'])<line_sep>log.error(" Payload: "+fit_common.json.dumps(payload_data))<block_end># increment node index to run next bootstrap nodeindex<augadd>1<block_end><block_end><block_end>@fit_common.unittest.skipUnless(node_taskid("Graph.InstallESXi" "5." <false>)<ne>'' "Skipping ESXi5.5, repo not configured or node unavailable")<def_stmt>test_api20_bootstrap_esxi5 self<block_start>self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallESXi" "5." <false>)) "ESXi5.5 failed.")<block_end>@fit_common.unittest.skipUnless(node_taskid("Graph.InstallESXi" "6." <false>)<ne>'' "Skipping ESXi6.0, repo not configured or node unavailable")<def_stmt>test_api20_bootstrap_esxi6 self<block_start>self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallESXi" "6." <false>)) "ESXi6.0 failed.")<block_end>@fit_common.unittest.skipUnless(node_taskid("Graph.InstallCentOS" "6." <false>)<ne>'' "Skipping Centos 6.5, repo not configured or node unavailable")<def_stmt>test_api20_bootstrap_centos6 self<block_start>self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallCentOS" "6." <false>)) "Centos 6.5 failed.")<block_end>@fit_common.unittest.skipUnless(node_taskid("Graph.InstallCentOS" "6." <true>)<ne>'' "Skipping Centos 6.5 KVM, repo not configured or node unavailable")<def_stmt>test_api20_bootstrap_centos6_kvm self<block_start>self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallCentOS" "6." <true>)) "Centos 6.5 KVM failed.")<block_end>@fit_common.unittest.skipUnless(node_taskid("Graph.InstallCentOS" "7." <false>)<ne>'' "Skipping Centos 7.0, repo not configured or node unavailable")<def_stmt>test_api20_bootstrap_centos7 self<block_start>self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallCentOS" "7." <false>)) "Centos 7.0 failed.")<block_end>@fit_common.unittest.skipUnless(node_taskid("Graph.InstallCentOS" "7." <true>)<ne>'' "Skipping Centos 7.0 KVM, repo not configured or node unavailable")<def_stmt>test_api20_bootstrap_centos7_kvm self<block_start>self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallCentOS" "7." <true>)) "Centos 7.0 KVM failed.")<block_end>@fit_common.unittest.skipUnless(node_taskid("Graph.InstallRHEL" "7." <false>)<ne>'' "Skipping Redhat 7.0, repo not configured or node unavailable")<def_stmt>test_api20_bootstrap_rhel7 self<block_start>self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallRHEL" "7." <false>)) "RHEL 7.0 failed.")<block_end>@fit_common.unittest.skipUnless(node_taskid("Graph.InstallRHEL" "7." <true>)<ne>'' "Skipping Redhat 7.0 KVM, repo not configured or node unavailable")<def_stmt>test_api20_bootstrap_rhel7_kvm self<block_start>self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallRHEL" "7." <true>)) "RHEL 7.0 KVM failed.")<block_end>@fit_common.unittest.skipUnless(node_taskid("Graph.InstallUbuntu" "trusty" <false>)<ne>'' "Skipping Ubuntu 14, repo not configured or node unavailable")<def_stmt>test_api20_bootstrap_ubuntu14 self<block_start>self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallUbuntu" "trusty" <false>)) "Ubuntu 14 failed.")<block_end>@fit_common.unittest.skipUnless(node_taskid("Graph.InstallCoreOS" "899." <false>)<ne>'' "Skipping CoreOS 899.17.0, repo not configured or node unavailable")<def_stmt>test_api20_bootstrap_coreos899 self<block_start>self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallCoreOS" "899." <false>)) "CoreOS 899.17 failed.")<block_end>@fit_common.unittest.skipUnless(node_taskid("Graph.InstallSUSE" "42." <false>)<ne>'' "Skipping SuSe 42, repo not configured or node unavailable")<def_stmt>test_api20_bootstrap_suse self<block_start>self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallSUSE" "42." <false>)) "SuSe 42 failed.")<block_end>@fit_common.unittest.skipUnless(node_taskid("Graph.InstallWindowsServer" "2012" <false>)<ne>'' "Skipping Windows 2012, repo not configured or node unavailable")<def_stmt>test_api20_bootstrap_windows self<block_start>self.assertTrue(wait_for_workflow_complete(node_taskid("Graph.InstallWindowsServer" "2012" <false>)) "Win2012 failed.")<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>fit_common.unittest.main()<block_end>
<import_from_stmt>util get_data_from_id read_kpt_file <import_stmt>glob<import_stmt>os<import_stmt>numpy<as>np<import_from_stmt>skimage.io imread imsave <import_from_stmt>skimage.transform resize<line_sep>root_dir=os.environ['DIR_3DFAW']<def_stmt>prepare_train <block_start>ids=glob.glob("%s/train_img/*.jpg"%root_dir)<line_sep>ids=[os.path.basename(id_).replace(".jpg" "")<for>id_ ids]<line_sep>y_keypts,z_keypts=get_keypts_from_ids(ids "train")<line_sep>np.savez(file="%s/train"%root_dir y_keypts=y_keypts z_keypts=z_keypts)<block_end><def_stmt>get_keypts_from_ids ids mode<block_start>y_keypts=[]<line_sep>z_keypts=[]<line_sep>x_keypts=[]<line_sep>meta=[]<for_stmt>k,id_ enumerate(ids)<block_start>print("%i / %i"%(k len(ids)))<line_sep>_,b,c=get_data_from_id(root=root_dir mode=mode id_=id_)<line_sep># a is f64, let's make it uint8 to save some space. #a = (a*256.).astype("uint8") #imgs.append(a) y_keypts.append(b.astype("float32"))<line_sep>z_keypts.append(c.astype("float32"))<block_end>#imgs = np.asarray(imgs) y_keypts=np.asarray(y_keypts)<line_sep>z_keypts=np.asarray(z_keypts)<line_sep><return>y_keypts z_keypts<block_end><def_stmt>prepare_valid <block_start>ids=[]<with_stmt>open("%s/list_valid_test.txt"%root_dir)<as>f<block_start><for_stmt>line f<block_start>line=line.rstrip().split(",")<if_stmt>line[1]<eq>"valid"<block_start>ids.append(line[0])<block_end><block_end><block_end>y_keypts,z_keypts=get_keypts_from_ids(ids "valid")<line_sep>np.savez(file="%s/valid"%root_dir y_keypts=y_keypts z_keypts=z_keypts ids=ids)<block_end><def_stmt>prepare_test <block_start>ids=[]<line_sep>orientations=[]<with_stmt>open("%s/list_valid_test.txt"%root_dir)<as>f<block_start><for_stmt>line f<block_start>line=line.rstrip().split(",")<if_stmt>line[1]<eq>"test"<block_start>ids.append(line[0])<line_sep>orientations.append(line[2])<block_end><block_end><block_end>y_keypts,z_keypts=get_keypts_from_ids(ids "valid")# yes, valid np.savez(file="%s/test"%root_dir y_keypts=y_keypts z_keypts=z_keypts ids=ids orientations=orientations)<block_end><def_stmt>prepare_valid_imgs_downsized <block_start>ids=glob.glob("%s/valid_img/*.jpg"%root_dir)<line_sep>ids=[os.path.basename(id_).replace(".jpg" "")<for>id_ ids]<line_sep>output_folder="%s/valid_img_cropped_80x80"%root_dir<if_stmt><not>os.path.exists(output_folder)<block_start>os.makedirs(output_folder)<block_end><for_stmt>id_ ids<block_start>kpts=read_kpt_file("%s/valid_lm/%s_lm.csv"%(root_dir id_))<line_sep>img=imread("%s/valid_img/%s.jpg"%(root_dir id_))<line_sep>img=img[int(np.min(kpts[: 1])):int(np.max(kpts[: 1])) int(np.min(kpts[: 0])):int(np.max(kpts[: 0]))]<line_sep>img=resize(img (80 80))<line_sep>imsave(arr=img fname="%s/%s.jpg"%(output_folder id_))<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>prepare_train()<line_sep>prepare_valid()<line_sep>prepare_test()<line_sep>prepare_valid_imgs_downsized()<block_end>
# -*- coding: utf-8 -*- # (C) Datadog, Inc. 2018-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) <import_stmt>logging<import_stmt>warnings<import_stmt>mock<import_from_stmt>datadog_checks log<import_from_stmt>datadog_checks.base AgentCheck<import_from_stmt>datadog_checks.base.log DEFAULT_FALLBACK_LOGGER get_check_logger init_logging<def_stmt>test_get_py_loglevel # default value for invalid input <block_start><assert_stmt>log._get_py_loglevel(<none>)<eq>logging.INFO<line_sep># default value for valid unicode input encoding into an invalid key <assert_stmt>log._get_py_loglevel(u'dèbùg')<eq>logging.INFO<line_sep># check unicode works <assert_stmt>log._get_py_loglevel(u'crit')<eq>logging.CRITICAL<line_sep># check string works <assert_stmt>log._get_py_loglevel('crit')<eq>logging.CRITICAL<block_end><def_stmt>test_logging_capture_warnings <block_start><with_stmt>mock.patch('logging.Logger.warning')<as>log_warning<block_start>warnings.warn("hello-world")<line_sep>log_warning.assert_not_called()# warnings are NOT yet captured init_logging()# from here warnings are captured as logs warnings.warn("hello-world")<assert_stmt>log_warning.call_count<eq>1<line_sep>msg=log_warning.mock_calls[0].args[1]<assert_stmt>"hello-world"<in>msg<block_end><block_end><def_stmt>test_get_check_logger caplog<block_start><class_stmt>FooConfig(object)<block_start><def_stmt>__init__ self<block_start>self.log=get_check_logger()<block_end><def_stmt>do_something self<block_start>self.log.warning("This is a warning")<block_end><block_end><class_stmt>MyCheck(AgentCheck)<block_start><def_stmt>__init__ self *args **kwargs<block_start>super(MyCheck self).__init__(*args **kwargs)<line_sep>self._config=FooConfig()<block_end><def_stmt>check self _<block_start>self._config.do_something()<block_end><block_end>check=MyCheck()<line_sep>check.check({})<assert_stmt>check.log<is>check._config.log<assert_stmt>"This is a warning"<in>caplog.text<block_end><def_stmt>test_get_check_logger_fallback caplog<block_start>log=get_check_logger()<line_sep>log.warning("This is a warning")<assert_stmt>log<is>DEFAULT_FALLBACK_LOGGER<assert_stmt>"This is a warning"<in>caplog.text<block_end><def_stmt>test_get_check_logger_argument_fallback caplog<block_start>logger=logging.getLogger()<line_sep>log=get_check_logger(default_logger=logger)<line_sep>log.warning("This is a warning")<assert_stmt>log<is>logger<assert_stmt>"This is a warning"<in>caplog.text<block_end>
<import_stmt>unittest<import_from_stmt>dojo remove_word main<class_stmt>DojoTest(unittest.TestCase)<block_start><def_stmt>test_remove_word_1 self<block_start>self.assertEqual(remove_word("bananauva" "banana") "uva")<block_end><def_stmt>test_remove_word_2 self<block_start>self.assertEqual(remove_word("catdog" "dog") "catdog")<block_end><def_stmt>test_remove_word_3 self<block_start>self.assertEqual(remove_word("pão" "pão") "")<block_end><def_stmt>test_main_1 self<block_start>words=["leet" "code"]<line_sep>self.assertEqual(main("leetcode" words) <true>)<block_end><def_stmt>test_main_2 self<block_start>words=["leet" "code" "apple" ]<line_sep>self.assertEqual(main("leetcodeapple" words) <true>)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end># Sami - Elen - Allan - Tiago - Mateus - Juan # s = "leetcode", wordDict = ["leet", "code"] # FncOne(s,oneWord) # "bananaaçaimaça" = ["banana", "açai", "maça"] # "maçabanana" # { # banana # acai # maca # } # bananaaçaimaça # naaçaimaça { - - - } , açaimaça { - - - } # solbabanana / [uva,sol, solba, banana] # # sol_babanana
<import_stmt>numpy<as>np<import_stmt>cv2<as>cv<import_from_stmt>preprocessing binary_otsus deskew<import_from_stmt>utilities projection save_image<import_from_stmt>glob glob<def_stmt>preprocess image# Maybe we end up using only gray level image. <block_start>gray_img=cv.cvtColor(image cv.COLOR_BGR2GRAY)<line_sep>gray_img=cv.bitwise_not(gray_img)<line_sep>binary_img=binary_otsus(gray_img 0)<line_sep># cv.imwrite('origin.png', gray_img) # deskewed_img = deskew(binary_img) deskewed_img=deskew(binary_img)<line_sep># cv.imwrite('output.png', deskewed_img) # binary_img = binary_otsus(deskewed_img, 0) # breakpoint() # Visualize # breakpoint() <return>deskewed_img<block_end><def_stmt>projection_segmentation clean_img axis cut=3<block_start>segments=[]<line_sep>start=-1<line_sep>cnt=0<line_sep>projection_bins=projection(clean_img axis)<for_stmt>idx,projection_bin enumerate(projection_bins)<block_start><if_stmt>projection_bin<ne>0<block_start>cnt=0<block_end><if_stmt>projection_bin<ne>0<and>start<eq>-1<block_start>start=idx<block_end><if_stmt>projection_bin<eq>0<and>start<ne>-1<block_start>cnt<augadd>1<if_stmt>cnt<ge>cut<block_start><if_stmt>axis<eq>'horizontal'<block_start>segments.append(clean_img[max(start-1 0):idx :])<block_end><elif_stmt>axis<eq>'vertical'<block_start>segments.append(clean_img[: max(start-1 0):idx])<block_end>cnt=0<line_sep>start=-1<block_end><block_end><block_end><return>segments<block_end># Line Segmentation #---------------------------------------------------------------------------------------- <def_stmt>line_horizontal_projection image cut=3# Preprocess input image <block_start>clean_img=preprocess(image)<line_sep># Segmentation lines=projection_segmentation(clean_img axis='horizontal' cut=cut)<line_sep><return>lines<block_end># Word Segmentation #---------------------------------------------------------------------------------------- <def_stmt>word_vertical_projection line_image cut=3<block_start>line_words=projection_segmentation(line_image axis='vertical' cut=cut)<line_sep>line_words.reverse()<line_sep><return>line_words<block_end><def_stmt>extract_words img visual=0<block_start>lines=line_horizontal_projection(img)<line_sep>words=[]<for_stmt>idx,line enumerate(lines)<block_start><if_stmt>visual<block_start>save_image(line 'lines' f'line{idx}')<block_end>line_words=word_vertical_projection(line)<for_stmt>w line_words# if len(words) == 585: # print(idx) <block_start>words.append((w line))<block_end># words.extend(line_words) <block_end># breakpoint() <if_stmt>visual<block_start><for_stmt>idx,word enumerate(words)<block_start>save_image(word[0] 'words' f'word{idx}')<block_end><block_end><return>words<block_end><if_stmt>__name__<eq>"__main__"<block_start>img=cv.imread('../Dataset/scanned/capr196.png')<line_sep>extract_words(img 1)<block_end>
_empty=[]<line_sep>_simple=[1 2 3]<line_sep>_complex=[{"value":1} {"value":2} {"value":3}]<line_sep>_locations=[("Scotland" "Edinburgh" "Branch1" 20000) ("Scotland" "Glasgow" "Branch1" 12500) ("Scotland" "Glasgow" "Branch2" 12000) ("Wales" "Cardiff" "Branch1" 29700) ("Wales" "Cardiff" "Branch2" 30000) ("Wales" "Bangor" "Branch1" 12800) ("England" "London" "Branch1" 90000) ("England" "London" "Branch2" 80000) ("England" "London" "Branch3" 70000) ("England" "Manchester" "Branch1" 45600) ("England" "Manchester" "Branch2" 50000) ("England" "Liverpool" "Branch1" 29700) ("England" "Liverpool" "Branch2" 25000) ]<line_sep>
# coding=utf-8 <import_stmt>json<import_from_stmt>django.contrib messages<import_from_stmt>django.shortcuts render get_object_or_404<import_from_stmt>wi_model_util.imodel get_object_or_none<import_from_stmt>django.contrib.auth.decorators login_required<import_from_stmt>django.core.urlresolvers reverse<import_from_stmt>django.http HttpResponse HttpResponseRedirect<import_from_stmt>app.iclass.utils redefine_item_pos<import_from_stmt>app.iclass.models *<import_from_stmt>base.core.dateutils *<import_stmt>datetime<import_stmt>random<line_sep>@login_required<def_stmt>dashboard_index request<block_start>""" 这里展示系统的应用统计数据 """<line_sep>qd=request.GET<line_sep>current_user=request.user<line_sep>now=datetime.datetime.now()<line_sep>start_time=qd.get("start_time" days_ago(30).strftime("%Y-%m-%d %H:%M"))<line_sep>end_time=qd.get("end_time" zero_date().strftime("%Y-%m-%d %H:%M"))<if_stmt>type(start_time)<eq>str<or>type(start_time)<eq>unicode<block_start>start_time=datetime.datetime.strptime(start_time '%Y-%m-%d %H:%M')<block_end><if_stmt>type(end_time)<eq>str<or>type(end_time)<eq>unicode<block_start>end_time=datetime.datetime.strptime(end_time '%Y-%m-%d %H:%M')<block_end>_start_time=datetime_to_timestamp(start_time)<line_sep>_end_time=datetime_to_timestamp(end_time)<line_sep>context={"course":{"course_count":random.randint(10 100) "course_guake_count":random.randint(10 100) } "subject":{"subject_count":random.randint(10 100) "mock_page_count":random.randint(10 100)} "paper":{"paper_count":random.randint(10 100) } "question":{"question_count":random.randint(10 100) } "app":'fastor' }<line_sep>context['user_incr_datas']=((u'11-23' 238L) (u'11-24' 747L) (u'11-25' 632L) (u'11-26' 470L) (u'11-27' 408L) (u'11-28' 408L) (u'11-29' 318L) (u'11-30' 248L) (u'12-01' 269L) (u'12-02' 358L) (u'12-03' 401L) (u'12-04' 343L) (u'12-05' 422L) (u'12-06' 299L) (u'12-07' 236L) (u'12-08' 317L) (u'12-09' 436L) (u'12-10' 484L) (u'12-11' 351L) (u'12-12' 287L) (u'12-13' 279L) (u'12-14' 301L) (u'12-15' 266L) (u'12-16' 336L) (u'12-17' 374L) (u'12-18' 404L) (u'12-19' 357L) (u'12-20' 279L) (u'12-21' 218L) (u'12-22' 264L))<line_sep>context['user_incr_success_datas']=((u'11-23' 238L) (u'11-24' 747L) (u'11-25' 632L) (u'11-26' 470L) (u'11-27' 408L) (u'11-28' 408L) (u'11-29' 318L) (u'11-30' 248L) (u'12-01' 269L) (u'12-02' 357L) (u'12-03' 401L) (u'12-04' 343L) (u'12-05' 422L) (u'12-06' 299L) (u'12-07' 235L) (u'12-08' 317L) (u'12-09' 436L) (u'12-10' 484L) (u'12-11' 351L) (u'12-12' 287L) (u'12-13' 279L) (u'12-14' 301L) (u'12-15' 266L) (u'12-16' 336L) (u'12-17' 374L) (u'12-18' 404L) (u'12-19' 357L) (u'12-20' 279L) (u'12-21' 218L) (u'12-22' 264L))<line_sep>context["sql_area_count"]=((u'\u5e7f\u4e1c\u7701' 387L) (u'\u5317\u4eac' 376L) (u'\u6c5f\u82cf\u7701' 316L) (u'\u9ed1\u9f99\u6c5f\u7701' 310L) (u'\u5e7f\u4e1c' 300L) (u'\u6d59\u6c5f' 282L))<line_sep>context["order_time_datas"]=((u'00' 35L) (u'01' 10L) (u'02' 8L) (u'05' 2L) (u'06' 8L) (u'07' 18L) (u'08' 47L) (u'09' 35L) (u'10' 108L) (u'11' 65L) (u'12' 61L) (u'13' 50L) (u'14' 54L) (u'15' 65L) (u'16' 39L) (u'17' 43L) (u'18' 20L) (u'19' 43L) (u'20' 48L) (u'21' 77L) (u'22' 34L) (u'23' 34L))<line_sep>context["start_time"]=start_time<line_sep>context["end_time"]=end_time<line_sep>context["now"]=now.strftime("%Y-%m-%d")<line_sep>context["device_data"]=((u'iPhon' 78425L) (u'phone' 69710L) (u'HUAWE' 30187L) (u'Xiaom' 17106L) (u'OPPO-' 16214L) (u'vivo-' 16134L) (u'iPad1' 13548L) (u'Meizu' 4509L) (u'samsu' 3361L) (u'OnePl' 1110L))<line_sep><return>render(request 'cms_index/basecontent.html' context)<block_end>
# coding: utf-8 <class_stmt>DataBatch<block_start><def_stmt>__init__ self torch_module<block_start>self._data=[]<line_sep>self._label=[]<line_sep>self.torch_module=torch_module<block_end><def_stmt>append_data self new_data<block_start>self._data.append(self.__as_tensor(new_data))<block_end><def_stmt>append_label self new_label<block_start>self._label.append(self.__as_tensor(new_label))<block_end><def_stmt>__as_tensor self in_data<block_start><return>self.torch_module.from_numpy(in_data)<block_end>@property<def_stmt>data self<block_start><return>self._data<block_end>@property<def_stmt>label self<block_start><return>self._label<block_end><block_end>
""" Usage: python3 setup.py build Created on Apr 19, 2016 @author: paulross """<import_stmt>ScList<def_stmt>test <block_start>s=ScList.ScList()<line_sep>s.append(8)<line_sep>print(s.appends)<line_sep>print(s)<block_end>
"""Component to integrate with garbage_colection."""<import_stmt>logging<import_from_stmt>datetime timedelta<import_stmt>homeassistant.helpers.config_validation<as>cv<import_stmt>homeassistant.util.dt<as>dt_util<import_stmt>voluptuous<as>vol<import_from_stmt>homeassistant config_entries<import_from_stmt>homeassistant.const CONF_ENTITY_ID CONF_NAME<import_from_stmt>homeassistant.helpers discovery<import_from_stmt>.const ATTR_LAST_COLLECTION CONF_FREQUENCY CONF_SENSORS DOMAIN SENSOR_PLATFORM configuration <line_sep>MIN_TIME_BETWEEN_UPDATES=timedelta(seconds=30)<line_sep>_LOGGER=logging.getLogger(__name__)<line_sep>config_definition=configuration()<line_sep>SENSOR_SCHEMA=vol.Schema(config_definition.compile_schema())<line_sep>CONFIG_SCHEMA=vol.Schema({DOMAIN:vol.Schema({vol.Optional(CONF_SENSORS):vol.All(cv.ensure_list [SENSOR_SCHEMA])})} extra=vol.ALLOW_EXTRA )<line_sep>COLLECT_NOW_SCHEMA=vol.Schema({vol.Required(CONF_ENTITY_ID):cv.string vol.Optional(ATTR_LAST_COLLECTION):cv.datetime })<async_keyword><def_stmt>async_setup hass config<block_start>"""Set up this component using YAML."""<def_stmt>handle_collect_garbage call<block_start>"""Handle the service call."""<line_sep>entity_id=call.data.get(CONF_ENTITY_ID)<line_sep>last_collection=call.data.get(ATTR_LAST_COLLECTION)<line_sep>_LOGGER.debug("called collect_garbage for %s" entity_id)<try_stmt><block_start>entity=hass.data[DOMAIN][SENSOR_PLATFORM][entity_id]<if_stmt>last_collection<is><none><block_start>entity.last_collection=dt_util.now()<block_end><else_stmt><block_start>entity.last_collection=dt_util.as_local(last_collection)<block_end><block_end><except_stmt>Exception<as>err<block_start>_LOGGER.error("Failed setting last collection for %s - %s" entity_id err)<block_end>hass.services.call("homeassistant" "update_entity" {"entity_id":entity_id})<block_end><if_stmt>DOMAIN<not><in>hass.services.async_services()<block_start>hass.services.async_register(DOMAIN "collect_garbage" handle_collect_garbage schema=COLLECT_NOW_SCHEMA)<block_end><else_stmt><block_start>_LOGGER.debug("Service already registered")<block_end><if_stmt>config.get(DOMAIN)<is><none># We get here if the integration is set up using config flow <block_start><return><true><block_end>platform_config=config[DOMAIN].get(CONF_SENSORS {})<line_sep># If platform is not enabled, skip. <if_stmt><not>platform_config<block_start><return><false><block_end><for_stmt>entry platform_config<block_start>_LOGGER.debug("Setting %s(%s) from YAML configuration" entry[CONF_NAME] entry[CONF_FREQUENCY] )<line_sep># If entry is not enabled, skip. # if not entry[CONF_ENABLED]: # continue hass.async_create_task(discovery.async_load_platform(hass SENSOR_PLATFORM DOMAIN entry config))<block_end>hass.async_create_task(hass.config_entries.flow.async_init(DOMAIN context={"source":config_entries.SOURCE_IMPORT} data={}))<line_sep><return><true><block_end><async_keyword><def_stmt>async_setup_entry hass config_entry<block_start>"""Set up this integration using UI."""<if_stmt>config_entry.source<eq>config_entries.SOURCE_IMPORT# We get here if the integration is set up using YAML <block_start>hass.async_create_task(hass.config_entries.async_remove(config_entry.entry_id))<line_sep><return><false><block_end>_LOGGER.debug("Setting %s (%s) from ConfigFlow" config_entry.title config_entry.data[CONF_FREQUENCY] )<line_sep># Backward compatibility - clean-up (can be removed later?) config_entry.options={}<line_sep>config_entry.add_update_listener(update_listener)<line_sep># Add sensor hass.async_add_job(hass.config_entries.async_forward_entry_setup(config_entry SENSOR_PLATFORM))<line_sep><return><true><block_end><async_keyword><def_stmt>async_remove_entry hass config_entry<block_start>"""Handle removal of an entry."""<try_stmt><block_start><await>hass.config_entries.async_forward_entry_unload(config_entry SENSOR_PLATFORM)<line_sep>_LOGGER.info("Successfully removed sensor from the garbage_collection integration")<block_end><except_stmt>ValueError<block_start><pass><block_end><block_end><async_keyword><def_stmt>update_listener hass entry<block_start>"""Update listener."""<line_sep># The OptionsFlow saves data to options. # Move them back to data and clean options (dirty, but not sure how else to do that) <if_stmt>len(entry.options)<g>0<block_start>entry.data=entry.options<line_sep>entry.options={}<block_end><await>hass.config_entries.async_forward_entry_unload(entry SENSOR_PLATFORM)<line_sep>hass.async_add_job(hass.config_entries.async_forward_entry_setup(entry SENSOR_PLATFORM))<block_end>
# Generated by Django 2.2.9 on 2020-02-26 14:33 <import_stmt>django.contrib.postgres.fields.jsonb<import_from_stmt>django.db migrations models<import_stmt>django.db.models.deletion<import_stmt>uuid<class_stmt>Migration(migrations.Migration)<block_start>initial=<true><line_sep>dependencies=[]<line_sep>operations=[migrations.CreateModel(name='Realm' fields=[('uuid' models.UUIDField(default=uuid.uuid4 editable=<false> primary_key=<true> serialize=<false>)) ('name' models.CharField(max_length=255)) ('enabled_for_login' models.BooleanField(default=<false>)) ('backend' models.CharField(editable=<false> max_length=255)) ('config' django.contrib.postgres.fields.jsonb.JSONField(default=dict editable=<false>)) ('username_claim' models.CharField(max_length=255)) ('email_claim' models.CharField(blank=<true> max_length=255)) ('first_name_claim' models.CharField(blank=<true> max_length=255)) ('last_name_claim' models.CharField(blank=<true> max_length=255)) ('full_name_claim' models.CharField(blank=<true> max_length=255)) ('created_at' models.DateTimeField(auto_now_add=<true>)) ('updated_at' models.DateTimeField(auto_now=<true>)) ] ) migrations.CreateModel(name='RealmUser' fields=[('uuid' models.UUIDField(default=uuid.uuid4 primary_key=<true> serialize=<false>)) ('claims' django.contrib.postgres.fields.jsonb.JSONField(default=dict)) ('username' models.CharField(max_length=255)) ('email' models.EmailField(blank=<true> max_length=254)) ('first_name' models.CharField(blank=<true> max_length=255)) ('last_name' models.CharField(blank=<true> max_length=255)) ('full_name' models.CharField(blank=<true> max_length=255)) ('created_at' models.DateTimeField(auto_now_add=<true>)) ('updated_at' models.DateTimeField(auto_now=<true>)) ('realm' models.ForeignKey(on_delete=django.db.models.deletion.PROTECT to='realms.Realm')) ] options={'unique_together':{('realm' 'username')} } ) migrations.CreateModel(name='RealmAuthenticationSession' fields=[('uuid' models.UUIDField(default=uuid.uuid4 primary_key=<true> serialize=<false>)) ('callback' models.CharField(max_length=255)) ('callback_kwargs' django.contrib.postgres.fields.jsonb.JSONField(default=dict)) ('created_at' models.DateTimeField(auto_now_add=<true>)) ('updated_at' models.DateTimeField(auto_now=<true>)) ('realm' models.ForeignKey(on_delete=django.db.models.deletion.PROTECT to='realms.Realm')) ('user' models.ForeignKey(null=<true> on_delete=django.db.models.deletion.PROTECT to='realms.RealmUser')) ] ) ]<block_end>
<import_stmt>FWCore.ParameterSet.Config<as>cms<line_sep>tracker_apv_vfp30_2016=cms.Modifier()<line_sep>
<import_stmt>unittest<import_stmt>pandas<as>pd<import_from_stmt>snorkel.slicing slicing_function<import_from_stmt>snorkel.slicing.monitor slice_dataframe<line_sep>DATA=[5 10 19 22 25]<line_sep>@slicing_function()<def_stmt>sf x<block_start><return>x.num<l>20<block_end><class_stmt>PandasSlicerTest(unittest.TestCase)<block_start>@classmethod<def_stmt>setUpClass cls<block_start>cls.df=pd.DataFrame(dict(num=DATA))<block_end><def_stmt>test_slice self<block_start>self.assertEqual(len(self.df) 5)<line_sep># Should return a subset sliced_df=slice_dataframe(self.df sf)<line_sep>self.assertEqual(len(sliced_df) 3)<block_end><block_end>
"""Script to generate baseline values from PyTorch initialization algorithms"""<import_stmt>sys<import_stmt>torch<line_sep>HEADER=""" #include <torch/types.h> #include <vector> namespace expected_parameters { """<line_sep>FOOTER="} // namespace expected_parameters"<line_sep>PARAMETERS="inline std::vector<std::vector<torch::Tensor>> {}() {{"<line_sep>INITIALIZERS={"Xavier_Uniform":<lambda>w:torch.nn.init.xavier_uniform(w) "Xavier_Normal":<lambda>w:torch.nn.init.xavier_normal(w) "Kaiming_Normal":<lambda>w:torch.nn.init.kaiming_normal(w) "Kaiming_Uniform":<lambda>w:torch.nn.init.kaiming_uniform(w)}<def_stmt>emit initializer_parameter_map# Don't write generated with an @ in front, else this file is recognized as generated. <block_start>print("// @{} from {}".format('generated' __file__))<line_sep>print(HEADER)<for_stmt>initializer_name,weights initializer_parameter_map.items()<block_start>print(PARAMETERS.format(initializer_name))<line_sep>print(" return {")<for_stmt>sample weights<block_start>print(" {")<for_stmt>parameter sample<block_start>parameter_values="{{{}}}".format(", ".join(map(str parameter)))<line_sep>print(" torch::tensor({}),".format(parameter_values))<block_end>print(" },")<block_end>print(" };")<line_sep>print("}\n")<block_end>print(FOOTER)<block_end><def_stmt>run initializer<block_start>torch.manual_seed(0)<line_sep>layer1=torch.nn.Linear(7 15)<line_sep>INITIALIZERS[initializer](layer1.weight)<line_sep>layer2=torch.nn.Linear(15 15)<line_sep>INITIALIZERS[initializer](layer2.weight)<line_sep>layer3=torch.nn.Linear(15 2)<line_sep>INITIALIZERS[initializer](layer3.weight)<line_sep>weight1=layer1.weight.data.numpy()<line_sep>weight2=layer2.weight.data.numpy()<line_sep>weight3=layer3.weight.data.numpy()<line_sep><return>[weight1 weight2 weight3]<block_end><def_stmt>main <block_start>initializer_parameter_map={}<for_stmt>initializer INITIALIZERS.keys()<block_start>sys.stderr.write('Evaluating {} ...\n'.format(initializer))<line_sep>initializer_parameter_map[initializer]=run(initializer)<block_end>emit(initializer_parameter_map)<block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end>
<import_from_stmt>.config parseConfig<import_from_stmt>.engine TradingEngine<def_stmt>main <arrow><none># Parse the command line config <block_start>config=parseConfig()<line_sep># Instantiate trading engine # # The engine is responsible for managing the different components, # including the strategies, the bank/risk engine, and the # exchange/backtest engine. engine=TradingEngine(**config)<line_sep># Run the live trading engine engine.start()<block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end>
<import_stmt>pytest<line_sep>@pytest.fixture(params=["real" "wsgi"])<def_stmt>app_type request<block_start><return>request.param<block_end>@pytest.fixture<def_stmt>cli_args request openapi_version app_type<block_start><if_stmt>app_type<eq>"real"<block_start>schema_url=request.getfixturevalue("schema_url")<line_sep>args=(schema_url )<block_end><else_stmt><block_start>app_path=request.getfixturevalue("loadable_flask_app")<line_sep>args=(f"--app={app_path}" "/schema.yaml")<block_end><return>args<block_end>
# Copyright (c) 2019 - now, Eggroll Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_from_stmt>eggroll.core.meta_model ErPartition<def_stmt>get_db_path partition:ErPartition<block_start>store_locator=partition._store_locator<line_sep>db_path_prefix='/tmp/eggroll/'<line_sep><return>db_path_prefix+"/".join([store_locator._store_type store_locator._namespace store_locator._name str(partition._id)])<block_end>
<import_stmt>pandas<as>pd<line_sep>## convert to time df["date"]=pd.to_datetime(df["ts"] unit="ms")<line_sep>df["ts_year"]=df["date"].dt.year<line_sep>df["ts_month"]=df["date"].dt.month<line_sep>df["ts_week"]=df["date"].dt.week<line_sep>df["ts_day"]=df["date"].dt.day<line_sep>df["ts_dow"]=df["date"].dt.weekday<line_sep>df["ts_hour"]=df["date"].dt.hour<line_sep>df["ts_date_day"]=df["date"].dt.date<line_sep>df["ts_is_weekday"]=[1<if>x<in>[0 1 2 3 4]<else>0<for>x df["ts_dow"]]<line_sep>df["registration_ts"]=pd.to_datetime(df["registration"] unit="ms").dt.date<line_sep>##create label df["churned_event"]=[1<if>x<eq>"Cancellation Confirmation"<else>0<for>x df["page"]]<line_sep>df["user_churned"]=df.groupby("userId")["churned_event"].transform("max")<line_sep>## convert categorical page features events_list=["NextSong" "Thumbs Down" "Thumbs Up" "Add to Playlist" "Roll Advert" "Add Friend" "Downgrade" "Upgrade" "Error" ]<line_sep>usage_column_name=[]<for_stmt>event events_list<block_start>event_name="_".join(event.split()).lower()<line_sep>usage_column_name.append(event_name)<line_sep>df[event_name]=[1<if>x<eq>event<else>0<for>x df["page"]]<block_end>## feature engineering base_df=(df.groupby(["userId" "ts_date_day" "ts_is_weekday"]).agg({"page":"count"}).groupby(["userId" "ts_is_weekday"])["page"].mean().unstack(fill_value=0).reset_index().rename(columns={0:"average_events_weekend" 1:"average_events_weekday"}))<line_sep># num_ads_7d, num_songs_played_7d, num_songs_played_30d, num_songs_played_90d base_df_daily=(df.groupby(["userId" "ts_date_day"]).agg({"page":"count" "nextsong":"sum" "roll_advert":"sum" "error":"sum"}).reset_index())<line_sep>feature34=(base_df_daily.groupby(["userId" "ts_date_day"]).tail(7).groupby(["userId"]).agg({"nextsong":"sum" "roll_advert":"sum" "error":"sum"}).reset_index().rename(columns={"nextsong":"num_songs_played_7d" "roll_advert":"num_ads_7d" "error":"num_error_7d" }))<line_sep>feature5=(base_df_daily.groupby(["userId" "ts_date_day"]).tail(30).groupby(["userId"]).agg({"nextsong":"sum"}).reset_index().rename(columns={"nextsong":"num_songs_played_30d"}))<line_sep>feature6=(base_df_daily.groupby(["userId" "ts_date_day"]).tail(90).groupby(["userId"]).agg({"nextsong":"sum"}).reset_index().rename(columns={"nextsong":"num_songs_played_90d"}))<line_sep># num_artists, num_songs, num_ads, num_thumbsup, num_thumbsdown, num_playlist, num_addfriend, num_error, user_downgrade, # user_upgrade, percentage_ad, days_since_active base_df_user=(df.groupby(["userId"]).agg({"page":"count" "nextsong":"sum" "artist":"nunique" "song":"nunique" "thumbs_down":"sum" "thumbs_up":"sum" "add_to_playlist":"sum" "roll_advert":"sum" "add_friend":"sum" "downgrade":"max" "upgrade":"max" "error":"sum" "ts_date_day":"max" "registration_ts":"min" "user_churned":"max" }).reset_index())<line_sep>base_df_user["percentage_ad"]=base_df_user["roll_advert"]/base_df_user["page"]<line_sep>base_df_user["days_since_active"]=(base_df_user["ts_date_day"]-base_df_user["registration_ts"]).dt.days<line_sep># repeats ratio base_df_user["repeats_ratio"]=1-base_df_user["song"]/base_df_user["nextsong"]<line_sep># num_sessions, avg_time_per_session, avg_events_per_session, base_df_session=(df.groupby(["userId" "sessionId"]).agg({"length":"sum" "page":"count" "date":"min"}).reset_index())<line_sep>base_df_session["prev_session_ts"]=base_df_session.groupby(["userId"])["date"].shift(1)<line_sep>base_df_session["gap_session"]=(base_df_session["date"]-base_df_session["prev_session_ts"]).dt.days<line_sep>user_sessions=(base_df_session.groupby("userId").agg({"sessionId":"count" "length":"mean" "page":"mean" "gap_session":"mean"}).reset_index().rename(columns={"sessionId":"num_sessions" "length":"avg_time_per_session" "page":"avg_events_per_session" "gap_session":"avg_gap_between_session" }))<line_sep># merge features together base_df["userId"]=base_df["userId"].astype("int")<line_sep>final_feature_df=base_df.merge(feature34 how="left" on="userId")<line_sep>final_feature_df=final_feature_df.merge(feature5 how="left" on="userId")<line_sep>final_feature_df=final_feature_df.merge(feature6 how="left" on="userId")<line_sep>final_feature_df=final_feature_df.merge(user_sessions how="left" on="userId")<line_sep>df=final_feature_df.merge(base_df_user how="left" on="userId")<line_sep>df=df.fillna(0)<line_sep>df.columns=["userId" "average_events_weekend" "average_events_weekday" "num_songs_played_7d" "num_ads_7d" "num_error_7d" "num_songs_played_30d" "num_songs_played_90d" "num_sessions" "avg_time_per_session" "avg_events_per_session" "avg_gap_between_session" "num_events" "num_songs" "num_artists" "num_unique_songs" "num_thumbs_down" "num_thumbs_up" "num_add_to_playlist" "num_ads" "num_add_friend" "num_downgrade" "num_upgrade" "num_error" "ts_date_day" "registration_ts" "user_churned" "percentage_ad" "days_since_active" "repeats_ratio" ]<line_sep>df=df[["userId" "user_churned" "average_events_weekend" "average_events_weekday" "num_songs_played_7d" "num_ads_7d" "num_error_7d" "num_songs_played_30d" "num_songs_played_90d" "num_sessions" "avg_time_per_session" "avg_events_per_session" "avg_gap_between_session" "num_events" "num_songs" "num_artists" "num_thumbs_down" "num_thumbs_up" "num_add_to_playlist" "num_ads" "num_add_friend" "num_downgrade" "num_upgrade" "num_error" "percentage_ad" "days_since_active" "repeats_ratio" ]]<line_sep>
# Autogenerated file, do not edit, this file provides stubs for builtins autocomplete in VSCode, PyCharm, etc <import_from_stmt>typing Any<import_from_stmt>typing Tuple<import_from_stmt>typing Callable<import_from_stmt>typing overload<import_from_stmt>warp.types array array2d array3d array4d constant<import_from_stmt>warp.types int8 uint8 int16 uint16 int32 uint32 int64 uint64 float32 float64<import_from_stmt>warp.types vec2 vec3 vec4 mat22 mat33 mat44 quat transform spatial_vector spatial_matrix<import_from_stmt>warp.types mesh_query_aabb_t hash_grid_query_t<line_sep>@overload<def_stmt>min x:int32 y:int32<arrow>int<block_start>""" Return the minimum of two integers. """<line_sep><ellipsis><block_end>@overload<def_stmt>min x:float32 y:float32<arrow>float<block_start>""" Return the minimum of two floats. """<line_sep><ellipsis><block_end>@overload<def_stmt>max x:int32 y:int32<arrow>int<block_start>""" Return the maximum of two integers. """<line_sep><ellipsis><block_end>@overload<def_stmt>max x:float32 y:float32<arrow>float<block_start>""" Return the maximum of two floats. """<line_sep><ellipsis><block_end>@overload<def_stmt>clamp x:int32 a:int32 b:int32<arrow>int<block_start>""" Clamp the value of x to the range [a, b]. """<line_sep><ellipsis><block_end>@overload<def_stmt>clamp x:float32 a:float32 b:float32<arrow>float<block_start>""" Clamp the value of x to the range [a, b]. """<line_sep><ellipsis><block_end>@overload<def_stmt>abs x:int32<arrow>int<block_start>""" Return the absolute value of x. """<line_sep><ellipsis><block_end>@overload<def_stmt>abs x:float32<arrow>float<block_start>""" Return the absolute value of x. """<line_sep><ellipsis><block_end>@overload<def_stmt>sign x:int32<arrow>int<block_start>""" Return -1 if x < 0, return 1 otherwise. """<line_sep><ellipsis><block_end>@overload<def_stmt>sign x:float32<arrow>float<block_start>""" Return -1.0 if x < 0.0, return 1.0 otherwise. """<line_sep><ellipsis><block_end>@overload<def_stmt>step x:float32<arrow>float<block_start>""" Return 1.0 if x < 0.0, return 0.0 otherwise. """<line_sep><ellipsis><block_end>@overload<def_stmt>nonzero x:float32<arrow>float<block_start>""" Return 1.0 if x is not equal to zero, return 0.0 otherwise. """<line_sep><ellipsis><block_end>@overload<def_stmt>sin x:float32<arrow>float<block_start>""" Return the sine of x in radians. """<line_sep><ellipsis><block_end>@overload<def_stmt>cos x:float32<arrow>float<block_start>""" Return the cosine of x in radians. """<line_sep><ellipsis><block_end>@overload<def_stmt>acos x:float32<arrow>float<block_start>""" Return arccos of x in radians. Inputs are automatically clamped to [-1.0, 1.0]. """<line_sep><ellipsis><block_end>@overload<def_stmt>asin x:float32<arrow>float<block_start>""" Return arcsin of x in radians. Inputs are automatically clamped to [-1.0, 1.0]. """<line_sep><ellipsis><block_end>@overload<def_stmt>sqrt x:float32<arrow>float<block_start>""" Return the sqrt of x, where x is positive. """<line_sep><ellipsis><block_end>@overload<def_stmt>tan x:float32<arrow>float<block_start>""" Return tangent of x in radians. """<line_sep><ellipsis><block_end>@overload<def_stmt>atan x:float32<arrow>float<block_start>""" Return arctan of x. """<line_sep><ellipsis><block_end>@overload<def_stmt>atan2 y:float32 x:float32<arrow>float<block_start>""" Return atan2 of x. """<line_sep><ellipsis><block_end>@overload<def_stmt>sinh x:float32<arrow>float<block_start>""" Return the sinh of x. """<line_sep><ellipsis><block_end>@overload<def_stmt>cosh x:float32<arrow>float<block_start>""" Return the cosh of x. """<line_sep><ellipsis><block_end>@overload<def_stmt>tanh x:float32<arrow>float<block_start>""" Return the tanh of x. """<line_sep><ellipsis><block_end>@overload<def_stmt>log x:float32<arrow>float<block_start>""" Return the natural log (base-e) of x, where x is positive. """<line_sep><ellipsis><block_end>@overload<def_stmt>exp x:float32<arrow>float<block_start>""" Return base-e exponential, e^x. """<line_sep><ellipsis><block_end>@overload<def_stmt>pow x:float32 y:float32<arrow>float<block_start>""" Return the result of x raised to power of y. """<line_sep><ellipsis><block_end>@overload<def_stmt>round x:float32<arrow>float<block_start>""" Calculate the nearest integer value, rounding halfway cases away from zero. This is the most intuitive form of rounding in the colloquial sense, but can be slower than other options like ``warp.rint()``. Differs from ``numpy.round()``, which behaves the same way as ``numpy.rint()``. """<line_sep><ellipsis><block_end>@overload<def_stmt>rint x:float32<arrow>float<block_start>""" Calculate the nearest integer value, rounding halfway cases to nearest even integer. It is generally faster than ``warp.round()``. Equivalent to ``numpy.rint()``. """<line_sep><ellipsis><block_end>@overload<def_stmt>trunc x:float32<arrow>float<block_start>""" Calculate the nearest integer that is closer to zero than x. In other words, it discards the fractional part of x. It is similar to casting ``float(int(x))``, but preserves the negative sign when x is in the range [-0.0, -1.0). Equivalent to ``numpy.trunc()`` and ``numpy.fix()``. """<line_sep><ellipsis><block_end>@overload<def_stmt>floor x:float32<arrow>float<block_start>""" Calculate the largest integer that is less than or equal to x. """<line_sep><ellipsis><block_end>@overload<def_stmt>ceil x:float32<arrow>float<block_start>""" Calculate the smallest integer that is greater than or equal to x. """<line_sep><ellipsis><block_end>@overload<def_stmt>dot x:vec2 y:vec2<arrow>float<block_start>""" Compute the dot product between two 2d vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>dot x:vec3 y:vec3<arrow>float<block_start>""" Compute the dot product between two 3d vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>dot x:vec4 y:vec4<arrow>float<block_start>""" Compute the dot product between two 4d vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>dot x:quat y:quat<arrow>float<block_start>""" Compute the dot product between two quaternions. """<line_sep><ellipsis><block_end>@overload<def_stmt>outer x:vec2 y:vec2<arrow>mat22<block_start>""" Compute the outer product x*y^T for two vec2 objects. """<line_sep><ellipsis><block_end>@overload<def_stmt>outer x:vec3 y:vec3<arrow>mat33<block_start>""" Compute the outer product x*y^T for two vec3 objects. """<line_sep><ellipsis><block_end>@overload<def_stmt>cross x:vec3 y:vec3<arrow>vec3<block_start>""" Compute the cross product of two 3d vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>skew x:vec3<arrow>mat33<block_start>""" Compute the skew symmetric matrix for a 3d vector. """<line_sep><ellipsis><block_end>@overload<def_stmt>length x:vec2<arrow>float<block_start>""" Compute the length of a 2d vector. """<line_sep><ellipsis><block_end>@overload<def_stmt>length x:vec3<arrow>float<block_start>""" Compute the length of a 3d vector. """<line_sep><ellipsis><block_end>@overload<def_stmt>length x:vec4<arrow>float<block_start>""" Compute the length of a 4d vector. """<line_sep><ellipsis><block_end>@overload<def_stmt>normalize x:vec2<arrow>vec2<block_start>""" Compute the normalized value of x, if length(x) is 0 then the zero vector is returned. """<line_sep><ellipsis><block_end>@overload<def_stmt>normalize x:vec3<arrow>vec3<block_start>""" Compute the normalized value of x, if length(x) is 0 then the zero vector is returned. """<line_sep><ellipsis><block_end>@overload<def_stmt>normalize x:vec4<arrow>vec4<block_start>""" Compute the normalized value of x, if length(x) is 0 then the zero vector is returned. """<line_sep><ellipsis><block_end>@overload<def_stmt>normalize x:quat<arrow>quat<block_start>""" Compute the normalized value of x, if length(x) is 0 then the zero quat is returned. """<line_sep><ellipsis><block_end>@overload<def_stmt>transpose m:mat22<arrow>mat22<block_start>""" Return the transpose of the matrix m """<line_sep><ellipsis><block_end>@overload<def_stmt>transpose m:mat33<arrow>mat33<block_start>""" Return the transpose of the matrix m """<line_sep><ellipsis><block_end>@overload<def_stmt>transpose m:mat44<arrow>mat44<block_start>""" Return the transpose of the matrix m """<line_sep><ellipsis><block_end>@overload<def_stmt>transpose m:spatial_matrix<arrow>spatial_matrix<block_start>""" Return the transpose of the matrix m """<line_sep><ellipsis><block_end>@overload<def_stmt>inverse m:mat22<arrow>mat22<block_start>""" Return the inverse of the matrix m """<line_sep><ellipsis><block_end>@overload<def_stmt>inverse m:mat33<arrow>mat33<block_start>""" Return the inverse of the matrix m """<line_sep><ellipsis><block_end>@overload<def_stmt>inverse m:mat44<arrow>mat44<block_start>""" Return the inverse of the matrix m """<line_sep><ellipsis><block_end>@overload<def_stmt>determinant m:mat22<arrow>float<block_start>""" Return the determinant of the matrix m """<line_sep><ellipsis><block_end>@overload<def_stmt>determinant m:mat33<arrow>float<block_start>""" Return the determinant of the matrix m """<line_sep><ellipsis><block_end>@overload<def_stmt>determinant m:mat44<arrow>float<block_start>""" Return the determinant of the matrix m """<line_sep><ellipsis><block_end>@overload<def_stmt>diag d:vec2<arrow>mat22<block_start>""" Returns a matrix with the components of the vector d on the diagonal """<line_sep><ellipsis><block_end>@overload<def_stmt>diag d:vec3<arrow>mat33<block_start>""" Returns a matrix with the components of the vector d on the diagonal """<line_sep><ellipsis><block_end>@overload<def_stmt>diag d:vec4<arrow>mat44<block_start>""" Returns a matrix with the components of the vector d on the diagonal """<line_sep><ellipsis><block_end>@overload<def_stmt>cw_mul x:vec2 y:vec2<arrow>vec2<block_start>""" Component wise multiply of two 2d vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>cw_mul x:vec3 y:vec3<arrow>vec3<block_start>""" Component wise multiply of two 3d vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>cw_mul x:vec4 y:vec4<arrow>vec4<block_start>""" Component wise multiply of two 4d vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>cw_div x:vec2 y:vec2<arrow>vec2<block_start>""" Component wise division of two 2d vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>cw_div x:vec3 y:vec3<arrow>vec3<block_start>""" Component wise division of two 3d vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>cw_div x:vec4 y:vec4<arrow>vec4<block_start>""" Component wise division of two 4d vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>svd3 A:mat33 U:mat33 sigma:vec3 V:mat33<block_start>""" Compute the SVD of a 3x3 matrix. The singular values are returned in sigma, while the left and right basis vectors are returned in U and V. """<line_sep><ellipsis><block_end>@overload<def_stmt>quat_identity <arrow>quat<block_start>""" Construct an identity quaternion with zero imaginary part and real part of 1.0 """<line_sep><ellipsis><block_end>@overload<def_stmt>quat_from_axis_angle axis:vec3 angle:float32<arrow>quat<block_start>""" Construct a quaternion representing a rotation of angle radians around the given axis. """<line_sep><ellipsis><block_end>@overload<def_stmt>quat_from_matrix m:mat33<arrow>quat<block_start>""" Construct a quaternion from a 3x3 matrix. """<line_sep><ellipsis><block_end>@overload<def_stmt>quat_rpy roll:float32 pitch:float32 yaw:float32<arrow>quat<block_start>""" Construct a quaternion representing a combined roll (z), pitch (x), yaw rotations (y) in radians. """<line_sep><ellipsis><block_end>@overload<def_stmt>quat_inverse q:quat<arrow>quat<block_start>""" Compute quaternion conjugate. """<line_sep><ellipsis><block_end>@overload<def_stmt>quat_rotate q:quat p:vec3<arrow>vec3<block_start>""" Rotate a vector by a quaternion. """<line_sep><ellipsis><block_end>@overload<def_stmt>quat_rotate_inv q:quat p:vec3<arrow>vec3<block_start>""" Rotate a vector the inverse of a quaternion. """<line_sep><ellipsis><block_end>@overload<def_stmt>quat_to_matrix q:quat<arrow>mat33<block_start>""" Convert a quaternion to a 3x3 rotation matrix. """<line_sep><ellipsis><block_end>@overload<def_stmt>transform_identity <arrow>transform<block_start>""" Construct an identity transform with zero translation and identity rotation. """<line_sep><ellipsis><block_end>@overload<def_stmt>transform_get_translation t:transform<arrow>vec3<block_start>""" Return the translational part of a transform. """<line_sep><ellipsis><block_end>@overload<def_stmt>transform_get_rotation t:transform<arrow>quat<block_start>""" Return the rotational part of a transform. """<line_sep><ellipsis><block_end>@overload<def_stmt>transform_multiply a:transform b:transform<arrow>transform<block_start>""" Multiply two rigid body transformations together. """<line_sep><ellipsis><block_end>@overload<def_stmt>transform_point t:transform p:vec3<arrow>vec3<block_start>""" Apply the transform to a point p treating the homogenous coordinate as w=1 (translation and rotation). """<line_sep><ellipsis><block_end>@overload<def_stmt>transform_point m:mat44 p:vec3<arrow>vec3<block_start>""" Apply the transform to a point ``p`` treating the homogenous coordinate as w=1. The transformation is applied treating ``p`` as a column vector, e.g.: ``y = M*p`` note this is in contrast to some libraries, notably USD, which applies transforms to row vectors, ``y^T = p^T*M^T``. If the transform is coming from a library that uses row-vectors then users should transpose the tranformation matrix before calling this method. """<line_sep><ellipsis><block_end>@overload<def_stmt>transform_vector t:transform v:vec3<arrow>vec3<block_start>""" Apply the transform to a vector v treating the homogenous coordinate as w=0 (rotation only). """<line_sep><ellipsis><block_end>@overload<def_stmt>transform_vector m:mat44 v:vec3<arrow>vec3<block_start>""" Apply the transform to a vector ``v`` treating the homogenous coordinate as w=0. The transformation is applied treating ``v`` as a column vector, e.g.: ``y = M*v`` note this is in contrast to some libraries, notably USD, which applies transforms to row vectors, ``y^T = v^T*M^T``. If the transform is coming from a library that uses row-vectors then users should transpose the tranformation matrix before calling this method. """<line_sep><ellipsis><block_end>@overload<def_stmt>transform_inverse t:transform<arrow>transform<block_start>""" Compute the inverse of the transform. """<line_sep><ellipsis><block_end>@overload<def_stmt>spatial_dot a:spatial_vector b:spatial_vector<arrow>float<block_start>""" Compute the dot product of two 6d screw vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>spatial_cross a:spatial_vector b:spatial_vector<arrow>spatial_vector<block_start>""" Compute the cross-product of two 6d screw vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>spatial_cross_dual a:spatial_vector b:spatial_vector<arrow>spatial_vector<block_start>""" Compute the dual cross-product of two 6d screw vectors. """<line_sep><ellipsis><block_end>@overload<def_stmt>spatial_top a:spatial_vector<arrow>vec3<block_start>""" Return the top (first) part of a 6d screw vector. """<line_sep><ellipsis><block_end>@overload<def_stmt>spatial_bottom a:spatial_vector<arrow>vec3<block_start>""" Return the bottom (second) part of a 6d screw vector. """<line_sep><ellipsis><block_end>@overload<def_stmt>spatial_jacobian S:array[spatial_vector] joint_parents:array[int32] joint_qd_start:array[int32] joint_start:int32 joint_count:int32 J_start:int32 J_out:array[float32]<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>spatial_mass I_s:array[spatial_matrix] joint_start:int32 joint_count:int32 M_start:int32 M:array[float32]<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mlp weights:array[float32] bias:array[float32] activation:Callable index:int32 x:array[float32] out:array[float32]<block_start>""" Evaluate a multi-layer perceptron (MLP) layer in the form: ``out = act(weights*x + bias)``. :param weights: A layer's network weights with dimensions ``(m, n)``. :param bias: An array with dimensions ``(n)``. :param activation: A ``wp.func`` function that takes a single scalar float as input and returns a scalar float as output :param index: The batch item to process, typically each thread will process 1 item in the batch, in this case index should be ``wp.tid()`` :param x: The feature matrix with dimensions ``(n, b)`` :param out: The network output with dimensions ``(m, b)`` :note: Feature and output matrices are transposed compared to some other frameworks such as PyTorch. All matrices are assumed to be stored in flattened row-major memory layout (NumPy default). """<line_sep><ellipsis><block_end>@overload<def_stmt>mesh_query_point id:uint64 point:vec3 max_dist:float32 inside:float32 face:int32 bary_u:float32 bary_v:float32<arrow>bool<block_start>""" Computes the closest point on the mesh with identifier `id` to the given point in space. Returns ``True`` if a point < ``max_dist`` is found. :param id: The mesh identifier :param point: The point in space to query :param max_dist: Mesh faces above this distance will not be considered by the query :param inside: Returns a value < 0 if query point is inside the mesh, >=0 otherwise. Note that mesh must be watertight for this to be robust :param face: Returns the index of the closest face :param bary_u: Returns the barycentric u coordinate of the closest point :param bary_v: Retruns the barycentric v coordinate of the closest point """<line_sep><ellipsis><block_end>@overload<def_stmt>mesh_query_ray id:uint64 start:vec3 dir:vec3 max_t:float32 t:float32 bary_u:float32 bary_v:float32 sign:float32 normal:vec3 face:int32<arrow>bool<block_start>""" Computes the closest ray hit on the mesh with identifier `id`, returns ``True`` if a point < ``max_t`` is found. :param id: The mesh identifier :param start: The start point of the ray :param dir: The ray direction (should be normalized) :param max_t: The maximum distance along the ray to check for intersections :param t: Returns the distance of the closest hit along the ray :param bary_u: Returns the barycentric u coordinate of the closest hit :param bary_v: Returns the barycentric v coordinate of the closest hit :param sign: Returns a value > 0 if the hit ray hit front of the face, returns < 0 otherwise :param normal: Returns the face normal :param face: Returns the index of the hit face """<line_sep><ellipsis><block_end>@overload<def_stmt>mesh_query_aabb id:uint64 lower:vec3 upper:vec3<arrow>mesh_query_aabb_t<block_start>""" Construct an axis-aligned bounding box query against a mesh object. This query can be used to iterate over all triangles inside a volume. Returns an object that is used to track state during mesh traversal. :param id: The mesh identifier :param lower: The lower bound of the bounding box in mesh space :param upper: The upper bound of the bounding box in mesh space """<line_sep><ellipsis><block_end>@overload<def_stmt>mesh_query_aabb_next query:mesh_query_aabb_t index:int32<arrow>bool<block_start>""" Move to the next triangle overlapping the query bounding box. The index of the current face is stored in ``index``, returns ``False`` if there are no more overlapping triangles. """<line_sep><ellipsis><block_end>@overload<def_stmt>mesh_eval_position id:uint64 face:int32 bary_u:float32 bary_v:float32<arrow>vec3<block_start>""" Evaluates the position on the mesh given a face index, and barycentric coordinates. """<line_sep><ellipsis><block_end>@overload<def_stmt>mesh_eval_velocity id:uint64 face:int32 bary_u:float32 bary_v:float32<arrow>vec3<block_start>""" Evaluates the velocity on the mesh given a face index, and barycentric coordinates. """<line_sep><ellipsis><block_end>@overload<def_stmt>hash_grid_query id:uint64 point:vec3 max_dist:float32<arrow>hash_grid_query_t<block_start>""" Construct a point query against a hash grid. This query can be used to iterate over all neighboring points withing a fixed radius from the query point. Returns an object that is used to track state during neighbor traversal. """<line_sep><ellipsis><block_end>@overload<def_stmt>hash_grid_query_next query:hash_grid_query_t index:int32<arrow>bool<block_start>""" Move to the next point in the hash grid query. The index of the current neighbor is stored in ``index``, returns ``False`` if there are no more neighbors. """<line_sep><ellipsis><block_end>@overload<def_stmt>hash_grid_point_id id:uint64 index:int32<arrow>int<block_start>""" Return the index of a point in the grid, this can be used to re-order threads such that grid traversal occurs in a spatially coherent order. """<line_sep><ellipsis><block_end>@overload<def_stmt>intersect_tri_tri v0:vec3 v1:vec3 v2:vec3 u0:vec3 u1:vec3 u2:vec3<arrow>int<block_start>""" Tests for intersection between two triangles (v0, v1, v2) and (u0, u1, u2) using Moller's method. Returns > 0 if triangles intersect. """<line_sep><ellipsis><block_end>@overload<def_stmt>mesh_eval_face_normal id:uint64 face:int32<arrow>vec3<block_start>""" Evaluates the face normal the mesh given a face index. """<line_sep><ellipsis><block_end>@overload<def_stmt>mesh_get_point id:uint64 index:int32<arrow>vec3<block_start>""" Returns the point of the mesh given a index. """<line_sep><ellipsis><block_end>@overload<def_stmt>mesh_get_velocity id:uint64 index:int32<arrow>vec3<block_start>""" Returns the velocity of the mesh given a index. """<line_sep><ellipsis><block_end>@overload<def_stmt>mesh_get_index id:uint64 index:int32<arrow>int<block_start>""" Returns the point-index of the mesh given a face-vertex index. """<line_sep><ellipsis><block_end>@overload<def_stmt>volume_sample_f id:uint64 uvw:vec3 sampling_mode:int32<arrow>float<block_start>""" Sample the volume given by ``id`` at the volume local-space point ``uvw``. Interpolation should be ``wp.Volume.CLOSEST``, or ``wp.Volume.LINEAR.`` """<line_sep><ellipsis><block_end>@overload<def_stmt>volume_lookup_f id:uint64 i:int32 j:int32 k:int32<arrow>float<block_start>""" Returns the value of voxel with coordinates ``i``, ``j``, ``k``, if the voxel at this index does not exist this function returns the background value """<line_sep><ellipsis><block_end>@overload<def_stmt>volume_sample_v id:uint64 uvw:vec3 sampling_mode:int32<arrow>vec3<block_start>""" Sample the vector volume given by ``id`` at the volume local-space point ``uvw``. Interpolation should be ``wp.Volume.CLOSEST``, or ``wp.Volume.LINEAR.`` """<line_sep><ellipsis><block_end>@overload<def_stmt>volume_lookup_v id:uint64 i:int32 j:int32 k:int32<arrow>vec3<block_start>""" Returns the vector value of voxel with coordinates ``i``, ``j``, ``k``, if the voxel at this index does not exist this function returns the background value """<line_sep><ellipsis><block_end>@overload<def_stmt>volume_sample_i id:uint64 uvw:vec3<arrow>int<block_start>""" Sample the int32 volume given by ``id`` at the volume local-space point ``uvw``. """<line_sep><ellipsis><block_end>@overload<def_stmt>volume_lookup_i id:uint64 i:int32 j:int32 k:int32<arrow>int<block_start>""" Returns the int32 value of voxel with coordinates ``i``, ``j``, ``k``, if the voxel at this index does not exist this function returns the background value """<line_sep><ellipsis><block_end>@overload<def_stmt>volume_index_to_world id:uint64 uvw:vec3<arrow>vec3<block_start>""" Transform a point defined in volume index space to world space given the volume's intrinsic affine transformation. """<line_sep><ellipsis><block_end>@overload<def_stmt>volume_world_to_index id:uint64 xyz:vec3<arrow>vec3<block_start>""" Transform a point defined in volume world space to the volume's index space, given the volume's intrinsic affine transformation. """<line_sep><ellipsis><block_end>@overload<def_stmt>volume_index_to_world_dir id:uint64 uvw:vec3<arrow>vec3<block_start>""" Transform a direction defined in volume index space to world space given the volume's intrinsic affine transformation. """<line_sep><ellipsis><block_end>@overload<def_stmt>volume_world_to_index_dir id:uint64 xyz:vec3<arrow>vec3<block_start>""" Transform a direction defined in volume world space to the volume's index space, given the volume's intrinsic affine transformation. """<line_sep><ellipsis><block_end>@overload<def_stmt>rand_init seed:int32<arrow>uint32<block_start>""" Initialize a new random number generator given a user-defined seed. Returns a 32-bit integer representing the RNG state. """<line_sep><ellipsis><block_end>@overload<def_stmt>rand_init seed:int32 offset:int32<arrow>uint32<block_start>""" Initialize a new random number generator given a user-defined seed and an offset. This alternative constructor can be useful in parallel programs, where a kernel as a whole should share a seed, but each thread should generate uncorrelated values. In this case usage should be ``r = rand_init(seed, tid)`` """<line_sep><ellipsis><block_end>@overload<def_stmt>randi state:uint32<arrow>int<block_start>""" Return a random integer between [0, 2^32) """<line_sep><ellipsis><block_end>@overload<def_stmt>randi state:uint32 min:int32 max:int32<arrow>int<block_start>""" Return a random integer between [min, max) """<line_sep><ellipsis><block_end>@overload<def_stmt>randf state:uint32<arrow>float<block_start>""" Return a random float between [0.0, 1.0) """<line_sep><ellipsis><block_end>@overload<def_stmt>randf state:uint32 min:float32 max:float32<arrow>float<block_start>""" Return a random float between [min, max) """<line_sep><ellipsis><block_end>@overload<def_stmt>randn state:uint32<arrow>float<block_start>""" Sample a normal distribution """<line_sep><ellipsis><block_end>@overload<def_stmt>noise state:uint32 x:float32<arrow>float<block_start>""" Non-periodic Perlin-style noise in 1d. """<line_sep><ellipsis><block_end>@overload<def_stmt>noise state:uint32 xy:vec2<arrow>float<block_start>""" Non-periodic Perlin-style noise in 2d. """<line_sep><ellipsis><block_end>@overload<def_stmt>noise state:uint32 xyz:vec3<arrow>float<block_start>""" Non-periodic Perlin-style noise in 3d. """<line_sep><ellipsis><block_end>@overload<def_stmt>noise state:uint32 xyzt:vec4<arrow>float<block_start>""" Non-periodic Perlin-style noise in 4d. """<line_sep><ellipsis><block_end>@overload<def_stmt>pnoise state:uint32 x:float32 px:int32<arrow>float<block_start>""" Periodic Perlin-style noise in 1d. """<line_sep><ellipsis><block_end>@overload<def_stmt>pnoise state:uint32 xy:vec2 px:int32 py:int32<arrow>float<block_start>""" Periodic Perlin-style noise in 2d. """<line_sep><ellipsis><block_end>@overload<def_stmt>pnoise state:uint32 xyz:vec3 px:int32 py:int32 pz:int32<arrow>float<block_start>""" Periodic Perlin-style noise in 3d. """<line_sep><ellipsis><block_end>@overload<def_stmt>pnoise state:uint32 xyzt:vec4 px:int32 py:int32 pz:int32 pt:int32<arrow>float<block_start>""" Periodic Perlin-style noise in 4d. """<line_sep><ellipsis><block_end>@overload<def_stmt>curlnoise state:uint32 xy:vec2<arrow>vec2<block_start>""" Divergence-free vector field based on the gradient of a Perlin noise function. """<line_sep><ellipsis><block_end>@overload<def_stmt>curlnoise state:uint32 xyz:vec3<arrow>vec3<block_start>""" Divergence-free vector field based on the curl of three Perlin noise functions. """<line_sep><ellipsis><block_end>@overload<def_stmt>curlnoise state:uint32 xyzt:vec4<arrow>vec3<block_start>""" Divergence-free vector field based on the curl of three Perlin noise functions. """<line_sep><ellipsis><block_end>@overload<def_stmt>printf <block_start>""" Allows printing formatted strings, using C-style format specifiers. """<line_sep><ellipsis><block_end>@overload<def_stmt>tid <arrow>int<block_start>""" Return the current thread index. Note that this is the *global* index of the thread in the range [0, dim) where dim is the parameter passed to kernel launch. """<line_sep><ellipsis><block_end>@overload<def_stmt>tid <arrow>Tuple[int int]<block_start>""" Return the current thread indices for a 2d kernel launch. Use ``i,j = wp.tid()`` syntax to retrieve the coordinates inside the kernel thread grid. """<line_sep><ellipsis><block_end>@overload<def_stmt>tid <arrow>Tuple[int int int]<block_start>""" Return the current thread indices for a 3d kernel launch. Use ``i,j,k = wp.tid()`` syntax to retrieve the coordinates inside the kernel thread grid. """<line_sep><ellipsis><block_end>@overload<def_stmt>tid <arrow>Tuple[int int int int]<block_start>""" Return the current thread indices for a 4d kernel launch. Use ``i,j,k,l = wp.tid()`` syntax to retrieve the coordinates inside the kernel thread grid. """<line_sep><ellipsis><block_end>@overload<def_stmt>select cond:bool arg1:Any arg2:Any<block_start>""" Select between two arguments, if cond is false then return ``arg1``, otherwise return ``arg2`` """<line_sep><ellipsis><block_end>@overload<def_stmt>atomic_add a:array[Any] i:int32 value:Any<block_start>""" Atomically add ``value`` onto the array at location given by index. """<line_sep><ellipsis><block_end>@overload<def_stmt>atomic_add a:array[Any] i:int32 j:int32 value:Any<block_start>""" Atomically add ``value`` onto the array at location given by indices. """<line_sep><ellipsis><block_end>@overload<def_stmt>atomic_add a:array[Any] i:int32 j:int32 k:int32 value:Any<block_start>""" Atomically add ``value`` onto the array at location given by indices. """<line_sep><ellipsis><block_end>@overload<def_stmt>atomic_add a:array[Any] i:int32 j:int32 k:int32 l:int32 value:Any<block_start>""" Atomically add ``value`` onto the array at location given by indices. """<line_sep><ellipsis><block_end>@overload<def_stmt>atomic_sub a:array[Any] i:int32 value:Any<block_start>""" Atomically subtract ``value`` onto the array at location given by index. """<line_sep><ellipsis><block_end>@overload<def_stmt>atomic_sub a:array[Any] i:int32 j:int32 value:Any<block_start>""" Atomically subtract ``value`` onto the array at location given by indices. """<line_sep><ellipsis><block_end>@overload<def_stmt>atomic_sub a:array[Any] i:int32 j:int32 k:int32 value:Any<block_start>""" Atomically subtract ``value`` onto the array at location given by indices. """<line_sep><ellipsis><block_end>@overload<def_stmt>atomic_sub a:array[Any] i:int32 j:int32 k:int32 l:int32 value:Any<block_start>""" Atomically subtract ``value`` onto the array at location given by indices. """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:int8 arg2:int8<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:uint8 arg2:uint8<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:int16 arg2:int16<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:uint16 arg2:uint16<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:int32 arg2:int32<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:uint32 arg2:uint32<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:int64 arg2:int64<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:uint64 arg2:uint64<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:float32 arg2:float32<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:float64 arg2:float64<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:vec2 arg2:vec2<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:vec3 arg2:vec3<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:vec4 arg2:vec4<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:mat22 arg2:mat22<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:mat33 arg2:mat33<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:mat44 arg2:mat44<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:quat arg2:quat<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:transform arg2:transform<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:spatial_vector arg2:spatial_vector<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_eq arg1:spatial_matrix arg2:spatial_matrix<block_start>""" Prints an error to stdout if arg1 and arg2 are not equal """<line_sep><ellipsis><block_end>@overload<def_stmt>lerp a:float32 b:float32 t:float32<arrow>float32<block_start>""" Linearly interpolate two values a and b using factor t, computed as ``a*(1-t) + b*t`` """<line_sep><ellipsis><block_end>@overload<def_stmt>lerp a:float64 b:float64 t:float32<arrow>float64<block_start>""" Linearly interpolate two values a and b using factor t, computed as ``a*(1-t) + b*t`` """<line_sep><ellipsis><block_end>@overload<def_stmt>lerp a:vec2 b:vec2 t:float32<arrow>vec2<block_start>""" Linearly interpolate two values a and b using factor t, computed as ``a*(1-t) + b*t`` """<line_sep><ellipsis><block_end>@overload<def_stmt>lerp a:vec3 b:vec3 t:float32<arrow>vec3<block_start>""" Linearly interpolate two values a and b using factor t, computed as ``a*(1-t) + b*t`` """<line_sep><ellipsis><block_end>@overload<def_stmt>lerp a:vec4 b:vec4 t:float32<arrow>vec4<block_start>""" Linearly interpolate two values a and b using factor t, computed as ``a*(1-t) + b*t`` """<line_sep><ellipsis><block_end>@overload<def_stmt>lerp a:mat22 b:mat22 t:float32<arrow>mat22<block_start>""" Linearly interpolate two values a and b using factor t, computed as ``a*(1-t) + b*t`` """<line_sep><ellipsis><block_end>@overload<def_stmt>lerp a:mat33 b:mat33 t:float32<arrow>mat33<block_start>""" Linearly interpolate two values a and b using factor t, computed as ``a*(1-t) + b*t`` """<line_sep><ellipsis><block_end>@overload<def_stmt>lerp a:mat44 b:mat44 t:float32<arrow>mat44<block_start>""" Linearly interpolate two values a and b using factor t, computed as ``a*(1-t) + b*t`` """<line_sep><ellipsis><block_end>@overload<def_stmt>lerp a:quat b:quat t:float32<arrow>quat<block_start>""" Linearly interpolate two values a and b using factor t, computed as ``a*(1-t) + b*t`` """<line_sep><ellipsis><block_end>@overload<def_stmt>lerp a:transform b:transform t:float32<arrow>transform<block_start>""" Linearly interpolate two values a and b using factor t, computed as ``a*(1-t) + b*t`` """<line_sep><ellipsis><block_end>@overload<def_stmt>lerp a:spatial_vector b:spatial_vector t:float32<arrow>spatial_vector<block_start>""" Linearly interpolate two values a and b using factor t, computed as ``a*(1-t) + b*t`` """<line_sep><ellipsis><block_end>@overload<def_stmt>lerp a:spatial_matrix b:spatial_matrix t:float32<arrow>spatial_matrix<block_start>""" Linearly interpolate two values a and b using factor t, computed as ``a*(1-t) + b*t`` """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_near arg1:float32 arg2:float32 tolerance:float32<block_start>""" Prints an error to stdout if arg1 and arg2 are not closer than tolerance in magnitude """<line_sep><ellipsis><block_end>@overload<def_stmt>expect_near arg1:vec3 arg2:vec3 tolerance:float32<block_start>""" Prints an error to stdout if any element of arg1 and arg2 are not closer than tolerance in magnitude """<line_sep><ellipsis><block_end>@overload<def_stmt>add x:int32 y:int32<arrow>int<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>add x:float32 y:float32<arrow>float<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>add x:vec2 y:vec2<arrow>vec2<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>add x:vec3 y:vec3<arrow>vec3<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>add x:vec4 y:vec4<arrow>vec4<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>add x:quat y:quat<arrow>quat<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>add x:mat22 y:mat22<arrow>mat22<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>add x:mat33 y:mat33<arrow>mat33<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>add x:mat44 y:mat44<arrow>mat44<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>add x:spatial_vector y:spatial_vector<arrow>spatial_vector<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>add x:spatial_matrix y:spatial_matrix<arrow>spatial_matrix<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>sub x:int32 y:int32<arrow>int<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>sub x:float32 y:float32<arrow>float<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>sub x:vec2 y:vec2<arrow>vec2<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>sub x:vec3 y:vec3<arrow>vec3<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>sub x:vec4 y:vec4<arrow>vec4<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>sub x:mat22 y:mat22<arrow>mat22<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>sub x:mat33 y:mat33<arrow>mat33<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>sub x:mat44 y:mat44<arrow>mat44<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>sub x:spatial_vector y:spatial_vector<arrow>spatial_vector<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>sub x:spatial_matrix y:spatial_matrix<arrow>spatial_matrix<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:int32 y:int32<arrow>int<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:float32 y:float32<arrow>float<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:float32 y:vec2<arrow>vec2<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:float32 y:vec3<arrow>vec3<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:float32 y:vec4<arrow>vec4<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:float32 y:quat<arrow>quat<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:vec2 y:float32<arrow>vec2<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:vec3 y:float32<arrow>vec3<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:vec4 y:float32<arrow>vec4<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:quat y:float32<arrow>quat<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:quat y:quat<arrow>quat<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:mat22 y:float32<arrow>mat22<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:mat22 y:vec2<arrow>vec2<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:mat22 y:mat22<arrow>mat22<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:mat33 y:float32<arrow>mat33<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:mat33 y:vec3<arrow>vec3<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:mat33 y:mat33<arrow>mat33<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:mat44 y:float32<arrow>mat44<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:mat44 y:vec4<arrow>vec4<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:mat44 y:mat44<arrow>mat44<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:spatial_vector y:float32<arrow>spatial_vector<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:spatial_matrix y:spatial_matrix<arrow>spatial_matrix<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:spatial_matrix y:spatial_vector<arrow>spatial_vector<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mul x:transform y:transform<arrow>transform<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mod x:int32 y:int32<arrow>int<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>mod x:float32 y:float32<arrow>float<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>div x:int32 y:int32<arrow>int<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>div x:float32 y:float32<arrow>float<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>div x:vec2 y:float32<arrow>vec2<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>div x:vec3 y:float32<arrow>vec3<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>div x:vec4 y:float32<arrow>vec4<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>floordiv x:int32 y:int32<arrow>int<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>floordiv x:float32 y:float32<arrow>float<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>neg x:int32<arrow>int<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>neg x:float32<arrow>float<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>neg x:vec2<arrow>vec2<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>neg x:vec3<arrow>vec3<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>neg x:vec4<arrow>vec4<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>neg x:quat<arrow>quat<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>neg x:mat33<arrow>mat33<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>neg x:mat44<arrow>mat44<block_start>""" """<line_sep><ellipsis><block_end>@overload<def_stmt>unot b:bool<arrow>bool<block_start>""" """<line_sep><ellipsis><block_end>
# - Generated by tools/entrypoint_compiler.py: do not edit by hand """ Models.Schema """<import_from_stmt>..utils.entrypoints EntryPoint<import_from_stmt>..utils.utils try_set unlist<def_stmt>models_schema model schema=<none> **params<block_start>""" **Description** Retrieve output model schema :param model: The transform model. (inputs). :param schema: The model schema (outputs). """<line_sep>entrypoint_name='Models.Schema'<line_sep>inputs={}<line_sep>outputs={}<if_stmt>model<is><not><none><block_start>inputs['Model']=try_set(obj=model none_acceptable=<false> is_of_type=str)<block_end><if_stmt>schema<is><not><none><block_start>outputs['Schema']=try_set(obj=schema none_acceptable=<false> is_of_type=str)<block_end>input_variables={x<for>x unlist(inputs.values())<if>isinstance(x str)<and>x.startswith("$")}<line_sep>output_variables={x<for>x unlist(outputs.values())<if>isinstance(x str)<and>x.startswith("$")}<line_sep>entrypoint=EntryPoint(name=entrypoint_name inputs=inputs outputs=outputs input_variables=input_variables output_variables=output_variables)<line_sep><return>entrypoint<block_end>
# Copyright ClusterHQ Inc. See LICENSE file for details. """ Tests for ``flocker.node._diffing``. """<import_from_stmt>json dumps<import_from_stmt>uuid uuid4<import_from_stmt>eliot.testing capture_logging assertHasMessage<import_from_stmt>hypothesis given<import_stmt>hypothesis.strategies<as>st<import_from_stmt>pyrsistent PClass field pmap pset InvariantException<import_from_stmt>twisted.python.monkey MonkeyPatcher<import_from_stmt>.._diffing create_diff compose_diffs DIFF_COMMIT_ERROR _TransformProxy <import_from_stmt>.._persistence wire_encode wire_decode<import_from_stmt>.._model Node Port<import_from_stmt>..testtools application_strategy deployment_strategy node_strategy related_deployments_strategy <import_from_stmt>...testtools TestCase<import_from_stmt>testtools.matchers Equals LessThan<class_stmt>DiffTestObj(PClass)<block_start>""" Simple pyrsistent object for testing. """<line_sep>a=field()<block_end><class_stmt>DeploymentDiffTest(TestCase)<block_start>""" Tests for creating and applying diffs between deployments. """<line_sep>@given(related_deployments_strategy(2))<def_stmt>test_deployment_diffing self deployments<block_start>""" Diffing two arbitrary deployments, then applying the diff to the first deployment yields the second even after the diff has been serialized and re-created. """<line_sep>deployment_a,deployment_b=deployments<line_sep>diff=create_diff(deployment_a deployment_b)<line_sep>serialized_diff=wire_encode(diff)<line_sep>newdiff=wire_decode(serialized_diff)<line_sep>should_b_b=newdiff.apply(deployment_a)<line_sep>self.assertThat(should_b_b Equals(deployment_b))<block_end>@given(st.lists(deployment_strategy() min_size=3 max_size=10))<def_stmt>test_deployment_diffing_composable self deployments<block_start>""" Diffs should compose to create an aggregate diff. Create a bunch of deployments and compute the incremental diffs from one to the next. Compose all diffs together and apply the resulting diff to the first deployment. Verify that the final deployment is the result. """<line_sep>reserialize=<lambda>x:wire_decode(wire_encode(x))<line_sep>deployment_diffs=list(reserialize(create_diff(a b))<for>a,b zip(deployments[:-1] deployments[1:]))<line_sep>full_diff=reserialize(compose_diffs(deployment_diffs))<line_sep>self.assertThat(full_diff.apply(deployments[0]) Equals(deployments[-1]))<block_end><def_stmt>test_deployment_diffing_smart self<block_start>""" Small modifications to a deployment have diffs that are small. Their reverse is also small. """<line_sep># Any large deployment will do, just use hypothesis for convenience of # generating a large deployment. deployment=deployment_strategy(min_number_of_nodes=90).example()<line_sep>new_nodes=list(Node(uuid=uuid4())<for>_ xrange(4))<line_sep>d=reduce(<lambda>x y:x.update_node(y) new_nodes deployment)<line_sep>encoded_deployment=wire_encode(deployment)<line_sep>diff=create_diff(deployment d)<line_sep>encoded_diff=wire_encode(diff)<line_sep>self.assertThat(len(encoded_diff) LessThan(len(encoded_deployment)/2))<line_sep>self.assertThat(wire_decode(encoded_diff).apply(deployment) Equals(d))<line_sep>removal_diff=create_diff(d deployment)<line_sep>encoded_removal_diff=wire_encode(removal_diff)<line_sep>self.assertThat(len(encoded_removal_diff) LessThan(len(encoded_deployment)/2))<line_sep>self.assertThat(wire_decode(encoded_removal_diff).apply(d) Equals(deployment))<block_end><def_stmt>test_set_diffing_smart self<block_start>""" Small modifications to sets have diffs that are small. Their reverse is also small. """<line_sep># Any Application with a large set of ports will do, just use # hypothesis for convenience of generating a large number of ports on # an application. application=application_strategy(min_number_of_ports=1000).example()<line_sep>new_ports=list(Port(internal_port=i external_port=i)<for>i xrange(4))<line_sep>a=reduce(<lambda>x y:x.transform(['ports'] <lambda>x:x.add(y)) new_ports application)<line_sep>encoded_application=wire_encode(application)<line_sep>diff=create_diff(application a)<line_sep>encoded_diff=wire_encode(diff)<line_sep>self.assertThat(len(encoded_diff) LessThan(len(encoded_application)/2))<line_sep>self.assertThat(wire_decode(encoded_diff).apply(application) Equals(a))<line_sep>removal_diff=create_diff(a application)<line_sep>encoded_removal_diff=wire_encode(removal_diff)<line_sep>self.assertThat(len(encoded_removal_diff) LessThan(len(encoded_application)/2))<line_sep>self.assertThat(wire_decode(encoded_removal_diff).apply(a) Equals(application))<block_end><def_stmt>test_equal_objects self<block_start>""" Diffing objects that are equal results in an object that is smaller than the object. """<line_sep>baseobj=frozenset(xrange(1000))<line_sep>object_a=DiffTestObj(a=baseobj)<line_sep>object_b=DiffTestObj(a=baseobj)<line_sep>diff=create_diff(object_a object_b)<line_sep>serialized_diff=wire_encode(diff)<line_sep>self.assertThat(len(serialized_diff) LessThan(len(dumps(list(baseobj)))))<line_sep>self.assertThat(wire_decode(serialized_diff).apply(object_a) Equals(object_b))<block_end><def_stmt>test_different_objects self<block_start>""" Diffing objects that are entirely different results in a diff that can be applied. """<line_sep>object_a=DiffTestObj(a=pset(xrange(1000)))<line_sep>object_b=pmap({'1':34})<line_sep>diff=create_diff(object_a object_b)<line_sep>self.assertThat(wire_decode(wire_encode(diff)).apply(object_a) Equals(object_b))<block_end><def_stmt>test_different_uuids self<block_start>""" Diffing objects that have parts that are simply not equal can be applied to turn the first object into the second. """<line_sep>object_a=DiffTestObj(a=uuid4())<line_sep>object_b=DiffTestObj(a=uuid4())<line_sep>diff=create_diff(object_a object_b)<line_sep>self.assertThat(wire_decode(wire_encode(diff)).apply(object_a) Equals(object_b))<block_end><block_end><class_stmt>DiffTestObjInvariant(PClass)<block_start>""" Simple pyrsistent object with an invariant that spans multiple fields. Diffs which swap the values of the fields will trigger ``InvariantError` unless ``_perform_invariant_check`` is set to ``False`` or the diff is applied to an evolver object. """<line_sep>_perform_invariant_check=<true><line_sep>a=field()<line_sep>b=field()<def_stmt>__invariant__ self<block_start><if_stmt>self._perform_invariant_check<and>self.a<eq>self.b<block_start><return>(<false> "a must not equal b")<block_end><else_stmt><block_start><return>(<true> "")<block_end><block_end><block_end><class_stmt>InvariantDiffTests(TestCase)<block_start>""" Tests for creating and applying diffs to objects with invariant checks. """<def_stmt>test_straight_swap self<block_start>""" A diff composed of two separate ``set`` operations can be applied to an object without triggering an invariant exception. """<line_sep>o1=DiffTestObjInvariant(a=1 b=2 )<line_sep>o2=DiffTestObjInvariant(a=2 b=1 )<line_sep>diff=create_diff(o1 o2)<line_sep>self.expectThat(len(diff.changes) Equals(2))<line_sep>self.assertEqual(o2 diff.apply(o1))<block_end><def_stmt>test_deep_swap self<block_start>""" A diff composed of two separate ``set`` operations can be applied to a nested object without triggering an invariant exception. """<line_sep>a=DiffTestObjInvariant(a=1 b=2 )<line_sep>b=DiffTestObjInvariant(a=3 b=4 )<line_sep>o1=DiffTestObjInvariant(a=a b=b )<line_sep>o2=o1.transform(['a'] DiffTestObjInvariant(a=2 b=1 ))<line_sep>diff=create_diff(o1 o2)<line_sep>self.expectThat(len(diff.changes) Equals(2))<line_sep>self.assertEqual(o2 diff.apply(o1))<block_end>@capture_logging(assertHasMessage DIFF_COMMIT_ERROR)<def_stmt>test_error_logging self logger<block_start>""" Failures while applying a diff emit a log message containing the full diff. """<line_sep>o1=DiffTestObjInvariant(a=1 b=2 )<line_sep>patcher=MonkeyPatcher()<line_sep>patcher.addPatch(DiffTestObjInvariant '_perform_invariant_check' <false>)<line_sep>patcher.patch()<try_stmt><block_start>o2=o1.set('b' 1)<block_end><finally_stmt><block_start>patcher.restore()<block_end>diff=create_diff(o1 o2)<line_sep>self.assertRaises(InvariantException diff.apply o1 )<block_end><def_stmt>test_application_add self<block_start>""" A diff on a Node, which *adds* and application with a volume *and* the manifestation for the volume, can be applied without triggering an invariant error on the Node. """<line_sep>node2=node_strategy(min_number_of_applications=1 stateful_applications=<true> ).example()<line_sep>application=node2.applications.values()[0]<line_sep>node1=node2.transform(['applications'] <lambda>o:o.remove(application.name)).transform(['manifestations'] <lambda>o:o.remove(application.volume.manifestation.dataset_id))<line_sep>diff=create_diff(node1 node2)<line_sep>self.assertEqual(node2 diff.apply(node1) )<block_end><def_stmt>test_application_modify self<block_start>""" A diff on a Node, which adds a volume to an *existing* application volume *and* the manifestation for the volume, can be applied without triggering an invariant error on the Node. """<line_sep>node2=node_strategy(min_number_of_applications=1 stateful_applications=<true> ).example()<line_sep>application=node2.applications.values()[0]<line_sep>volume=application.volume<line_sep>node1=node2.transform(['applications' application.name] <lambda>o:o.set('volume' <none>)).transform(['manifestations'] <lambda>o:o.remove(volume.manifestation.dataset_id))<line_sep>diff=create_diff(node1 node2)<line_sep>self.assertEqual(node2 diff.apply(node1) )<block_end><block_end><class_stmt>TransformProxyTests(TestCase)<block_start>""" Tests for ``_TransformProxy``. """<def_stmt>test_type_error self<block_start>""" The wrapped object must provide _IEvolvable. """<line_sep>e=self.assertRaises(TypeError _TransformProxy 1)<line_sep>self.assertEqual('1 does not provide _IEvolvable' e.message )<block_end><def_stmt>test_commit_no_change self<block_start>""" ``commit`` returns the original object if no changes have been performed. """<line_sep>original=pmap()<line_sep>self.assertIs(original _TransformProxy(original).commit())<block_end><def_stmt>test_transform_keyerror self<block_start>""" ``transform`` raises ``KeyError`` if the supplied ``path`` is not found. """<line_sep>e=self.assertRaises(KeyError _TransformProxy(pmap()).transform ['a'] 1)<line_sep>self.assertEqual("Attribute or key 'a' not found in pmap({})" e.message )<block_end><def_stmt>test_transform_typeerror self<block_start>""" ``transform`` raises ``TypeError`` if the object at the supplied ``path`` does not provide ``_IEvolvable``. """<line_sep>proxy=_TransformProxy(pmap({'a':1}))<line_sep>e=self.assertRaises(TypeError proxy.transform ['a'] 2 )<line_sep>self.assertEqual("1 does not provide _IEvolvable" e.message)<block_end><def_stmt>test_transform_empty_path self<block_start>""" If ``transform`` is supplied with an empty path, the operation is performed on the root object. """<line_sep>proxy=_TransformProxy(pmap({'a':1}))<line_sep>proxy.transform([] <lambda>o:o.set('a' 2))<line_sep>self.assertEqual(pmap({'a':2}) proxy.commit() )<block_end><def_stmt>test_transform_deep_path self<block_start>""" If ``transform`` is supplied with a path containing multiple segments, the operation is performed on the object corresponding to the last segment. """<line_sep>proxy=_TransformProxy(pmap({'a':pmap({'b':pmap({'c':1})})}))<line_sep>proxy.transform(['a' 'b'] <lambda>o:o.set('c' 2))<line_sep>self.assertEqual(pmap({'a':pmap({'b':pmap({'c':2})})}) proxy.commit() )<block_end><def_stmt>test_transform_deep_evolver self<block_start>""" ``transform`` can perform operations on nested objects that have invariant constraints, without triggering the InvariantException. """<line_sep>proxy=_TransformProxy(pmap({'a':pmap({'b':pmap({'c':DiffTestObjInvariant(a=1 b=2)})})}))<line_sep># If these operations were performed directly on the Pyrsistent # structure it'd trigger InvariantException. proxy.transform(['a' 'b' 'c'] <lambda>o:o.set('a' 2))<line_sep>proxy.transform(['a' 'b' 'c'] <lambda>o:o.set('b' 1))<line_sep>self.assertEqual(pmap({'a':pmap({'b':pmap({'c':DiffTestObjInvariant(a=2 b=1)})})}) proxy.commit() )<block_end><block_end>
<import_from_stmt>typing Tuple Union<import_stmt>numpy<as>np<import_stmt>torch<import_from_stmt>torchsparse.utils make_ntuple<line_sep>__all__=['get_kernel_offsets']<def_stmt>get_kernel_offsets size:Union[int Tuple[int <ellipsis>]] stride:Union[int Tuple[int <ellipsis>]]=1 dilation:Union[int Tuple[int <ellipsis>]]=1 device:str='cpu'<arrow>torch.Tensor<block_start>size=make_ntuple(size ndim=3)<line_sep>stride=make_ntuple(stride ndim=3)<line_sep>dilation=make_ntuple(dilation ndim=3)<line_sep>offsets=[(np.arange(-size[k]<floordiv>2+1 size[k]<floordiv>2+1)<times>stride[k]<times>dilation[k])<for>k range(3)]<line_sep># This condition check is only to make sure that our weight layout is # compatible with `MinkowskiEngine`. <if_stmt>np.prod(size)%2<eq>1<block_start>offsets=[[x y z]<for>z offsets[2]<for>y offsets[1]<for>x offsets[0]]<block_end><else_stmt><block_start>offsets=[[x y z]<for>x offsets[0]<for>y offsets[1]<for>z offsets[2]]<block_end>offsets=torch.tensor(offsets dtype=torch.int device=device)<line_sep><return>offsets<block_end>
# Copyright (C) 2019 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions # and limitations under the License. <import_stmt>yaml<class_stmt>AttributedDict(dict)<block_start>"""Class to simplify the access to dictionary fields. """<def_stmt>__getattr__ self name<block_start><return>self[name]<block_end><def_stmt>__setattr__ self name value<block_start>self[name]=value<block_end><block_end><def_stmt>load_config config_path<block_start>"""Loads parameters into the dict from the specified path. :param config_path: Path to config file :return: Dictionary with parameters """<with_stmt>open(config_path 'r')<as>config_file<block_start>config_values=AttributedDict(yaml.load(config_file Loader=yaml.FullLoader))<block_end><return>config_values<block_end>
# Copyright 2017 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. <import_stmt>logging<import_from_stmt>benchmarks memory<import_from_stmt>core perf_benchmark<import_from_stmt>measurements smoothness<import_from_stmt>telemetry benchmark<import_from_stmt>telemetry story<import_from_stmt>telemetry.timeline chrome_trace_category_filter<import_from_stmt>telemetry.timeline chrome_trace_config<import_from_stmt>telemetry.web_perf timeline_based_measurement<import_from_stmt>contrib.vr_benchmarks vr_browsing_mode_pages<import_from_stmt>contrib.vr_benchmarks webvr_sample_pages<import_from_stmt>contrib.vr_benchmarks webvr_wpr_pages<import_from_stmt>contrib.vr_benchmarks webxr_sample_pages<class_stmt>_BaseVRBenchmark(perf_benchmark.PerfBenchmark)<block_start>@classmethod<def_stmt>AddBenchmarkCommandLineArgs cls parser<block_start>parser.add_option('--shared-prefs-file' help='The path relative to the Chromium source root '<concat>'to a file containing a JSON list of shared '<concat>'preference files to edit and how to do so. '<concat>'See examples in //chrome/android/'<concat>'shared_preference_files/test/')<line_sep>parser.add_option('--disable-screen-reset' action='store_true' default=<false> help='Disables turning screen off and on after each story. '<concat>'This is useful for local testing when turning off the '<concat>'screen leads to locking the phone, which makes Telemetry '<concat>'not produce valid results.')<line_sep>parser.add_option('--recording-wpr' action='store_true' default=<false> help='Modifies benchmark behavior slightly while recording WPR files '<concat>'for it. This largely boils down to adding waits/sleeps in order '<concat>'to ensure that enough streaming data is recorded for the '<concat>'benchmark to run without issues.')<block_end><block_end><class_stmt>_BaseWebVRWebXRBenchmark(_BaseVRBenchmark)<block_start>SUPPORTED_PLATFORMS=[story.expectations.ALL_ANDROID]<def_stmt>CreateCoreTimelineBasedMeasurementOptions self<block_start>memory_categories=['blink.console' 'disabled-by-default-memory-infra']<line_sep>gpu_categories=['gpu']<line_sep>debug_categories=['toplevel' 'viz']<line_sep>category_filter=chrome_trace_category_filter.ChromeTraceCategoryFilter(','.join(['-*']+memory_categories+gpu_categories+debug_categories))<line_sep>options=timeline_based_measurement.Options(category_filter)<line_sep>options.config.enable_android_graphics_memtrack=<true><line_sep>options.config.enable_platform_display_trace=<true><line_sep>options.SetTimelineBasedMetrics(['memoryMetric' 'webvrMetric'])<line_sep>options.config.chrome_trace_config.SetMemoryDumpConfig(chrome_trace_config.MemoryDumpConfig())<line_sep><return>options<block_end>@classmethod<def_stmt>ShouldAddValue cls name from_first_story_run<block_start><del_stmt>from_first_story_run# unused <return>memory.DefaultShouldAddValueForMemoryMeasurement(name)<block_end><block_end><class_stmt>_BaseWebVRBenchmark(_BaseWebVRWebXRBenchmark)<block_start><def_stmt>SetExtraBrowserOptions self options<block_start>memory.SetExtraBrowserOptionsForMemoryMeasurement(options)<line_sep>options.AppendExtraBrowserArgs(['--enable-webvr' ])<block_end><block_end><class_stmt>_BaseWebXRBenchmark(_BaseWebVRWebXRBenchmark)<block_start><def_stmt>SetExtraBrowserOptions self options<block_start>memory.SetExtraBrowserOptionsForMemoryMeasurement(options)<line_sep>options.AppendExtraBrowserArgs(['--enable-features=WebXR' ])<block_end><block_end>@benchmark.Owner(emails=['<EMAIL>' '<EMAIL>'])# pylint: disable=too-many-ancestors <class_stmt>XrWebVrStatic(_BaseWebVRBenchmark)<block_start>"""Measures WebVR performance with synthetic sample pages."""<def_stmt>CreateStorySet self options<block_start><return>webvr_sample_pages.WebVrSamplePageSet()<block_end>@classmethod<def_stmt>Name cls<block_start><return>'xr.webvr.static'<block_end><block_end>@benchmark.Owner(emails=['<EMAIL>' '<EMAIL>'])# pylint: disable=too-many-ancestors <class_stmt>XrWebXrStatic(_BaseWebXRBenchmark)<block_start>"""Measures WebXR performance with synthetic sample pages."""<def_stmt>CreateStorySet self options<block_start><return>webxr_sample_pages.WebXrSamplePageSet()<block_end>@classmethod<def_stmt>Name cls<block_start><return>'xr.webxr.static'<block_end><block_end>@benchmark.Owner(emails=['<EMAIL>' '<EMAIL>'])# pylint: disable=too-many-ancestors <class_stmt>XrWebVrWprStatic(_BaseWebVRBenchmark)<block_start>"""Measures WebVR performance with WPR copies of live websites."""<def_stmt>CreateStorySet self options<block_start><return>webvr_wpr_pages.WebVrWprPageSet()<block_end>@classmethod<def_stmt>Name cls<block_start><return>'xr.webvr.wpr.static'<block_end><block_end>@benchmark.Owner(emails=['<EMAIL>' '<EMAIL>'])# pylint: disable=too-many-ancestors <class_stmt>XrWebVrLiveStatic(_BaseWebVRBenchmark)<block_start>"""Measures WebVR performance with live websites. This is a superset of xr.webvr.wpr.static, containing all the pages that it uses plus some that we would like to test with WPR, but behave differently when using WPR compared to the live version. """<def_stmt>CreateStorySet self options<block_start><if_stmt><not>hasattr(options 'use_live_sites')<or><not>options.use_live_sites# We log an error instead of raising an exception here because the # Telemetry presubmit unittests fail if we raise. <block_start>logging.error('Running the live sites benchmark without using live '<concat>'sites. Results will likely be incorrect for some sites.')<block_end><return>webvr_wpr_pages.WebVrLivePageSet()<block_end>@classmethod<def_stmt>Name cls<block_start><return>'xr.webvr.live.static'<block_end><block_end><class_stmt>_BaseBrowsingBenchmark(_BaseVRBenchmark)<block_start>SUPPORTED_PLATFORMS=[story.expectations.ALL_ANDROID]<def_stmt>CreateTimelineBasedMeasurementOptions self<block_start>memory_categories=['blink.console' 'disabled-by-default-memory-infra']<line_sep>gpu_categories=['gpu']<line_sep>debug_categories=['toplevel' 'viz']<line_sep>category_filter=chrome_trace_category_filter.ChromeTraceCategoryFilter(','.join(['-*']+memory_categories+gpu_categories+debug_categories))<line_sep>options=timeline_based_measurement.Options(category_filter)<line_sep>options.config.enable_android_graphics_memtrack=<true><line_sep>options.config.enable_platform_display_trace=<true><line_sep>options.SetTimelineBasedMetrics(['frameCycleDurationMetric' 'memoryMetric'])<line_sep>options.config.chrome_trace_config.SetMemoryDumpConfig(chrome_trace_config.MemoryDumpConfig())<line_sep><return>options<block_end><def_stmt>SetExtraBrowserOptions self options<block_start>options.clear_sytem_cache_for_browser_and_profile_on_start=<true><line_sep>options.AppendExtraBrowserArgs(['--enable-gpu-benchmarking' '--touch-events=enabled' '--enable-vr-shell' ])<block_end><block_end>@benchmark.Owner(emails=['<EMAIL>'])<class_stmt>XrBrowsingStatic(_BaseBrowsingBenchmark)<block_start>"""Benchmark for testing the VR Browsing Mode performance on sample pages."""<def_stmt>CreateStorySet self options<block_start><return>vr_browsing_mode_pages.VrBrowsingModePageSet()<block_end>@classmethod<def_stmt>Name cls<block_start><return>'xr.browsing.static'<block_end><block_end>@benchmark.Owner(emails=['<EMAIL>' '<EMAIL>'])<class_stmt>XrBrowsingWprStatic(_BaseBrowsingBenchmark)<block_start>"""Benchmark for testing the VR Browsing Mode performance on WPR pages."""<def_stmt>CreateStorySet self options<block_start><return>vr_browsing_mode_pages.VrBrowsingModeWprPageSet()<block_end>@classmethod<def_stmt>Name cls<block_start><return>'xr.browsing.wpr.static'<block_end><block_end>@benchmark.Owner(emails=['<EMAIL>' '<EMAIL>'])<class_stmt>XrBrowsingWprSmoothness(_BaseBrowsingBenchmark)<block_start>"""Benchmark for testing VR browser scrolling smoothness and throughput."""<line_sep>test=smoothness.Smoothness<def_stmt>CreateStorySet self options<block_start><return>vr_browsing_mode_pages.VrBrowsingModeWprSmoothnessPageSet()<block_end>@classmethod<def_stmt>Name cls<block_start><return>'xr.browsing.wpr.smoothness'<block_end><block_end>
# This sample tests the handling of the @final method decorator. <import_from_stmt>typing final<class_stmt>ClassA<block_start><def_stmt>func1 self<block_start><pass><block_end>@classmethod<def_stmt>func2 cls<block_start><pass><block_end>@final<def_stmt>func3 self<block_start><pass><block_end>@final@classmethod<def_stmt>func4 cls<block_start><pass><block_end>@final<def_stmt>_func5 self<block_start><pass><block_end>@final<def_stmt>__func6 self<block_start><pass><block_end><block_end># This should generate an error because func3 is final. ClassA.func3=<lambda>self:<none><line_sep># This should generate an error because func4 is final. ClassA.func4=<lambda>cls:<none><line_sep># This should generate an error because _func5 is final. ClassA._func5=<lambda>self:<none><class_stmt>ClassB(ClassA)<block_start><def_stmt>func1 self<block_start><pass><block_end>@classmethod<def_stmt>func2 cls<block_start><pass><block_end># This should generate an error because func3 is # defined as final. <def_stmt>func3 self<block_start><pass><block_end># This should generate an error because func3 is # defined as final. @classmethod<def_stmt>func4 cls<block_start><pass><block_end># This should generate an error because func3 is # defined as final. <def_stmt>_func5 self<block_start><pass><block_end># This should not generate an error because double # underscore symbols are exempt from this check. <def_stmt>__func6 self<block_start><pass><block_end><block_end><class_stmt>Base4<block_start><ellipsis><block_end><class_stmt>Base5<block_start>@final<def_stmt>__init__ self v:int<arrow><none><block_start><ellipsis><block_end><block_end><class_stmt>C(Base4 Base5)# This should generate an error because it overrides Base5, # and __init__ is marked final there. <block_start><def_stmt>__init__ self<arrow><none><block_start><ellipsis><block_end><block_end>
<def_stmt>binary_search arr target<block_start>low,high=0 len(arr)-1<while_stmt>low<l>high<block_start>mid=(low+high)/2<if_stmt>arr[mid]<eq>target<block_start><return>mid<block_end><elif_stmt>arr[mid]<g>target<block_start>high=mid-1<block_end><else_stmt><block_start>low=mid+1<block_end><block_end><try_stmt><block_start><if_stmt>arr[high]<eq>target<block_start><return>high<block_end><else_stmt><block_start><return>-1<block_end><block_end><except_stmt>IndexError<as>e<block_start><return>-1<block_end><block_end>
''' Global settings '''<import_stmt>tensorflow<as>tf<line_sep># Default boxes # DEFAULT_BOXES = ((x1_offset, y1_offset, x2_offset, y2_offset), (...), ...) # Offset is relative to upper-left-corner and lower-right-corner of the feature map cell DEFAULT_BOXES=((-0.5 -0.5 0.5 0.5) (0.2 0.2 -0.2 -0.2) (-0.8 -0.2 0.8 0.2) (-0.2 -0.8 0.2 0.8))<line_sep>NUM_DEFAULT_BOXES=len(DEFAULT_BOXES)<line_sep># Constants (TODO: Keep this updated as we go along) NUM_CLASSES=3# 2 signs + 1 background class NUM_CHANNELS=1# grayscale->1, RGB->3 NUM_PRED_CONF=NUM_DEFAULT_BOXES<times>NUM_CLASSES# number of class predictions per feature map cell NUM_PRED_LOC=NUM_DEFAULT_BOXES<times>4# number of localization regression predictions per feature map cell # Bounding box parameters IOU_THRESH=0.5# match ground-truth box to default boxes exceeding this IOU threshold, during data prep NMS_IOU_THRESH=0.2# IOU threshold for non-max suppression # Negatives-to-positives ratio used to filter training data NEG_POS_RATIO=5# negative:positive = NEG_POS_RATIO:1 # Class confidence threshold to count as detection CONF_THRESH=0.9<line_sep># Model selection and dependent parameters MODEL='AlexNet'# AlexNet/VGG16/ResNet50 <if_stmt>MODEL<eq>'AlexNet'#IMG_H, IMG_W = 300, 300 #FM_SIZES = [[36, 36], [17, 17], [9, 9], [5, 5]] # feature map sizes for SSD hooks via TensorBoard visualization (HxW) <block_start>IMG_H,IMG_W=260 400<line_sep>FM_SIZES=[[31 48] [15 23] [8 12] [4 6]]<block_end><else_stmt><block_start><raise>NotImplementedError('Model not implemented')<block_end># Model hyper-parameters OPT=tf.train.AdadeltaOptimizer()<line_sep>REG_SCALE=1e-2# L2 regularization strength LOC_LOSS_WEIGHT=1.# weight of localization loss: loss = conf_loss + LOC_LOSS_WEIGHT * loc_loss # Training process RESUME=<false># resume training from previously saved model? NUM_EPOCH=200<line_sep>BATCH_SIZE=32# batch size for training (relatively small) VALIDATION_SIZE=0.05# fraction of total training set to use as validation set SAVE_MODEL=<true># save trained model to disk? MODEL_SAVE_PATH='./model.ckpt'# where to save trained model
# Copyright 2018 Recruit Communications Co., Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_from_stmt>cpp_pyqubo SubH<import_from_stmt>pyqubo.array Array<import_from_stmt>pyqubo.integer Integer<import_stmt>numpy<as>np<class_stmt>LogEncInteger(Integer)<block_start>"""Log encoded integer. The value that takes :math:`[0, n]` is represented by :math:`\\sum_{i=1}^{\\lceil\\log_{2}n\\rceil}2^ix_{i}` without any constraint. Args: label (str): Label of the integer. lower (int): Lower value of the integer. upper (int): Upper value of the integer. Examples: This example finds the value `a`, `b` such that :math:`a+b=5` and :math:`2a-b=1`. >>> from pyqubo import LogEncInteger >>> import dimod >>> a = LogEncInteger("a", (0, 4)) >>> b = LogEncInteger("b", (0, 4)) >>> M=2.0 >>> H = (2*a-b-1)**2 + M*(a+b-5)**2 >>> model = H.compile() >>> bqm = model.to_bqm() >>> import dimod >>> sampleset = dimod.ExactSolver().sample(bqm) >>> decoded_samples = model.decode_sampleset(sampleset) >>> best_sample = min(decoded_samples, key=lambda s: s.energy) >>> print(best_sample.subh['a']) 2.0 >>> print(best_sample.subh['b']) 3.0 """<def_stmt>__init__ self label value_range<block_start>lower,upper=value_range<assert_stmt>upper<g>lower "upper value should be larger than lower value"<assert_stmt>isinstance(lower int)<assert_stmt>isinstance(upper int)<line_sep>span=upper-lower<line_sep>self._num_variables=int(np.log2(span))+1<line_sep>self.array=Array.create(label shape=self._num_variables vartype='BINARY')<line_sep>d=self._num_variables-1<line_sep>express=lower+sum(self.array[i]<times>2<power>i<for>i range(self._num_variables-1))<line_sep>express<augadd>(span-(2<power>d-1))<times>self.array[-1]<line_sep>express=SubH(express label)<line_sep>super().__init__(label=label value_range=value_range express=express)<block_end><block_end>
"""Decorator to make it cope with two staged computation easily."""<import_from_stmt>typing Any Callable Generator Tuple Union cast<import_stmt>dask<import_from_stmt>.intermediate Intermediate<line_sep>Decoratee=Callable[<ellipsis> Generator[Any Any Intermediate]]<line_sep>Completion=Callable[[Any] Intermediate]<def_stmt>staged func:Decoratee <arrow>Callable[<ellipsis> Union[Tuple[Any Completion] Intermediate]]<block_start>"""Transform a two stage computation into a result and a completion function."""<def_stmt>staged_imp *args:Any _staged:bool=<false> **kwargs:Any<arrow>Union[Tuple[Any Completion] Intermediate]<block_start>gen=func(*args **kwargs)<def_stmt>completion computed:Any<arrow>Intermediate<block_start><try_stmt><block_start>gen.send(computed)<line_sep><raise>RuntimeError("Computation didn't stop.")<block_end><except_stmt>StopIteration<as>stop<block_start><return>cast(Intermediate stop.value)<block_end><block_end><if_stmt>_staged<block_start><return>next(gen) completion<block_end><else_stmt><block_start>(computed )=dask.compute(next(gen))<line_sep><return>completion(computed)<block_end><block_end><return>staged_imp<block_end>
<import_from_stmt>django.conf.urls url<import_from_stmt>dojo.components views<line_sep>urlpatterns=[url(r'^components$' views.components name='components') ]<line_sep>
#! /usr/bin/env python3 # coding=utf-8 <import_stmt>os<import_stmt>subprocess<import_stmt>time<import_stmt>toml<import_from_stmt>jsonrpcclient.http_client HTTPClient<def_stmt>block_number host="127.0.0.1" port=1337<block_start>""" url: str port: int """<line_sep>url="http://"+host+":"+str(port)<try_stmt><block_start>response=HTTPClient(url).request("blockNumber" [])<line_sep><return>int(response 16)<block_end><except_stmt><block_start><return><none><block_end><block_end><def_stmt>run_subprocess cmd shell=<true><block_start>""" cmd: str, style like "ls -al" """<line_sep><return>subprocess.Popen(cmd shell=shell stdout=subprocess.PIPE)<block_end><def_stmt>start node_number log_level=""<block_start>""" node_number: int log_level: str """<for_stmt>i range(node_number+1)<block_start>p=run_subprocess(f'bin/cita bebop setup node/{i}')<line_sep>p.wait()<line_sep>run_subprocess(f'bin/cita bebop start node/{i} {log_level}')<block_end><block_end><def_stmt>stop node_number<block_start>""" node_number: int """<for_stmt>i range(node_number+1)<block_start>p=run_subprocess(f'bin/cita bebop stop node/{i}')<line_sep>p.wait()<block_end><block_end><def_stmt>clean <block_start>run_subprocess("rm node/ -rf")<block_end><def_stmt>modify_forever node_number<block_start>""" node_number: int """<for_stmt>i range(node_number+1)<block_start><with_stmt>open(f"./node/{i}/forever.toml" "r")<as>file<block_start>forever_conf=toml.load(file)<line_sep>forever_conf["process"][-1]["respawn"]=10000<line_sep>forever_conf["process"][-2]["respawn"]=10000<block_end><with_stmt>open(f"./node/{i}/forever.toml" "w")<as>file<block_start>toml.dump(forever_conf file)<block_end><block_end><block_end><def_stmt>remove_statedb node_number<block_start>""" node_number: int """<for_stmt>i range(node_number+1)<block_start>run_subprocess(f'rm ./node/{i}/data/statedb/ -rf')<block_end><block_end><def_stmt>kill_process always occasionally<block_start>""" :param always: path, str :param occasionally: path, str :return: None """<for_stmt>i range(50)<block_start><if_stmt>os.path.exists(always)<block_start><with_stmt>open(always "r")<as>file<block_start>always_kill=file.read()<block_end>run_subprocess(f"kill -9 {always_kill}")<block_end><if_stmt>i%4<eq>0<and>os.path.exists(occasionally)<block_start><with_stmt>open(occasionally "r")<as>file<block_start>occasionally_kill=file.read()<block_end>run_subprocess(f"kill -9 {occasionally_kill}")<block_end>time.sleep(0.3)<block_end><block_end><def_stmt>prepare <block_start>p=run_subprocess("python3 ./scripts/create_cita_config.py create --super_admin '<PASSWORD>' --nodes '127.0.0.1:4000,127.0.0.1:4001,127.0.0.1:4002,127.0.0.1:4003' --chain_name node > /dev/null")<line_sep>p.wait()<line_sep>modify_forever(3)<line_sep>start(3)<line_sep>time.sleep(30)<block_end><def_stmt>test_chain_higher_than_executor <block_start><for_stmt>i range(10)<block_start>point_number=block_number()<line_sep>print(f"point height is {point_number}")<line_sep>stop(0)<line_sep>remove_statedb(0)<line_sep>start(0)<line_sep>start_time=time.time()<while_stmt><true><block_start>new_node_block_height=block_number()<if_stmt>new_node_block_height<and>new_node_block_height<g>point_number+2<block_start>print(f"Current height is {new_node_block_height}, finish {i}")<line_sep><break><block_end><else_stmt><block_start>print(f"Current height is {new_node_block_height}, wait...")<line_sep>time.sleep(3)<line_sep>duration_time=time.time()-start_time<if_stmt>duration_time<g>60<block_start><raise>Exception("robustness test failure")<block_end><block_end><block_end><block_end><block_end><def_stmt>test_executor_higher_than_chain <block_start>kill_process('./node/0/.cita-executor.pid' "./node/0/.cita-chain.pid")<line_sep>kill_process("./node/0/.cita-chain.pid" './node/0/.cita-executor.pid')<line_sep>time.sleep(6)<line_sep>point_number=block_number(port=1339)<line_sep>print(f"point height is {point_number}")<line_sep>start_time=time.time()<while_stmt><true><block_start>new_node_block_height=block_number()<if_stmt>new_node_block_height<and>new_node_block_height<g>point_number+10<block_start>print(f"Current height is {new_node_block_height}, finish")<line_sep><break><block_end><else_stmt><block_start>print(f"Current height is {new_node_block_height}, wait...")<line_sep>time.sleep(3)<line_sep>duration_time=time.time()-start_time<if_stmt>duration_time<g>60<block_start><raise>Exception("robustness test failure")<block_end><block_end><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>pwd=os.getcwd()<line_sep>os.chdir(f'{pwd}/target/install')<line_sep>print("step 0: prepare")<line_sep>clean()<line_sep>prepare()<line_sep>print("step 1: Chain higher than Executor")<line_sep>test_chain_higher_than_executor()<line_sep>print("step 2: Executor higher than Chain")<line_sep>test_executor_higher_than_chain()<line_sep>print("step 3: stop")<line_sep>stop(3)<line_sep>print("step 4: clean up")<line_sep>clean()<block_end>
"""Handler for mediate-request message."""<import_from_stmt>.....messaging.base_handler BaseHandler HandlerException<import_from_stmt>.....messaging.request_context RequestContext<import_from_stmt>.....messaging.responder BaseResponder<import_from_stmt>..manager MediationManager MediationAlreadyExists<import_from_stmt>..messages.mediate_request MediationRequest<import_from_stmt>..messages.problem_report CMProblemReport ProblemReportReason<class_stmt>MediationRequestHandler(BaseHandler)<block_start>"""Handler for mediate-request message."""<async_keyword><def_stmt>handle self context:RequestContext responder:BaseResponder<block_start>"""Handle mediate-request message."""<line_sep>self._logger.debug("%s called with context %s" self.__class__.__name__ context)<assert_stmt>isinstance(context.message MediationRequest)<if_stmt><not>context.connection_ready<block_start><raise>HandlerException("Invalid mediation request: no active connection")<block_end>mgr=MediationManager(context.profile)<try_stmt><block_start>record=<await>mgr.receive_request(context.connection_record.connection_id context.message)<if_stmt>context.settings.get("mediation.open" <false>)<block_start>record,grant=<await>mgr.grant_request(record.mediation_id)<line_sep><await>responder.send_reply(grant)<block_end><block_end><except_stmt>MediationAlreadyExists<block_start>reply=CMProblemReport(description={"en":"Mediation request already exists from this connection" "code":ProblemReportReason.MEDIATION_REQUEST_REPEAT.value })<line_sep>reply.assign_thread_from(context.message)<line_sep><await>responder.send_reply(reply)<block_end><block_end><block_end>
"""Definitions for the primitive `extract_kwarg`."""<import_from_stmt>..lib Inferrer standard_prim<import_from_stmt>. primitives<as>P<line_sep>@standard_prim(P.extract_kwarg)<class_stmt>_ExtractKwArgInferrer(Inferrer)<block_start>"""Infer the return type of primitive `extract_kwarg`."""<async_keyword><def_stmt>normalize_args self args<block_start><return>args<block_end><async_keyword><def_stmt>infer self engine key kwarg<block_start><assert_stmt>key.xvalue()<is>kwarg.key<line_sep><return>kwarg.argument<block_end><block_end>__operation_defaults__={"name":"extract_kwarg" "registered_name":"extract_kwarg" "mapping":P.extract_kwarg "python_implementation":<none> }<line_sep>__primitive_defaults__={"name":"extract_kwarg" "registered_name":"extract_kwarg" "type":"inference" "python_implementation":<none> "inferrer_constructor":_ExtractKwArgInferrer "grad_transform":<none> }<line_sep>
# encoding:utf-8 <import_stmt>tensorflow<as>tf<import_stmt>os<import_from_stmt>tensorflow.python.framework graph_util<import_from_stmt>tensorflow.python.platform gfile<def_stmt>save_mode_pb pb_file_path<block_start>x=tf.placeholder(tf.int32 name='x')<line_sep>y=tf.placeholder(tf.int32 name='y')<line_sep>b=tf.Variable(2 name='b')<line_sep>xy=tf.multiply(x y)<line_sep># 这里的输出需要加上name属性 op=tf.add(xy b name='op_to_store')<line_sep>sess=tf.Session()<line_sep>sess.run(tf.global_variables_initializer())<line_sep>path=os.path.dirname(os.path.abspath(pb_file_path))<if_stmt>os.path.isdir(path)<is><false><block_start>os.makedirs(path)<block_end># convert_variables_to_constants 需要指定output_node_names,list(),可以多个 constant_graph=graph_util.convert_variables_to_constants(sess sess.graph_def ['op_to_store'])<with_stmt>tf.gfile.FastGFile(pb_file_path mode='wb')<as>f<block_start>f.write(constant_graph.SerializeToString())<block_end># test feed_dict={x:2 y:4}<line_sep>print(sess.run(op feed_dict))<block_end><def_stmt>restore_mode_pb pb_file_path<block_start>sess=tf.Session()<with_stmt>gfile.FastGFile(pb_file_path 'rb')<as>f<block_start>graph_def=tf.GraphDef()<line_sep>graph_def.ParseFromString(f.read())<line_sep>sess.graph.as_default()<line_sep>tf.import_graph_def(graph_def name='')<block_end>print(sess.run('b:0'))<line_sep>input_x=sess.graph.get_tensor_by_name('x:0')<line_sep>input_y=sess.graph.get_tensor_by_name('y:0')<line_sep>op=sess.graph.get_tensor_by_name('op_to_store:0')<line_sep>ret=sess.run(op {input_x:5 input_y:5})<line_sep>print(ret)<block_end><if_stmt>__name__<eq>'__main__'# save_mode_pb("Data/Models/generation_model/fajietest.ckpt") <block_start>restore_mode_pb("Data/Models/generation_model/fajietest.ckpt")<block_end>
<class_stmt>Class1(object)<block_start><def_stmt>__init__ self<block_start><pass><block_end><def_stmt>test1 self<block_start><return>5<block_end><block_end><class_stmt>Class2(object)<block_start><def_stmt>test1 self<block_start><return>6<block_end><block_end><class_stmt>Class3(object)<block_start><def_stmt>test1 self x<block_start><return>self.test2(x)-1<block_end><def_stmt>test2 self x<block_start><return>2<times>x<block_end><block_end>a=Class1()<line_sep>print(a.test1())<line_sep>a=Class2()<line_sep>print(a.test1())<line_sep>a=Class3()<line_sep>print(a.test1(3))<line_sep>print(a.test2(3))<line_sep>
<import_from_stmt>ctranslate2.specs.model_spec LayerSpec<import_from_stmt>ctranslate2.specs.model_spec ModelSpec<import_from_stmt>ctranslate2.specs.transformer_spec TransformerSpec<line_sep>
<import_from_future_stmt> unicode_literals<line_sep>default_app_config='dynamic_search.apps.DynamicSearchApp'<line_sep>
<import_stmt>clr<line_sep>clr.AddReference('RevitAPI')<import_from_stmt>Autodesk.Revit.DB *<line_sep>vals=IN[0]<line_sep>dispunit=IN[1]<line_sep>elementlist=[]<for_stmt>val vals<block_start>elementlist.append(UnitUtils.ConvertFromInternalUnits(val dispunit))<block_end>OUT=elementlist<line_sep>
<import_stmt>pandas<as>pd<import_from_stmt>functools reduce<import_stmt>os<line_sep>model_name=context.current_model.name<line_sep>output=""<line_sep>df:pd.DataFrame=ref(model_name)<line_sep>df.columns=df.columns.str.lower()# Snowflake has uppercase columns output<augadd>f"my_float {df.my_float[0]}\n"<line_sep>write_to_source(df "results" "some_source" mode="overwrite")<line_sep>source_size=len(source("results" "some_source"))<line_sep>output<augadd>f"source size {source_size}\n"<line_sep>write_to_source(df "results" "some_source" mode="append")<line_sep>source_size=len(source("results" "some_source"))<line_sep>output<augadd>f"source size {source_size}\n"<line_sep>path=reduce(os.path.join [os.environ["temp_dir"] model_name+".write_to_source_twice.txt"])<with_stmt>open(path "w")<as>file<block_start>file.write(output)<block_end>
<import_stmt>pytest<import_stmt>stan<line_sep>program_code="parameters {real y;} model {y ~ normal(0,1);}"<line_sep>@pytest.fixture(scope="module")<def_stmt>normal_posterior <block_start><return>stan.build(program_code)<block_end><def_stmt>test_normal_stepsize normal_posterior<block_start>fit=normal_posterior.sample(stepsize=0.001)<assert_stmt>fit<is><not><none><block_end>
# -*- coding: utf-8 -*- """ This module contains the Screw6 data structure. """<import_from_stmt>sympy zeros<import_from_stmt>sympy ShapeError<class_stmt>Screw6(object)<block_start>""" Data structure: Represent the data structure (base class) to hold a 6x6 matrix which in turn contains four 3x3 matrices. """<def_stmt>__init__ self *args **kwargs<block_start>""" Constructor period. Usage: >>> # initialise to 0 by default Screw6() >>> # initialise to a given 6x6 matrix Screw6(<value>) >>> # intiialise each of the 4 sub-matrices individually Screw6(<top-left>, <top-right>, <bottom-left>, <bottom-right>) >>> # initialise using keywords Screw6(value=<value>) Screw6( tl=<top-left>, tr=<top-right>, bl=<bottom-left>, br=<bottom-right> ) """<line_sep>self._val=zeros(6 6)<if_stmt>len(args)<eq>1<block_start>self.val=args[0]<block_end><elif_stmt>len(args)<eq>4<block_start>self.topleft=args[0]<line_sep>self.topright=args[1]<line_sep>self.botleft=args[2]<line_sep>self.botright=args[3]<block_end><elif_stmt>len(args)<g>0<block_start><raise>NotImplementedError("""Screw6 Constructor does not accept %s positional arguments. See Usage."""%(str(len(args))))<block_end><if_stmt>len(kwargs)<eq>4<block_start>self.topleft=kwargs['tl']<line_sep>self.topright=kwargs['tr']<line_sep>self.botleft=kwargs['bl']<line_sep>self.botright=kwargs['br']<block_end><elif_stmt>len(kwargs)<eq>1<block_start>self.val=kwargs['value']<block_end><elif_stmt>len(kwargs)<g>0<block_start><raise>NotImplementedError("""Screw6 Constructor does not accept %s keyword arguments. See Usage."""%(str(len(kwargs))))<block_end><block_end><def_stmt>__str__ self<block_start>row_format='['+((('{},'<times>6)+';')<times>6)+']'<line_sep>elements=list()<for_stmt>i range(self._val.rows)<block_start><for_stmt>j range(self._val.cols)<block_start>elements.append(str(self._val[i j]))<block_end><block_end>str_format=row_format.format(*elements)<line_sep><return>str_format<block_end><def_stmt>__repr__ self<block_start>repr_format='Screw6()'<line_sep><return>repr_format<block_end>@property<def_stmt>val self<block_start>""" Get current value. Returns: A 6x6 Matrix with the current value """<line_sep><return>self._val<block_end>@val.setter<def_stmt>val self value<block_start>""" Set the current value. Args: value: A 6x6 Matrix """<if_stmt>value.rows<ne>6<or>value.cols<ne>6<block_start><raise>ShapeError("Matrix size has to be 6x6.")<block_end>self._val=value<block_end>@property<def_stmt>topleft self<block_start>""" Get the top-left part of the 6x6 matrix. Returns: A 3x3 Matrix. """<line_sep><return>self._val[0:3 0:3]<block_end>@property<def_stmt>topright self<block_start>""" Get the top-right part of the 6x6 matrix. Returns: A 3x3 Matrix. """<line_sep><return>self._val[0:3 3:6]<block_end>@property<def_stmt>botleft self<block_start>""" Get the bottom-left part of the 6x6 matrix. Returns: A 3x3 Matrix. """<line_sep><return>self._val[3:6 0:3]<block_end>@property<def_stmt>botright self<block_start>""" Get the bottom-right part of the 6x6 matrix. Returns: A 3x3 Matrix. """<line_sep><return>self._val[3:6 3:6]<block_end>@topleft.setter<def_stmt>topleft self value<block_start>""" Set the top-left part of the 6x6 matrix. Args: value: A 3x3 Matrix - top-left value. """<if_stmt>value.rows<ne>3<or>value.cols<ne>3<block_start><raise>ShapeError("Top-left value size has to be 3x3.")<block_end>self._val[0:3 0:3]=value<block_end>@topright.setter<def_stmt>topright self value<block_start>""" Set the top-right part of the 6x6 matrix. Args: value: A 3x3 Matrix - top-right value. """<if_stmt>value.rows<ne>3<or>value.cols<ne>3<block_start><raise>ShapeError("Top-right value size has to be 3x3.")<block_end>self._val[0:3 3:6]=value<block_end>@botleft.setter<def_stmt>botleft self value<block_start>""" Set the bottom-left part of the 6x6 matrix. Args: value: A 3x3 Matrix - bottom-left value. """<if_stmt>value.rows<ne>3<or>value.cols<ne>3<block_start><raise>ShapeError("Bottom-left value size has to be 3x3.")<block_end>self._val[3:6 0:3]=value<block_end>@botright.setter<def_stmt>botright self value<block_start>""" Set the bottom-right part of the 6x6 matrix. Args: value: A 3x3 Matrix - bottom-right value. """<if_stmt>value.rows<ne>3<or>value.cols<ne>3<block_start><raise>ShapeError("Bottom-right value size has to be 3x3.")<block_end>self._val[3:6 3:6]=value<block_end><def_stmt>__eq__ self other<block_start>"""Check equality between two instances of Screw6."""<if_stmt>type(self)<ne>type(other)<block_start><raise>ValueError("Unable to compare %s with Screw6 type."%str(type(other)))<block_end><return>self.val<eq>other.val<block_end><def_stmt>__ne__ self other<block_start>"""Check non-equality between two instances of Screw6."""<line_sep><return><not>self<eq>other<block_end><block_end>
<import_from_future_stmt> absolute_import<import_from_stmt>pex.marshaller *<line_sep>
<import_from_stmt>.tool.func *<def_stmt>recent_app_submit_2 conn<block_start>curs=conn.cursor()<line_sep>div=''<line_sep>curs.execute(db_change('select data from other where name = "requires_approval"'))<line_sep>requires_approval=curs.fetchall()<if_stmt>requires_approval<and>requires_approval[0][0]<ne>'on'<block_start>div<augadd>load_lang('approval_requirement_disabled')<block_end><if_stmt>flask.request.method<eq>'GET'<block_start>curs.execute(db_change('select data from user_set where name = "application"'))<line_sep>db_data=curs.fetchall()<if_stmt>db_data<block_start>div<augadd>''+load_lang('all_register_num')+' : '+str(len(db_data))+'<hr class="main_hr">'+''<line_sep>div<augadd>''' <table id="main_table_set"> <tr id="main_table_top_tr"> <td id="main_table_width_half">'''+load_lang('id')+'''</td> <td id="main_table_width_half">'''+load_lang('email')+'''</td> </tr> <tr id="main_table_top_tr"> <td>'''+load_lang('approval_question')+'''</td> <td>'''+load_lang('answer')+'''</td> </tr> '''<for_stmt>application db_data<block_start>application=json.loads(application[0])<if_stmt>'question'<in>application<block_start>question=html.escape(application['question'])<line_sep>question=question<if>question<ne>''<else>'<br>'<block_end><else_stmt><block_start>question='<br>'<block_end><if_stmt>'answer'<in>application<block_start>answer=html.escape(application['answer'])<line_sep>answer=answer<if>answer<ne>''<else>'<br>'<block_end><else_stmt><block_start>answer='<br>'<block_end><if_stmt>'email'<in>application<block_start>email=html.escape(application['email'])<line_sep>email=email<if>email<ne>''<else>'<br>'<block_end><else_stmt><block_start>email='<br>'<block_end>div<augadd>''' <form method="post"> <tr> <td>'''+application['id']+'''</td> <td>'''+email+'''</td> </tr> <tr> <td>'''+question+'''</td> <td>'''+answer+'''</td> </tr> <tr> <td colspan="3"> <button type="submit" id="save" name="approve" value="'''+application['id']+'''"> '''+load_lang('approve')+''' </button> <button type="submit" name="decline" value="'''+application['id']+'''"> '''+load_lang('decline')+''' </button> </td> </tr> </form> '''<block_end>div<augadd>'</table>'<block_end><else_stmt><block_start>div<augadd>load_lang('no_applications_now')<block_end><return>easy_minify(flask.render_template(skin_check() imp=[load_lang('application_list') wiki_set() wiki_custom() wiki_css([0 0])] data=div menu=[['other' load_lang('return')]]))<block_end><else_stmt><block_start><if_stmt>admin_check(<none> 'app submit')<ne>1<block_start><return>re_error('/ban')<block_end><if_stmt>flask.request.form.get('approve' '')<ne>''<block_start>curs.execute(db_change('select data from user_set where id = ? and name = "application"') [flask.request.form.get('approve' '')])<line_sep>application=curs.fetchall()<if_stmt><not>application<block_start><return>re_error('/error/26')<block_end><else_stmt><block_start>application=json.loads(application[0][0])<block_end>add_user(application['id'] application['pw'] application['email'] application['encode'])<line_sep>curs.execute(db_change("insert into user_set (name, id, data) values ('approval_question', ?, ?)") [application['id'] application['question']])<line_sep>curs.execute(db_change("insert into user_set (name, id, data) values ('approval_question_answer', ?, ?)") [application['id'] application['answer']])<line_sep>curs.execute(db_change('delete from user_set where id = ? and name = "application"') [application['id']])<line_sep>conn.commit()<block_end><elif_stmt>flask.request.form.get('decline' '')<ne>''<block_start>curs.execute(db_change('delete from user_set where id = ? and name = "application"') [flask.request.form.get('decline' '')])<line_sep>conn.commit()<block_end><return>redirect('/app_submit')<block_end><block_end>
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. <import_stmt>argparse<import_from_stmt>typing Dict Type<import_stmt>glob<import_stmt>os<import_stmt>pathlib<import_from_stmt>runstats Statistics<def_stmt>main <block_start>parser=argparse.ArgumentParser(description='NAS E2E Runs')<line_sep>parser.add_argument('--logdir' type=str default='D:\\logdir\\azure\\random_cifar_test' help='folder with logs')<line_sep>args,extra_args=parser.parse_known_args()<line_sep>lines=[]<line_sep>top1s=[]<for_stmt>filepath pathlib.Path(args.logdir).rglob('logs.log')<block_start>epoch=0<for_stmt>line pathlib.Path(filepath).read_text().splitlines()<block_start><if_stmt>'[eval_test] Epoch: [ 1/1] '<in>line<block_start>top1s.append(Statistics())<line_sep>top1=float(line.strip().split('(')[-1].split(',')[0].split('%')[0].strip())/100.0<line_sep>lines.append(f'{epoch}\t{top1}\t{str(filepath)}')<line_sep>top1s[epoch].push(top1)<line_sep>epoch<augadd>1<block_end><block_end><block_end>pathlib.Path(os.path.join(args.logdir 'summary.tsv')).write_text('\n'.join(lines))<line_sep>stat_lines=['epoch\tmean\tstddev\tcount']<for_stmt>i,top1 enumerate(top1s)<block_start>stat_lines.append(f'{i}\t{top1.mean()}\t{top1.stddev()<if>len(top1)<g>1<else>float("NaN")}\t{len(top1)}')<block_end>pathlib.Path(os.path.join(args.logdir 'summary_stats.tsv')).write_text('\n'.join(stat_lines))<block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
# Copyright (c) 2019 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom # the Software is furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. <import_stmt>argparse<import_stmt>re<import_stmt>sys<import_from_stmt>uxy base<def_stmt>_linux_args args<block_start>parser=argparse.ArgumentParser("__main__.py w" add_help=<false>)<line_sep>parser.add_argument("-h" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--no-header" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-s" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--short" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-f" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--from" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-o" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--old-style" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--help" action="store_true" default=argparse.SUPPRESS)<line_sep>base.check_args(args parser)<line_sep><return>args+[]<block_end><def_stmt>_osx_args args<block_start>parser=argparse.ArgumentParser("__main__.py w" add_help=<false>)<line_sep>parser.add_argument("-h" action="store_true" default=argparse.SUPPRESS)<line_sep>base.check_args(args parser)<line_sep><return>args+[]<block_end><def_stmt>_bsd_args args<block_start><return>args+[]<block_end><def_stmt>w args uxy_args# Launch the underlying binary. <block_start><if_stmt>uxy_args.platform.startswith("linux")<block_start>args=_linux_args(args)<block_end><elif_stmt>uxy_args.platform.startswith("darwin")<block_start>args=_osx_args(args)<block_end><else_stmt><block_start>args=_bsd_args(args)<block_end>proc=base.launch(uxy_args ['w']+args[1:])<line_sep># Ignore status line. proc.readline()<line_sep># Process the header line. hdr=proc.readline()<line_sep>parser=base.FmtParser(hdr)<line_sep>fmt=base.Format(hdr)<line_sep>base.writeline(fmt.render())<line_sep># Process data lines. <for_stmt>ln proc<block_start>base.writeline(fmt.render(parser.extract(ln)))<block_end><return>proc.wait()<block_end>
#------------------------------------------------------------------------------- # htmlize: htmlize_main.py # # Main user-facing program. Usage: pipe some input text to its stdin. # # <NAME> (<EMAIL>) # This code is in the public domain #------------------------------------------------------------------------------- <import_from_stmt>datetime datetime<import_stmt>os sys<import_from_stmt>htmlize.core htmlize<import_from_stmt>htmlize.db DB Post<import_from_stmt>htmlize.iplugin discover_plugins<if_stmt>__name__<eq>'__main__'# Look for plugins in the plugins/ directory which lives in the same # place with this program. <block_start>mydir=os.path.dirname(sys.argv[0])<line_sep>plugins=discover_plugins([os.path.join(mydir 'plugins')])<line_sep>contents=sys.stdin.read()<line_sep>db=DB()<line_sep>post=db.create_new_post(author='eliben' date=datetime.today() title='Hello world' contents=contents)<line_sep>print(htmlize(post db plugins))<block_end>
""" Remainder problem in cpmpy. ''' 11. Is there a number which when divided by 3 gives a remainder of 1; when divided by 4, gives a remainder of 2; when divided by 5, gives a remainder of 3; and when divided by 6, gives a remainder of 4? (Kordemsky) ''' Model created by <NAME>, <EMAIL> See also my CPMpy page: http://www.hakank.org/cpmpy/ """<import_from_stmt>cpmpy *<import_stmt>numpy<as>np<import_from_stmt>cpmpy_hakank *<def_stmt>remainder_problem <block_start>Max=10000<line_sep>v=intvar(1 Max shape=5 name="v")<line_sep>X,A,B,C,D=v<line_sep>model=Model([X<eq>A<times>3+1 X<eq>B<times>4+2 X<eq>C<times>5+3 X<eq>D<times>6+4 ])<line_sep>ss=CPM_ortools(model)<line_sep>num_solutions=0<line_sep>xs=[]<while_stmt>ss.solve()<block_start>num_solutions<augadd>1<line_sep># print(v.value()) xs.append(v[0].value())<line_sep>get_different_solution(ss v)<block_end>print(xs)<line_sep>print("len:" len(xs))<block_end># Another approach <def_stmt>remainder_problem2 <block_start>Max=10000<line_sep>v=intvar(1 Max shape=5 name="v")<line_sep>X,A,B,C,D=v<line_sep>model=Model()<for_stmt>(i k) zip(range(1 4+1) [A B C D])<block_start>model<augadd>(X<eq>k<times>(i+2)+i)<block_end>ss=CPM_ortools(model)<line_sep>num_solutions=0<line_sep>xs=[]<while_stmt>ss.solve()<block_start>num_solutions<augadd>1<line_sep># print(v.value()) xs.append(v[0].value())<line_sep>get_different_solution(ss v)<block_end>print(xs)<line_sep>print("len:" len(xs))<block_end>remainder_problem()<line_sep>print("Another approach")<line_sep>remainder_problem2()<line_sep>
# -*- coding: utf-8 -*- # Copyright (c) 2015 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_from_stmt>collections defaultdict<import_from_stmt>calvin.actor.actor Actor manage condition stateguard<import_from_stmt>calvin.runtime.north.calvin_token EOSToken<import_from_stmt>calvin.utilities.calvinlogger get_logger<line_sep>_log=get_logger(__name__)<class_stmt>WordCount(Actor)<block_start>""" Count occurances of words in a stream of words. Inputs: in : a word Outputs: out : count for each word """<line_sep>@manage([])<def_stmt>init self<block_start>self.word_counts=defaultdict(int)<line_sep>self.finished=<false><block_end><def_stmt>exception_handler self action args<block_start>self.finished=<true><block_end>@condition(['in'] [])<def_stmt>count_word self word<block_start>self.word_counts[word]=self.word_counts[word]+1<block_end>@stateguard(<lambda>self:self.finished<is><true>)@condition(action_output=['out'])<def_stmt>output_counts self<block_start>self.finished=<false><line_sep><return>(self.word_counts )<block_end>action_priority=(count_word output_counts)<line_sep>test_set=[{'inports':{'in':['a' 'b' 'a' EOSToken()]} 'outports':{'out':[{'a':2 'b':1}]}}]<block_end>
<import_stmt>logging<import_stmt>os<import_stmt>shutil<import_stmt>pytest<import_stmt>salt.config<import_stmt>salt.loader<import_stmt>salt.modules.cmdmod<as>cmdmod<import_stmt>salt.modules.config<as>configmod<import_stmt>salt.modules.file<as>filemod<import_stmt>salt.utils.data<import_stmt>salt.utils.files<import_stmt>salt.utils.platform<import_stmt>salt.utils.stringutils<import_from_stmt>tests.support.mock MagicMock call patch<line_sep>log=logging.getLogger(__name__)<line_sep>@pytest.fixture<def_stmt>configure_loader_modules <block_start><return>{filemod:{"__salt__":{"config.manage_mode":configmod.manage_mode "cmd.run":cmdmod.run "cmd.run_all":cmdmod.run_all } "__opts__":{"test":<false> "file_roots":{"base":"tmp"} "pillar_roots":{"base":"tmp"} "cachedir":"tmp" "grains":{} } "__grains__":{"kernel":"Linux"} }}<block_end>@pytest.fixture<def_stmt>tmp_sub_dir tmp_path<block_start>directory=tmp_path/"file-basics-test-dir"<line_sep>directory.mkdir()<line_sep><yield>directory<line_sep>shutil.rmtree(str(directory))<block_end>@pytest.fixture<def_stmt>tfile tmp_sub_dir<block_start>filename=str(tmp_sub_dir/"file-basics-test-file")<with_stmt>salt.utils.files.fopen(filename "w+")<as>fp<block_start>fp.write("Hi hello! I am a file.")<block_end><yield>filename<line_sep>os.remove(filename)<block_end>@pytest.fixture<def_stmt>myfile tmp_sub_dir<block_start>filename=str(tmp_sub_dir/"myfile")<with_stmt>salt.utils.files.fopen(filename "w+")<as>fp<block_start>fp.write(salt.utils.stringutils.to_str("Hello\n"))<block_end><yield>filename<line_sep>os.remove(filename)<block_end>@pytest.fixture<def_stmt>a_link tmp_sub_dir<block_start>path=tmp_sub_dir/"a_link"<line_sep>linkname=str(path)<line_sep><yield>linkname<if_stmt>path.exists()<block_start>os.remove(linkname)<block_end><block_end>@pytest.fixture<def_stmt>a_hardlink tmp_sub_dir<block_start>path=tmp_sub_dir/"a_hardlink"<line_sep>linkname=str(path)<line_sep><yield>linkname<if_stmt>path.exists()<block_start>os.remove(linkname)<block_end><block_end>@pytest.mark.skip_on_windows(reason="os.symlink is not available on Windows")<def_stmt>test_symlink_already_in_desired_state tfile a_link<block_start>os.symlink(tfile a_link)<line_sep>result=filemod.symlink(tfile a_link)<assert_stmt>result<block_end>@pytest.mark.skip_on_windows(reason="os.link is not available on Windows")<def_stmt>test_hardlink_sanity tfile a_hardlink<block_start>target=a_hardlink<line_sep>result=filemod.link(tfile target)<assert_stmt>result<block_end>@pytest.mark.skip_on_windows(reason="os.link is not available on Windows")<def_stmt>test_hardlink_numlinks tfile a_hardlink<block_start>target=a_hardlink<line_sep>result=filemod.link(tfile target)<line_sep>name_i=os.stat(tfile).st_nlink<assert_stmt>name_i<g>1<block_end>@pytest.mark.skip_on_windows(reason="os.link is not available on Windows")<def_stmt>test_hardlink_working tfile a_hardlink<block_start>target=a_hardlink<line_sep>result=filemod.link(tfile target)<line_sep>name_i=os.stat(tfile).st_ino<line_sep>target_i=os.stat(target).st_ino<assert_stmt>name_i<eq>target_i<block_end><def_stmt>test_source_list_for_list_returns_file_from_dict_via_http <block_start><with_stmt>patch("salt.modules.file.os.remove")<as>remove<block_start>remove.return_value=<none><with_stmt>patch.dict(filemod.__salt__ {"cp.list_master":MagicMock(return_value=[]) "cp.list_master_dirs":MagicMock(return_value=[]) "cp.cache_file":MagicMock(return_value="/tmp/http.conf") } )<block_start><with_stmt>patch("salt.utils.http.query")<as>http_query<block_start>http_query.return_value={}<line_sep>ret=filemod.source_list([{"http://t.est.com/http/httpd.conf":"filehash"}] "" "base")<assert_stmt>list(ret)<eq>["http://t.est.com/http/httpd.conf" "filehash"]<block_end><block_end><block_end><block_end><def_stmt>test_source_list_use_requests <block_start><with_stmt>patch("salt.modules.file.os.remove")<as>remove<block_start>remove.return_value=<none><with_stmt>patch.dict(filemod.__salt__ {"cp.list_master":MagicMock(return_value=[]) "cp.list_master_dirs":MagicMock(return_value=[]) "cp.cache_file":MagicMock(return_value="/tmp/http.conf") } )<block_start>expected_call=call("http://t.est.com/http/file1" decode_body=<false> method="HEAD" )<with_stmt>patch("salt.utils.http.query" MagicMock(return_value={}))<as>http_query<block_start>ret=filemod.source_list([{"http://t.est.com/http/file1":"filehash"}] "" "base")<assert_stmt>list(ret)<eq>["http://t.est.com/http/file1" "filehash"]<assert_stmt>expected_call<in>http_query.mock_calls<block_end><block_end><block_end><block_end><def_stmt>test_source_list_for_list_returns_existing_file <block_start><with_stmt>patch.dict(filemod.__salt__ {"cp.list_master":MagicMock(return_value=["http/httpd.conf.fallback"]) "cp.list_master_dirs":MagicMock(return_value=[]) } )<block_start>ret=filemod.source_list(["salt://http/httpd.conf" "salt://http/httpd.conf.fallback"] "filehash" "base" )<assert_stmt>list(ret)<eq>["salt://http/httpd.conf.fallback" "filehash"]<block_end><block_end><def_stmt>test_source_list_for_list_returns_file_from_other_env <block_start><def_stmt>list_master env<block_start>dct={"base":[] "dev":["http/httpd.conf"]}<line_sep><return>dct[env]<block_end><with_stmt>patch.dict(filemod.__salt__ {"cp.list_master":MagicMock(side_effect=list_master) "cp.list_master_dirs":MagicMock(return_value=[]) } )<block_start>ret=filemod.source_list(["salt://http/httpd.conf?saltenv=dev" "salt://http/httpd.conf.fallback" ] "filehash" "base" )<assert_stmt>list(ret)<eq>["salt://http/httpd.conf?saltenv=dev" "filehash"]<block_end><block_end><def_stmt>test_source_list_for_list_returns_file_from_dict <block_start><with_stmt>patch.dict(filemod.__salt__ {"cp.list_master":MagicMock(return_value=["http/httpd.conf"]) "cp.list_master_dirs":MagicMock(return_value=[]) } )<block_start>ret=filemod.source_list([{"salt://http/httpd.conf":""}] "filehash" "base")<assert_stmt>list(ret)<eq>["salt://http/httpd.conf" "filehash"]<block_end><block_end><def_stmt>test_source_list_for_list_returns_existing_local_file_slash myfile<block_start><with_stmt>patch.dict(filemod.__salt__ {"cp.list_master":MagicMock(return_value=[]) "cp.list_master_dirs":MagicMock(return_value=[]) } )<block_start>ret=filemod.source_list([myfile+"-foo" myfile] "filehash" "base")<assert_stmt>list(ret)<eq>[myfile "filehash"]<block_end><block_end><def_stmt>test_source_list_for_list_returns_existing_local_file_proto myfile<block_start><with_stmt>patch.dict(filemod.__salt__ {"cp.list_master":MagicMock(return_value=[]) "cp.list_master_dirs":MagicMock(return_value=[]) } )<block_start>ret=filemod.source_list(["file://"+myfile+"-foo" "file://"+myfile] "filehash" "base" )<assert_stmt>list(ret)<eq>["file://"+myfile "filehash"]<block_end><block_end><def_stmt>test_source_list_for_list_returns_local_file_slash_from_dict myfile<block_start><with_stmt>patch.dict(filemod.__salt__ {"cp.list_master":MagicMock(return_value=[]) "cp.list_master_dirs":MagicMock(return_value=[]) } )<block_start>ret=filemod.source_list([{myfile:""}] "filehash" "base")<assert_stmt>list(ret)<eq>[myfile "filehash"]<block_end><block_end><def_stmt>test_source_list_for_list_returns_local_file_proto_from_dict myfile<block_start><with_stmt>patch.dict(filemod.__salt__ {"cp.list_master":MagicMock(return_value=[]) "cp.list_master_dirs":MagicMock(return_value=[]) } )<block_start>ret=filemod.source_list([{"file://"+myfile:""}] "filehash" "base")<assert_stmt>list(ret)<eq>["file://"+myfile "filehash"]<block_end><block_end>
<import_from_stmt>slugify slugify<line_sep>filepath='GRSI.dat'<line_sep>GRSI=[]<with_stmt>open(filepath)<as>fp<block_start>GRSI=fp.readlines()<line_sep>print(GRSI)<block_end>
<import_from_stmt>IccCalibrator *<import_stmt>IccUtil<as>util<import_stmt>IccPlots<as>plots<import_stmt>IccSensors<as>sens<line_sep>
<import_stmt>multiprocessing<as>mp<import_stmt>pub_foo<def_stmt>hello q<block_start>print('module name: %s'%__name__)<line_sep>q.put('hello')<block_end><if_stmt>__name__<eq>'__main__'<block_start><try_stmt><block_start>ctx=mp.get_context('spawn')<block_end><except_stmt>Exception<block_start>ctx=mp<block_end>q=ctx.Queue()<line_sep>p=ctx.Process(target=pub_foo.proxy_hello args=(q ))<line_sep>p.start()<line_sep>print(q.get())<line_sep>p.join()<block_end>
<import_from_stmt>io BytesIO<import_from_stmt>scratch.application Application<import_from_stmt>scratch.headers Headers<import_from_stmt>scratch.request Request<import_from_stmt>scratch.response Response<line_sep>app=Application()<line_sep>@app.route("/")<def_stmt>static_handler request<block_start><return>Response(content="static")<block_end>@app.route("/people/{name}/{age}")<def_stmt>dynamic_handler request name age<block_start><return>Response(content=f"{name} is {age} years old!")<block_end><def_stmt>test_applications_can_route_requests # Given that I have an application # When I request the static_handler <block_start>response=app(Request(method="GET" path="/" headers=Headers() body=BytesIO()))<line_sep># Then I should get back a valid response <assert_stmt>response.body.read()<eq>b"static"<block_end><def_stmt>test_applications_can_route_requests_to_dynamic_paths # Given that I have an application # When I request the dynamic_handler <block_start>response=app(Request(method="GET" path="/people/Jim/32" headers=Headers() body=BytesIO()))<line_sep># Then I should get back a valid response <assert_stmt>response.body.read()<eq>b"Jim is 32 years old!"<block_end><def_stmt>test_applications_can_fail_to_route_invalid_paths # Given that I have an application # When I request a path that isn't registered <block_start>response=app(Request(method="GET" path="/invalid" headers=Headers() body=BytesIO()))<line_sep># Then I should get back a 404 response <assert_stmt>response.status<eq>b"404 Not Found"<block_end>
"""Vocal pitch contour transcription PatchCNN ver. Transcribes monophonic pitch contour of vocal in the given polyphonic audio by using the PatchCNN approach. Re-implementation of the repository `VocalMelodyExtPatchCNN <https://github.com/leo-so/VocalMelodyExtPatchCNN>`_. Feature Storage Format ---------------------- Processed feature and label will be stored in ``.hdf`` format, one file per piece. Columns contained in each file are: * **feature**: Patch CFP feature. * **label**: Binary classes of each patch. * **Z**: The original CFP feature. * **mapping**: Records the original frequency and time indexes of each patch. References ########## Publication of this module can be found in [1]_. .. [1] <NAME>, "Vocal Melody Extraction Using Patch-based CNN," in IEEE International Conference of Acoustics, Speech, and Signal Processing (ICASSP), 2018. """<import_from_stmt>omnizart.patch_cnn.app PatchCNNTranscription<line_sep>app=PatchCNNTranscription()<line_sep>
"""Helper library to get environment variables for absltest helper binaries."""<import_stmt>os<line_sep>_INHERITED_ENV_KEYS=frozenset({# This is needed to correctly use the Python interpreter determined by # bazel. 'PATH' # This is used by the random module on Windows to locate crypto # libraries. 'SYSTEMROOT' })<def_stmt>inherited_env <block_start>"""Returns the environment variables that should be inherited from parent. Reason why using an explict list of environment variables instead of inheriting all from parent: the absltest module itself interprets a list of environment variables set by bazel, e.g. XML_OUTPUT_FILE, TESTBRIDGE_TEST_ONLY. While testing absltest's own behavior, we should remove them when invoking the helper subprocess. Using an explicit list is safer. """<line_sep>env={}<for_stmt>key _INHERITED_ENV_KEYS<block_start><if_stmt>key<in>os.environ<block_start>env[key]=os.environ[key]<block_end><block_end><return>env<block_end>
# MIT License # # Copyright (c) 2021 <NAME> and EASE lab # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. <import_stmt>unittest<import_stmt>tracing<import_stmt>os<import_stmt>time<class_stmt>MyTest(unittest.TestCase)<block_start><def_stmt>test self<block_start>os.system("docker run -d --name zipkin-test -p 9411:9411 openzipkin/zipkin")<line_sep>time.sleep(5)<line_sep>tracing.initTracer("test tracer" debug=<true>)<with_stmt>tracing.Span("test parent span")<block_start><with_stmt>tracing.Span("test child span")<block_start>self.assertTrue<block_end><block_end>time.sleep(5)<line_sep>os.system("docker rm -f zipkin-test")<block_end><block_end>
<import_from_stmt>os.path dirname join<import_from_stmt>.printer prt<line_sep>description="Dataset: Create a chained dataset."<line_sep># Files are stored in same directory as this python file, # see comment below. path=dirname(__file__)<def_stmt>main urd<block_start>prt.source(__file__)<line_sep>prt()<line_sep>prt('Create a chain of datasets using csvimport.')<line_sep>imp=<none><for_stmt>filename ('data.csv' 'data2.csv' 'data3.csv')# Ideally, you'd set "input_directory" to the location of the # input files in "accelerator.conf" to avoid an absolute path # in the input filename. <block_start>filename=join(path filename)<line_sep>imp=urd.build('csvimport' filename=filename previous=imp)<block_end>prt()<line_sep>prt('Try this to investigate the chain.')<line_sep>prt.command('ax ds -c -S' imp)<line_sep>prt()<line_sep>prt('To go back in chain and investigate datasets, try')<line_sep>prt.command('ax ds %s'%(imp ))<line_sep>prt.command('ax ds %s~'%(imp ))<line_sep>prt.command('ax ds %s~~'%(imp ))<line_sep>prt('Note that ~~ can also be written ~2 etc.')<line_sep>prt()<line_sep>prt('This method will iterate over the whole chain.')<line_sep>job=urd.build('dsexample_iteratechain' source=imp)<line_sep>prt()<line_sep>prt('To see its output, try')<line_sep>prt.command('ax job -O' job)<block_end>
<class_stmt>DockablePane(object IDisposable)<block_start>""" A user interface pane that participates in Revit's docking window system. DockablePane(other: DockablePane) DockablePane(id: DockablePaneId) """<def_stmt>Dispose self<block_start>""" Dispose(self: DockablePane) """<line_sep><pass><block_end><def_stmt>GetTitle self<block_start>""" GetTitle(self: DockablePane) -> str Returns the current title (a.k.a. window caption) of the dockable pane. """<line_sep><pass><block_end><def_stmt>Hide self<block_start>""" Hide(self: DockablePane) If the pane is on screen,hide it. Has no effect on built-in Revit dockable panes. """<line_sep><pass><block_end><def_stmt>IsShown self<block_start>""" IsShown(self: DockablePane) -> bool Identify the pane is currently visible or in a tab. """<line_sep><pass><block_end>@staticmethod<def_stmt>PaneExists id<block_start>""" PaneExists(id: DockablePaneId) -> bool Returns true if %id% refers to a dockable pane window that currently exists in the Revit user interface,whether it's hidden or shown. """<line_sep><pass><block_end>@staticmethod<def_stmt>PaneIsBuiltIn id<block_start>""" PaneIsBuiltIn(id: DockablePaneId) -> bool Returns true if %id% refers to a built-in Revit dockable pane,rather than one created by an add-in. """<line_sep><pass><block_end>@staticmethod<def_stmt>PaneIsRegistered id<block_start>""" PaneIsRegistered(id: DockablePaneId) -> bool Returns true if %id% refers to a built-in Revit dockable pane,or an add-in pane that has been properly registered with %Autodesk.Revit.UI.UIApplication.RegisterDockablePane%. """<line_sep><pass><block_end><def_stmt>ReleaseUnmanagedResources self *args<block_start>""" ReleaseUnmanagedResources(self: DockablePane,disposing: bool) """<line_sep><pass><block_end><def_stmt>Show self<block_start>""" Show(self: DockablePane) If the pane is not currently visible or in a tab,display the pane in the Revit user interface at its last docked location. """<line_sep><pass><block_end><def_stmt>__enter__ self *args<block_start>""" __enter__(self: IDisposable) -> object """<line_sep><pass><block_end><def_stmt>__exit__ self *args<block_start>""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """<line_sep><pass><block_end><def_stmt>__init__ self *args<block_start>""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """<line_sep><pass><block_end>@staticmethod<def_stmt>__new__ self *__args<block_start>""" __new__(cls: type,other: DockablePane) __new__(cls: type,id: DockablePaneId) """<line_sep><pass><block_end><def_stmt>__repr__ self *args<block_start>""" __repr__(self: object) -> str """<line_sep><pass><block_end>Id=property(<lambda>self:object() <lambda>self v:<none> <lambda>self:<none>)<line_sep>"""The unique identifier for this dockable pane. Get: Id(self: DockablePane) -> DockablePaneId """<line_sep>IsValidObject=property(<lambda>self:object() <lambda>self v:<none> <lambda>self:<none>)<line_sep>"""Specifies whether the .NET object represents a valid Revit entity. Get: IsValidObject(self: DockablePane) -> bool """<block_end>
""" Source: https://github.com/zsef123/MixNet-PyTorch """<line_sep>
<import_from_stmt>algo.asset Algo Asset<import_from_stmt>algo.config_algo localconfig<import_from_stmt>common.make_tx make_borrow_tx make_deposit_collateral_tx make_excluded_tx make_income_tx make_liquidate_tx make_lp_deposit_tx make_lp_stake_tx make_lp_unstake_tx make_lp_withdraw_tx make_repay_tx make_reward_tx make_spend_tx make_stake_tx make_swap_tx make_transfer_in_tx make_transfer_out_tx make_unstake_tx make_withdraw_collateral_tx <line_sep>lp_tickers={}<def_stmt>_ingest_row exporter row fee_amount=0 comment=<none><block_start><if_stmt>fee_amount<block_start>fee=Algo(fee_amount)<line_sep>row.fee=fee.amount<block_end><if_stmt>comment<block_start>row.comment=comment<block_end>exporter.ingest_row(row)<block_end><def_stmt>_should_exclude_tx asset_list<block_start><for_stmt>asset asset_list<block_start>ticker=asset.ticker<if>isinstance(asset Asset)<else>asset<if_stmt>ticker.lower()<in>localconfig.exclude_asas<block_start><return><true><block_end><block_end><return><false><block_end><def_stmt>export_exclude_tx exporter txinfo<block_start>row=make_excluded_tx(txinfo)<line_sep>_ingest_row(exporter row)<block_end><def_stmt>exclude_tx func<block_start><def_stmt>inner *args **kwargs<block_start>asset_list=[arg<for>arg args[2:]<if>isinstance(arg Asset)]<line_sep>exporter=args[0]<line_sep>txinfo=args[1]<if_stmt>_should_exclude_tx(asset_list)<block_start><return>export_exclude_tx(exporter txinfo)<block_end><return>func(*args **kwargs)<block_end><return>inner<block_end><def_stmt>exclude_lp_tx func<block_start><def_stmt>inner *args **kwargs<block_start>exporter=args[0]<line_sep>txinfo=args[1]<line_sep>asset=args[2]<line_sep>asset_currency=lp_tickers.get(asset.id asset.ticker)<if_stmt>asset_currency.startswith("LP_")<block_start>tokens=asset_currency.split("_")<if_stmt>_should_exclude_tx(tokens[2:])<block_start><return>export_exclude_tx(exporter txinfo)<block_end><block_end><return>func(*args **kwargs)<block_end><return>inner<block_end>@exclude_tx<def_stmt>export_send_tx exporter txinfo send_asset fee_amount=0 dest_address=<none> comment=<none> z_index=0<block_start><if_stmt><not>send_asset.zero()<block_start>row=make_transfer_out_tx(txinfo send_asset.amount send_asset.ticker dest_address z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end><block_end>@exclude_tx<def_stmt>export_receive_tx exporter txinfo receive_asset fee_amount=0 comment=<none> z_index=0<block_start><if_stmt><not>receive_asset.zero()<block_start>row=make_transfer_in_tx(txinfo receive_asset.amount receive_asset.ticker z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end><block_end>@exclude_tx<def_stmt>export_reward_tx exporter txinfo reward_asset fee_amount=0 comment=<none> z_index=0<block_start><if_stmt><not>reward_asset.zero()<block_start>row=make_reward_tx(txinfo reward_asset.amount reward_asset.ticker z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end><block_end>@exclude_tx<def_stmt>export_spend_tx exporter txinfo send_asset fee_amount=0 comment=<none> z_index=0<block_start>row=make_spend_tx(txinfo send_asset.amount send_asset.ticker z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end>@exclude_tx@exclude_lp_tx<def_stmt>export_income_tx exporter txinfo receive_asset fee_amount=0 comment=<none> z_index=0<block_start>receive_asset_currency=lp_tickers.get(receive_asset.id receive_asset.ticker)<line_sep>row=make_income_tx(txinfo receive_asset.amount receive_asset_currency z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end>@exclude_tx<def_stmt>export_swap_tx exporter txinfo send_asset receive_asset fee_amount=0 comment=<none> z_index=0<block_start>row=make_swap_tx(txinfo send_asset.amount send_asset.ticker receive_asset.amount receive_asset.ticker z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end><def_stmt>export_lp_deposit_tx exporter txinfo amm_symbol send_asset_1 send_asset_2 lp_asset fee_amount=0 comment=<none> z_index=0<block_start>lp_asset_currency=f"LP_{amm_symbol}_{send_asset_1.ticker}_{send_asset_2.ticker}"<line_sep>lp_tickers[lp_asset.id]=lp_asset_currency<if_stmt>_should_exclude_tx([send_asset_1 send_asset_2 lp_asset])<block_start><return>export_exclude_tx(exporter txinfo)<block_end>row=make_lp_deposit_tx(txinfo send_asset_1.amount send_asset_1.ticker lp_asset.amount/2 lp_asset_currency z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount/2 comment)<line_sep>row=make_lp_deposit_tx(txinfo send_asset_2.amount send_asset_2.ticker lp_asset.amount/2 lp_asset_currency z_index=z_index+1)<line_sep>_ingest_row(exporter row fee_amount/2 comment)<block_end><def_stmt>export_lp_withdraw_tx exporter txinfo amm_symbol lp_asset receive_asset_1 receive_asset_2 fee_amount=0 comment=<none> z_index=0<block_start>lp_asset_currency=f"LP_{amm_symbol}_{receive_asset_1.ticker}_{receive_asset_2.ticker}"<line_sep>lp_tickers[lp_asset.id]=lp_asset_currency<if_stmt>_should_exclude_tx([receive_asset_1 receive_asset_2 lp_asset])<block_start><return>export_exclude_tx(exporter txinfo)<block_end>row=make_lp_withdraw_tx(txinfo lp_asset.amount/2 lp_asset_currency receive_asset_1.amount receive_asset_1.ticker z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount/2 comment)<line_sep>row=make_lp_withdraw_tx(txinfo lp_asset.amount/2 lp_asset_currency receive_asset_2.amount receive_asset_2.ticker z_index=z_index+1)<line_sep>_ingest_row(exporter row fee_amount/2 comment)<block_end>@exclude_tx@exclude_lp_tx<def_stmt>export_lp_stake_tx exporter txinfo send_asset fee_amount=0 comment=<none> z_index=0<block_start>send_asset_currency=lp_tickers.get(send_asset.id send_asset.ticker)<line_sep>row=make_lp_stake_tx(txinfo send_asset.amount send_asset_currency z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end>@exclude_tx@exclude_lp_tx<def_stmt>export_lp_unstake_tx exporter txinfo receive_asset fee_amount=0 comment=<none> z_index=0<block_start>receive_asset_currency=lp_tickers.get(receive_asset.id receive_asset.ticker)<line_sep>row=make_lp_unstake_tx(txinfo receive_asset.amount receive_asset_currency z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end>@exclude_tx<def_stmt>export_borrow_tx exporter txinfo receive_asset fee_amount=0 comment=<none> z_index=0<block_start>row=make_borrow_tx(txinfo receive_asset.amount receive_asset.ticker z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end>@exclude_tx<def_stmt>export_repay_tx exporter txinfo send_asset fee_amount=0 comment=<none> z_index=0<block_start>row=make_repay_tx(txinfo send_asset.amount send_asset.ticker z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end>@exclude_tx<def_stmt>export_liquidate_tx exporter txinfo send_asset receive_asset fee_amount=0 comment=<none> z_index=0<block_start>row=make_liquidate_tx(txinfo send_asset.amount send_asset.ticker receive_asset.amount receive_asset.ticker z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end>@exclude_tx<def_stmt>export_deposit_collateral_tx exporter txinfo send_asset fee_amount=0 comment=<none> z_index=0<block_start>row=make_deposit_collateral_tx(txinfo send_asset.amount send_asset.ticker z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end>@exclude_tx<def_stmt>export_withdraw_collateral_tx exporter txinfo receive_asset fee_amount=0 comment=<none> z_index=0<block_start>row=make_withdraw_collateral_tx(txinfo receive_asset.amount receive_asset.ticker z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end>@exclude_tx<def_stmt>export_stake_tx exporter txinfo send_asset fee_amount=0 comment=<none> z_index=0<block_start>send_asset_currency=lp_tickers.get(send_asset.id send_asset.ticker)<line_sep>row=make_stake_tx(txinfo send_asset.amount send_asset_currency z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end>@exclude_tx<def_stmt>export_unstake_tx exporter txinfo receive_asset fee_amount=0 comment=<none> z_index=0<block_start>receive_asset_currency=lp_tickers.get(receive_asset.id receive_asset.ticker)<line_sep>row=make_unstake_tx(txinfo receive_asset.amount receive_asset_currency z_index=z_index)<line_sep>_ingest_row(exporter row fee_amount comment)<block_end>
# -*- coding: utf-8 -*- <import_stmt>os<line_sep>PAD='<pad>'<line_sep>UNK='<unk>'<line_sep>BOS='<bos>'<line_sep>EOS='<eos>'<line_sep>MIN=-1e32<line_sep>CACHE=os.path.expanduser('~/.cache/supar')<line_sep>
# # Tencent is pleased to support the open source community by making IoTHunter available. # Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved. # Licensed under the MIT License (the "License"); you may not use this file except in # compliance with the License. You may obtain a copy of the License at # # http://opensource.org/licenses/MIT # # Unless required by applicable law or agreed to in writing, software distributed under the # License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific language governing permissions # and limitations under the License. <import_stmt>sys<import_stmt>os<import_stmt>hashlib<import_stmt>json<import_stmt>struct<import_stmt>re<import_stmt>idc<import_stmt>idautils<import_stmt>idaapi<class_stmt>NetUtil()<block_start>@staticmethod<def_stmt>ip_to_long ip<block_start>result=0<while_stmt><true><block_start><if_stmt>type(ip)<ne>str<block_start><break><block_end>ip_list=ip.split('.')<if_stmt>len(ip_list)<ne>4<block_start><break><block_end><for_stmt>i range(4)<block_start>result=result+int(ip_list[i])<times>256<power>(3-i)<line_sep><break><block_end><break><block_end><return>result<block_end>@staticmethod<def_stmt>long_to_ip value<block_start><if_stmt>type(value)<ne>long<block_start><return>""<block_end>floor_list=[]<line_sep>yushu=value<for_stmt>i reversed(range(4))<block_start>res=divmod(yushu 256<power>i)<line_sep>floor_list.append(str(res[0]))<line_sep>yushu=res[1]<block_end><return>'.'.join(floor_list)<block_end>@staticmethod<def_stmt>check_domain domain<block_start><pass><block_end>@staticmethod<def_stmt>check_ip string<block_start>ret=<false><while_stmt><true><block_start><if_stmt>type(string)<ne>str<block_start><break><block_end>compile_ip=re.compile('^(1\d{2}|2[0-4]\d|25[0-5]|[1-9]\d|[1-9])\.(1\d{2}|2[0-4]\d|25[0-5]|[1-9]\d|\d)\.(1\d{2}|2[0-4]\d|25[0-5]|[1-9]\d|\d)\.(1\d{2}|2[0-4]\d|25[0-5]|[1-9]\d|\d)$')<if_stmt>compile_ip.match(string)<block_start>ret=<true><line_sep><break><block_end><break><block_end><return>ret<block_end><block_end><class_stmt>IdaUtil()<block_start>@staticmethod<def_stmt>is_packed_upx <block_start>strings=idautils.Strings()<line_sep>count=0<for_stmt>s strings<block_start><if_stmt>"upx.sf.net"<in>str(s)<block_start><return><true><block_end><if_stmt>count<ge>2<block_start><break><block_end>count<augadd>1<block_end><return><false><block_end>@staticmethod<def_stmt>match_binary addr search_flag pattern_list<block_start>ret_addr=idc.BADADDR<for_stmt>pattern pattern_list<block_start>ret_addr=idc.FindBinary(addr search_flag pattern)<if_stmt>ret_addr<ne>idc.BADADDR<block_start><break><block_end><block_end><return>ret_addr<block_end>@staticmethod<def_stmt>get_to_xrefs ea<block_start>xref_set=set([])<for_stmt>xref idautils.XrefsTo(ea 1)<block_start>xref_set.add(xref.frm)<block_end><return>xref_set<block_end>@staticmethod<def_stmt>get_frm_xrefs ea<block_start>xref_set=set([])<for_stmt>xref idautils.XrefsFrom(ea 1)<block_start>xref_set.add(xref.to)<block_end><return>xref_set<block_end>@staticmethod<def_stmt>get_string addr<block_start>""" idc.GetString may be return wrong length. For example: 00096d10f7872706af8155d40ddc4dab address 0x0001A7D4 string length 8, but idc.GetString returns 3. """<line_sep>string=""<while_stmt><true><block_start><if_stmt>idc.Byte(addr)<ne>0<block_start>string<augadd>chr(idc.Byte(addr))<block_end><else_stmt><block_start><break><block_end>addr<augadd>1<block_end><return>string<block_end><block_end><class_stmt>StringUtil()<block_start>@staticmethod<def_stmt>format_data_to_string data len<block_start>""" Replace invisible characters with 16 hexadecimal. """<line_sep>string=""<for_stmt>i data<block_start><if_stmt>isinstance(i int)<block_start><if_stmt>i<in>range(0 0x20)+range(0x7F 0xFF)<block_start>string<augadd>r"\x%02x"%i<block_end><else_stmt><block_start>string<augadd>chr(i)<block_end><block_end><elif_stmt>isinstance(i str)<block_start><if_stmt>ord(i)<in>range(0 0x20)+range(0x7F 0xFF)<block_start>string<augadd>r"\x%02x"%ord(i)<block_end><else_stmt><block_start>string<augadd>i<block_end><block_end><block_end><return>string<block_end><block_end>
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Binary to evaluate model. This binary can also be configured to run alongside a training job and poll for new model checkpoints, writing eval metrics (e.g. for TensorBoard). This binary also supports evaluations for settings such as NQG-T5, where predictions from T5 are used when NQG does not produce an output. Such 'fallback' predictions can be supplied via the `--fallback_predictions` flag. """<import_stmt>os<import_stmt>time<import_from_stmt>absl app<import_from_stmt>absl flags<import_from_stmt>language.nqg.model.parser config_utils<import_from_stmt>language.nqg.model.parser.data tokenization_utils<import_from_stmt>language.nqg.model.parser.inference inference_wrapper<import_from_stmt>language.nqg.model.parser.inference.targets target_grammar<import_from_stmt>language.nqg.model.qcfg qcfg_file<import_from_stmt>language.nqg.tasks tsv_utils<import_stmt>tensorflow<as>tf<import_from_stmt>official.nlp.bert configs<line_sep>FLAGS=flags.FLAGS<line_sep>flags.DEFINE_string("input" "" "Input tsv file.")<line_sep>flags.DEFINE_integer("limit" 0 "Index of example to begin processing (Ignored if 0).")<line_sep>flags.DEFINE_integer("offset" 0 "Index of example to end processing (Ignored if 0).")<line_sep>flags.DEFINE_bool("verbose" <true> "Whether to print debug output.")<line_sep>flags.DEFINE_string("model_dir" "" "Model directory.")<line_sep>flags.DEFINE_bool("poll" <false> "Whether to poll.")<line_sep>flags.DEFINE_bool("write" <false> "Whether to write metrics to model_dir.")<line_sep>flags.DEFINE_string("subdir" "eval_test" "Sub-directory of model_dir for writing metrics.")<line_sep>flags.DEFINE_string("checkpoint" "" "Checkpoint prefix, or None for latest.")<line_sep>flags.DEFINE_string("config" "" "Config file.")<line_sep>flags.DEFINE_string("bert_dir" "" "Directory for BERT, including vocab and config.")<line_sep>flags.DEFINE_string("rules" "" "QCFG rules txt file.")<line_sep>flags.DEFINE_string("fallback_predictions" "" "Optional fallback predictions txt file.")<line_sep>flags.DEFINE_string("target_grammar" "" "Optional target CFG.")<def_stmt>compute_metrics wrapper examples<block_start>"""Compute accuracy on examples."""<line_sep># Initialize stats. num_examples=0<line_sep>num_nqg_correct=0<line_sep>num_nqg_predictions=0<line_sep>num_fallback_correct=0<line_sep>num_hybrid_correct=0<line_sep>fallback_predictions=<none><if_stmt>FLAGS.fallback_predictions<block_start>fallback_predictions=[]<with_stmt>tf.io.gfile.GFile(FLAGS.fallback_predictions "r")<as>predictions_file<block_start><for_stmt>line predictions_file<block_start>fallback_predictions.append(line.rstrip())<block_end><block_end><block_end><for_stmt>idx,example enumerate(examples)<block_start><if_stmt>FLAGS.offset<and>idx<l>FLAGS.offset<block_start><continue><block_end><if_stmt>FLAGS.limit<and>idx<ge>FLAGS.limit<block_start><break><block_end><if_stmt>FLAGS.verbose<block_start>print("Processing example %s: %s"%(idx example[0]))<block_end>num_examples<augadd>1<line_sep>source=example[0]<line_sep>gold_target=example[1]<line_sep>nqg_prediction,_=wrapper.get_output(source)<if_stmt>nqg_prediction<block_start>num_nqg_predictions<augadd>1<block_end><if_stmt>nqg_prediction<eq>gold_target<block_start>num_nqg_correct<augadd>1<block_end><else_stmt><block_start><if_stmt>FLAGS.verbose<block_start>print("nqg incorrect (gold vs. predicted):\n%s\n%s\n"%(gold_target nqg_prediction))<block_end><block_end>fallback_prediction=(fallback_predictions[idx]<if>fallback_predictions<else><none>)<if_stmt>fallback_prediction<eq>gold_target<block_start>num_fallback_correct<augadd>1<block_end><else_stmt><block_start><if_stmt>FLAGS.verbose<block_start>print("fallback incorrect (gold vs. predicted):\n%s\n%s\n"%(gold_target fallback_prediction))<block_end><block_end>hybrid_prediction=nqg_prediction<or>fallback_prediction<if_stmt>hybrid_prediction<eq>gold_target<block_start>num_hybrid_correct<augadd>1<if_stmt>FLAGS.verbose<block_start>print("hybrid correct.")<block_end><block_end><else_stmt><block_start><if_stmt>FLAGS.verbose<block_start>print("hybrid incorrect.")<block_end><block_end><block_end>metrics_dict={"nqg_accuracy":float(num_nqg_correct)/float(num_examples) "fallback_accuracy":float(num_fallback_correct)/float(num_examples) "hybrid_accuracy":float(num_hybrid_correct)/float(num_examples) "nqg_coverage":float(num_nqg_predictions)/float(num_examples) "nqg_precision":float(num_nqg_correct)/float(num_nqg_predictions) }<if_stmt>FLAGS.verbose<block_start>print("num_examples: %s"%num_examples)<line_sep>print("num_nqg_correct: %s"%num_nqg_correct)<line_sep>print("num_nqg_predictions: %s"%num_nqg_predictions)<line_sep>print("num_fallback_correct: %s"%num_fallback_correct)<line_sep>print("num_hybrid_correct: %s"%num_hybrid_correct)<line_sep>print("metrics_dict: %s"%metrics_dict)<block_end><return>metrics_dict<block_end><def_stmt>get_summary_writer <block_start><if_stmt><not>FLAGS.write<block_start><return><none><block_end><return>tf.summary.create_file_writer(os.path.join(FLAGS.model_dir FLAGS.subdir))<block_end><def_stmt>write_metric writer name metric step<block_start><with_stmt>writer.as_default()<block_start>tf.summary.scalar(name metric step=step)<block_end><block_end><def_stmt>get_checkpoint <block_start>"""Return checkpoint path and step, or (None, None)."""<if_stmt>FLAGS.checkpoint<block_start>checkpoint=os.path.join(FLAGS.model_dir FLAGS.checkpoint)<block_end><else_stmt><block_start>checkpoint=tf.train.latest_checkpoint(FLAGS.model_dir)<block_end># TODO(petershaw): Consider less hacky way to get current step. step=<none><if_stmt>checkpoint<is><not><none><block_start>step=int(checkpoint.split("-")[-2])<block_end>print("Using checkpoint %s at step %s"%(checkpoint step))<line_sep><return>checkpoint step<block_end><def_stmt>get_inference_wrapper config<block_start>"""Construct and return InferenceWrapper."""<line_sep>rules=qcfg_file.read_rules(FLAGS.rules)<line_sep>tokenizer=tokenization_utils.get_tokenizer(os.path.join(FLAGS.bert_dir "vocab.txt"))<line_sep>bert_config=configs.BertConfig.from_json_file(os.path.join(FLAGS.bert_dir "bert_config.json"))<line_sep>target_grammar_rules=<none><if_stmt>FLAGS.target_grammar<block_start>target_grammar_rules=target_grammar.load_rules_from_file(FLAGS.target_grammar)<block_end>wrapper=inference_wrapper.InferenceWrapper(tokenizer rules config bert_config target_grammar_rules)<line_sep><return>wrapper<block_end><def_stmt>run_inference writer wrapper examples checkpoint step=<none><block_start>"""Run inference."""<line_sep>wrapper.restore_checkpoint(checkpoint)<line_sep>metrics_dict=compute_metrics(wrapper examples)<for_stmt>metric_name,metric_value metrics_dict.items()<block_start>print("%s at %s: %s"%(metric_name step metric_value))<if_stmt>FLAGS.write<block_start>write_metric(writer metric_name metric_value step)<block_end><block_end><block_end><def_stmt>main unused_argv<block_start>config=config_utils.json_file_to_dict(FLAGS.config)<line_sep>wrapper=get_inference_wrapper(config)<line_sep>examples=tsv_utils.read_tsv(FLAGS.input)<line_sep>writer=get_summary_writer()<if_stmt>FLAGS.poll<block_start>last_checkpoint=<none><while_stmt><true><block_start>checkpoint,step=get_checkpoint()<if_stmt>checkpoint<eq>last_checkpoint<block_start>print("Waiting for new checkpoint...\nLast checkpoint: %s"%last_checkpoint)<block_end><else_stmt><block_start>run_inference(writer wrapper examples checkpoint step=step)<line_sep>last_checkpoint=checkpoint<block_end><if_stmt>step<and>step<ge>config["training_steps"]# Stop eval job after completing eval for last training step. <block_start><break><block_end>time.sleep(10)<block_end><block_end><else_stmt><block_start>checkpoint,_=get_checkpoint()<line_sep>run_inference(writer wrapper examples checkpoint)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>app.run(main)<block_end>
# -*- coding: utf-8 -*- <import_stmt>random<import_from_stmt>tkinter *<line_sep>#variables and Dictionary # These are total events that could occur if/else can also be used but they are pain to implement schema={"rock":{"rock":1 "paper":0 "scissors":2} "paper":{"rock":2 "paper":1 "scissors":0} "scissors":{"rock":0 "paper":2 "scissors":1}}<line_sep>comp_score=0<line_sep>player_score=0<line_sep># functions <def_stmt>outcome_handler user_choice<block_start><global>comp_score<line_sep><global>player_score<line_sep>outcomes=["rock" "paper" "scissors"]<line_sep>num=random.randint(0 2)<line_sep>computer_choice=outcomes[num]<line_sep>result=schema[user_choice][computer_choice]<line_sep># now config the labes acc to the choices Player_Choice_Label.config(fg="green" text="Player choice : "+str(user_choice))<line_sep>Computer_Choice_Label.config(fg="red" text="Computer choice : "+str(computer_choice))<if_stmt>result<eq>2<block_start>player_score<augadd>2<line_sep>Player_Score_Label.config(text="Player : "+str(player_score))<line_sep>Outcome_Label.config(fg="blue" bg="skyblue" text="Player-Won")<block_end><elif_stmt>result<eq>1<block_start>player_score<augadd>1<line_sep>comp_score<augadd>1<line_sep>Player_Score_Label.config(text="Player : "+str(player_score))<line_sep>Outcome_Label.config(fg="blue" bg="skyblue" text="Draw")<line_sep>Computer_Score_Label.config(text="Computer : "+str(comp_score))<block_end><elif_stmt>result<eq>0<block_start>comp_score<augadd>2<line_sep>Outcome_Label.config(fg="blue" bg="skyblue" text="Computer-Won")<line_sep>Computer_Score_Label.config(text="Computer : "+str(comp_score))<block_end><block_end># main Screen master=Tk()<line_sep>master.title("RPS")<line_sep># labels Label(master text="Rock , Paper , Scissors" font=("Calibri" 15)).grid(row=0 sticky=N pady=10 padx=200)<line_sep>Label(master text="Please Select an option" font=("Calibri" 12)).grid(row=2 sticky=N)<line_sep>Player_Score_Label=Label(master text="Player : 0" font=("Calibri" 12))<line_sep># label for player Score Player_Score_Label.grid(row=3 sticky=W)<line_sep>Computer_Score_Label=Label(master text="Computer : 0" font=("Calibri" 12))<line_sep># label for computer score Computer_Score_Label.grid(row=3 sticky=E)<line_sep># player and computer choice labels Player_Choice_Label=Label(master font=("Calibri" 12))<line_sep>Player_Choice_Label.grid(row=5 sticky=W)<line_sep>Computer_Choice_Label=Label(master font=("Calibri" 12))<line_sep>Computer_Choice_Label.grid(row=5 sticky=E)<line_sep># outcome Labels Outcome_Label=Label(master font=("Calibri" 12))<line_sep>Outcome_Label.grid(row=5 sticky=N pady=10)<line_sep># buttons Button(master text="Rock" width=17 command=<lambda>:outcome_handler("rock")).grid(row=6 sticky=W padx=10 pady=10)<line_sep>Button(master text="Paper" width=17 command=<lambda>:outcome_handler("paper")).grid(row=6 sticky=N pady=10)<line_sep>Button(master text="Scissors" width=17 command=<lambda>:outcome_handler("scissors")).grid(row=6 sticky=E padx=10 pady=10)<line_sep># dummy label to create space at the end of master screen Label(master).grid(row=5)<line_sep>master.mainloop()<line_sep>
<import_from_stmt>typing Iterable<import_from_stmt>typing Optional<import_from_stmt>typing Union<import_from_stmt>.inputs.input Input<import_from_stmt>.outputs.output Output<import_from_stmt>.outputs.output Type<as>OutputType<import_from_stmt>.outputs.output Verbosity<import_from_stmt>.outputs.section_output SectionOutput<class_stmt>IO<block_start><def_stmt>__init__ self input:Input output:Output error_output:Output<arrow><none><block_start>self._input=input<line_sep>self._output=output<line_sep>self._error_output=error_output<block_end>@property<def_stmt>input self<arrow>Input<block_start><return>self._input<block_end>@property<def_stmt>output self<arrow>Output<block_start><return>self._output<block_end>@property<def_stmt>error_output self<arrow>Output<block_start><return>self._error_output<block_end><def_stmt>read self length:int default:Optional[str]=<none><arrow>str<block_start>""" Reads the given amount of characters from the input stream. """<line_sep><return>self._input.read(length default=default)<block_end><def_stmt>read_line self length:Optional[int]=<none> default:Optional[str]=<none><arrow>str<block_start>""" Reads a line from the input stream. """<line_sep><return>self._input.read_line(length=length default=default)<block_end><def_stmt>write_line self messages:Union[str Iterable[str]] verbosity:Verbosity=Verbosity.NORMAL type:OutputType=OutputType.NORMAL <arrow><none><block_start>self._output.write_line(messages verbosity=verbosity type=type)<block_end><def_stmt>write self messages:Union[str Iterable[str]] new_line:bool=<false> verbosity:Verbosity=Verbosity.NORMAL type:OutputType=OutputType.NORMAL <arrow><none><block_start>self._output.write(messages new_line=new_line verbosity=verbosity type=type)<block_end><def_stmt>write_error_line self messages:Union[str Iterable[str]] verbosity:Verbosity=Verbosity.NORMAL type:OutputType=OutputType.NORMAL <arrow><none><block_start>self._error_output.write_line(messages verbosity=verbosity type=type)<block_end><def_stmt>write_error self messages:Union[str Iterable[str]] new_line:bool=<false> verbosity:Verbosity=Verbosity.NORMAL type:OutputType=OutputType.NORMAL <arrow><none><block_start>self._error_output.write(messages new_line=new_line verbosity=verbosity type=type)<block_end><def_stmt>overwrite self messages:Union[str Iterable[str]]<arrow><none><block_start><import_from_stmt>cleo.cursor Cursor<line_sep>cursor=Cursor(self._output)<line_sep>cursor.move_to_column(1)<line_sep>cursor.clear_line()<line_sep>self.write(messages)<block_end><def_stmt>overwrite_error self messages:Union[str Iterable[str]]<arrow><none><block_start><import_from_stmt>cleo.cursor Cursor<line_sep>cursor=Cursor(self._error_output)<line_sep>cursor.move_to_column(1)<line_sep>cursor.clear_line()<line_sep>self.write_error(messages)<block_end><def_stmt>flush self<arrow><none><block_start>self._output.flush()<block_end><def_stmt>is_interactive self<arrow>bool<block_start><return>self._input.is_interactive()<block_end><def_stmt>interactive self interactive:bool=<true><arrow><none><block_start>self._input.interactive(interactive)<block_end><def_stmt>decorated self decorated:bool=<true><arrow><none><block_start>self._output.decorated(decorated)<line_sep>self._error_output.decorated(decorated)<block_end><def_stmt>is_decorated self<arrow>bool<block_start><return>self._output.is_decorated()<block_end><def_stmt>supports_utf8 self<arrow>bool<block_start><return>self._output.supports_utf8()<block_end><def_stmt>set_verbosity self verbosity:Verbosity<arrow><none><block_start>self._output.set_verbosity(verbosity)<line_sep>self._error_output.set_verbosity(verbosity)<block_end><def_stmt>is_verbose self<arrow>bool<block_start><return>self.output.is_verbose()<block_end><def_stmt>is_very_verbose self<arrow>bool<block_start><return>self.output.is_very_verbose()<block_end><def_stmt>is_debug self<arrow>bool<block_start><return>self.output.is_debug()<block_end><def_stmt>set_input self input:Input<arrow><none><block_start>self._input=input<block_end><def_stmt>with_input self input:Input<arrow>"IO"<block_start><return>self.__class__(input self._output self._error_output)<block_end><def_stmt>remove_format self text:str<arrow>str<block_start><return>self._output.remove_format(text)<block_end><def_stmt>section self<arrow>SectionOutput<block_start><return>self._output.section()<block_end><block_end>
"""Test the Z-Wave JS humidifier platform."""<import_from_stmt>zwave_js_server.const CommandClass<import_from_stmt>zwave_js_server.const.command_class.humidity_control HumidityControlMode<import_from_stmt>zwave_js_server.event Event<import_from_stmt>homeassistant.components.humidifier HumidifierDeviceClass<import_from_stmt>homeassistant.components.humidifier.const ATTR_HUMIDITY ATTR_MAX_HUMIDITY ATTR_MIN_HUMIDITY DEFAULT_MAX_HUMIDITY DEFAULT_MIN_HUMIDITY DOMAIN<as>HUMIDIFIER_DOMAIN SERVICE_SET_HUMIDITY <import_from_stmt>homeassistant.const ATTR_DEVICE_CLASS ATTR_ENTITY_ID SERVICE_TURN_OFF SERVICE_TURN_ON STATE_OFF STATE_ON <import_from_stmt>.common DEHUMIDIFIER_ADC_T3000_ENTITY HUMIDIFIER_ADC_T3000_ENTITY<async_keyword><def_stmt>test_humidifier hass client climate_adc_t3000 integration<block_start>"""Test a humidity control command class entity."""<line_sep>node=climate_adc_t3000<line_sep>state=hass.states.get(HUMIDIFIER_ADC_T3000_ENTITY)<assert_stmt>state<assert_stmt>state.state<eq>STATE_ON<assert_stmt>state.attributes[ATTR_DEVICE_CLASS]<eq>HumidifierDeviceClass.HUMIDIFIER<assert_stmt>state.attributes[ATTR_HUMIDITY]<eq>35<assert_stmt>state.attributes[ATTR_MIN_HUMIDITY]<eq>10<assert_stmt>state.attributes[ATTR_MAX_HUMIDITY]<eq>70<line_sep>client.async_send_command.reset_mock()<line_sep># Test setting humidity <await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_SET_HUMIDITY {ATTR_ENTITY_ID:HUMIDIFIER_ADC_T3000_ENTITY ATTR_HUMIDITY:41 } blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>1<line_sep>args=client.async_send_command.call_args_list[0][0][0]<assert_stmt>args["command"]<eq>"node.set_value"<assert_stmt>args["nodeId"]<eq>68<assert_stmt>args["valueId"]<eq>{"ccVersion":1 "commandClassName":"Humidity Control Setpoint" "commandClass":CommandClass.HUMIDITY_CONTROL_SETPOINT "endpoint":0 "property":"setpoint" "propertyKey":1 "propertyName":"setpoint" "propertyKeyName":"Humidifier" "metadata":{"type":"number" "readable":<true> "writeable":<true> "unit":"%" "min":10 "max":70 "ccSpecific":{"setpointType":1} } "value":35 }<assert_stmt>args["value"]<eq>41<line_sep>client.async_send_command.reset_mock()<line_sep># Test de-humidify mode update from value updated event event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.DEHUMIDIFY) "prevValue":int(HumidityControlMode.HUMIDIFY) } } )<line_sep>node.receive_event(event)<line_sep>state=hass.states.get(HUMIDIFIER_ADC_T3000_ENTITY)<assert_stmt>state.state<eq>STATE_OFF<line_sep>client.async_send_command.reset_mock()<line_sep># Test auto mode update from value updated event event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.AUTO) "prevValue":int(HumidityControlMode.HUMIDIFY) } } )<line_sep>node.receive_event(event)<line_sep>state=hass.states.get(HUMIDIFIER_ADC_T3000_ENTITY)<assert_stmt>state.state<eq>STATE_ON<line_sep>client.async_send_command.reset_mock()<line_sep># Test off mode update from value updated event event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.OFF) "prevValue":int(HumidityControlMode.HUMIDIFY) } } )<line_sep>node.receive_event(event)<line_sep>state=hass.states.get(HUMIDIFIER_ADC_T3000_ENTITY)<assert_stmt>state.state<eq>STATE_OFF<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning off when device is previously humidifying event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.HUMIDIFY) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_OFF {ATTR_ENTITY_ID:HUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>1<line_sep>args=client.async_send_command.call_args_list[0][0][0]<assert_stmt>args["command"]<eq>"node.set_value"<assert_stmt>args["nodeId"]<eq>68<assert_stmt>args["valueId"]<eq>{"ccVersion":2 "commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "metadata":{"type":"number" "readable":<true> "writeable":<true> "min":0 "max":255 "label":"Humidity control mode" "states":{"0":"Off" "1":"Humidify" "2":"De-humidify" "3":"Auto"} } "value":int(HumidityControlMode.HUMIDIFY) }<assert_stmt>args["value"]<eq>int(HumidityControlMode.OFF)<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning off when device is previously auto event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.AUTO) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_OFF {ATTR_ENTITY_ID:HUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>1<line_sep>args=client.async_send_command.call_args_list[0][0][0]<assert_stmt>args["command"]<eq>"node.set_value"<assert_stmt>args["nodeId"]<eq>68<assert_stmt>args["valueId"]<eq>{"ccVersion":2 "commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "metadata":{"type":"number" "readable":<true> "writeable":<true> "min":0 "max":255 "label":"Humidity control mode" "states":{"0":"Off" "1":"Humidify" "2":"De-humidify" "3":"Auto"} } "value":int(HumidityControlMode.AUTO) }<assert_stmt>args["value"]<eq>int(HumidityControlMode.DEHUMIDIFY)<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning off when device is previously de-humidifying event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.DEHUMIDIFY) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_OFF {ATTR_ENTITY_ID:HUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>0<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning off when device is previously off event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.OFF) "prevValue":int(HumidityControlMode.AUTO) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_OFF {ATTR_ENTITY_ID:HUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>0<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning on when device is previously humidifying event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.HUMIDIFY) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_ON {ATTR_ENTITY_ID:HUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>0<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning on when device is previously auto event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.AUTO) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_ON {ATTR_ENTITY_ID:HUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>0<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning on when device is previously de-humidifying event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.DEHUMIDIFY) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_ON {ATTR_ENTITY_ID:HUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>1<line_sep>args=client.async_send_command.call_args_list[0][0][0]<assert_stmt>args["command"]<eq>"node.set_value"<assert_stmt>args["nodeId"]<eq>68<assert_stmt>args["valueId"]<eq>{"ccVersion":2 "commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "metadata":{"type":"number" "readable":<true> "writeable":<true> "min":0 "max":255 "label":"Humidity control mode" "states":{"0":"Off" "1":"Humidify" "2":"De-humidify" "3":"Auto"} } "value":int(HumidityControlMode.DEHUMIDIFY) }<assert_stmt>args["value"]<eq>int(HumidityControlMode.AUTO)<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning on when device is previously off event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.OFF) "prevValue":int(HumidityControlMode.AUTO) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_ON {ATTR_ENTITY_ID:HUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>1<line_sep>args=client.async_send_command.call_args_list[0][0][0]<assert_stmt>args["command"]<eq>"node.set_value"<assert_stmt>args["nodeId"]<eq>68<assert_stmt>args["valueId"]<eq>{"ccVersion":2 "commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "metadata":{"type":"number" "readable":<true> "writeable":<true> "min":0 "max":255 "label":"Humidity control mode" "states":{"0":"Off" "1":"Humidify" "2":"De-humidify" "3":"Auto"} } "value":int(HumidityControlMode.OFF) }<assert_stmt>args["value"]<eq>int(HumidityControlMode.HUMIDIFY)<block_end><async_keyword><def_stmt>test_dehumidifier_missing_setpoint hass client climate_adc_t3000_missing_setpoint integration<block_start>"""Test a humidity control command class entity."""<line_sep>entity_id="humidifier.adc_t3000_missing_setpoint_dehumidifier"<line_sep>state=hass.states.get(entity_id)<assert_stmt>state<assert_stmt>ATTR_HUMIDITY<not><in>state.attributes<assert_stmt>state.attributes[ATTR_MIN_HUMIDITY]<eq>DEFAULT_MIN_HUMIDITY<assert_stmt>state.attributes[ATTR_MAX_HUMIDITY]<eq>DEFAULT_MAX_HUMIDITY<line_sep>client.async_send_command.reset_mock()<line_sep># Test setting humidity <await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_SET_HUMIDITY {ATTR_ENTITY_ID:entity_id ATTR_HUMIDITY:41 } blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>0<line_sep>client.async_send_command.reset_mock()<block_end><async_keyword><def_stmt>test_humidifier_missing_mode hass client climate_adc_t3000_missing_mode integration<block_start>"""Test a humidity control command class entity."""<line_sep>node=climate_adc_t3000_missing_mode<line_sep># Test that de-humidifer entity does not exist but humidifier entity does entity_id="humidifier.adc_t3000_missing_mode_dehumidifier"<line_sep>state=hass.states.get(entity_id)<assert_stmt><not>state<line_sep>entity_id="humidifier.adc_t3000_missing_mode_humidifier"<line_sep>state=hass.states.get(entity_id)<assert_stmt>state<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning off when device is previously auto for a device which does not have de-humidify mode event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.AUTO) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_OFF {ATTR_ENTITY_ID:entity_id} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>1<line_sep>args=client.async_send_command.call_args_list[0][0][0]<assert_stmt>args["command"]<eq>"node.set_value"<assert_stmt>args["nodeId"]<eq>68<assert_stmt>args["valueId"]<eq>{"ccVersion":2 "commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "metadata":{"type":"number" "readable":<true> "writeable":<true> "min":0 "max":255 "label":"Humidity control mode" "states":{"0":"Off" "1":"Humidify" "3":"Auto"} } "value":int(HumidityControlMode.AUTO) }<assert_stmt>args["value"]<eq>int(HumidityControlMode.OFF)<line_sep>client.async_send_command.reset_mock()<block_end><async_keyword><def_stmt>test_dehumidifier hass client climate_adc_t3000 integration<block_start>"""Test a humidity control command class entity."""<line_sep>node=climate_adc_t3000<line_sep>state=hass.states.get(DEHUMIDIFIER_ADC_T3000_ENTITY)<assert_stmt>state<assert_stmt>state.state<eq>STATE_ON<assert_stmt>state.attributes[ATTR_DEVICE_CLASS]<eq>HumidifierDeviceClass.DEHUMIDIFIER<assert_stmt>state.attributes[ATTR_HUMIDITY]<eq>60<assert_stmt>state.attributes[ATTR_MIN_HUMIDITY]<eq>30<assert_stmt>state.attributes[ATTR_MAX_HUMIDITY]<eq>90<line_sep>client.async_send_command.reset_mock()<line_sep># Test setting humidity <await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_SET_HUMIDITY {ATTR_ENTITY_ID:DEHUMIDIFIER_ADC_T3000_ENTITY ATTR_HUMIDITY:41 } blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>1<line_sep>args=client.async_send_command.call_args_list[0][0][0]<assert_stmt>args["command"]<eq>"node.set_value"<assert_stmt>args["nodeId"]<eq>68<assert_stmt>args["valueId"]<eq>{"ccVersion":1 "commandClassName":"Humidity Control Setpoint" "commandClass":CommandClass.HUMIDITY_CONTROL_SETPOINT "endpoint":0 "property":"setpoint" "propertyKey":2 "propertyName":"setpoint" "propertyKeyName":"De-humidifier" "metadata":{"type":"number" "readable":<true> "writeable":<true> "unit":"%" "min":30 "max":90 "ccSpecific":{"setpointType":2} } "value":60 }<assert_stmt>args["value"]<eq>41<line_sep>client.async_send_command.reset_mock()<line_sep># Test humidify mode update from value updated event event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.HUMIDIFY) "prevValue":int(HumidityControlMode.DEHUMIDIFY) } } )<line_sep>node.receive_event(event)<line_sep>state=hass.states.get(DEHUMIDIFIER_ADC_T3000_ENTITY)<assert_stmt>state.state<eq>STATE_OFF<line_sep>client.async_send_command.reset_mock()<line_sep># Test auto mode update from value updated event event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.AUTO) "prevValue":int(HumidityControlMode.DEHUMIDIFY) } } )<line_sep>node.receive_event(event)<line_sep>state=hass.states.get(DEHUMIDIFIER_ADC_T3000_ENTITY)<assert_stmt>state.state<eq>STATE_ON<line_sep>client.async_send_command.reset_mock()<line_sep># Test off mode update from value updated event event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.OFF) "prevValue":int(HumidityControlMode.DEHUMIDIFY) } } )<line_sep>node.receive_event(event)<line_sep>state=hass.states.get(DEHUMIDIFIER_ADC_T3000_ENTITY)<assert_stmt>state.state<eq>STATE_OFF<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning off when device is previously de-humidifying event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.DEHUMIDIFY) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_OFF {ATTR_ENTITY_ID:DEHUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>1<line_sep>args=client.async_send_command.call_args_list[0][0][0]<assert_stmt>args["command"]<eq>"node.set_value"<assert_stmt>args["nodeId"]<eq>68<assert_stmt>args["valueId"]<eq>{"ccVersion":2 "commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "metadata":{"type":"number" "readable":<true> "writeable":<true> "min":0 "max":255 "label":"Humidity control mode" "states":{"0":"Off" "1":"Humidify" "2":"De-humidify" "3":"Auto"} } "value":int(HumidityControlMode.DEHUMIDIFY) }<assert_stmt>args["value"]<eq>int(HumidityControlMode.OFF)<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning off when device is previously auto event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.AUTO) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_OFF {ATTR_ENTITY_ID:DEHUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>1<line_sep>args=client.async_send_command.call_args_list[0][0][0]<assert_stmt>args["command"]<eq>"node.set_value"<assert_stmt>args["nodeId"]<eq>68<assert_stmt>args["valueId"]<eq>{"ccVersion":2 "commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "metadata":{"type":"number" "readable":<true> "writeable":<true> "min":0 "max":255 "label":"Humidity control mode" "states":{"0":"Off" "1":"Humidify" "2":"De-humidify" "3":"Auto"} } "value":int(HumidityControlMode.AUTO) }<assert_stmt>args["value"]<eq>int(HumidityControlMode.HUMIDIFY)<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning off when device is previously humidifying event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.HUMIDIFY) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_OFF {ATTR_ENTITY_ID:DEHUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>0<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning off when device is previously off event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.OFF) "prevValue":int(HumidityControlMode.AUTO) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_OFF {ATTR_ENTITY_ID:DEHUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>0<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning on when device is previously de-humidifying event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.DEHUMIDIFY) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_ON {ATTR_ENTITY_ID:DEHUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>0<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning on when device is previously auto event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.AUTO) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_ON {ATTR_ENTITY_ID:DEHUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>0<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning on when device is previously humidifying event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.HUMIDIFY) "prevValue":int(HumidityControlMode.OFF) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_ON {ATTR_ENTITY_ID:DEHUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>1<line_sep>args=client.async_send_command.call_args_list[0][0][0]<assert_stmt>args["command"]<eq>"node.set_value"<assert_stmt>args["nodeId"]<eq>68<assert_stmt>args["valueId"]<eq>{"ccVersion":2 "commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "metadata":{"type":"number" "readable":<true> "writeable":<true> "min":0 "max":255 "label":"Humidity control mode" "states":{"0":"Off" "1":"Humidify" "2":"De-humidify" "3":"Auto"} } "value":int(HumidityControlMode.HUMIDIFY) }<assert_stmt>args["value"]<eq>int(HumidityControlMode.AUTO)<line_sep>client.async_send_command.reset_mock()<line_sep># Test turning on when device is previously off event=Event(type="value updated" data={"source":"node" "event":"value updated" "nodeId":68 "args":{"commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "newValue":int(HumidityControlMode.OFF) "prevValue":int(HumidityControlMode.AUTO) } } )<line_sep>node.receive_event(event)<line_sep><await>hass.services.async_call(HUMIDIFIER_DOMAIN SERVICE_TURN_ON {ATTR_ENTITY_ID:DEHUMIDIFIER_ADC_T3000_ENTITY} blocking=<true> )<assert_stmt>len(client.async_send_command.call_args_list)<eq>1<line_sep>args=client.async_send_command.call_args_list[0][0][0]<assert_stmt>args["command"]<eq>"node.set_value"<assert_stmt>args["nodeId"]<eq>68<assert_stmt>args["valueId"]<eq>{"ccVersion":2 "commandClassName":"Humidity Control Mode" "commandClass":CommandClass.HUMIDITY_CONTROL_MODE "endpoint":0 "property":"mode" "propertyName":"mode" "metadata":{"type":"number" "readable":<true> "writeable":<true> "min":0 "max":255 "label":"Humidity control mode" "states":{"0":"Off" "1":"Humidify" "2":"De-humidify" "3":"Auto"} } "value":int(HumidityControlMode.OFF) }<assert_stmt>args["value"]<eq>int(HumidityControlMode.DEHUMIDIFY)<block_end>
<import_stmt>sqlite3<import_stmt>textwrap<import_from_stmt>scripts.artifact_report ArtifactHtmlReport<import_from_stmt>scripts.ilapfuncs logfunc tsv timeline is_platform_windows open_sqlite_db_readonly<def_stmt>get_cashApp files_found report_folder seeker wrap_text<block_start><for_stmt>file_found files_found<block_start>file_found=str(file_found)<if_stmt>file_found.endswith('.db')<block_start>db=open_sqlite_db_readonly(file_found)<line_sep>cursor=db.cursor()<line_sep>cursor.execute('''Select payment.role, payment.sender_id, CASE WHEN customer.cashtag IS NULL THEN '***NO CASH TAG PRESENT***' ELSE customer.cashtag END, customer.customer_display_name, payment.recipient_id, CASE WHEN customer1.cashtag IS NULL THEN '***NO CASH TAG PRESENT***' ELSE customer1.cashtag END, customer1.customer_display_name, payment.state, datetime(payment.display_date / 1000.0, 'unixepoch'), CASE WHEN json_extract (payment.render_data, '$."note"') IS NULL THEN '***NO NOTE SUBMITTED***' ELSE json_extract (payment.render_data, '$."note"') END, printf("$%.2f", json_extract(payment.render_data, '$."amount"."amount"') / 100.0) From payment Inner Join customer On customer.customer_id = payment.sender_id Inner Join customer customer1 On payment.recipient_id = customer1.customer_id ORDER BY payment.display_date DESC ''')<block_end><block_end>all_rows=cursor.fetchall()<line_sep>usageentries=len(all_rows)<if_stmt>usageentries<g>0<block_start>report=ArtifactHtmlReport('Transactions')<line_sep>report.start_artifact_report(report_folder 'Transactions')<line_sep>report.add_script()<line_sep>data_headers=('Transaction Date' 'User Account Role' 'Sender Display Name' 'Sender Unique ID' 'Sender Cashtag' 'Recipient Display Name' 'Recipient Unique ID' 'Recipient Cashtag' 'Transaction Amount' 'Transaction Status' 'Note')# Don't remove the comma, that is required to make this a tuple as there is only 1 element data_list=[]<for_stmt>row all_rows<block_start>data_list.append((row[8] row[0] row[3] row[1] row[2] row[6] row[4] row[5] row[10] row[7] row[9]))<block_end>report.write_artifact_data_table(data_headers data_list file_found)<line_sep>report.end_artifact_report()<line_sep>tsvname=f'Cash App Transactions'<line_sep>tsv(report_folder data_headers data_list tsvname)<line_sep>tlactivity=f'Cash App Transactions'<line_sep>timeline(report_folder tlactivity data_list data_headers)<block_end><else_stmt><block_start>logfunc('No Cash App Transactions data available')<block_end>db.close()<line_sep><return><block_end>
<import_stmt>vaex<def_stmt>test_propagate_uncertainty <block_start>ds=vaex.from_scalars(x=1 y=2 e_x=2 e_y=4)<line_sep>ds['r']=ds.x+ds.y<line_sep>ds.propagate_uncertainties([ds.r])<line_sep>print(ds.r_uncertainty.expression)<assert_stmt>ds.r_uncertainty.expand().expression<eq>'sqrt(((e_x ** 2) + (e_y ** 2)))'<block_end><def_stmt>test_matrix <block_start>ds=vaex.from_scalars(x=1 y=0 z=0 x_e=0.1 y_e=0.2 z_e=0.3)<line_sep>matrix=[[1 0 0] [0 1 0] [0 0 1]]<line_sep>ds.add_virtual_columns_matrix3d(ds.x ds.y ds.z 'xn' 'yn' 'zy' matrix)<line_sep>ds.propagate_uncertainties([ds.xn])<assert_stmt>ds.xn.values[0]<eq>ds.x.values[0]<assert_stmt>ds.xn_uncertainty.values[0]<eq>ds.x_e.values[0]<line_sep>ds=vaex.from_scalars(x=1 y=0 z=0 x_e=0.1 y_e=0.2 z_e=0.3)<line_sep>matrix=[[0 1 0] [1 0 0] [0 0 1]]<line_sep>ds.add_virtual_columns_matrix3d(ds.x ds.y ds.z 'xn' 'yn' 'zy' matrix)<line_sep>ds.propagate_uncertainties([ds.xn ds.yn])<assert_stmt>ds.xn.values[0]<eq>ds.y.values[0]<assert_stmt>ds.xn_uncertainty.values[0]<eq>ds.y_e.values[0]<assert_stmt>ds.yn.values[0]<eq>ds.x.values[0]<assert_stmt>ds.yn_uncertainty.values[0]<eq>ds.x_e.values[0]<block_end>
<import_stmt>os<import_stmt>shutil<import_from_stmt>pathlib Path<import_stmt>SimpleITK<as>sitk<import_from_stmt>nndet.io save_json<import_from_stmt>nndet.utils.check env_guard<import_from_stmt>nndet.utils.info maybe_verbose_iterable<def_stmt>run_prep source_data:Path source_label:Path target_data_dir target_label_dir:Path<block_start>case_id=f"{(source_data.stem).rsplit('_' 1)[0]}"<line_sep>shutil.copy(source_data target_data_dir/f"{case_id}_0000.nii.gz")<line_sep>shutil.copy(source_label target_label_dir/f"{case_id}.nii.gz")# rename label file to match data label_itk=sitk.ReadImage(str(source_label))<line_sep>label_np=sitk.GetArrayFromImage(label_itk)<line_sep>instances={int(_id+1):0<for>_id range(label_np.max())}<line_sep>save_json({"instances":instances} target_label_dir/f"{case_id}")<block_end>@env_guard<def_stmt>main <block_start>det_data_dir=Path(os.getenv('det_data'))<line_sep>task_data_dir=det_data_dir/"Task017_CADA"<line_sep># setup raw paths source_data_dir=task_data_dir/"raw"/"train_dataset"<if_stmt><not>source_data_dir.is_dir()<block_start><raise>RuntimeError(f"{source_data_dir} should contain the raw data but does not exist.")<block_end>source_label_dir=task_data_dir/"raw"/"train_mask_images"<if_stmt><not>source_label_dir.is_dir()<block_start><raise>RuntimeError(f"{source_label_dir} should contain the raw labels but does not exist.")<block_end># setup raw splitted dirs target_data_dir=task_data_dir/"raw_splitted"/"imagesTr"<line_sep>target_data_dir.mkdir(exist_ok=<true> parents=<true>)<line_sep>target_label_dir=task_data_dir/"raw_splitted"/"labelsTr"<line_sep>target_label_dir.mkdir(exist_ok=<true> parents=<true>)<line_sep># prepare dataset info meta={"name":"CADA" "task":"Task017_CADA" "target_class":<none> "test_labels":<false> "labels":{"0":"aneurysm"} "modalities":{"0":"CT"} "dim":3 }<line_sep>save_json(meta task_data_dir/"dataset.json")<line_sep># prepare data & label case_ids=[(p.stem).rsplit('_' 1)[0]<for>p source_data_dir.glob("*.nii.gz")]<line_sep>print(f"Found {len(case_ids)} case ids")<for_stmt>cid maybe_verbose_iterable(case_ids)<block_start>run_prep(source_data=source_data_dir/f"{cid}_orig.nii.gz" source_label=source_label_dir/f"{cid}_labeledMasks.nii.gz" target_data_dir=target_data_dir target_label_dir=target_label_dir )<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end>
default_app_config='select2_nestedadmin.apps.TestApp'<line_sep>
<import_stmt>bisect<import_stmt>random<class_stmt>Solution(object)<block_start><def_stmt>__init__ self w<block_start>""" :type w: List[int] """<line_sep>self.prefisSum=w<for_stmt>i range(1 len(self.prefisSum))<block_start>self.prefisSum[i]=self.prefisSum[i]+self.prefisSum[i-1]<block_end><block_end><def_stmt>pickIndex self<block_start>""" :rtype: int """<line_sep>target=random.randint(1 self.prefisSum[-1])<line_sep><return>bisect.bisect_left(self.prefisSum target)<block_end><block_end># Your Solution object will be instantiated and called as such: # obj = Solution(w) # param_1 = obj.pickIndex()
master_doc='index'<line_sep>project=u'Infrastructure-Components'<line_sep>copyright='2019, <NAME>'<line_sep>htmlhelp_basename='Infrastructure-Components-Doc'<line_sep>language='en'<line_sep>gettext_compact=<false><line_sep>html_theme='sphinx_rtd_theme'<line_sep>#html_logo = 'img/logo.svg' html_theme_options={'logo_only':<true> 'display_version':<false> }<line_sep># sphinx-notfound-page # https://github.com/rtfd/sphinx-notfound-page notfound_context={'title':'Page Not Found' 'body':''' <h1>Page Not Found</h1> <p>Sorry, we couldn't find that page.</p> <p>Try using the search box or go to the homepage.</p> ''' }<line_sep>
r""" Solve Poisson equation on (-2\pi, 2\pi) with periodic bcs .. math:: \nabla^2 u = f, u(2\pi) = u(-2\pi) Use Fourier basis and find u in V such that:: (v, div(grad(u))) = (v, f) for all v in V V is the Fourier basis span{exp(1jkx)}_{k=-N/2}^{N/2-1} Use the method of manufactured solutions, and choose a solution that is either real or complex. """<import_stmt>os<import_from_stmt>sympy Symbol cos sin lambdify<import_stmt>numpy<as>np<import_from_stmt>shenfun inner grad TestFunction TrialFunction FunctionSpace Function Array<line_sep># Use sympy to compute a rhs, given an analytical solution x=Symbol("x" real=<true>)<line_sep>ue=cos(4<times>x)+1j<times>sin(6<times>x)<line_sep>#ue = cos(4*x) fe=ue.diff(x 2)<line_sep># Size of discretization N=40<line_sep>dtype={<true>:complex <false>:float}[ue.has(1j)]<line_sep>ST=FunctionSpace(N dtype=dtype domain=(-2<times>np.pi 2<times>np.pi))<line_sep>u=TrialFunction(ST)<line_sep>v=TestFunction(ST)<line_sep># Get f on quad points and exact solution fj=Array(ST buffer=fe)<line_sep>uj=Array(ST buffer=ue)<line_sep># Compute right hand side f_hat=Function(ST)<line_sep>f_hat=inner(v fj output_array=f_hat)<line_sep># Solve Poisson equation A=inner(grad(v) grad(u))<line_sep>u_hat=Function(ST)<line_sep>u_hat=A.solve(-f_hat u_hat)<line_sep>uq=ST.backward(u_hat)<line_sep>u_hat=ST.forward(uq u_hat fast_transform=<false>)<line_sep>uq=ST.backward(u_hat uq fast_transform=<false>)<assert_stmt>np.allclose(uj uq)<line_sep>point=np.array([0.1 0.2])<line_sep>p=ST.eval(point u_hat)<assert_stmt>np.allclose(p lambdify(x ue)(point))<if_stmt>'pytest'<not><in>os.environ<block_start><import_stmt>matplotlib.pyplot<as>plt<line_sep>plt.figure()<line_sep>X=ST.mesh()<line_sep>plt.plot(X uj.real)<line_sep>plt.title("U")<line_sep>plt.figure()<line_sep>plt.plot(X (uq-uj).real)<line_sep>plt.title("Error")<line_sep>plt.show()<block_end>
""" test_providers_package ~~~~~~~~~~~~~~~~~~~~~~ Tests for the :mod:`~ulid.providers` package. """<import_from_stmt>ulid providers<import_from_stmt>ulid.providers default monotonic<def_stmt>test_package_has_dunder_all <block_start>""" Assert that :pkg:`~ulid.providers` exposes the :attr:`~ulid.providers.__all__` attribute as a list. """<assert_stmt>hasattr(providers '__all__')<assert_stmt>isinstance(providers.__all__ list)<block_end><def_stmt>test_package_exposes_expected_interface <block_start>""" Assert that :attr:`~ulid.providers.__all__` exposes expected interface. """<assert_stmt>providers.__all__<eq>['Provider' 'DEFAULT' 'MICROSECOND' 'MONOTONIC']<block_end><def_stmt>test_package_has_default_provider <block_start>""" Assert :attr:`~ulid.providers.DEFAULT` is a :class:`~ulid.providers.default.Provider` instance. """<assert_stmt>isinstance(providers.DEFAULT default.Provider)<block_end><def_stmt>test_package_has_monotonic_provider <block_start>""" Assert :attr:`~ulid.providers.MONOTONIC` is a :class:`~ulid.providers.monotonic.Provider` instance. """<assert_stmt>isinstance(providers.MONOTONIC monotonic.Provider)<block_end>
<import_stmt>unittest<import_stmt>six<import_from_stmt>langdetect.detector_factory DetectorFactory<import_from_stmt>langdetect.utils.lang_profile LangProfile<class_stmt>DetectorTest(unittest.TestCase)<block_start>TRAINING_EN='a a a b b c c d e'<line_sep>TRAINING_FR='a b b c c c d d d'<line_sep>TRAINING_JA=six.u('\u3042 \u3042 \u3042 \u3044 \u3046 \u3048 \u3048')<line_sep>JSON_LANG1='{"freq":{"A":3,"B":6,"C":3,"AB":2,"BC":1,"ABC":2,"BBC":1,"CBA":1},"n_words":[12,3,4],"name":"lang1"}'<line_sep>JSON_LANG2='{"freq":{"A":6,"B":3,"C":3,"AA":3,"AB":2,"ABC":1,"ABA":1,"CAA":1},"n_words":[12,5,3],"name":"lang2"}'<def_stmt>setUp self<block_start>self.factory=DetectorFactory()<line_sep>profile_en=LangProfile('en')<for_stmt>w self.TRAINING_EN.split()<block_start>profile_en.add(w)<block_end>self.factory.add_profile(profile_en 0 3)<line_sep>profile_fr=LangProfile('fr')<for_stmt>w self.TRAINING_FR.split()<block_start>profile_fr.add(w)<block_end>self.factory.add_profile(profile_fr 1 3)<line_sep>profile_ja=LangProfile('ja')<for_stmt>w self.TRAINING_JA.split()<block_start>profile_ja.add(w)<block_end>self.factory.add_profile(profile_ja 2 3)<block_end><def_stmt>test_detector1 self<block_start>detect=self.factory.create()<line_sep>detect.append('a')<line_sep>self.assertEqual(detect.detect() 'en')<block_end><def_stmt>test_detector2 self<block_start>detect=self.factory.create()<line_sep>detect.append('b d')<line_sep>self.assertEqual(detect.detect() 'fr')<block_end><def_stmt>test_detector3 self<block_start>detect=self.factory.create()<line_sep>detect.append('d e')<line_sep>self.assertEqual(detect.detect() 'en')<block_end><def_stmt>test_detector4 self<block_start>detect=self.factory.create()<line_sep>detect.append(six.u('\u3042\u3042\u3042\u3042a'))<line_sep>self.assertEqual(detect.detect() 'ja')<block_end><def_stmt>test_lang_list self<block_start>langlist=self.factory.get_lang_list()<line_sep>self.assertEqual(len(langlist) 3)<line_sep>self.assertEqual(langlist[0] 'en')<line_sep>self.assertEqual(langlist[1] 'fr')<line_sep>self.assertEqual(langlist[2] 'ja')<block_end><def_stmt>test_factory_from_json_string self<block_start>self.factory.clear()<line_sep>profiles=[self.JSON_LANG1 self.JSON_LANG2]<line_sep>self.factory.load_json_profile(profiles)<line_sep>langlist=self.factory.get_lang_list()<line_sep>self.assertEqual(len(langlist) 2)<line_sep>self.assertEqual(langlist[0] 'lang1')<line_sep>self.assertEqual(langlist[1] 'lang2')<block_end><block_end>
# coding=utf-8 # # MLDB-2161-utf8-in-script-apply.py # <NAME>, 2017-03-08 # This file is part of MLDB. Copyright 2017 mldb.ai inc. All rights reserved. # <import_from_stmt>mldb mldb MldbUnitTest ResponseException<class_stmt>MLDB2161Utf8InScriptApply(MldbUnitTest)# noqa <block_start><def_stmt>test_python_script_apply_with_utf8 self<block_start>mldb.put("/v1/functions/filter_top_themes" {"type":"script.apply" "params":{"language":'python' "scriptConfig":{"source":""" from mldb import mldb # retrieve all themes mldb.log(mldb.script.args) request.set_return([[str(mldb.script.args[0][1]), 0, '1970-01-01T00:00:00.0000000Z']]) """}}})<line_sep>self.assertTableResultEquals(mldb.query(""" SELECT filter_top_themes( {{"Politique Provinciale":2, "Élections":1, "Thèmes et sous-thàmes":0} AS args} ) AS * """) [["_rowName" "return.['Thèmes et sous-thàmes', [0, '-Inf']]"] ["result" 0]])<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>mldb.run_tests()<block_end>
# coding=utf-8 """ train bert model """<import_stmt>modeling<import_stmt>tensorflow<as>tf<import_stmt>numpy<as>np<import_stmt>argparse<line_sep>parser=argparse.ArgumentParser(description='Describe your program')<line_sep>parser.add_argument('-batch_size' '--batch_size' type=int default=128)<line_sep>args=parser.parse_args()<line_sep>batch_size=args.batch_size<line_sep>print("batch_size:" batch_size)<def_stmt>bert_train_fn <block_start>is_training=<true><line_sep>hidden_size=768<line_sep>num_labels=10<line_sep>#batch_size=128 max_seq_length=512<line_sep>use_one_hot_embeddings=<false><line_sep>bert_config=modeling.BertConfig(vocab_size=21128 hidden_size=hidden_size num_hidden_layers=12 num_attention_heads=12 intermediate_size=3072)<line_sep>input_ids=tf.placeholder(tf.int32 [batch_size max_seq_length] name="input_ids")<line_sep>input_mask=tf.placeholder(tf.int32 [batch_size max_seq_length] name="input_mask")<line_sep>segment_ids=tf.placeholder(tf.int32 [batch_size max_seq_length] name="segment_ids")<line_sep>label_ids=tf.placeholder(tf.float32 [batch_size num_labels] name="label_ids")<line_sep>loss,per_example_loss,logits,probabilities,model=create_model(bert_config is_training input_ids input_mask segment_ids label_ids num_labels use_one_hot_embeddings)<line_sep># 1. generate or load training/validation/test data. e.g. train:(X,y). X is input_ids,y is labels. # 2. train the model by calling create model, get loss gpu_config=tf.ConfigProto()<line_sep>gpu_config.gpu_options.allow_growth=<true><line_sep>sess=tf.Session(config=gpu_config)<line_sep>sess.run(tf.global_variables_initializer())<for_stmt>i range(1000)<block_start>input_ids_=np.ones((batch_size max_seq_length) dtype=np.int32)<line_sep>input_mask_=np.ones((batch_size max_seq_length) dtype=np.int32)<line_sep>segment_ids_=np.ones((batch_size max_seq_length) dtype=np.int32)<line_sep>label_ids_=np.ones((batch_size num_labels) dtype=np.float32)<line_sep>feed_dict={input_ids:input_ids_ input_mask:input_mask_ segment_ids:segment_ids_ label_ids:label_ids_}<line_sep>loss_=sess.run([loss] feed_dict)<line_sep>print("loss:" loss_)<block_end># 3. eval the model from time to time <block_end><def_stmt>bert_predict_fn # 1. predict based on <block_start><pass><block_end><def_stmt>create_model bert_config is_training input_ids input_mask segment_ids labels num_labels use_one_hot_embeddings<block_start>"""Creates a classification model."""<line_sep>model=modeling.BertModel(config=bert_config is_training=is_training input_ids=input_ids input_mask=input_mask token_type_ids=segment_ids use_one_hot_embeddings=use_one_hot_embeddings)<line_sep>output_layer=model.get_pooled_output()<line_sep>hidden_size=output_layer.shape[-1].value<line_sep>output_weights=tf.get_variable("output_weights" [num_labels hidden_size] initializer=tf.truncated_normal_initializer(stddev=0.02))<line_sep>output_bias=tf.get_variable("output_bias" [num_labels] initializer=tf.zeros_initializer())<with_stmt>tf.variable_scope("loss")<block_start><if_stmt>is_training# if training, add dropout <block_start>output_layer=tf.nn.dropout(output_layer keep_prob=0.9)<block_end>logits=tf.matmul(output_layer output_weights transpose_b=<true>)<line_sep>print("output_layer:" output_layer.shape ";output_weights:" output_weights.shape ";logits:" logits.shape)<line_sep>logits=tf.nn.bias_add(logits output_bias)<line_sep>probabilities=tf.nn.softmax(logits axis=-1)<line_sep>per_example_loss=tf.nn.sigmoid_cross_entropy_with_logits(labels=labels logits=logits)<line_sep>loss=tf.reduce_mean(per_example_loss)<line_sep><return>loss per_example_loss logits probabilities model<block_end><block_end>bert_train_fn()<line_sep>
<def_stmt>fb_python_library name **kwargs<block_start>native.python_library(name=name **kwargs)<block_end>
<import_from_stmt>cmd Cmd<class_stmt>GUICallbackBaseClass()<block_start><def_stmt>update_ip self ip<block_start><pass><block_end><block_end><class_stmt>BinjaCallback(GUICallbackBaseClass)<block_start><def_stmt>__init__ self bv<block_start>self.bv=bv<block_end><def_stmt>update_ip self ip<block_start>self.bv.file.navigate(self.bv.file.view ip)<block_end><block_end><def_stmt>red text<block_start><return>"\x1b[0;31m"+text+"\x1b[0m"<block_end><class_stmt>ExploreInteractive(Cmd object)<block_start>intro=red("[!] Dropping into angr shell\n")<line_sep>intro<augadd>red("Available Commands: print, pyshell, (p)ick, (r)un, (s)tep, stepi, (q)uit")<line_sep>prompt=red(">>> ")<def_stmt>__init__ self proj state gui_callback_object=GUICallbackBaseClass()<block_start>super(ExploreInteractive self).__init__()<line_sep>self.proj=proj<line_sep>self.simgr=proj.factory.simulation_manager(state)<if_stmt>"deferred"<not><in>self.simgr.stashes<block_start>self.simgr.stashes["deferred"]=[]<block_end>self.gui_cb=gui_callback_object<block_end>@property<def_stmt>state self<block_start>""" Alias to `self.simgr.one_active` :return: """<line_sep><return>self.simgr.one_active<block_end><def_stmt>_clearScreen self<block_start>print("\033[H\033[J")<block_end><def_stmt>do_quit self args<block_start>"""Quits the cli."""<line_sep>print(red("Exiting cmd-loop"))<line_sep><return><true><block_end><def_stmt>do_q self args<block_start>self.do_quit(args)<line_sep><return><true><block_end><def_stmt>do_print self arg<block_start>""" print [state_number] Prints a state state_number optionally specifies the state to print if multiple are available """<if_stmt><not>arg<block_start>arg="0"<block_end>pick=int(arg)<line_sep>active=len(self.simgr.active)<if_stmt>pick<ge>active<block_start>print(red("Only {} active state(s), indexed from 0".format(active)))<block_end><else_stmt><block_start>self.simgr.active[pick].context_view.pprint()<line_sep>self.gui_cb.update_ip(self.simgr.active[pick].addr)<block_end><block_end><def_stmt>do_stepi self args<block_start>""" stepi Steps one instruction """<if_stmt>len(self.simgr.active)<eq>1<block_start>self.simgr.step(num_inst=1)<line_sep>self._clearScreen()<line_sep>self.simgr.one_active.context_view.pprint()<line_sep>self.gui_cb.update_ip(self.simgr.one_active.addr)<block_end><elif_stmt>len(self.simgr.active)<g>1<block_start><for_stmt>idx,state enumerate(self.simgr.active)<block_start>print(state.context_view.pstr_branch_info(idx))<block_end><block_end><block_end><def_stmt>do_step self args<block_start>""" step Steps the current state one basic block """<if_stmt>len(self.simgr.active)<eq>1<block_start>self.simgr.step()<line_sep>self._clearScreen()<line_sep>self.simgr.one_active.context_view.pprint()<line_sep>self.gui_cb.update_ip(self.simgr.one_active.addr)<block_end><elif_stmt>len(self.simgr.active)<g>1<block_start><for_stmt>idx,state enumerate(self.simgr.active)<block_start>print(state.context_view.pstr_branch_info(idx))<block_end><block_end><block_end><def_stmt>do_s self args<block_start>self.do_step(args)<block_end><def_stmt>do_s self args<block_start>self.do_step(args)<block_end><def_stmt>do_run self args<block_start>""" run [state_number] Runs until a branch is encountered state_number optionally picks a state if multiple are available """<if_stmt>len(self.simgr.active)<g>1<and>args<block_start>self.do_pick(args)<block_end><if_stmt>len(self.simgr.active)<eq>1<block_start>self.simgr.run(until=<lambda>s:len(s.active)<ne>1)<if_stmt>self.simgr.active<block_start>self.gui_cb.update_ip(self.simgr.one_active.addr)<block_end><block_end><if_stmt>len(self.simgr.active)<g>0<block_start><for_stmt>i,state enumerate(self.simgr.active)<block_start>print(state.context_view.pstr_branch_info(i))<block_end><block_end><else_stmt><block_start>print(red("STATE FINISHED EXECUTION"))<if_stmt>len(self.simgr.stashes["deferred"])<eq>0<block_start>print(red("No states left to explore"))<block_end><else_stmt># DFS-style like <block_start>print(red("Other side of last branch has been added to {}".format(self.simgr)))<line_sep>self.simgr.stashes["active"].append(self.simgr.stashes["deferred"].pop())<block_end><block_end><block_end><def_stmt>do_r self args<block_start>self.do_run(args)<block_end><def_stmt>do_pick self arg<block_start>""" pick <state_number> Selects a state to continue if multiple are available, the other state is saved """<try_stmt><block_start>pick=int(arg)<line_sep>ip=self.simgr.active[pick].regs.ip<block_end><except_stmt><block_start>print("Invalid Choice: "+red("{}".format(arg))+", for {}".format(self.simgr))<line_sep><return><false><block_end>print(red("Picking state with ip: "+(str(ip))))<line_sep>self.simgr.move(from_stash='active' to_stash="deferred" filter_func=<lambda>x:x.solver.eval(ip<ne>x.regs.ip))<line_sep>self.simgr.step()<line_sep>self._clearScreen()<line_sep>self.simgr.one_active.context_view.pprint()<block_end><def_stmt>do_p self args<block_start>self.do_pick(args)<block_end><def_stmt>do_pyshell self args<block_start><import_stmt>gdb<line_sep>gdb.execute('pi')<block_end><def_stmt>do_EOF self args<block_start>self.do_quit(args)<line_sep><return><true><block_end><block_end>
<import_stmt>unittest2<import_from_stmt>pykafka.utils struct_helpers<class_stmt>StructHelpersTests(unittest2.TestCase)<block_start><def_stmt>test_basic_unpack self<block_start>output=struct_helpers.unpack_from('iiqhi' b'\x00\x00\x00\x01\x00\x00\x00\n\x00\x00\x00\x00\x00\x00\x00\n\x00<\x00\x00\x00\x04')<line_sep>self.assertEqual(output (1 10 10 60 4))<block_end><def_stmt>test_string_encoding self<block_start>output=struct_helpers.unpack_from('S' b'\x00\x04test')<line_sep>self.assertEqual(output (b'test' ))<block_end><def_stmt>test_bytearray_unpacking self<block_start>output=struct_helpers.unpack_from('Y' b'\x00\x00\x00\x04test')<line_sep>self.assertEqual(output (b'test' ))<block_end><def_stmt>test_array_unpacking self<block_start>output=struct_helpers.unpack_from('[i]' b'\x00\x00\x00\x04\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x04')<line_sep># A 1-length tuple with a 4-length tuple as the element self.assertEqual(output [1 2 3 4])<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest2.main()<block_end>
"""LTI Consumer plugin URLs configuration."""<import_from_stmt>django.urls include path<import_from_stmt>rest_framework.routers DefaultRouter<import_from_stmt>. models<import_from_stmt>.api LTIConsumerViewsSet<line_sep>router=DefaultRouter()<line_sep>router.register(models.LTIConsumer.RESOURCE_NAME LTIConsumerViewsSet basename="lti-consumer" )<line_sep>url_patterns=[path("" include(router.urls))]<line_sep>
<import_from_stmt>PyObjCTools.TestSupport *<import_stmt>objc<import_from_stmt>Foundation NSObject<try_stmt><block_start><import_from_stmt>Quartz *<block_end><except_stmt>ImportError<block_start><pass><block_end><class_stmt>TestQLPreviewPanelHelper(NSObject)<block_start><def_stmt>acceptsPreviewPanelControl_ self panel<block_start><return>1<block_end><def_stmt>previewPanel_handleEvent_ self panel event<block_start><return>1<block_end><def_stmt>previewPanel_sourceFrameOnScreenForPreviewItem_ self panel item<block_start><return>1<block_end><def_stmt>previewPanel_transitionImageForPreviewItem_contentRect_ self panel item rect<block_start><return>1<block_end><block_end><class_stmt>TestQLPreviewPanel(TestCase)<block_start>@min_os_level('10.6')<def_stmt>testClasses self<block_start>self.assertIsInstance(QLPreviewPanel objc.objc_class)<block_end>@min_os_level('10.6')<def_stmt>testMethods self<block_start>self.assertResultIsBOOL(QLPreviewPanel.sharedPreviewPanelExists)<line_sep>self.assertResultIsBOOL(QLPreviewPanel.enterFullScreenMode_withOptions_)<line_sep>self.assertResultIsBOOL(QLPreviewPanel.isInFullScreenMode)<line_sep>self.assertResultIsBOOL(TestQLPreviewPanelHelper.acceptsPreviewPanelControl_)<line_sep>self.assertResultIsBOOL(TestQLPreviewPanelHelper.previewPanel_handleEvent_)<line_sep>self.assertResultHasType(TestQLPreviewPanelHelper.previewPanel_sourceFrameOnScreenForPreviewItem_ NSRect.__typestr__)<line_sep>self.assertArgHasType(TestQLPreviewPanelHelper.previewPanel_transitionImageForPreviewItem_contentRect_ 2 objc._C_PTR+NSRect.__typestr__)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end>
<import_stmt>glob<import_stmt>h5py<import_stmt>numpy<as>np<import_stmt>os<import_stmt>tqdm<import_stmt>json<def_stmt>convert_tall_c3d_features sampling_rate<block_start>stride=sampling_rate<floordiv>5<line_sep>data_root="./data/TACoS/"<line_sep>hdf5_file=h5py.File(os.path.join(data_root 'tall_c3d_{}_features.hdf5'.format(sampling_rate)) 'w')<with_stmt>open(os.path.join(data_root 'train.json'))<as>json_file<block_start>annotation=json.load(json_file)<block_end><with_stmt>open(os.path.join(data_root 'val.json'))<as>json_file<block_start>annotation.update(json.load(json_file))<block_end><with_stmt>open(os.path.join(data_root 'test.json'))<as>json_file<block_start>annotation.update(json.load(json_file))<block_end>pbar=tqdm.tqdm(total=len(annotation))<for_stmt>vid,anno annotation.items()<block_start>video_feature=[]<for_stmt>i range(0 (anno['num_frames']-sampling_rate)<floordiv>stride+1)<block_start>s_idx=i<times>stride+1<line_sep>e_idx=s_idx+sampling_rate<line_sep>clip_path=os.path.join(data_root 'Interval64_128_256_512_overlap0.8_c3d_fc6' '{}_{}_{}.npy'.format(vid s_idx e_idx))<line_sep>frame_feat=np.load(clip_path)<line_sep>video_feature.append(frame_feat)<block_end>video_feature=np.stack(video_feature)<line_sep>hdf5_file.create_dataset(vid data=video_feature compression="gzip")<line_sep>pbar.update(1)<block_end>pbar.close()<line_sep>hdf5_file.close()<block_end><if_stmt>__name__<eq>'__main__'<block_start>convert_tall_c3d_features(64)<block_end>
<import_stmt>torch<import_from_stmt>pathlib Path<import_from_stmt>scipy sparse<import_stmt>math<def_stmt>to_coo_scipy x<block_start>indices_1=x._indices().numpy()<line_sep>values_1=x._values().numpy()<line_sep><return>sparse.coo_matrix((values_1 (indices_1[0] indices_1[1])) shape=x.shape)<block_end><def_stmt>sparse_grad_output a b<block_start>c=torch.sparse.mm(a b)<if_stmt>c.is_sparse<block_start>c2=torch.rand_like(c.to_dense())<line_sep><return>c2.sparse_mask(c.coalesce())<block_end><else_stmt><block_start><return>torch.rand_like(c)<block_end><block_end><def_stmt>read_matrix_params path<block_start><with_stmt>open(path 'r')<as>file<block_start>line=file.readline()<line_sep>nrows,ncols,nnz=map(<lambda>el:int(el) line.split(', '))<line_sep><return>(nrows ncols) nnz<block_end><block_end><def_stmt>csr_to_coo indices indptr shape<block_start>n_rows,n_cols=shape<line_sep>cols=indices<line_sep>rows=[0]<times>len(cols)<for_stmt>i range(n_rows)<block_start><for_stmt>j range(indptr[i] indptr[i+1])<block_start>rows[j]=i<block_end><block_end><return>torch.tensor([rows cols] dtype=torch.long)<block_end><def_stmt>load_sparse_matrix path device<block_start><with_stmt>open(path 'r')<as>file<block_start>nrows,ncols,nnz=map(<lambda>el:int(el) file.readline().split(', '))<line_sep>index_pointers=map(<lambda>el:int(el) file.readline().split())<line_sep>indices=map(<lambda>el:int(el) file.readline().split())<block_end>index_pointers=list(index_pointers)<line_sep>indices=list(indices)<line_sep>data=torch.randn(nnz dtype=torch.double)<line_sep>shape=(nrows ncols)<line_sep><return>torch.sparse_coo_tensor(csr_to_coo(indices index_pointers shape) data shape device=device)<block_end><def_stmt>gen_vector path device<block_start><with_stmt>open(path 'r')<as>file<block_start>nrows,ncols,nnz=map(<lambda>el:int(el) file.readline().split(', '))<line_sep>index_pointers=map(<lambda>el:int(el) file.readline().split())<line_sep>indices=map(<lambda>el:int(el) file.readline().split())<line_sep><return>torch.randn(nrows dtype=torch.double device=device)<block_end><block_end><def_stmt>gen_matrix path device<block_start><with_stmt>open(path 'r')<as>file<block_start>nrows,ncols,nnz=map(<lambda>el:int(el) file.readline().split(', '))<line_sep>index_pointers=map(<lambda>el:int(el) file.readline().split())<line_sep>indices=map(<lambda>el:int(el) file.readline().split())<line_sep><return>torch.randn(nrows ncols dtype=torch.double device=device)<block_end><block_end><def_stmt>load_spmv_dataset dataset_path hidden_size sparsity device n_limit=math.inf<block_start>"""load_spmv_dataset loads a DLMC dataset for a sparse matrix-vector multiplication (SPMV) performance test. Args: dataset_path: path of the dataset from DLMC collection. hidden_size This value allows tensors of varying sizes. sparsity: This value allows tensors of varying sparsities. device: Whether to place the Tensor on a GPU or CPU. n_limit: This value allows a dataset with some limit size. """<line_sep>current_folder_path=f"{dataset_path}/{sparsity}"<line_sep>path=Path(current_folder_path)<line_sep>files=path.glob('**/*.smtx')<line_sep>print(dataset_path hidden_size sparsity)<line_sep>index=0<line_sep>x_files,y_files=[] []<for_stmt>f files<block_start><if_stmt>index<ge>n_limit<block_start><break><block_end>print('.' end='')<line_sep>size,nnz=read_matrix_params(f.as_posix())<if_stmt>size[1]<eq>hidden_size<block_start>x_files.append(f.as_posix())<block_end><if_stmt>size[0]<eq>hidden_size<block_start>y_files.append(f.as_posix())<block_end>index<augadd>1<block_end>print()<for_stmt>fx,fy zip(x_files y_files)<block_start>x=load_sparse_matrix(fx device)<line_sep>y=gen_vector(fy device)<line_sep><yield>(x y)<block_end><block_end><def_stmt>load_spmm_dataset dataset_path hidden_size sparsity spmm_type device n_limit=math.inf<block_start>"""load_spmm_dataset loads a DLMC dataset for a sparse matrix-matrix multiplication (SPMM) performance test. Args: dataset_path: path of the dataset from DLMC collection. hidden_size This value allows tensors of varying sizes. sparsity: This value allows tensors of varying sparsities. spmm_type: This value allows tensors for `sparse@sparse` or `sparse@dense` operations. device: Whether to place the Tensor on a GPU or CPU. n_limit: This value allows a dataset with some limit size. """<line_sep>current_folder_path=f"{dataset_path}/{sparsity}"<line_sep>path=Path(current_folder_path)<line_sep>files=path.glob('**/*.smtx')<line_sep>print(dataset_path hidden_size sparsity)<line_sep>index=0<line_sep>x_files,y_files=[] []<for_stmt>f files<block_start><if_stmt>index<ge>n_limit<block_start><break><block_end>print('.' end='')<line_sep>size,nnz=read_matrix_params(f.as_posix())<if_stmt>size[1]<eq>hidden_size<block_start>x_files.append(f.as_posix())<block_end><if_stmt>size[0]<eq>hidden_size<block_start>y_files.append(f.as_posix())<block_end>index<augadd>1<block_end>print()<for_stmt>fx,fy zip(x_files y_files)<block_start>x=load_sparse_matrix(fx device)<line_sep>y=gen_matrix(fy device)<if>spmm_type<eq>'sparse@dense'<else>load_sparse_matrix(fy device)<line_sep><yield>(x y)<block_end><block_end><def_stmt>load_dlmc_dataset dataset_path operation hidden_size sparsity device requires_grad n_limit=math.inf<block_start>"""load_dlmc_dataset loads a DLMC dataset for a matmul performance test. Args: dataset_path: path of the dataset from DLMC collection. operation: This value allows tensors for `sparse@sparse`|`sparse@dense`|`sparse@vector` operations. hidden_size This value allows tensors of varying sizes. sparsity: This value allows tensors of varying sparsities. device: Whether to place the Tensor on a GPU or CPU. requires_grad: Loads the dataset for backward test. n_limit: This value allows a dataset with some limit size. """<if_stmt>operation<eq>'sparse@sparse'<or>operation<eq>"sparse@dense"<block_start>collection=load_spmm_dataset(dataset_path hidden_size sparsity operation device n_limit)<block_end><elif_stmt>operation<eq>'sparse@vector'<block_start>collection=load_spmv_dataset(dataset_path hidden_size sparsity device n_limit)<block_end>scipy_vars={}<line_sep>backward_vars={}<for_stmt>x,y collection<block_start><if_stmt>device<eq>'cpu'<block_start>scipy_vars={"sx":to_coo_scipy(x)<if>x.is_sparse<else>x.numpy() "sy":to_coo_scipy(y)<if>y.is_sparse<else>y.numpy() }<block_end><if_stmt><not>requires_grad<block_start>dx=x.to_dense()<if>x.is_sparse<else>x<line_sep>dy=y.to_dense()<if>y.is_sparse<else>y<block_end><else_stmt><block_start>c=sparse_grad_output(x y)<line_sep>backward_vars={"sparse_grad_output":c "grad_output":c.to_dense()<if>c.is_sparse<else>c }<line_sep>x.requires_grad_(<true>)<line_sep>y.requires_grad_(<true>)<line_sep>dx=x.to_dense().detach()<if>x.is_sparse<else>x.clone().detach()<line_sep>dy=y.to_dense().detach()<if>y.is_sparse<else>y.clone().detach()<line_sep>dx.requires_grad_(<true>)<line_sep>dy.requires_grad_(<true>)<block_end><yield>{"x":x "y":y "dx":dx "dy":dy **scipy_vars **backward_vars}<block_end><block_end>
<import_stmt>json<import_stmt>unittest<import_from_stmt>aws_allowlister.shared.utils clean_service_name get_service_name_matching_iam_service_prefix clean_service_name_after_brackets_and_parentheses chomp_keep_single_spaces chomp<class_stmt>UtilsTestCase(unittest.TestCase)<block_start><def_stmt>test_get_service_name_matching_iam_service_prefix self<block_start>iam_service_prefix="s3"<line_sep>result=get_service_name_matching_iam_service_prefix(iam_service_prefix)<line_sep>print(json.dumps(result indent=4))<line_sep>self.assertEqual(result "Amazon S3")<line_sep>service_name_pairs={"a4b":"<NAME>" "access-analyzer":"IAM Access Analyzer" "account":"AWS Accounts" "acm":"AWS Certificate Manager"# .. etc. # Try opening the SQLite database in DB Browser for SQLite to examine it more. # And view the table called compliancetable }<for_stmt>iam_service_prefix list(service_name_pairs.keys())# service prefix is like a4b, access-analyzer, etc. <block_start>result=get_service_name_matching_iam_service_prefix(iam_service_prefix)<line_sep>self.assertEqual(result service_name_pairs.get(iam_service_prefix))<line_sep>print(f"{iam_service_prefix}: {result}")<block_end><block_end><def_stmt>test_chomp self<block_start>result=chomp_keep_single_spaces("DoD CC SRG")<line_sep>print(result)<block_end># # def test_normalize_tags_or_strings(self): # print() <def_stmt>test_clean_service_name_non_breaking_spaces self<block_start>result=clean_service_name('AWS Amplify\u00a0')<line_sep>self.assertEqual(result "AWS Amplify")<block_end># def test_clean_service_name_remove_text_after_bracket(self): # # Example: Amazon Aurora on https://aws.amazon.com/compliance/hipaa-eligible-services-reference/ # result = clean_service_name('Amazon Aurora [MySQL, PostgreSQL]') # self.assertEqual(result, "Amazon Aurora") # # def test_clean_service_name_remove_text_after_parentheses(self): # # Example: Alexa for Business on https://aws.amazon.com/compliance/hipaa-eligible-services-reference/ # result = clean_service_name('Alexa for Business (for healthcare skills only – requires Alexa Skills BAA. See ' # 'HIPAA whitepaper for details)') # self.assertEqual(result, "Alexa for Business") <def_stmt>test_clean_service_name_tabs_and_newlines self# Make sure tabs and newlines are removed properly <block_start>result=clean_service_name('\n\n\t\tAmazon API Gateway\t\n')<line_sep>self.assertEqual(result "Amazon API Gateway")<line_sep>result=clean_service_name('Amazon API Gateway\n')<line_sep>self.assertTrue(result<eq>"Amazon API Gateway")<block_end><def_stmt>test_clean_service_name_text_after_brackets_and_parentheses self# Example: Amazon Aurora on https://aws.amazon.com/compliance/hipaa-eligible-services-reference/ <block_start>result=clean_service_name_after_brackets_and_parentheses('Amazon Aurora [MySQL, PostgreSQL]')<line_sep>self.assertEqual(result "Amazon Aurora")<line_sep># Example: Alexa for Business on https://aws.amazon.com/compliance/hipaa-eligible-services-reference/ result=clean_service_name_after_brackets_and_parentheses('Alexa for Business (for healthcare skills '<concat>'only – requires Alexa Skills BAA. See HIPAA '<concat>'whitepaper for details)')<line_sep>self.assertEqual(result "Alexa for Business")<line_sep># Make sure tabs and newlines are removed properly result=clean_service_name_after_brackets_and_parentheses('\n\n\t\tAmazon API Gateway\t\n')<line_sep>self.assertEqual(result "Amazon API Gateway")<line_sep>result=clean_service_name_after_brackets_and_parentheses('Amazon API Gateway\n')<line_sep>self.assertTrue(result<eq>"Amazon API Gateway")<block_end><block_end>
""" Copyright (c) 2020 Huawei Technologies Co.,Ltd. openGauss is licensed under Mulan PSL v2. You can use this software according to the terms and conditions of the Mulan PSL v2. You may obtain a copy of Mulan PSL v2 at: http://license.coscl.org.cn/MulanPSL2 THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. See the Mulan PSL v2 for more details. """<import_stmt>argparse<import_stmt>logging<import_stmt>sys<import_from_stmt>configparser ConfigParser<import_from_stmt>algorithm.diag SQLDiag<import_from_stmt>utils ResultSaver is_valid_conf<import_from_stmt>preprocessing LoadData split_sql<line_sep>__version__='2.0.0'<line_sep>__description__='SQLdiag integrated by openGauss.'<def_stmt>parse_args <block_start>parser=argparse.ArgumentParser(description=__description__)<line_sep>parser.add_argument('mode' choices=['train' 'predict' 'finetune'] help='The training mode is to perform feature extraction and '<concat>'model training based on historical SQL statements. '<concat>'The prediction mode is to predict the execution time of '<concat>'a new SQL statement through the trained model.')<line_sep>parser.add_argument('-f' '--csv-file' type=argparse.FileType('r') help='The data set for training or prediction. '<concat>'The file format is CSV. '<concat>'If it is two columns, the format is (SQL statement, duration time). '<concat>'If it is three columns, '<concat>'the format is (timestamp of SQL statement execution time, SQL statement, duration time).')<line_sep>parser.add_argument('--predicted-file' help='The file path to save the predicted result.')<line_sep>parser.add_argument('--model' default='template' choices=['template' 'dnn'] help='Choose the model model to use.')<line_sep>parser.add_argument('--query' help='Input the querys to predict.')<line_sep>parser.add_argument('--threshold' help='Slow SQL threshold.')<line_sep>parser.add_argument('--model-path' required=<true> help='The storage path of the model file, used to read or save the model file.')<line_sep>parser.add_argument('--config-file' default='sqldiag.conf')<line_sep>parser.version=__version__<line_sep><return>parser.parse_args()<block_end><def_stmt>get_config filepath<block_start>cp=ConfigParser()<line_sep>cp.read(filepath encoding='UTF-8')<line_sep><return>cp<block_end><def_stmt>main args<block_start>logging.basicConfig(level=logging.WARNING)<if_stmt><not>is_valid_conf(args.config_file)<block_start>logging.fatal('The [--config-file] parameter is incorrect')<line_sep>sys.exit(1)<block_end>model=SQLDiag(args.model get_config(args.config_file))<if_stmt>args.mode<in>('train' 'finetune')<block_start><if_stmt><not>args.csv_file<block_start>logging.fatal('The [--csv-file] parameter is required for train mode')<line_sep>sys.exit(1)<block_end>train_data=LoadData(args.csv_file).train_data<if_stmt>args.mode<eq>'train'<block_start>model.fit(train_data)<block_end><else_stmt><block_start>model.fine_tune(args.model_path train_data)<block_end>model.save(args.model_path)<block_end><else_stmt><block_start>model.load(args.model_path)<if_stmt>args.csv_file<and><not>args.query<block_start>predict_data=LoadData(args.csv_file).predict_data<block_end><elif_stmt>args.query<and><not>args.csv_file<block_start>predict_data=split_sql(args.query)<block_end><else_stmt><block_start>logging.error('The predict model only supports [--csv-file] or [--query] at the same time.')<line_sep>sys.exit(1)<block_end>args.threshold=-100<if><not>args.threshold<else>float(args.threshold)<line_sep>pred_result=model.transform(predict_data)<if_stmt>args.predicted_file<block_start><if_stmt>args.model<eq>'template'<block_start>info_sum=[]<for_stmt>stats,_info pred_result.items()<block_start><if_stmt>_info<block_start>_info=list(filter(<lambda>item:item[1]<ge>args.threshold _info))<for_stmt>item _info<block_start>item.insert(1 stats)<block_end>info_sum.extend(_info)<block_end><block_end>ResultSaver().save(info_sum args.predicted_file)<block_end><else_stmt><block_start>pred_result=list(filter(<lambda>item:float(item[1])<ge>args.threshold pred_result))<line_sep>ResultSaver().save(pred_result args.predicted_file)<block_end><block_end><else_stmt><block_start><import_from_stmt>prettytable PrettyTable<line_sep>display_table=PrettyTable()<if_stmt>args.model<eq>'template'<block_start>display_table.field_names=['sql' 'status' 'predicted time' 'most similar template']<line_sep>display_table.align='l'<line_sep>status=('Suspect illegal SQL' 'No SQL information' 'No SQL template found' 'Fine match')<for_stmt>stats status<block_start><if_stmt>pred_result[stats]<block_start><for_stmt>sql,predicted_time,similariest_sql pred_result[stats]<block_start><if_stmt>predicted_time<ge>args.threshold<or>stats<eq>'Suspect illegal sql'<block_start>display_table.add_row([sql stats predicted_time similariest_sql])<block_end><block_end><block_end><block_end><block_end><else_stmt><block_start>display_table.field_names=['sql' 'predicted time']<line_sep>display_table.align='l'<for_stmt>sql,predicted_time pred_result<block_start><if_stmt>float(predicted_time)<ge>args.threshold<block_start>display_table.add_row([sql predicted_time])<block_end><block_end><block_end>print(display_table.get_string())<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>main(parse_args())<block_end>
HEALTH_CHECKS_ERROR_CODE=503<line_sep>HEALTH_CHECKS={'db':'django_healthchecks.contrib.check_database' }<line_sep>
<def_stmt>check kwds name<block_start><if_stmt>kwds<block_start>msg=', '.join('"%s"'%s<for>s sorted(kwds))<line_sep>s=''<if>len(kwds)<eq>1<else>'s'<line_sep><raise>ValueError('Unknown attribute%s for %s: %s'%(s name msg))<block_end><block_end><def_stmt>set_reserved value section name=<none> data=<none> **kwds<block_start>check(kwds '%s %s'%(section value.__class__.__name__))<line_sep>value.name=name<line_sep>value.data=data<block_end>
<import_stmt>sys<import_stmt>os<import_stmt>sys<line_sep>theme_keys=["cursor" "foreground" "background" "background_opacity" "dynamic_background_opacity" "dim_opacity" "selection_foreground" "selection_background" "color0" "color8" "color1" "color9" "color2" "color10" "color3" "color11" "color4" "color12" "color5" "color13" "color6" "color14" "color7" "color15"]<def_stmt>is_valid line<block_start>""" Returns true if a line inside a configuration file is a valid theme configuration pair: is not a comment, is not empty and the key is correct. :param line: a line inside the configuration file :type line: str :return: true if is valid, false otherwise :rtype: bool """<line_sep><return>(<not>line.lstrip().startswith("#")# is not a comment <and>len(line.strip())<ne>0# is not empty <and>line.split(maxsplit=1)[0]<in>theme_keys)<block_end># key is a valid one <def_stmt>extract_configuration_pair line<block_start>""" Extract a configuration pair by splitting on spaces and taking the first couple of values. :param line: a line inside the configuration file :type line: str :return: a key-value pair :rtype: bool """<line_sep>split=line.split(maxsplit=2)<line_sep><return>split[0] split[1]<block_end><def_stmt>read_configuration filename<block_start>""" Read a kitty configuration file and extract only theme related keys and values. :param filename: path to the configuration file :type filename: str :return: a map with theme related configuration values :rtype: dict[str, str] """<with_stmt>open(filename "r")<as>fp<block_start>lines=fp.readlines()<line_sep>theme_config=dict([extract_configuration_pair(line)<for>line lines<if>is_valid(line)])<block_end><return>theme_config<block_end><def_stmt>fg color text<block_start>rgb=tuple(int(color[i+1:i+3] 16)<for>i (0 2 4))<line_sep><return>('\x1b[38;2;%s;%s;%sm'%rgb+text+'\x1b[0m')<block_end><def_stmt>bg color text<block_start>rgb=tuple(int(color[i+1:i+3] 16)<for>i (0 2 4))<line_sep><return>('\x1b[48;2;%s;%s;%sm'%rgb+text+'\x1b[0m')<block_end><def_stmt>print_preview filename configuration<block_start>cursor=configuration["cursor"]<line_sep>background=configuration["background"]<line_sep>foreground=configuration["foreground"]<line_sep>theme=os.path.basename(filename)<line_sep>size=len(theme)+(2+2+16+2+16+1+2)<line_sep>print(bg(background " "<times>size))<line_sep>print(bg(background " ") end="")<line_sep>print(bg(background fg(foreground theme)) end="")<line_sep>print(bg(background " ") end="")<line_sep>c='a'<for_stmt>i range(0 16)<block_start>color=configuration["color%d"%i]<line_sep>print(bg(background fg(color c)) end="")<line_sep>c=chr(ord(c)+1)<block_end>print(bg(background " ") end="")<line_sep>selection_background=configuration["selection_background"]<line_sep>selection_foreground=configuration["selection_foreground"]<line_sep>c='A'<for_stmt>i range(0 16)<block_start>print(bg(selection_background fg(selection_foreground c)) end="")<line_sep>c=chr(ord(c)+1)<block_end>print(bg(cursor " ") end="")<line_sep>print(bg(background " "))<line_sep>print(bg(background " "<times>size))<line_sep>print(bg(background " ") end="")<line_sep>print(bg(configuration["color0"] " ") end="")<line_sep>print(bg(configuration["color1"] " ") end="")<line_sep>print(bg(configuration["color2"] " ") end="")<line_sep>print(bg(configuration["color3"] " ") end="")<line_sep>print(bg(configuration["color4"] " ") end="")<line_sep>print(bg(configuration["color5"] " ") end="")<line_sep>print(bg(configuration["color6"] " ") end="")<line_sep>print(bg(configuration["color7"] " ") end="")<line_sep>print(bg(background " ") end="")<line_sep>print(bg(configuration["color8"] " ") end="")<line_sep>print(bg(configuration["color9"] " ") end="")<line_sep>print(bg(configuration["color10"] " ") end="")<line_sep>print(bg(configuration["color11"] " ") end="")<line_sep>print(bg(configuration["color12"] " ") end="")<line_sep>print(bg(configuration["color13"] " ") end="")<line_sep>print(bg(configuration["color14"] " ") end="")<line_sep>print(bg(configuration["color15"] " ") end="")<line_sep>print(bg(background " "<times>(size-16-4)) end="")<line_sep>print()<line_sep>print(bg(background " "<times>size))<line_sep>print()<block_end><def_stmt>main directory<block_start><for_stmt>filename os.listdir(directory)<block_start><try_stmt><block_start>path=os.path.join(directory filename)<line_sep>configuration=read_configuration(path)<line_sep>print_preview(path configuration)<block_end><except_stmt>Exception<as>e<block_start>print(e file=sys.stderr)<line_sep>print("Error while processing %s"%filename file=sys.stderr)<block_end><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>main(sys.argv[1])<block_end>
<import_stmt>matplotlib<line_sep>matplotlib.use("Agg")<import_stmt>matplotlib.pyplot<as>plt<line_sep>
"""Module that computes the parameters of the normal data distribution of the training set."""<import_from_stmt>typing Optional Tuple<import_stmt>torch<import_from_stmt>torch Tensor<import_from_stmt>torchmetrics Metric<class_stmt>AnomalyScoreDistribution(Metric)<block_start>"""Mean and standard deviation of the anomaly scores of normal training data."""<def_stmt>__init__ self **kwargs<block_start>super().__init__(**kwargs)<line_sep>self.anomaly_maps=[]<line_sep>self.anomaly_scores=[]<line_sep>self.add_state("image_mean" torch.empty(0) persistent=<true>)<line_sep>self.add_state("image_std" torch.empty(0) persistent=<true>)<line_sep>self.add_state("pixel_mean" torch.empty(0) persistent=<true>)<line_sep>self.add_state("pixel_std" torch.empty(0) persistent=<true>)<line_sep>self.image_mean=torch.empty(0)<line_sep>self.image_std=torch.empty(0)<line_sep>self.pixel_mean=torch.empty(0)<line_sep>self.pixel_std=torch.empty(0)<block_end># pylint: disable=arguments-differ <def_stmt>update # type: ignore self anomaly_scores:Optional[Tensor]=<none> anomaly_maps:Optional[Tensor]=<none><arrow><none><block_start>"""Update the precision-recall curve metric."""<if_stmt>anomaly_maps<is><not><none><block_start>self.anomaly_maps.append(anomaly_maps)<block_end><if_stmt>anomaly_scores<is><not><none><block_start>self.anomaly_scores.append(anomaly_scores)<block_end><block_end><def_stmt>compute self<arrow>Tuple[Tensor Tensor Tensor Tensor]<block_start>"""Compute stats."""<line_sep>anomaly_scores=torch.hstack(self.anomaly_scores)<line_sep>anomaly_scores=torch.log(anomaly_scores)<line_sep>self.image_mean=anomaly_scores.mean()<line_sep>self.image_std=anomaly_scores.std()<if_stmt>self.anomaly_maps<block_start>anomaly_maps=torch.vstack(self.anomaly_maps)<line_sep>anomaly_maps=torch.log(anomaly_maps).cpu()<line_sep>self.pixel_mean=anomaly_maps.mean(dim=0).squeeze()<line_sep>self.pixel_std=anomaly_maps.std(dim=0).squeeze()<block_end><return>self.image_mean self.image_std self.pixel_mean self.pixel_std<block_end><block_end>
""" external_settings_configs """<import_stmt>logging<import_stmt>traceback<import_stmt>requests<import_stmt>simplejson<as>json<import_from_stmt>ast literal_eval<import_from_stmt>skyline_functions get_redis_conn_decoded<import_stmt>settings<line_sep># @added 20210601 - Feature #4000: EXTERNAL_SETTINGS <def_stmt>manage_external_settings current_skyline_app<block_start>""" Return a concatenated external settings from :mod:`settings.EXTERNAL_SETTINGS` of any fetched external settings. :param current_skyline_app: the app calling the function so the function knows which log to write too. :type current_skyline_app: str :return: (external_settings, external_from_cache) :rtype: (dict, boolean) """<line_sep># Get the logger current_skyline_app_logger=str(current_skyline_app)+'Log'<line_sep>current_logger=logging.getLogger(current_skyline_app_logger)<line_sep>function_str='metrics_manager :: functions.settings.manage_external_settings'<line_sep>debug_get_external_settings=<none><line_sep># Set the default dicts to return external_settings={}<line_sep>external_from_cache=<none><line_sep>last_known_redis_key='skyline.last_known.external_settings'<line_sep># Define the items that are expected in the external settings json EXTERNAL_SETTINGS_JSON_ITEMS=('id' 'namespace' 'full_duration' 'second_order_resolution_seconds' 'learn_full_duration_seconds' )<line_sep>OPTIONAL_EXTERNAL_ALERTS_JSON_ITEMS=('retention_1_resolution_seconds' 'retention_1_period_seconds' 'retention_2_resolution_seconds' 'retention_2_period_seconds' 'flux_token' 'thunder_alert_endpoint' 'thunder_alert_token' 'alert_on_no_data' 'alert_on_stale_metrics' 'do_not_alert_on_stale_metrics' )<try_stmt><block_start>EXTERNAL_SETTINGS=settings.EXTERNAL_SETTINGS.copy()<block_end><except_stmt>Exception<as>e<block_start>current_logger.error(traceback.format_exc())<line_sep>current_logger.error('error :: %s :: failed to determine EXTERNAL_SETTINGS - %s'%(function_str e))<line_sep><return>(external_settings external_from_cache)<block_end><for_stmt>external_settings_item list(EXTERNAL_SETTINGS.keys())<block_start>endpoint=<none><try_stmt><block_start>endpoint=EXTERNAL_SETTINGS[external_settings_item]['endpoint']<block_end><except_stmt>Exception<as>e<block_start>current_logger.error(traceback.format_exc())<line_sep>current_logger.error('error :: %s :: failed to determine endpoint for EXTERNAL_SETTINGS[\'%s\'] - %s'%(function_str str(external_settings_item) e))<block_end><if_stmt><not>endpoint<block_start><continue><block_end>post_data=<none><try_stmt><block_start>post_data=EXTERNAL_SETTINGS[external_settings_item]['post_data']<block_end><except_stmt>Exception<as>e<block_start>current_logger.error(traceback.format_exc())<line_sep>current_logger.error('error :: %s :: failed to determine post_data for EXTERNAL_SETTINGS[\'%s\'] - %s'%(function_str str(external_settings_item) e))<block_end><if_stmt><not>post_data<block_start><continue><block_end>external_settings_dict={}<line_sep>current_logger.info('%s :: fetching external settings from %s'%(function_str str(endpoint)))<try_stmt><block_start>header={"content-type":"application/json"}<line_sep>r=requests.post(endpoint data=json.dumps(post_data) headers=header timeout=10)<line_sep>external_settings_dict=r.json()<block_end><except_stmt>Exception<as>e<block_start>current_logger.error(traceback.format_exc())<line_sep>current_logger.error('error :: %s :: could not retrieve json from the url - %s - %s'%(function_str str(endpoint) e))<line_sep><continue><block_end><if_stmt><not>external_settings_dict<block_start>current_logger.error('error :: %s :: failed to retrieve json from the url - %s'%(function_str str(endpoint)))<block_end><if_stmt>external_settings_dict<block_start>namespaces_list=[]<try_stmt><block_start>namespaces_list=external_settings_dict['data']['namespaces']<block_end><except_stmt>Exception<as>e<block_start>current_logger.error(traceback.format_exc())<line_sep>current_logger.error('error :: %s :: could not parse [\'data\'][\'namespaces\'] from json from url - %s - %s'%(function_str str(endpoint) e))<line_sep><continue><block_end><for_stmt>item namespaces_list<block_start><try_stmt><block_start>required_elements=<true><for_stmt>element EXTERNAL_SETTINGS_JSON_ITEMS<block_start>valid_element=<false><try_stmt><block_start>valid_element=item[element]<block_end><except_stmt>Exception<as>e<block_start>current_logger.error('error :: %s :: could not validate %s from json from url - %s - %s'%(function_str element str(endpoint) e))<line_sep>required_elements=<false><block_end><if_stmt><not>valid_element<block_start>required_elements=<false><line_sep><continue><block_end><block_end><if_stmt>required_elements<block_start>config_id='external-%s'%str(item['id'])<line_sep>external_settings[config_id]=item<block_end><block_end><except_stmt>Exception<as>e<block_start>current_logger.error(traceback.format_exc())<line_sep>current_logger.error('error :: get_external_settings :: could not parse namespace element from json from url - %s - %s - %s'%(str(endpoint) str(item) e))<block_end><block_end><block_end><block_end>redis_conn_decoded=<none><try_stmt><block_start>redis_conn_decoded=get_redis_conn_decoded(current_skyline_app)<block_end><except_stmt>Exception<as>e<block_start>current_logger.error(traceback.format_exc())<line_sep>current_logger.error('error :: get_external_settings :: failed to get decoded Redis connection - %s'%e)<line_sep><return>(external_settings external_from_cache)<block_end><if_stmt><not>external_settings<block_start><try_stmt><block_start>external_settings_raw=redis_conn_decoded.get(last_known_redis_key)<if_stmt>external_settings_raw<block_start>external_settings=literal_eval(external_settings_raw)<line_sep>external_from_cache=<true><block_end><block_end><except_stmt>Exception<as>e<block_start>current_logger.error(traceback.format_exc())<line_sep>current_logger.error('error :: get_external_settings :: failed to query Redis for %s - %s'%(last_known_redis_key e))<block_end><return>(external_settings external_from_cache)<block_end>redis_key='skyline.external_settings'<try_stmt><block_start>redis_conn_decoded.set(redis_key str(external_settings))<block_end><except_stmt>Exception<as>e<block_start>current_logger.error(traceback.format_exc())<line_sep>current_logger.error('error :: get_external_settings :: failed to set Redis key %s - %s'%(redis_key e))<block_end><try_stmt><block_start>redis_conn_decoded.set(last_known_redis_key str(external_settings))<block_end><except_stmt>Exception<as>e<block_start>current_logger.error(traceback.format_exc())<line_sep>current_logger.error('error :: get_external_settings :: failed to set Redis key %s - %s'%(last_known_redis_key e))<block_end><return>(external_settings external_from_cache)<block_end>
# Copyright Contributors to the Amundsen project. # SPDX-License-Identifier: Apache-2.0 <import_stmt>unittest<import_from_stmt>amundsen_application.models.data_issue DataIssue Priority<class_stmt>DataIssueTest(unittest.TestCase)<block_start><def_stmt>setUp self<arrow><none><block_start>self.issue_key='key'<line_sep>self.title='title'<line_sep>self.url='https://place'<line_sep>self.status='open'<line_sep>self.priority=Priority.P2<line_sep>self.maxDiff=<none><block_end><def_stmt>test_mapping_priority self<arrow><none><block_start>expected_priority_name='major'<line_sep>expected_priority_display_name='P2'<line_sep>data_issue=DataIssue(issue_key=self.issue_key title=self.title url=self.url status=self.status priority=self.priority).serialize()<line_sep>self.assertEqual(data_issue['priority_display_name'] expected_priority_display_name)<line_sep>self.assertEqual(data_issue['priority_name'] expected_priority_name)<line_sep>self.assertEqual(data_issue['issue_key'] self.issue_key)<line_sep>self.assertEqual(data_issue['title'] self.title)<line_sep>self.assertEqual(data_issue['url'] self.url)<line_sep>self.assertEqual(data_issue['status'] self.status)<block_end><def_stmt>test_mapping_priorty_missing self<arrow><none><block_start>expected_priority_name=<none># type: ignore expected_priority_display_name=<none># type: ignore data_issue=DataIssue(issue_key=self.issue_key title=self.title url=self.url status=self.status priority=<none>).serialize()<line_sep>self.assertEqual(data_issue['priority_display_name'] expected_priority_display_name)<line_sep>self.assertEqual(data_issue['priority_name'] expected_priority_name)<line_sep>self.assertEqual(data_issue['issue_key'] self.issue_key)<line_sep>self.assertEqual(data_issue['title'] self.title)<line_sep>self.assertEqual(data_issue['url'] self.url)<line_sep>self.assertEqual(data_issue['status'] self.status)<block_end><block_end>
# -*- coding: utf-8 -*- <import_from_future_stmt> absolute_import division print_function unicode_literals<import_from_stmt>sumy._compat to_unicode<import_from_stmt>sumy.summarizers.random RandomSummarizer<import_from_stmt>..utils build_document build_document_from_string<def_stmt>test_empty_document <block_start>document=build_document()<line_sep>summarizer=RandomSummarizer()<line_sep>sentences=summarizer(document 10)<assert_stmt>len(sentences)<eq>0<block_end><def_stmt>test_less_sentences_than_requested <block_start>document=build_document_from_string(""" This is only one sentence. """)<line_sep>summarizer=RandomSummarizer()<line_sep>sentences=summarizer(document 10)<assert_stmt>len(sentences)<eq>1<assert_stmt>to_unicode(sentences[0])<eq>"This is only one sentence."<block_end><def_stmt>test_sentences_in_right_order <block_start>document=build_document_from_string(""" # Heading one First sentence. Second sentence. Third sentence. """)<line_sep>summarizer=RandomSummarizer()<line_sep>sentences=summarizer(document 4)<assert_stmt>len(sentences)<eq>3<assert_stmt>to_unicode(sentences[0])<eq>"First sentence."<assert_stmt>to_unicode(sentences[1])<eq>"Second sentence."<assert_stmt>to_unicode(sentences[2])<eq>"Third sentence."<block_end><def_stmt>test_more_sentences_than_requested <block_start>document=build_document_from_string(""" # Heading one First sentence. Second sentence. Third sentence. # Heading two I like sentences They are so wordy And have many many letters And are green in my editor But someone doesn't like them :( """)<line_sep>summarizer=RandomSummarizer()<line_sep>sentences=summarizer(document 4)<assert_stmt>len(sentences)<eq>4<block_end>
# # This file is part of the LibreOffice project. # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # # This file incorporates work covered by the following license notice: # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed # with this work for additional information regarding copyright # ownership. The ASF licenses this file to you under the Apache # License, Version 2.0 (the "License"); you may not use this file # except in compliance with the License. You may obtain a copy of # the License at http://www.apache.org/licenses/LICENSE-2.0 . # <import_from_stmt>.FaxWizardDialogResources FaxWizardDialogResources<import_from_stmt>.FaxWizardDialogConst FaxWizardDialogConst HIDMAIN HID<import_from_stmt>..ui.WizardDialog WizardDialog uno UIConsts PropertyNames<import_from_stmt>com.sun.star.awt.FontUnderline SINGLE<class_stmt>FaxWizardDialog(WizardDialog)<block_start><def_stmt>__init__ self xmsf<block_start>super(FaxWizardDialog self).__init__(xmsf HIDMAIN)<line_sep>#Load Resources self.resources=FaxWizardDialogResources()<line_sep>#set dialog properties... self.setDialogProperties(<true> 210 <true> 104 52 1 1 self.resources.resFaxWizardDialog_title 310)<line_sep>self.fontDescriptor4=uno.createUnoStruct('com.sun.star.awt.FontDescriptor')<line_sep>self.fontDescriptor5=uno.createUnoStruct('com.sun.star.awt.FontDescriptor')<line_sep>self.fontDescriptor4.Weight=100<line_sep>self.fontDescriptor5.Weight=150<block_end><def_stmt>buildStep1 self<block_start>self.optBusinessFax=self.insertRadioButton("optBusinessFax" FaxWizardDialogConst.OPTBUSINESSFAX_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.OPTBUSINESSFAX_HID self.resources.resoptBusinessFax_value 97 28 1 1 184) self)<line_sep>self.lstBusinessStyle=self.insertListBox("lstBusinessStyle" FaxWizardDialogConst.LSTBUSINESSSTYLE_ACTION_PERFORMED FaxWizardDialogConst.LSTBUSINESSSTYLE_ITEM_CHANGED ("Dropdown" PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (<true> 12 FaxWizardDialogConst.LSTBUSINESSSTYLE_HID 180 40 1 3 74) self)<line_sep>self.optPrivateFax=self.insertRadioButton("optPrivateFax" FaxWizardDialogConst.OPTPRIVATEFAX_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.OPTPRIVATEFAX_HID self.resources.resoptPrivateFax_value 97 81 1 2 184) self)<line_sep>self.lstPrivateStyle=self.insertListBox("lstPrivateStyle" FaxWizardDialogConst.LSTPRIVATESTYLE_ACTION_PERFORMED FaxWizardDialogConst.LSTPRIVATESTYLE_ITEM_CHANGED ("Dropdown" PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (<true> 12 FaxWizardDialogConst.LSTPRIVATESTYLE_HID 180 95 1 4 74) self)<line_sep>self.insertLabel("lblBusinessStyle" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 self.resources.reslblBusinessStyle_value 110 42 1 32 60))<line_sep>self.insertLabel("lblTitle1" ("FontDescriptor" PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_MULTILINE PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (self.fontDescriptor5 16 self.resources.reslblTitle1_value <true> 91 8 1 37 212))<line_sep>self.insertLabel("lblPrivateStyle" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 self.resources.reslblPrivateStyle_value 110 95 1 50 60))<line_sep>self.insertLabel("lblIntroduction" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_MULTILINE PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (39 self.resources.reslblIntroduction_value <true> 104 145 1 55 199))<line_sep>self.ImageControl3=self.insertInfoImage(92 145 1)<block_end><def_stmt>buildStep2 self<block_start>self.chkUseLogo=self.insertCheckBox("chkUseLogo" FaxWizardDialogConst.CHKUSELOGO_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STATE PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.CHKUSELOGO_HID self.resources.reschkUseLogo_value 97 28 0 2 5 212) self)<line_sep>self.chkUseDate=self.insertCheckBox("chkUseDate" FaxWizardDialogConst.CHKUSEDATE_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STATE PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.CHKUSEDATE_HID self.resources.reschkUseDate_value 97 43 0 2 6 212) self)<line_sep>self.chkUseCommunicationType=self.insertCheckBox("chkUseCommunicationType" FaxWizardDialogConst.CHKUSECOMMUNICATIONTYPE_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STATE PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.CHKUSECOMMUNICATIONTYPE_HID self.resources.reschkUseCommunicationType_value 97 57 0 2 7 100) self)<line_sep>self.lstCommunicationType=self.insertComboBox("lstCommunicationType" FaxWizardDialogConst.LSTCOMMUNICATIONTYPE_ACTION_PERFORMED FaxWizardDialogConst.LSTCOMMUNICATIONTYPE_ITEM_CHANGED FaxWizardDialogConst.LSTCOMMUNICATIONTYPE_TEXT_CHANGED ("Dropdown" PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (<true> 12 FaxWizardDialogConst.LSTCOMMUNICATIONTYPE_HID 105 68 2 8 174) self)<line_sep>self.chkUseSubject=self.insertCheckBox("chkUseSubject" FaxWizardDialogConst.CHKUSESUBJECT_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STATE PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.CHKUSESUBJECT_HID self.resources.reschkUseSubject_value 97 87 0 2 9 212) self)<line_sep>self.chkUseSalutation=self.insertCheckBox("chkUseSalutation" FaxWizardDialogConst.CHKUSESALUTATION_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STATE PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.CHKUSESALUTATION_HID self.resources.reschkUseSalutation_value 97 102 0 2 10 100) self)<line_sep>self.lstSalutation=self.insertComboBox("lstSalutation" FaxWizardDialogConst.LSTSALUTATION_ACTION_PERFORMED FaxWizardDialogConst.LSTSALUTATION_ITEM_CHANGED FaxWizardDialogConst.LSTSALUTATION_TEXT_CHANGED ("Dropdown" PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (<true> 12 FaxWizardDialogConst.LSTSALUTATION_HID 105 113 2 11 174) self)<line_sep>self.chkUseGreeting=self.insertCheckBox("chkUseGreeting" FaxWizardDialogConst.CHKUSEGREETING_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STATE PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.CHKUSEGREETING_HID self.resources.reschkUseGreeting_value 97 132 0 2 12 100) self)<line_sep>self.lstGreeting=self.insertComboBox("lstGreeting" FaxWizardDialogConst.LSTGREETING_ACTION_PERFORMED FaxWizardDialogConst.LSTGREETING_ITEM_CHANGED FaxWizardDialogConst.LSTGREETING_TEXT_CHANGED ("Dropdown" PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (<true> 12 FaxWizardDialogConst.LSTGREETING_HID 105 143 2 13 174) self)<line_sep>self.chkUseFooter=self.insertCheckBox("chkUseFooter" FaxWizardDialogConst.CHKUSEFOOTER_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STATE PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.CHKUSEFOOTER_HID self.resources.reschkUseFooter_value 97 163 0 2 14 212) self)<line_sep>self.insertLabel("lblTitle3" ("FontDescriptor" PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_MULTILINE PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (self.fontDescriptor5 16 self.resources.reslblTitle3_value <true> 91 8 2 59 212))<block_end><def_stmt>buildStep3 self<block_start>self.optSenderPlaceholder=self.insertRadioButton("optSenderPlaceholder" FaxWizardDialogConst.OPTSENDERPLACEHOLDER_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.OPTSENDERPLACEHOLDER_HID self.resources.resoptSenderPlaceholder_value 104 42 3 15 149) self)<line_sep>self.optSenderDefine=self.insertRadioButton("optSenderDefine" FaxWizardDialogConst.OPTSENDERDEFINE_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.OPTSENDERDEFINE_HID self.resources.resoptSenderDefine_value 104 54 3 16 149) self)<line_sep>self.txtSenderName=self.insertTextField("txtSenderName" FaxWizardDialogConst.TXTSENDERNAME_TEXT_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (12 FaxWizardDialogConst.TXTSENDERNAME_HID 182 67 3 17 119) self)<line_sep>self.txtSenderStreet=self.insertTextField("txtSenderStreet" FaxWizardDialogConst.TXTSENDERSTREET_TEXT_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (12 FaxWizardDialogConst.TXTSENDERSTREET_HID 182 81 3 18 119) self)<line_sep>self.txtSenderPostCode=self.insertTextField("txtSenderPostCode" FaxWizardDialogConst.TXTSENDERPOSTCODE_TEXT_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (12 FaxWizardDialogConst.TXTSENDERPOSTCODE_HID 182 95 3 19 25) self)<line_sep>self.txtSenderState=self.insertTextField("txtSenderState" FaxWizardDialogConst.TXTSENDERSTATE_TEXT_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (12 FaxWizardDialogConst.TXTSENDERSTATE_HID 211 95 3 20 21) self)<line_sep>self.txtSenderCity=self.insertTextField("txtSenderCity" FaxWizardDialogConst.TXTSENDERCITY_TEXT_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (12 FaxWizardDialogConst.TXTSENDERCITY_HID 236 95 3 21 65) self)<line_sep>self.txtSenderFax=self.insertTextField("txtSenderFax" FaxWizardDialogConst.TXTSENDERFAX_TEXT_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (12 FaxWizardDialogConst.TXTSENDERFAX_HID 182 109 3 22 119) self)<line_sep>self.optReceiverPlaceholder=self.insertRadioButton("optReceiverPlaceholder" FaxWizardDialogConst.OPTRECEIVERPLACEHOLDER_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.OPTRECEIVERPLACEHOLDER_HID self.resources.resoptReceiverPlaceholder_value 104 148 3 23 200) self)<line_sep>self.optReceiverDatabase=self.insertRadioButton("optReceiverDatabase" FaxWizardDialogConst.OPTRECEIVERDATABASE_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.OPTRECEIVERDATABASE_HID self.resources.resoptReceiverDatabase_value 104 160 3 24 200) self)<line_sep>self.insertLabel("lblSenderAddress" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 self.resources.reslblSenderAddress_value 97 28 3 46 136))<line_sep>self.insertFixedLine("FixedLine2" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (5 90 126 3 51 212))<line_sep>self.insertLabel("lblSenderName" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 self.resources.reslblSenderName_value 113 69 3 52 68))<line_sep>self.insertLabel("lblSenderStreet" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 self.resources.reslblSenderStreet_value 113 82 3 53 68))<line_sep>self.insertLabel("lblPostCodeCity" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 self.resources.reslblPostCodeCity_value 113 97 3 54 68))<line_sep>self.insertLabel("lblTitle4" ("FontDescriptor" PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_MULTILINE PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (self.fontDescriptor5 16 self.resources.reslblTitle4_value <true> 91 8 3 60 212))<line_sep>self.insertLabel("lblSenderFax" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 self.resources.resLabel1_value 113 111 3 68 68))<line_sep>self.insertLabel("Label2" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 self.resources.resLabel2_value 97 137 3 69 136))<block_end><def_stmt>buildStep4 self<block_start>self.txtFooter=self.insertTextField("txtFooter" FaxWizardDialogConst.TXTFOOTER_TEXT_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_MULTILINE PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (47 FaxWizardDialogConst.TXTFOOTER_HID <true> 97 40 4 25 203) self)<line_sep>self.chkFooterNextPages=self.insertCheckBox("chkFooterNextPages" FaxWizardDialogConst.CHKFOOTERNEXTPAGES_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STATE PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.CHKFOOTERNEXTPAGES_HID self.resources.reschkFooterNextPages_value 97 92 0 4 26 202) self)<line_sep>self.chkFooterPageNumbers=self.insertCheckBox("chkFooterPageNumbers" FaxWizardDialogConst.CHKFOOTERPAGENUMBERS_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STATE PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.CHKFOOTERPAGENUMBERS_HID self.resources.reschkFooterPageNumbers_value 97 106 0 4 27 201) self)<line_sep>self.insertLabel("lblFooter" ("FontDescriptor" PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (self.fontDescriptor4 8 self.resources.reslblFooter_value 97 28 4 33 116))<line_sep>self.insertLabel("lblTitle5" ("FontDescriptor" PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_MULTILINE PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (self.fontDescriptor5 16 self.resources.reslblTitle5_value <true> 91 8 4 61 212))<block_end><def_stmt>buildStep5 self<block_start>self.txtTemplateName=self.insertTextField("txtTemplateName" FaxWizardDialogConst.TXTTEMPLATENAME_TEXT_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX "Text" PropertyNames.PROPERTY_WIDTH) (12 FaxWizardDialogConst.TXTTEMPLATENAME_HID 202 56 5 28 self.resources.restxtTemplateName_value 100) self)<line_sep>self.optCreateFax=self.insertRadioButton("optCreateFax" FaxWizardDialogConst.OPTCREATEFAX_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.OPTCREATEFAX_HID self.resources.resoptCreateFax_value 104 111 5 30 198) self)<line_sep>self.optMakeChanges=self.insertRadioButton("optMakeChanges" FaxWizardDialogConst.OPTMAKECHANGES_ITEM_CHANGED (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_HELPURL PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 FaxWizardDialogConst.OPTMAKECHANGES_HID self.resources.resoptMakeChanges_value 104 123 5 31 198) self)<line_sep>self.insertLabel("lblFinalExplanation1" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_MULTILINE PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (28 self.resources.reslblFinalExplanation1_value <true> 97 28 5 34 205))<line_sep>self.insertLabel("lblProceed" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 self.resources.reslblProceed_value 97 100 5 35 204))<line_sep>self.insertLabel("lblFinalExplanation2" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_MULTILINE PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (33 self.resources.reslblFinalExplanation2_value <true> 104 145 5 36 199))<line_sep>self.insertImage("ImageControl2" ("Border" PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_IMAGEURL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y "ScaleImage" PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (0 10 UIConsts.INFOIMAGEURL 92 145 <false> 5 47 10))<line_sep>self.insertLabel("lblTemplateName" (PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (8 self.resources.reslblTemplateName_value 97 58 5 57 101))<line_sep>self.insertLabel("lblTitle6" ("FontDescriptor" PropertyNames.PROPERTY_HEIGHT PropertyNames.PROPERTY_LABEL PropertyNames.PROPERTY_MULTILINE PropertyNames.PROPERTY_POSITION_X PropertyNames.PROPERTY_POSITION_Y PropertyNames.PROPERTY_STEP PropertyNames.PROPERTY_TABINDEX PropertyNames.PROPERTY_WIDTH) (self.fontDescriptor5 16 self.resources.reslblTitle6_value <true> 91 8 5 62 212))<block_end><block_end>
<import_stmt>logging<import_from_stmt>typing Dict<import_from_stmt>typing List<import_stmt>boto3<import_stmt>neo4j<import_from_stmt>botocore.exceptions ClientError<import_from_stmt>.util get_botocore_config<import_from_stmt>cartography.util aws_handle_regions<import_from_stmt>cartography.util run_cleanup_job<import_from_stmt>cartography.util timeit<line_sep>logger=logging.getLogger(__name__)<line_sep>@timeit@aws_handle_regions<def_stmt>get_reserved_instances boto3_session:boto3.session.Session region:str<arrow>List[Dict]<block_start>client=boto3_session.client('ec2' region_name=region config=get_botocore_config())<try_stmt><block_start>reserved_instances=client.describe_reserved_instances()['ReservedInstances']<block_end><except_stmt>ClientError<as>e<block_start>logger.warning(f"Failed retrieve reserved instances for region - {region}. Error - {e}")<line_sep><raise><block_end><return>reserved_instances<block_end>@timeit<def_stmt>load_reserved_instances neo4j_session:neo4j.Session data:List[Dict] region:str current_aws_account_id:str update_tag:int <arrow><none><block_start>ingest_reserved_instances=""" UNWIND {reserved_instances_list} as res MERGE (ri:EC2ReservedInstance{id: res.ReservedInstancesId}) ON CREATE SET ri.firstseen = timestamp() SET ri.lastupdated = {update_tag}, ri.availabilityzone = res.AvailabilityZone, ri.duration = res.Duration, ri.end = res.End, ri.start = res.Start, ri.count = res.InstanceCount, ri.type = res.InstanceType, ri.productdescription = res.ProductDescription, ri.state = res.State, ri.currencycode = res.CurrencyCode, ri.instancetenancy = res.InstanceTenancy, ri.offeringclass = res.OfferingClass, ri.offeringtype = res.OfferingType, ri.scope = res.Scope, ri.fixedprice = res.FixedPrice, ri.region={Region} WITH ri MATCH (aa:AWSAccount{id: {AWS_ACCOUNT_ID}}) MERGE (aa)-[r:RESOURCE]->(ri) ON CREATE SET r.firstseen = timestamp() SET r.lastupdated = {update_tag} """<for_stmt>r_instance data<block_start>r_instance['Start']=str(r_instance['Start'])<line_sep>r_instance['End']=str(r_instance['End'])<block_end>neo4j_session.run(ingest_reserved_instances reserved_instances_list=data AWS_ACCOUNT_ID=current_aws_account_id Region=region update_tag=update_tag )<block_end>@timeit<def_stmt>cleanup_reserved_instances neo4j_session:neo4j.Session common_job_parameters:Dict<arrow><none><block_start>run_cleanup_job('aws_import_reserved_instances_cleanup.json' neo4j_session common_job_parameters )<block_end>@timeit<def_stmt>sync_ec2_reserved_instances neo4j_session:neo4j.Session boto3_session:boto3.session.Session regions:List[str] current_aws_account_id:str update_tag:int common_job_parameters:Dict <arrow><none><block_start><for_stmt>region regions<block_start>logger.debug("Syncing reserved instances for region '%s' in account '%s'." region current_aws_account_id)<line_sep>data=get_reserved_instances(boto3_session region)<line_sep>load_reserved_instances(neo4j_session data region current_aws_account_id update_tag)<block_end>cleanup_reserved_instances(neo4j_session common_job_parameters)<block_end>
<import_stmt>unittest<import_stmt>clpy<import_from_stmt>clpy testing<line_sep>@testing.gpu<class_stmt>TestPermutations(unittest.TestCase)<block_start>_multiprocess_can_split_=<true><block_end>@testing.gpu<class_stmt>TestShuffle(unittest.TestCase)<block_start>_multiprocess_can_split_=<true><line_sep># Test ranks @testing.numpy_clpy_raises()<def_stmt>test_shuffle_zero_dim self xp<block_start>a=testing.shaped_random(() xp)<line_sep>xp.random.shuffle(a)<block_end># Test same values @testing.for_all_dtypes(no_float16=<true> no_bool=<true> no_complex=<true>)<def_stmt>test_shuffle_sort_1dim self dtype<block_start>a=clpy.arange(10 dtype=dtype)<line_sep>b=clpy.copy(a)<line_sep>clpy.random.shuffle(a)<line_sep>testing.assert_allclose(clpy.sort(a) b)<block_end>@testing.for_all_dtypes(no_float16=<true> no_bool=<true> no_complex=<true>)<def_stmt>test_shuffle_sort_ndim self dtype<block_start>a=clpy.arange(15 dtype=dtype).reshape(5 3)<line_sep>b=clpy.copy(a)<line_sep>clpy.random.shuffle(a)<line_sep>testing.assert_allclose(clpy.sort(a axis=0) b)<block_end># Test seed @testing.for_all_dtypes()<def_stmt>test_shuffle_seed1 self dtype<block_start>a=testing.shaped_random((10 ) clpy dtype)<line_sep>b=clpy.copy(a)<line_sep>clpy.random.seed(0)<line_sep>clpy.random.shuffle(a)<line_sep>clpy.random.seed(0)<line_sep>clpy.random.shuffle(b)<line_sep>testing.assert_allclose(a b)<block_end><block_end>
<import_stmt>ctypes<import_stmt>msvcrt<import_stmt>sys<import_stmt>threading<import_from_stmt>asyncio AbstractEventLoop run_coroutine_threadsafe<import_from_stmt>ctypes Structure Union byref wintypes<import_from_stmt>ctypes.wintypes BOOL CHAR DWORD HANDLE SHORT UINT WCHAR WORD<import_from_stmt>typing IO Callable List Optional<import_from_stmt>.._types EventTarget<import_from_stmt>.._xterm_parser XTermParser<import_from_stmt>..events Event Resize<import_from_stmt>..geometry Size<line_sep>KERNEL32=ctypes.WinDLL("kernel32" use_last_error=<true>)<line_sep># Console input modes ENABLE_ECHO_INPUT=0x0004<line_sep>ENABLE_EXTENDED_FLAGS=0x0080<line_sep>ENABLE_INSERT_MODE=0x0020<line_sep>ENABLE_LINE_INPUT=0x0002<line_sep>ENABLE_MOUSE_INPUT=0x0010<line_sep>ENABLE_PROCESSED_INPUT=0x0001<line_sep>ENABLE_QUICK_EDIT_MODE=0x0040<line_sep>ENABLE_WINDOW_INPUT=0x0008<line_sep>ENABLE_VIRTUAL_TERMINAL_INPUT=0x0200<line_sep># Console output modes ENABLE_PROCESSED_OUTPUT=0x0001<line_sep>ENABLE_WRAP_AT_EOL_OUTPUT=0x0002<line_sep>ENABLE_VIRTUAL_TERMINAL_PROCESSING=0x0004<line_sep>DISABLE_NEWLINE_AUTO_RETURN=0x0008<line_sep>ENABLE_LVB_GRID_WORLDWIDE=0x0010<line_sep>STD_INPUT_HANDLE=-10<line_sep>STD_OUTPUT_HANDLE=-11<line_sep>WAIT_TIMEOUT=0x00000102<line_sep>GetStdHandle=KERNEL32.GetStdHandle<line_sep>GetStdHandle.argtypes=[wintypes.DWORD]<line_sep>GetStdHandle.restype=wintypes.HANDLE<class_stmt>COORD(Structure)<block_start>"""https://docs.microsoft.com/en-us/windows/console/coord-str"""<line_sep>_fields_=[("X" SHORT) ("Y" SHORT) ]<block_end><class_stmt>uChar(Union)<block_start>"""https://docs.microsoft.com/en-us/windows/console/key-event-record-str"""<line_sep>_fields_=[("AsciiChar" CHAR) ("UnicodeChar" WCHAR) ]<block_end><class_stmt>KEY_EVENT_RECORD(Structure)<block_start>"""https://docs.microsoft.com/en-us/windows/console/key-event-record-str"""<line_sep>_fields_=[("bKeyDown" BOOL) ("wRepeatCount" WORD) ("wVirtualKeyCode" WORD) ("wVirtualScanCode" WORD) ("uChar" uChar) ("dwControlKeyState" DWORD) ]<block_end><class_stmt>MOUSE_EVENT_RECORD(Structure)<block_start>"""https://docs.microsoft.com/en-us/windows/console/mouse-event-record-str"""<line_sep>_fields_=[("dwMousePosition" COORD) ("dwButtonState" DWORD) ("dwControlKeyState" DWORD) ("dwEventFlags" DWORD) ]<block_end><class_stmt>WINDOW_BUFFER_SIZE_RECORD(Structure)<block_start>"""https://docs.microsoft.com/en-us/windows/console/window-buffer-size-record-str"""<line_sep>_fields_=[("dwSize" COORD)]<block_end><class_stmt>MENU_EVENT_RECORD(Structure)<block_start>"""https://docs.microsoft.com/en-us/windows/console/menu-event-record-str"""<line_sep>_fields_=[("dwCommandId" UINT)]<block_end><class_stmt>FOCUS_EVENT_RECORD(Structure)<block_start>"""https://docs.microsoft.com/en-us/windows/console/focus-event-record-str"""<line_sep>_fields_=[("bSetFocus" BOOL)]<block_end><class_stmt>InputEvent(Union)<block_start>"""https://docs.microsoft.com/en-us/windows/console/input-record-str"""<line_sep>_fields_=[("KeyEvent" KEY_EVENT_RECORD) ("MouseEvent" MOUSE_EVENT_RECORD) ("WindowBufferSizeEvent" WINDOW_BUFFER_SIZE_RECORD) ("MenuEvent" MENU_EVENT_RECORD) ("FocusEvent" FOCUS_EVENT_RECORD) ]<block_end><class_stmt>INPUT_RECORD(Structure)<block_start>"""https://docs.microsoft.com/en-us/windows/console/input-record-str"""<line_sep>_fields_=[("EventType" wintypes.WORD) ("Event" InputEvent)]<block_end><def_stmt>_set_console_mode file:IO mode:int<arrow>bool<block_start>"""Set the console mode for a given file (stdout or stdin). Args: file (IO): A file like object. mode (int): New mode. Returns: bool: True on success, otherwise False. """<line_sep>windows_filehandle=msvcrt.get_osfhandle(file.fileno())<line_sep>success=KERNEL32.SetConsoleMode(windows_filehandle mode)<line_sep><return>success<block_end><def_stmt>_get_console_mode file:IO<arrow>int<block_start>"""Get the console mode for a given file (stdout or stdin) Args: file (IO): A file-like object. Returns: int: The current console mode. """<line_sep>windows_filehandle=msvcrt.get_osfhandle(file.fileno())<line_sep>mode=wintypes.DWORD()<line_sep>KERNEL32.GetConsoleMode(windows_filehandle ctypes.byref(mode))<line_sep><return>mode.value<block_end><def_stmt>enable_application_mode <arrow>Callable[[] <none>]<block_start>"""Enable application mode. Returns: Callable[[], None]: A callable that will restore terminal to previous state. """<line_sep>terminal_in=sys.stdin<line_sep>terminal_out=sys.stdout<line_sep>current_console_mode_in=_get_console_mode(terminal_in)<line_sep>current_console_mode_out=_get_console_mode(terminal_out)<def_stmt>restore <arrow><none><block_start>"""Restore console mode to previous settings"""<line_sep>_set_console_mode(terminal_in current_console_mode_in)<line_sep>_set_console_mode(terminal_out current_console_mode_out)<block_end>_set_console_mode(terminal_out current_console_mode_out|ENABLE_VIRTUAL_TERMINAL_PROCESSING)<line_sep>_set_console_mode(terminal_in ENABLE_VIRTUAL_TERMINAL_INPUT)<line_sep><return>restore<block_end><def_stmt>_wait_for_handles handles:List[HANDLE] timeout:int=-1<arrow>Optional[HANDLE]<block_start>""" Waits for multiple handles. (Similar to 'select') Returns the handle which is ready. Returns `None` on timeout. http://msdn.microsoft.com/en-us/library/windows/desktop/ms687025(v=vs.85).aspx Note that handles should be a list of `HANDLE` objects, not integers. See this comment in the patch by @quark-zju for the reason why: ''' Make sure HANDLE on Windows has a correct size Previously, the type of various HANDLEs are native Python integer types. The ctypes library will treat them as 4-byte integer when used in function arguments. On 64-bit Windows, HANDLE is 8-byte and usually a small integer. Depending on whether the extra 4 bytes are zero-ed out or not, things can happen to work, or break. ''' This function returns either `None` or one of the given `HANDLE` objects. (The return value can be tested with the `is` operator.) """<line_sep>arrtype=HANDLE<times>len(handles)<line_sep>handle_array=arrtype(*handles)<line_sep>ret:int=KERNEL32.WaitForMultipleObjects(len(handle_array) handle_array BOOL(<false>) DWORD(timeout))<if_stmt>ret<eq>WAIT_TIMEOUT<block_start><return><none><block_end><else_stmt><block_start><return>handles[ret]<block_end><block_end><class_stmt>EventMonitor(threading.Thread)<block_start>"""A thread to send key / window events to Textual loop."""<def_stmt>__init__ self loop:AbstractEventLoop app target:EventTarget exit_event:threading.Event process_event:Callable[[Event] <none>] <arrow><none><block_start>self.loop=loop<line_sep>self.app=app<line_sep>self.target=target<line_sep>self.exit_event=exit_event<line_sep>self.process_event=process_event<line_sep>self.app.log("event monitor constructed")<line_sep>super().__init__()<block_end><def_stmt>run self<arrow><none><block_start>self.app.log("event monitor thread started")<line_sep>exit_requested=self.exit_event.is_set<line_sep>parser=XTermParser(self.target <lambda>:<false>)<try_stmt><block_start>read_count=wintypes.DWORD(0)<line_sep>hIn=GetStdHandle(STD_INPUT_HANDLE)<line_sep>MAX_EVENTS=1024<line_sep>KEY_EVENT=0x0001<line_sep>WINDOW_BUFFER_SIZE_EVENT=0x0004<line_sep>arrtype=INPUT_RECORD<times>MAX_EVENTS<line_sep>input_records=arrtype()<line_sep>ReadConsoleInputW=KERNEL32.ReadConsoleInputW<line_sep>keys:List[str]=[]<line_sep>append_key=keys.append<while_stmt><not>exit_requested()# Wait for new events <block_start><if_stmt>_wait_for_handles([hIn] 200)<is><none># No new events <block_start><continue><block_end># Get new events ReadConsoleInputW(hIn byref(input_records) MAX_EVENTS byref(read_count))<line_sep>read_input_records=input_records[:read_count.value]<del_stmt>keys[:]<line_sep>new_size:Optional[tuple[int int]]=<none><for_stmt>input_record read_input_records<block_start>event_type=input_record.EventType<if_stmt>event_type<eq>KEY_EVENT# Key event, store unicode char in keys list <block_start>key_event=input_record.Event.KeyEvent<line_sep>key=key_event.uChar.UnicodeChar<if_stmt>key_event.bKeyDown<or>key<eq>"\x1b"<block_start>append_key(key)<block_end><block_end><elif_stmt>event_type<eq>WINDOW_BUFFER_SIZE_EVENT# Window size changed, store size <block_start>size=input_record.Event.WindowBufferSizeEvent.dwSize<line_sep>new_size=(size.X size.Y)<block_end><block_end><if_stmt>keys# Process keys <block_start><for_stmt>event parser.feed("".join(keys))<block_start>self.process_event(event)<block_end><block_end><if_stmt>new_size<is><not><none># Process changed size <block_start>self.on_size_change(*new_size)<block_end><block_end><block_end><except_stmt>Exception<as>error<block_start>self.app.log("EVENT MONITOR ERROR" error)<block_end>self.app.log("event monitor thread finished")<block_end><def_stmt>on_size_change self width:int height:int<arrow><none><block_start>"""Called when terminal size changes."""<line_sep>event=Resize(self.target Size(width height))<line_sep>run_coroutine_threadsafe(self.target.post_message(event) loop=self.loop)<block_end><block_end>