repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
100star/h2o
py/testdir_release/sh2junit.py
31
16028
import sys, psutil, os, stat, tempfile, argparse, time, datetime sys.path.extend(['.','..','../..','py']) import h2o_sandbox # Stripped down, similar to h2o.py has for these functions # Possible to do this in bash, but the code becomes cryptic. # You can execute this as sh2junit.py <bash command string> # sh2junit runs the cmd_string as a subprocess, with stdout/stderr going to files in sandbox # and stdout to python stdout too. # When it completes, check the sandbox for errors (using h2o_sandbox.py # prints interesting things to stdout. Creates the result xml in the current dire # with name "sh2junit_<name>.xml" def sandbox_tmp_file(prefix='', suffix=''): # this gives absolute path, good! dirname = './sandbox' if not os.path.exists(dirname): print "no ./sandbox. Creating" os.makedirs(dirname) fd, path = tempfile.mkstemp(prefix=prefix, suffix=suffix, dir=dirname) # make sure the file now exists # os.open(path, 'a').close() # give everyone permission to read it (jenkins running as # 0xcustomer needs to archive as jenkins #permissions = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH os.chmod(path, 0644) #'644') #permissions) return (fd, path) #************************************************************************** # Example junit xml #<?xml version="1.0" encoding="UTF-8"?> #<testsuites disabled="" errors="" failures="" name="" tests="" time=""> # <testsuite disabled="" errors="" failures="" hostname="" id="" name="" package="" skipped="" tests="" time="" timestamp=""> # <properties> # <property name="" value=""/> # </properties> # <testcase assertions="" classname="" name="" status="" time=""> # <skipped/> # <error message="" type=""/> # <failure message="" type=""/> # <system-out/> # <system-err/> # </testcase> # <system-out/> # <system-err/> # </testsuite> #</testsuites> def create_junit_xml(name, out, err, sandboxErrorMessage, errors=0, elapsed=0): # http://junitpdfreport.sourceforge.net/managedcontent/PdfTranslation # not really nosetests..just trying to mimic the python xml content = '<?xml version="1.0" encoding="UTF-8" ?>\n' content += ' <testsuite name="nosetests" tests="1" errors="%s" failures="0" skip="0">\n' % (errors) content += ' <testcase classname="%s" name="%s" time="%0.4f">\n' % (name, name, elapsed) if errors != 0 and not sandboxErrorMessage: content += ' <error type="Non-zero R exit code" message="Non-zero R exit code"></error>\n' # may or may not be 2 errors (R exit code plus log error if errors != 0 and sandboxErrorMessage: content += ' <error type="Error in h2o logs" message="Error in h2o logs"></error>\n' content += ' <system-out>\n' content += '<![CDATA[\n' content += 'spawn stdout' + str(datetime.datetime.now()) + '**********************************************************\n' content += out content += ']]>\n' content += ' </system-out>\n' content += ' <system-err>\n' content += '<![CDATA[\n' content += 'spawn stderr' + str(datetime.datetime.now()) + '**********************************************************\n' content += err if sandboxErrorMessage: content += 'spawn errors from sandbox log parsing*********************************\n' # maybe could split this into a 2nd stdout or stder ..see above content += sandboxErrorMessage content += ']]>\n' content += ' </system-err>\n' content += ' </testcase>\n' content += ' </testsuite>\n' # see if adding nosetests makes michal's stuff pick it up?? # and "test_" prefix" x = './test_' + os.path.basename(name) + '.nosetests.xml' with open(x, 'wb') as f: f.write(content) #f = open(x, 'w') #f.write(content) #f.close() #************************************************************************** # belt and suspenders. Do we really need to worry about this? def terminate_process_tree(pid, including_parent=True): parent = psutil.Process(pid) for child in parent.get_children(recursive=True): try: child.terminate() except psutil.NoSuchProcess: print "terminate_process_tree:", "NoSuchProcess. couldn't terminate child process with pid %s" % child.pid() except psutil.AccessDenied: print "terminate_process_tree:", "couldn't terminate child process with pid %s" % child.pid() else: child.wait(timeout=3) if including_parent: try: parent.terminate() except psutil.NoSuchProcess: print "terminate_process_tree:", "NoSuchProcess. couldn't terminate parent process with pid %s" % parent.pid() pass except psutil.AccessDenied: print "terminate_process_tree:", "AccessDenied. couldn't terminate parent process with pid %s" % parent.pid() else: parent.wait(timeout=3) def terminate_child_processes(): me = os.getpid() terminate_process_tree(me, including_parent=False) #************************************************************************** def rc_if_exists_and_done(ps): try: rc = ps.wait(0) except psutil.TimeoutExpired: # not sure why I'm getting this print "Got TimeoutExpired on the R subprocess, may be legal" rc = None except psutil.NoSuchProcess: raise Exception("The R subprocess disappeared when we thought it should still be there") except psutil.AccessDenied: raise Exception("The R subprocess gave us AccessDenied") # rc = None means it already completed? # FIX! Is it none if we get a timeout exception on this python ..how is that captured? if rc: # increment the global errors count if we get a non-zero rc. non-zero rc should only happen once? error = 1 print "rc_if_exists_and_done: got non-zero rc: %s" % rc else: error = 0 return (rc, error) #************************************************************************** def sh2junit(name='NoName', cmd_string='/bin/ls', timeout=300, shdir=None, **kwargs): # split by arbitrary strings of whitespace characters (space, tab, newline, return, formfeed) print "cmd_string:", cmd_string cmdList = cmd_string.split() # these are absolute paths outfd, outpath = sandbox_tmp_file(prefix=name + '.stdout.', suffix='.log') errfd, errpath = sandbox_tmp_file(prefix=name + '.stderr.', suffix='.log') # make outpath and errpath full paths, so we can redirect print "outpath:", outpath print "errpath:", errpath start = time.time() print "psutil.Popen:", cmdList, outpath, errpath import subprocess # start the process in the target dir, if desired if shdir: currentDir = os.getcwd() os.chdir(shdir) ps = psutil.Popen(cmdList, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) if shdir: os.chdir(currentDir) comment = 'PID %d, stdout %s, stderr %s' % ( ps.pid, os.path.basename(outpath), os.path.basename(errpath)) print "spawn_cmd", cmd_string, comment # Reads the subprocess stdout until it is closed and # ...echo it our python stdout and also the R stdout file in sandbox # Then wait for the program to exit. # Read before wait so that you don't risk the pipe filling up and hanging the program. # You wait after read for the final program exit and return code. # If you don't wait, you'll get a zombie process (at least on linux) # this might not do what we want..see: # http://stackoverflow.com/questions/2804543/read-subprocess-stdout-line-by-line # I suppose we'll stop early? # shouldn't need a delay before checking this? if not ps.is_running(): raise Exception("sh2junit: not immediate ps.is_running after start") # Until we get the rc, it can be a zombie process. # A zombie process is not a real process. # it's just a remaining entry in the process table until the parent process requests the child's return code. # The actual process has ended and requires no other resources but said process table entry. linesMayExist = True errors = 0 timeoutError = False while linesMayExist: # get whatever accumulated, up to nothing returned # only do up to 20 lines before we check timeout again # why was R processes not completing on centos? # linesMayExist = ps.is_running() and not ps.status() == psutil.STATUS_ZOMBIE linesMayExist = ps.is_running() lineBurstCnt = 0 # stdout from subprocess line = ps.stdout.readline() # R apparently uses stderr a lot, so want to mix that in. We don't grab it until we hit a stall in R stdout though. while line: lineBurstCnt += 1 # maybe I should use p.communicate() instead. have to keep it to stdout? or do stdout+stderr here sys.stdout.write("R->" + line) # to our python stdout, with a prefix so it's obviously from R sys.stdout.flush() os.write(outfd, line) # to sandbox R stdout elapsed = time.time() - start if elapsed > timeout: timeoutError = True errors += 1 print "ERROR: sh2junit: elapsed: %0.2f timeout: %s (secs) while echoing subprocess stdout" % (elapsed, timeout) #kill R subprocess but don't kill me terminate_process_tree(ps.pid, including_parent=False) break line = ps.stdout.readline() if timeoutError: print "\n\n\nERROR: timeout" break # stderr from subprocess line = ps.stderr.readline() while line: lineBurstCnt += 1 sys.stdout.write("Re->" + line) # to our python stdout, with a prefix so it's obviously from R stderr sys.stdout.flush() os.write(errfd, line) # to sandbox R stderr line = ps.stderr.readline() print "lineBurstCnt:", lineBurstCnt # Check. may have flipped to not running, and we just got the last bit. # shouldn't be a race on a transition here, if ps.wait(0) completion syncs the transition if linesMayExist: print "ps.is_running():", ps.is_running(), ps.pid, ps.name, ps.status, ps.create_time # unload the return code without waiting..so we don't have a zombie! (lastrc, error) = rc_if_exists_and_done(ps) errors += error elapsed = time.time() - start # forever if timeout is None #if timeout and elapsed > timeout: if elapsed > timeout: timeoutError = True errors += 1 # we don't want to exception here, because we're going to print the xml that says there's an error # I guess we'll end up terminating the R process down below # could we have lines in stdout we didn't catch up on? maybe, but do we care? print "ERROR: sh2junit: elapsed: %0.2f timeout: %s (secs) while echoing subprocess stdout" % (elapsed, timeout) #kill R subprocess but don't kill me #terminate_process_tree(ps.pid, including_parent=False) break # wait for some more output to accumulate time.sleep(0.25) # It shouldn't be running now? # timeout=None waits forever. timeout=0 returns immediately. # default above is 5 minutes # Wait for process termination. Since child: return the exit code. # If the process is already terminated does not raise NoSuchProcess exception # but just return None immediately. # If timeout is specified and process is still alive raises psutil.TimeoutExpired() exception. # old # rc = ps.wait(timeout) (lastrc, error) = rc_if_exists_and_done(ps) errors += error elapsed = time.time() - start # Prune h2o logs to interesting lines and detect errors. # Error lines are returned. warning/info are printed to our (python stdout) # so that's always printed/saved? # None if no error sandboxErrorMessage = h2o_sandbox.check_sandbox_for_errors( LOG_DIR='./sandbox', python_test_name=name, cloudShutdownIsError=True, sandboxIgnoreErrors=True) # don't take exception on error if sandboxErrorMessage: errors += 1 out = file(outpath).read() err = file(errpath).read() create_junit_xml(name, out, err, sandboxErrorMessage, errors=errors, elapsed=elapsed) if not errors: return (errors, outpath, errpath) else: # dump all the info as part of the exception? maybe too much # is this bad to do in all cases? do we need it? hline = "\n===========================================BEGIN DUMP=============================================================\n" hhline = "\n===========================================END DUMP=============================================================\n" out = '[stdout->err]: '.join(out.splitlines(True)) err = '[sterr->err]: '.join(err.splitlines(True)) if ps.is_running(): print "Before terminate:", ps.pid, ps.is_running() terminate_process_tree(ps.pid, including_parent=True) if sandboxErrorMessage: print "\n\n\nError in Sandbox. Ending test. Dumping sub-process output.\n" print hline raise Exception("%s %s \n\tlastrc:%s \n\terrors:%s \n\tErrors found in ./sandbox log files?.\nR stdout:\n%s\n\nR stderr:\n%s\n%s" % (name, cmd_string, lastrc, errors, out, err, hhline)) # could have already terminated? elif timeoutError: print "\n\n\nTimeout Error. Ending test. Dumping sub-process output.\n" print hline raise Exception("%s %s \n\tlastrc:%s \n\terrors:%s \n\ttimed out after %d secs. \nR stdout:\n%s\n\nR stderr:\n%s\n%s" % (name, cmd_string, lastrc, errors, timeout or 0, out, err, hhline)) else: print "\n\n\nCaught exception. Ending test. Dumping sub-process output.\n" print hline raise Exception("%s %s \n\tlastrc:%s \n\terrors:%s \n\tLikely non-zero exit code from R.\nR stdout:\n%s\n\nR stderr:\n%s\n%s" % (name, cmd_string, lastrc, errors, out, err, hhline)) #************************************************************************** if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-shdir', type=str, default=None, help='executes the $cmd in the target dir, but the logs stay in sandbox here') parser.add_argument('-name', type=str, default='NoName', help='used to help name the xml/stdout/stderr logs created') parser.add_argument('-timeout', type=int, default=5, help='secs timeout for the shell subprocess. Fail if timeout') parser.add_argument('-cmd', '--cmd_string', type=str, default=None, help="cmd string to pass to shell subprocess. Better to just use'--' to start the cmd (everything after that is sucked in)") parser.add_argument('Rargs', nargs=argparse.REMAINDER) args = parser.parse_args() if args.cmd_string: cmd_string = args.cmd_string else: # easiest way to handle multiple tokens for command # end with -- and this grabs the rest # drop the leading '--' if we stopped parsing the rest that way if args.Rargs: print "args.Rargs:", args.Rargs if args.Rargs[0]=='--': args.Rargs[0] = '' cmd_string = ' '.join(args.Rargs) else: # placeholder for test cmd_string = '/bin/ls' sh2junit(name=args.name, cmd_string=cmd_string, timeout=args.timeout, shdir=args.shdir)
apache-2.0
nicolaoun/NS3-AM-Proto-Simulation
src/lr-wpan/bindings/callbacks_list.py
42
2037
callback_classes = [ ['void', 'ns3::LrWpanPhyEnumeration', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::LrWpanPhyEnumeration', 'unsigned char', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'unsigned int', 'ns3::Ptr<ns3::Packet>', 'unsigned char', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::LrWpanMacState', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::LrWpanPhyEnumeration', 'ns3::LrWpanPibAttributeIdentifier', 'ns3::LrWpanPhyPibAttributes*', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::McpsDataIndicationParams', 'ns3::Ptr<ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::McpsDataConfirmParams', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::LrWpanPhyEnumeration', 'ns3::LrWpanPibAttributeIdentifier', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ['unsigned char', 'ns3::Ptr<ns3::QueueItem>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], ]
gpl-2.0
soumyanishan/azure-linux-extensions
OSPatching/azure/storage/storageclient.py
51
5800
#------------------------------------------------------------------------- # Copyright (c) Microsoft. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #-------------------------------------------------------------------------- import os import sys from azure import ( WindowsAzureError, DEV_ACCOUNT_NAME, DEV_ACCOUNT_KEY, _ERROR_STORAGE_MISSING_INFO, ) from azure.http import HTTPError from azure.http.httpclient import _HTTPClient from azure.storage import _storage_error_handler #-------------------------------------------------------------------------- # constants for azure app setting environment variables AZURE_STORAGE_ACCOUNT = 'AZURE_STORAGE_ACCOUNT' AZURE_STORAGE_ACCESS_KEY = 'AZURE_STORAGE_ACCESS_KEY' EMULATED = 'EMULATED' #-------------------------------------------------------------------------- class _StorageClient(object): ''' This is the base class for BlobManager, TableManager and QueueManager. ''' def __init__(self, account_name=None, account_key=None, protocol='https', host_base='', dev_host=''): ''' account_name: your storage account name, required for all operations. account_key: your storage account key, required for all operations. protocol: Optional. Protocol. Defaults to http. host_base: Optional. Live host base url. Defaults to Azure url. Override this for on-premise. dev_host: Optional. Dev host url. Defaults to localhost. ''' self.account_name = account_name self.account_key = account_key self.requestid = None self.protocol = protocol self.host_base = host_base self.dev_host = dev_host # the app is not run in azure emulator or use default development # storage account and key if app is run in emulator. self.use_local_storage = False # check whether it is run in emulator. if EMULATED in os.environ: self.is_emulated = os.environ[EMULATED].lower() != 'false' else: self.is_emulated = False # get account_name and account key. If they are not set when # constructing, get the account and key from environment variables if # the app is not run in azure emulator or use default development # storage account and key if app is run in emulator. if not self.account_name or not self.account_key: if self.is_emulated: self.account_name = DEV_ACCOUNT_NAME self.account_key = DEV_ACCOUNT_KEY self.protocol = 'http' self.use_local_storage = True else: self.account_name = os.environ.get(AZURE_STORAGE_ACCOUNT) self.account_key = os.environ.get(AZURE_STORAGE_ACCESS_KEY) if not self.account_name or not self.account_key: raise WindowsAzureError(_ERROR_STORAGE_MISSING_INFO) self._httpclient = _HTTPClient( service_instance=self, account_key=self.account_key, account_name=self.account_name, protocol=self.protocol) self._batchclient = None self._filter = self._perform_request_worker def with_filter(self, filter): ''' Returns a new service which will process requests with the specified filter. Filtering operations can include logging, automatic retrying, etc... The filter is a lambda which receives the HTTPRequest and another lambda. The filter can perform any pre-processing on the request, pass it off to the next lambda, and then perform any post-processing on the response. ''' res = type(self)(self.account_name, self.account_key, self.protocol) old_filter = self._filter def new_filter(request): return filter(request, old_filter) res._filter = new_filter return res def set_proxy(self, host, port, user=None, password=None): ''' Sets the proxy server host and port for the HTTP CONNECT Tunnelling. host: Address of the proxy. Ex: '192.168.0.100' port: Port of the proxy. Ex: 6000 user: User for proxy authorization. password: Password for proxy authorization. ''' self._httpclient.set_proxy(host, port, user, password) def _get_host(self): if self.use_local_storage: return self.dev_host else: return self.account_name + self.host_base def _perform_request_worker(self, request): return self._httpclient.perform_request(request) def _perform_request(self, request, text_encoding='utf-8'): ''' Sends the request and return response. Catches HTTPError and hand it to error handler ''' try: if self._batchclient is not None: return self._batchclient.insert_request_to_batch(request) else: resp = self._filter(request) if sys.version_info >= (3,) and isinstance(resp, bytes) and \ text_encoding: resp = resp.decode(text_encoding) except HTTPError as ex: _storage_error_handler(ex) return resp
apache-2.0
yatinkumbhare/openstack-nova
nova/tests/functional/v3/api_paste_fixture.py
25
1543
# Copyright 2015 NEC Corporation. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import fixtures from oslo_config import cfg from nova import paths CONF = cfg.CONF class ApiPasteFixture(fixtures.Fixture): def setUp(self): super(ApiPasteFixture, self).setUp() CONF.set_default('api_paste_config', paths.state_path_def('etc/nova/api-paste.ini')) tmp_api_paste_dir = self.useFixture(fixtures.TempDir()) tmp_api_paste_file_name = os.path.join(tmp_api_paste_dir.path, 'fake_api_paste.ini') with open(CONF.api_paste_config, 'r') as orig_api_paste: with open(tmp_api_paste_file_name, 'w') as tmp_file: for line in orig_api_paste: tmp_file.write(line.replace( "/v2: openstack_compute_api_v2", "/v2: openstack_compute_api_v21")) CONF.set_override('api_paste_config', tmp_api_paste_file_name)
apache-2.0
rbprogrammer/advanced_python_topics
course-material/py2/solutions/07 XML Processing/Ex7.1.py
1
1036
#!/usr/local/bin/python2 # Ex7.1.py Python 2 version # Script to parse an XML file and enumerate tags import sys from xml.parsers import expat # Allow user to provide a filename, or default to books.xml filename = sys.argv[1] if sys.argv[1:] else 'books.xml' Tags = 0 tags = {} class ExpatError(Exception): pass def start_tag(name, attr): global Tags, tags Tags += 1 if name in tags: tags[name] += 1 else: tags[name] = 1 # The following line does the same as the if/else above # tags.get() does not raise an exception if the default value (zero here) # is supplied. # tags[name] = 1 + tags.get(name, 0) ExParser = expat.ParserCreate() ExParser.StartElementHandler = start_tag try: ExParser.ParseFile(open(filename, 'rb')) except ExpatError: print >> sys.stderr, "Ooops!" exit(1) else: for k, v in tags.items(): print k.ljust(15), ":", v print "XML is well-formed and has ", \ "%s tags of which %d are unique" % (Tags, len(tags.keys()))
apache-2.0
marcellfischbach/CobaltSKY
Scripts/Blender/valkyrie/geometry.py
1
1365
import valkyrie import struct class GeometryWriter: GT_GeometryMesh = 0 GT_GeometryCollection = 1 GT_GeometryLOD = 2 DM_Internal = 0 DM_Exteral = 1 def __init__(self): self.stream = [] self.material_map = {} def write(self, multi_mesh): self.stream += struct.pack('<I', GeometryWriter.GT_GeometryMesh) self.stream += struct.pack('<ffff', 1.0, 0.0, 0.0, 0.0); self.stream += struct.pack('<ffff', 0.0, 1.0, 0.0, 0.0); self.stream += struct.pack('<ffff', 0.0, 0.0, 1.0, 0.0); self.stream += struct.pack('<ffff', 0.0, 0.0, 0.0, 1.0); self.stream += struct.pack('<I', GeometryWriter.DM_Internal) self._write_string('Mesh') # export the materials that are stored within this static mesh self._prepare_material_map(multi_mesh) self._write_material_names() def _prepare_material_map(self, multi_mesh): i = 0 for mesh_data in multi_mesh.mesh_datas: mat_name = mesh_data.material_name if not mat_name in self.material_map: self.material_map[mat_name] = i i+=1 def _write_material_names(self): self.stream += struct.pack('<I', len(self.material_map)) for material_name in self.material_map: self._write_string(material_name) def _write_string(self, string): _string = bytes(string, 'latin1') self.stream += struct.pack("<I%dsb" % (len(_string)), len(_string)+1, _string, 0)
gpl-2.0
LIKAIMO/MissionPlanner
LogAnalyzer/tests/TestIMUMatch.py
61
3781
from LogAnalyzer import Test,TestResult import DataflashLog from math import sqrt class TestIMUMatch(Test): '''test for empty or near-empty logs''' def __init__(self): Test.__init__(self) self.name = "IMU Mismatch" def run(self, logdata, verbose): #tuning parameters: warn_threshold = .75 fail_threshold = 1.5 filter_tc = 5.0 self.result = TestResult() self.result.status = TestResult.StatusType.GOOD if ("IMU" in logdata.channels) and (not "IMU2" in logdata.channels): self.result.status = TestResult.StatusType.NA self.result.statusMessage = "No IMU2" return if (not "IMU" in logdata.channels) or (not "IMU2" in logdata.channels): self.result.status = TestResult.StatusType.UNKNOWN self.result.statusMessage = "No IMU log data" return imu1 = logdata.channels["IMU"] imu2 = logdata.channels["IMU2"] imu1_timems = imu1["TimeMS"].listData imu1_accx = imu1["AccX"].listData imu1_accy = imu1["AccY"].listData imu1_accz = imu1["AccZ"].listData imu2_timems = imu2["TimeMS"].listData imu2_accx = imu2["AccX"].listData imu2_accy = imu2["AccY"].listData imu2_accz = imu2["AccZ"].listData imu1 = [] imu2 = [] for i in range(len(imu1_timems)): imu1.append({ 't': imu1_timems[i][1]*1.0E-3, 'x': imu1_accx[i][1], 'y': imu1_accy[i][1], 'z': imu1_accz[i][1]}) for i in range(len(imu2_timems)): imu2.append({ 't': imu2_timems[i][1]*1.0E-3, 'x': imu2_accx[i][1], 'y': imu2_accy[i][1], 'z': imu2_accz[i][1]}) imu1.sort(key=lambda x: x['t']) imu2.sort(key=lambda x: x['t']) imu2_index = 0 last_t = None xdiff_filtered = 0 ydiff_filtered = 0 zdiff_filtered = 0 max_diff_filtered = 0 for i in range(len(imu1)): #find closest imu2 value t = imu1[i]['t'] dt = 0 if last_t is None else t-last_t dt=min(dt,.1) next_imu2 = None for i in range(imu2_index,len(imu2)): next_imu2 = imu2[i] imu2_index=i if next_imu2['t'] >= t: break prev_imu2 = imu2[imu2_index-1] closest_imu2 = next_imu2 if abs(next_imu2['t']-t)<abs(prev_imu2['t']-t) else prev_imu2 xdiff = imu1[i]['x']-closest_imu2['x'] ydiff = imu1[i]['y']-closest_imu2['y'] zdiff = imu1[i]['z']-closest_imu2['z'] xdiff_filtered += (xdiff-xdiff_filtered)*dt/filter_tc ydiff_filtered += (ydiff-ydiff_filtered)*dt/filter_tc zdiff_filtered += (zdiff-zdiff_filtered)*dt/filter_tc diff_filtered = sqrt(xdiff_filtered**2+ydiff_filtered**2+zdiff_filtered**2) max_diff_filtered = max(max_diff_filtered,diff_filtered) #print max_diff_filtered last_t = t if max_diff_filtered > fail_threshold: self.result.statusMessage = "Check vibration or accelerometer calibration. (Mismatch: %.2f, WARN: %.2f, FAIL: %.2f)" % (max_diff_filtered,warn_threshold,fail_threshold) self.result.status = TestResult.StatusType.FAIL elif max_diff_filtered > warn_threshold: self.result.statusMessage = "Check vibration or accelerometer calibration. (Mismatch: %.2f, WARN: %.2f, FAIL: %.2f)" % (max_diff_filtered,warn_threshold,fail_threshold) self.result.status = TestResult.StatusType.WARN else: self.result.statusMessage = "(Mismatch: %.2f, WARN: %.2f, FAIL: %.2f)" % (max_diff_filtered,warn_threshold, fail_threshold)
gpl-3.0
lwahlmeier/python-threadly
threadly/Futures.py
2
4445
""" Futures tools for threadly """ import threading import time class ListenableFuture(object): """ This class i used to make a Future that can have listeners and callbacks added to it. Once setter(object) is called all listeners/callbacks are also called. Callbacks will be given the set object, and .get() will return said object. """ def __init__(self): self.lock = threading.Condition() self.settable = None self.listeners = list() self.callables = list() def add_listener(self, listener, args=None, kwargs=None): """ Add a listener function to this ListenableFuture. Once set is called on this future all listeners will be ran. Arguments for the listener can be given if needed. `listener` a callable that will be called when the future is completed `args` tuple arguments that will be passed to the listener when called. `kwargs` dict keyword arguments to be passed to the passed listener when called. """ args = args or () kwargs = kwargs or {} if self.settable is None: self.listeners.append((listener, args, kwargs)) else: listener(*args, **kwargs) def add_callable(self, cable, args=None, kwargs=None): """ Add a callable function to this ListenableFuture. Once set is called on this future all callables will be ran. This works the same as the listener except the set object is passed as the first argument when the callable is called. Arguments for the listener can be given if needed. `cable` a callable that will be called when the future is completed, it must have at least 1 argument. `args` tuple arguments that will be passed to the listener when called. `kwargs` dict keyword arguments to be passed to the passed listener when called. """ args = args or () kwargs = kwargs or {} if self.settable is None: self.callables.append((cable, args, kwargs)) else: cable(self.settable, *args, **kwargs) def get(self, timeout=2 ** 32): """ This is a blocking call that will return the set object once it is set. `timeout` The max amount of time to wait for get (in seconds). If this is reached a null is returned. `returns` the set object. This can technically be anything so know what your listening for. """ if self.settable is not None: return self.settable start = time.time() try: self.lock.acquire() while self.settable is None and time.time() - start < timeout: self.lock.wait(timeout - (time.time() - start)) return self.settable finally: self.lock.release() def setter(self, obj): """ This is used to complete this future. Whatever thread sets this will be used to call all listeners and callables for this future. `obj` The object you want to set on this future (usually use just True if you dont care) """ if self.settable is None: self.settable = obj self.lock.acquire() self.lock.notify_all() self.lock.release() while len(self.listeners) > 0: i = self.listeners.pop(0) try: i[0](*i[1], **i[2]) except Exception as exp: print("Exception calling listener", i[0], exp) while len(self.callables) > 0: i = self.callables.pop(0) try: i[0](self.settable, *i[1], **i[2]) except Exception as exp: print("Exception calling listener", i[0], exp) else: raise Exception("Already Set!") def future_job(future, job): """ This is a simple helper function used to wrap a task on the Scheduler in a future. Once the job runs the future will complete. `future` The future that will be completed once the job finishes. `job` The job to run before completing the future. """ try: job[0](*job[1], **job[2]) future.setter(True) except Exception as exp: print("Error running futureJob:", exp) future.setter(False)
unlicense
ftrader-bitcoinabc/bitcoin-abc
test/functional/abc-finalize-block.py
1
13578
#!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the finalizeblock RPC calls.""" import time from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_raises_rpc_error, set_node_times, wait_until, ) RPC_FINALIZE_INVALID_BLOCK_ERROR = 'finalize-invalid-block' RPC_FORK_PRIOR_FINALIZED_ERROR = 'bad-fork-prior-finalized' RPC_BLOCK_NOT_FOUND_ERROR = 'Block not found' class FinalizeBlockTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 3 self.extra_args = [["-finalizationdelay=0"], ["-finalizationdelay=0"], []] self.finalization_delay = 2 * 60 * 60 def run_test(self): node = self.nodes[0] self.mocktime = int(time.time()) self.log.info("Test block finalization...") node.generatetoaddress(10, node.get_deterministic_priv_key().address) tip = node.getbestblockhash() node.finalizeblock(tip) assert_equal(node.getbestblockhash(), tip) assert_equal(node.getfinalizedblockhash(), tip) def wait_for_tip(node, tip): def check_tip(): return node.getbestblockhash() == tip wait_until(check_tip) alt_node = self.nodes[1] wait_for_tip(alt_node, tip) alt_node.invalidateblock(tip) # We will use this later fork_block = alt_node.getbestblockhash() # Node 0 should not accept the whole alt_node's chain due to tip being finalized, # even though it is longer. # Headers would not be accepted if previousblock is invalid: # - First block from alt node has same height than node tip, but is on a minority chain. Its # status is "valid-headers" # - Second block from alt node has height > node tip height, will be marked as invalid because # node tip is finalized # - Later blocks from alt node will be rejected because their previous block are invalid # # Expected state: # # On alt_node: # >(210)->(211)-> // ->(218 tip) # / # (200)->(201)-> // ->(209)->(210 invalid) # # On node: # >(210 valid-headers)->(211 invalid)->(212 to 218 dropped) # / # (200)->(201)-> // ->(209)->(210 finalized, tip) def wait_for_block(node, block, status="invalid"): def check_block(): for tip in node.getchaintips(): if tip["hash"] == block: assert tip["status"] != "active" return tip["status"] == status return False wait_until(check_block) # First block header is accepted as valid-header alt_node.generatetoaddress( 1, alt_node.get_deterministic_priv_key().address) wait_for_block(node, alt_node.getbestblockhash(), "valid-headers") # Second block header is accepted but set invalid alt_node.generatetoaddress( 1, alt_node.get_deterministic_priv_key().address) invalid_block = alt_node.getbestblockhash() wait_for_block(node, invalid_block) # Later block headers are rejected for i in range(2, 9): alt_node.generatetoaddress( 1, alt_node.get_deterministic_priv_key().address) assert_raises_rpc_error(-5, RPC_BLOCK_NOT_FOUND_ERROR, node.getblockheader, alt_node.getbestblockhash()) assert_equal(node.getbestblockhash(), tip) assert_equal(node.getfinalizedblockhash(), tip) self.log.info("Test that an invalid block cannot be finalized...") assert_raises_rpc_error(-20, RPC_FINALIZE_INVALID_BLOCK_ERROR, node.finalizeblock, invalid_block) self.log.info( "Test that invalidating a finalized block moves the finalization backward...") # Node's finalized block will be invalidated, which causes the finalized block to # move to the previous block. # # Expected state: # # On alt_node: # >(210)->(211)-> // ->(218 tip) # / # (200)->(201)-> // ->(208 auto-finalized)->(209)->(210 invalid) # # On node: # >(210 valid-headers)->(211 invalid)->(212 to 218 dropped) # / # (200)->(201)-> // ->(209 finalized)->(210 tip) node.invalidateblock(tip) node.reconsiderblock(tip) assert_equal(node.getbestblockhash(), tip) assert_equal(node.getfinalizedblockhash(), fork_block) assert_equal(alt_node.getfinalizedblockhash(), node.getblockheader( node.getfinalizedblockhash())['previousblockhash']) # The node will now accept that chain as the finalized block moved back. # Generate a new block on alt_node to trigger getheader from node # Previous 212-218 height blocks have been droped because their previous was invalid # # Expected state: # # On alt_node: # >(210)->(211)-> // ->(218)->(219 tip) # / # (200)->(201)-> // ->(209 auto-finalized)->(210 invalid) # # On node: # >(210)->(211)->(212)-> // ->(218)->(219 tip) # / # (200)->(201)-> // ->(209 finalized)->(210) node.reconsiderblock(invalid_block) alt_node_tip = alt_node.generatetoaddress( 1, alt_node.get_deterministic_priv_key().address)[-1] wait_for_tip(node, alt_node_tip) assert_equal(node.getbestblockhash(), alt_node.getbestblockhash()) assert_equal(node.getfinalizedblockhash(), fork_block) assert_equal(alt_node.getfinalizedblockhash(), fork_block) self.log.info("Trigger reorg via block finalization...") # Finalize node tip to reorg # # Expected state: # # On alt_node: # >(210)->(211)-> // ->(218)->(219 tip) # / # (200)->(201)-> // ->(209 auto-finalized)->(210 invalid) # # On node: # >(210 invalid)-> // ->(219 invalid) # / # (200)->(201)-> // ->(209)->(210 finalized, tip) node.finalizeblock(tip) assert_equal(node.getfinalizedblockhash(), tip) self.log.info("Try to finalize a block on a competiting fork...") assert_raises_rpc_error(-20, RPC_FINALIZE_INVALID_BLOCK_ERROR, node.finalizeblock, alt_node.getbestblockhash()) assert_equal(node.getfinalizedblockhash(), tip) self.log.info( "Check auto-finalization occurs as the tip move forward...") # Reconsider alt_node tip then generate some more blocks on alt_node. # Auto-finalization will occur on both chains. # # Expected state: # # On alt_node: # >(210)->(211)-> // ->(219 auto-finalized)-> // ->(229 tip) # / # (200)->(201)-> // ->(209)->(210 invalid) # # On node: # >(210)->(211)-> // ->(219 auto-finalized)-> // ->(229 tip) # / # (200)->(201)-> // ->(209)->(210 invalid) node.reconsiderblock(alt_node.getbestblockhash()) block_to_autofinalize = alt_node.generatetoaddress( 1, alt_node.get_deterministic_priv_key().address)[-1] alt_node_new_tip = alt_node.generatetoaddress( 9, alt_node.get_deterministic_priv_key().address)[-1] wait_for_tip(node, alt_node_new_tip) assert_equal(node.getbestblockhash(), alt_node.getbestblockhash()) assert_equal(node.getfinalizedblockhash(), alt_node_tip) assert_equal(alt_node.getfinalizedblockhash(), alt_node_tip) self.log.info( "Try to finalize a block on an already finalized chain...") # Finalizing a block of an already finalized chain should have no # effect block_218 = node.getblockheader(alt_node_tip)['previousblockhash'] node.finalizeblock(block_218) assert_equal(node.getfinalizedblockhash(), alt_node_tip) self.log.info( "Make sure reconsidering block move the finalization point...") # Reconsidering the tip will move back the finalized block on node # # Expected state: # # On alt_node: # >(210)->(211)-> // ->(219 auto-finalized)-> // ->(229 tip) # / # (200)->(201)-> // ->(209)->(210 invalid) # # On node: # >(210)->(211)-> // ->(219)-> // ->(229 tip) # / # (200)->(201)-> // ->(209 finalized)->(210) node.reconsiderblock(tip) assert_equal(node.getbestblockhash(), alt_node_new_tip) assert_equal(node.getfinalizedblockhash(), fork_block) # TEST FINALIZATION DELAY self.log.info("Check that finalization delay prevents eclipse attacks") # Because there has been no delay since the beginning of this test, # there should have been no auto-finalization on delay_node. # # Expected state: # # On alt_node: # >(210)->(211)-> // ->(219 auto-finalized)-> // ->(229 tip) # / # (200)->(201)-> // ->(209)->(210 invalid) # # On delay_node: # >(210)->(211)-> // ->(219)-> // ->(229 tip) # / # (200)->(201)-> // ->(209)->(210) delay_node = self.nodes[2] wait_for_tip(delay_node, alt_node_new_tip) assert_equal(delay_node.getfinalizedblockhash(), str()) self.log.info( "Check that finalization delay does not prevent auto-finalization") # Expire the delay, then generate 1 new block with alt_node to # update the tip on all chains. # Because the finalization delay is expired, auto-finalization # should occur. # # Expected state: # # On alt_node: # >(220 auto-finalized)-> // ->(230 tip) # / # (200)->(201)-> // ->(209)->(210 invalid) # # On delay_node: # >(220 auto-finalized)-> // ->(230 tip) # / # (200)->(201)-> // ->(209)->(210) self.mocktime += self.finalization_delay set_node_times([delay_node], self.mocktime) new_tip = alt_node.generatetoaddress( 1, alt_node.get_deterministic_priv_key().address)[-1] wait_for_tip(delay_node, new_tip) assert_equal(alt_node.getbestblockhash(), new_tip) assert_equal(node.getfinalizedblockhash(), block_to_autofinalize) assert_equal(alt_node.getfinalizedblockhash(), block_to_autofinalize) self.log.info( "Check that finalization delay is effective on node boot") # Restart the new node, so the blocks have no header received time. self.restart_node(2) # There should be no finalized block (getfinalizedblockhash returns an # empty string) assert_equal(delay_node.getfinalizedblockhash(), str()) # Generate 20 blocks with no delay. This should not trigger auto-finalization. # # Expected state: # # On delay_node: # >(220)-> // ->(250 tip) # / # (200)->(201)-> // ->(209)->(210) blocks = delay_node.generatetoaddress( 20, alt_node.get_deterministic_priv_key().address) reboot_autofinalized_block = blocks[10] new_tip = blocks[-1] wait_for_tip(delay_node, new_tip) assert_equal(delay_node.getfinalizedblockhash(), str()) # Now let the finalization delay to expire, then generate one more block. # This should resume auto-finalization. # # Expected state: # # On delay_node: # >(220)-> // ->(241 auto-finalized)-> // ->(251 tip) # / # (200)->(201)-> // ->(209)->(210) self.mocktime += self.finalization_delay set_node_times([delay_node], self.mocktime) new_tip = delay_node.generatetoaddress( 1, delay_node.get_deterministic_priv_key().address)[-1] wait_for_tip(delay_node, new_tip) assert_equal(delay_node.getfinalizedblockhash(), reboot_autofinalized_block) if __name__ == '__main__': FinalizeBlockTest().main()
mit
jcoady9/python-for-android
python-modules/twisted/twisted/words/protocols/jabber/jid.py
54
7167
# -*- test-case-name: twisted.words.test.test_jabberjid -*- # # Copyright (c) 2001-2008 Twisted Matrix Laboratories. # See LICENSE for details. """ Jabber Identifier support. This module provides an object to represent Jabber Identifiers (JIDs) and parse string representations into them with proper checking for illegal characters, case folding and canonicalisation through L{stringprep<twisted.words.protocols.jabber.xmpp_stringprep>}. """ from twisted.words.protocols.jabber.xmpp_stringprep import nodeprep, resourceprep, nameprep class InvalidFormat(Exception): """ The given string could not be parsed into a valid Jabber Identifier (JID). """ def parse(jidstring): """ Parse given JID string into its respective parts and apply stringprep. @param jidstring: string representation of a JID. @type jidstring: C{unicode} @return: tuple of (user, host, resource), each of type C{unicode} as the parsed and stringprep'd parts of the given JID. If the given string did not have a user or resource part, the respective field in the tuple will hold C{None}. @rtype: C{tuple} """ user = None host = None resource = None # Search for delimiters user_sep = jidstring.find("@") res_sep = jidstring.find("/") if user_sep == -1: if res_sep == -1: # host host = jidstring else: # host/resource host = jidstring[0:res_sep] resource = jidstring[res_sep + 1:] or None else: if res_sep == -1: # user@host user = jidstring[0:user_sep] or None host = jidstring[user_sep + 1:] else: if user_sep < res_sep: # user@host/resource user = jidstring[0:user_sep] or None host = jidstring[user_sep + 1:user_sep + (res_sep - user_sep)] resource = jidstring[res_sep + 1:] or None else: # host/resource (with an @ in resource) host = jidstring[0:res_sep] resource = jidstring[res_sep + 1:] or None return prep(user, host, resource) def prep(user, host, resource): """ Perform stringprep on all JID fragments. @param user: The user part of the JID. @type user: C{unicode} @param host: The host part of the JID. @type host: C{unicode} @param resource: The resource part of the JID. @type resource: C{unicode} @return: The given parts with stringprep applied. @rtype: C{tuple} """ if user: try: user = nodeprep.prepare(unicode(user)) except UnicodeError: raise InvalidFormat, "Invalid character in username" else: user = None if not host: raise InvalidFormat, "Server address required." else: try: host = nameprep.prepare(unicode(host)) except UnicodeError: raise InvalidFormat, "Invalid character in hostname" if resource: try: resource = resourceprep.prepare(unicode(resource)) except UnicodeError: raise InvalidFormat, "Invalid character in resource" else: resource = None return (user, host, resource) __internJIDs = {} def internJID(jidstring): """ Return interned JID. @rtype: L{JID} """ if jidstring in __internJIDs: return __internJIDs[jidstring] else: j = JID(jidstring) __internJIDs[jidstring] = j return j class JID(object): """ Represents a stringprep'd Jabber ID. JID objects are hashable so they can be used in sets and as keys in dictionaries. """ def __init__(self, str=None, tuple=None): if not (str or tuple): raise RuntimeError("You must provide a value for either 'str' or " "'tuple' arguments.") if str: user, host, res = parse(str) else: user, host, res = prep(*tuple) self.user = user self.host = host self.resource = res def userhost(self): """ Extract the bare JID as a unicode string. A bare JID does not have a resource part, so this returns either C{user@host} or just C{host}. @rtype: C{unicode} """ if self.user: return u"%s@%s" % (self.user, self.host) else: return self.host def userhostJID(self): """ Extract the bare JID. A bare JID does not have a resource part, so this returns a L{JID} object representing either C{user@host} or just C{host}. If the object this method is called upon doesn't have a resource set, it will return itself. Otherwise, the bare JID object will be created, interned using L{internJID}. @rtype: L{JID} """ if self.resource: return internJID(self.userhost()) else: return self def full(self): """ Return the string representation of this JID. @rtype: C{unicode} """ if self.user: if self.resource: return u"%s@%s/%s" % (self.user, self.host, self.resource) else: return u"%s@%s" % (self.user, self.host) else: if self.resource: return u"%s/%s" % (self.host, self.resource) else: return self.host def __eq__(self, other): """ Equality comparison. L{JID}s compare equal if their user, host and resource parts all compare equal. When comparing against instances of other types, it uses the default comparison. """ if isinstance(other, JID): return (self.user == other.user and self.host == other.host and self.resource == other.resource) else: return NotImplemented def __ne__(self, other): """ Inequality comparison. This negates L{__eq__} for comparison with JIDs and uses the default comparison for other types. """ result = self.__eq__(other) if result is NotImplemented: return result else: return not result def __hash__(self): """ Calculate hash. L{JID}s with identical constituent user, host and resource parts have equal hash values. In combination with the comparison defined on JIDs, this allows for using L{JID}s in sets and as dictionary keys. """ return hash((self.user, self.host, self.resource)) def __unicode__(self): """ Get unicode representation. Return the string representation of this JID as a unicode string. @see: L{full} """ return self.full() def __repr__(self): """ Get object representation. Returns a string that would create a new JID object that compares equal to this one. """ return 'JID(%r)' % self.full()
apache-2.0
HyperBaton/ansible
lib/ansible/plugins/doc_fragments/nxos.py
44
5396
# -*- coding: utf-8 -*- # Copyright: (c) 2015, Peter Sprygada <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) class ModuleDocFragment(object): # Standard files documentation fragment DOCUMENTATION = r''' options: provider: description: - B(Deprecated) - "Starting with Ansible 2.5 we recommend using C(connection: network_cli)." - This option is only required if you are using NX-API. - For more information please see the L(NXOS Platform Options guide, ../network/user_guide/platform_nxos.html). - HORIZONTALLINE - A dict object containing connection details. type: dict suboptions: host: description: - Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport. type: str required: true port: description: - Specifies the port to use when building the connection to the remote device. This value applies to either I(cli) or I(nxapi). The port value will default to the appropriate transport common port if none is provided in the task. (cli=22, http=80, https=443). type: int default: 0 (use common port) username: description: - Configures the username to use to authenticate the connection to the remote device. This value is used to authenticate either the CLI login or the nxapi authentication depending on which transport is used. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead. type: str password: description: - Specifies the password to use to authenticate the connection to the remote device. This is a common argument used for either I(cli) or I(nxapi) transports. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead. type: str authorize: description: - Instructs the module to enter privileged mode on the remote device before sending any commands. If not specified, the device will attempt to execute all commands in non-privileged mode. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_AUTHORIZE) will be used instead. type: bool default: no version_added: '2.5.3' auth_pass: description: - Specifies the password to use if required to enter privileged mode on the remote device. If I(authorize) is false, then this argument does nothing. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_AUTH_PASS) will be used instead. type: str version_added: '2.5.3' timeout: description: - Specifies the timeout in seconds for communicating with the network device for either connecting or sending commands. If the timeout is exceeded before the operation is completed, the module will error. NX-API can be slow to return on long-running commands (sh mac, sh bgp, etc). type: int default: 10 version_added: '2.3' ssh_keyfile: description: - Specifies the SSH key to use to authenticate the connection to the remote device. This argument is only used for the I(cli) transport. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE) will be used instead. type: str transport: description: - Configures the transport connection to use when connecting to the remote device. The transport argument supports connectivity to the device over cli (ssh) or nxapi. type: str required: true choices: [ cli, nxapi ] default: cli use_ssl: description: - Configures the I(transport) to use SSL if set to C(yes) only when the C(transport=nxapi), otherwise this value is ignored. type: bool default: no validate_certs: description: - If C(no), SSL certificates will not be validated. This should only be used on personally controlled sites using self-signed certificates. If the transport argument is not nxapi, this value is ignored. type: bool default: yes use_proxy: description: - If C(no), the environment variables C(http_proxy) and C(https_proxy) will be ignored. type: bool default: yes version_added: "2.5" notes: - For information on using CLI and NX-API see the :ref:`NXOS Platform Options guide <nxos_platform_options>` - For more information on using Ansible to manage network devices see the :ref:`Ansible Network Guide <network_guide>` - For more information on using Ansible to manage Cisco devices see the `Cisco integration page <https://www.ansible.com/integrations/networks/cisco>`_. '''
gpl-3.0
intervigilium/android_kernel_htc_msm8660
scripts/tracing/draw_functrace.py
14676
3560
#!/usr/bin/python """ Copyright 2008 (c) Frederic Weisbecker <[email protected]> Licensed under the terms of the GNU GPL License version 2 This script parses a trace provided by the function tracer in kernel/trace/trace_functions.c The resulted trace is processed into a tree to produce a more human view of the call stack by drawing textual but hierarchical tree of calls. Only the functions's names and the the call time are provided. Usage: Be sure that you have CONFIG_FUNCTION_TRACER # mount -t debugfs nodev /sys/kernel/debug # echo function > /sys/kernel/debug/tracing/current_tracer $ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func Wait some times but not too much, the script is a bit slow. Break the pipe (Ctrl + Z) $ scripts/draw_functrace.py < raw_trace_func > draw_functrace Then you have your drawn trace in draw_functrace """ import sys, re class CallTree: """ This class provides a tree representation of the functions call stack. If a function has no parent in the kernel (interrupt, syscall, kernel thread...) then it is attached to a virtual parent called ROOT. """ ROOT = None def __init__(self, func, time = None, parent = None): self._func = func self._time = time if parent is None: self._parent = CallTree.ROOT else: self._parent = parent self._children = [] def calls(self, func, calltime): """ If a function calls another one, call this method to insert it into the tree at the appropriate place. @return: A reference to the newly created child node. """ child = CallTree(func, calltime, self) self._children.append(child) return child def getParent(self, func): """ Retrieve the last parent of the current node that has the name given by func. If this function is not on a parent, then create it as new child of root @return: A reference to the parent. """ tree = self while tree != CallTree.ROOT and tree._func != func: tree = tree._parent if tree == CallTree.ROOT: child = CallTree.ROOT.calls(func, None) return child return tree def __repr__(self): return self.__toString("", True) def __toString(self, branch, lastChild): if self._time is not None: s = "%s----%s (%s)\n" % (branch, self._func, self._time) else: s = "%s----%s\n" % (branch, self._func) i = 0 if lastChild: branch = branch[:-1] + " " while i < len(self._children): if i != len(self._children) - 1: s += "%s" % self._children[i].__toString(branch +\ " |", False) else: s += "%s" % self._children[i].__toString(branch +\ " |", True) i += 1 return s class BrokenLineException(Exception): """If the last line is not complete because of the pipe breakage, we want to stop the processing and ignore this line. """ pass class CommentLineException(Exception): """ If the line is a comment (as in the beginning of the trace file), just ignore it. """ pass def parseLine(line): line = line.strip() if line.startswith("#"): raise CommentLineException m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line) if m is None: raise BrokenLineException return (m.group(1), m.group(2), m.group(3)) def main(): CallTree.ROOT = CallTree("Root (Nowhere)", None, None) tree = CallTree.ROOT for line in sys.stdin: try: calltime, callee, caller = parseLine(line) except BrokenLineException: break except CommentLineException: continue tree = tree.getParent(caller) tree = tree.calls(callee, calltime) print CallTree.ROOT if __name__ == "__main__": main()
gpl-2.0
evensonbryan/yocto-autobuilder
lib/python2.7/site-packages/buildbot-0.8.8-py2.7.egg/buildbot/status/progress.py
4
11969
# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from twisted.internet import reactor from twisted.spread import pb from twisted.python import log from buildbot import util from collections import defaultdict class StepProgress: """I keep track of how much progress a single BuildStep has made. Progress is measured along various axes. Time consumed is one that is available for all steps. Amount of command output is another, and may be better quantified by scanning the output for markers to derive number of files compiled, directories walked, tests run, etc. I am created when the build begins, and given to a BuildProgress object so it can track the overall progress of the whole build. """ startTime = None stopTime = None expectedTime = None buildProgress = None debug = False def __init__(self, name, metricNames): self.name = name self.progress = {} self.expectations = {} for m in metricNames: self.progress[m] = None self.expectations[m] = None def setBuildProgress(self, bp): self.buildProgress = bp def setExpectations(self, metrics): """The step can call this to explicitly set a target value for one of its metrics. E.g., ShellCommands knows how many commands it will execute, so it could set the 'commands' expectation.""" for metric, value in metrics.items(): self.expectations[metric] = value self.buildProgress.newExpectations() def setExpectedTime(self, seconds): self.expectedTime = seconds self.buildProgress.newExpectations() def start(self): if self.debug: print "StepProgress.start[%s]" % self.name self.startTime = util.now() def setProgress(self, metric, value): """The step calls this as progress is made along various axes.""" if self.debug: print "setProgress[%s][%s] = %s" % (self.name, metric, value) self.progress[metric] = value if self.debug: r = self.remaining() print " step remaining:", r self.buildProgress.newProgress() def finish(self): """This stops the 'time' metric and marks the step as finished overall. It should be called after the last .setProgress has been done for each axis.""" if self.debug: print "StepProgress.finish[%s]" % self.name self.stopTime = util.now() self.buildProgress.stepFinished(self.name) def totalTime(self): if self.startTime != None and self.stopTime != None: return self.stopTime - self.startTime def remaining(self): if self.startTime == None: return self.expectedTime if self.stopTime != None: return 0 # already finished # TODO: replace this with cleverness that graphs each metric vs. # time, then finds the inverse function. Will probably need to save # a timestamp with each setProgress update, when finished, go back # and find the 2% transition points, then save those 50 values in a # list. On the next build, do linear interpolation between the two # closest samples to come up with a percentage represented by that # metric. # TODO: If no other metrics are available, just go with elapsed # time. Given the non-time-uniformity of text output from most # steps, this would probably be better than the text-percentage # scheme currently implemented. percentages = [] for metric, value in self.progress.items(): expectation = self.expectations[metric] if value != None and expectation != None: p = 1.0 * value / expectation percentages.append(p) if percentages: avg = reduce(lambda x,y: x+y, percentages) / len(percentages) if avg > 1.0: # overdue avg = 1.0 if avg < 0.0: avg = 0.0 if percentages and self.expectedTime != None: return self.expectedTime - (avg * self.expectedTime) if self.expectedTime is not None: # fall back to pure time return self.expectedTime - (util.now() - self.startTime) return None # no idea class WatcherState: def __init__(self, interval): self.interval = interval self.timer = None self.needUpdate = 0 class BuildProgress(pb.Referenceable): """I keep track of overall build progress. I hold a list of StepProgress objects. """ def __init__(self, stepProgresses): self.steps = {} for s in stepProgresses: self.steps[s.name] = s s.setBuildProgress(self) self.finishedSteps = [] self.watchers = {} self.debug = 0 def setExpectationsFrom(self, exp): """Set our expectations from the builder's Expectations object.""" for name, metrics in exp.steps.items(): s = self.steps.get(name) if s: s.setExpectedTime(exp.times[name]) s.setExpectations(exp.steps[name]) def newExpectations(self): """Call this when one of the steps has changed its expectations. This should trigger us to update our ETA value and notify any subscribers.""" pass # subscribers are not implemented: they just poll def stepFinished(self, stepname): assert(stepname not in self.finishedSteps) self.finishedSteps.append(stepname) if len(self.finishedSteps) == len(self.steps.keys()): self.sendLastUpdates() def newProgress(self): r = self.remaining() if self.debug: print " remaining:", r if r != None: self.sendAllUpdates() def remaining(self): # sum eta of all steps sum = 0 for name, step in self.steps.items(): rem = step.remaining() if rem == None: return None # not sure sum += rem return sum def eta(self): left = self.remaining() if left == None: return None # not sure done = util.now() + left return done def remote_subscribe(self, remote, interval=5): # [interval, timer, needUpdate] # don't send an update more than once per interval self.watchers[remote] = WatcherState(interval) remote.notifyOnDisconnect(self.removeWatcher) self.updateWatcher(remote) self.startTimer(remote) log.msg("BuildProgress.remote_subscribe(%s)" % remote) def remote_unsubscribe(self, remote): # TODO: this doesn't work. I think 'remote' will always be different # than the object that appeared in _subscribe. log.msg("BuildProgress.remote_unsubscribe(%s)" % remote) self.removeWatcher(remote) #remote.dontNotifyOnDisconnect(self.removeWatcher) def removeWatcher(self, remote): #log.msg("removeWatcher(%s)" % remote) try: timer = self.watchers[remote].timer if timer: timer.cancel() del self.watchers[remote] except KeyError: log.msg("Weird, removeWatcher on non-existent subscriber:", remote) def sendAllUpdates(self): for r in self.watchers.keys(): self.updateWatcher(r) def updateWatcher(self, remote): # an update wants to go to this watcher. Send it if we can, otherwise # queue it for later w = self.watchers[remote] if not w.timer: # no timer, so send update now and start the timer self.sendUpdate(remote) self.startTimer(remote) else: # timer is running, just mark as needing an update w.needUpdate = 1 def startTimer(self, remote): w = self.watchers[remote] timer = reactor.callLater(w.interval, self.watcherTimeout, remote) w.timer = timer def sendUpdate(self, remote, last=0): self.watchers[remote].needUpdate = 0 #text = self.asText() # TODO: not text, duh try: remote.callRemote("progress", self.remaining()) if last: remote.callRemote("finished", self) except: log.deferr() self.removeWatcher(remote) def watcherTimeout(self, remote): w = self.watchers.get(remote, None) if not w: return # went away w.timer = None if w.needUpdate: self.sendUpdate(remote) self.startTimer(remote) def sendLastUpdates(self): for remote in self.watchers.keys(): self.sendUpdate(remote, 1) self.removeWatcher(remote) class Expectations: debug = False # decay=1.0 ignores all but the last build # 0.9 is short time constant. 0.1 is very long time constant # TODO: let decay be specified per-metric decay = 0.5 def __init__(self, buildprogress): """Create us from a successful build. We will expect each step to take as long as it did in that build.""" # .steps maps stepname to dict2 # dict2 maps metricname to final end-of-step value self.steps = defaultdict(dict) # .times maps stepname to per-step elapsed time self.times = {} for name, step in buildprogress.steps.items(): self.steps[name] = {} for metric, value in step.progress.items(): self.steps[name][metric] = value self.times[name] = None if step.startTime is not None and step.stopTime is not None: self.times[name] = step.stopTime - step.startTime def wavg(self, old, current): if old is None: return current if current is None: return old else: return (current * self.decay) + (old * (1 - self.decay)) def update(self, buildprogress): for name, stepprogress in buildprogress.steps.items(): old = self.times.get(name) current = stepprogress.totalTime() if current == None: log.msg("Expectations.update: current[%s] was None!" % name) continue new = self.wavg(old, current) self.times[name] = new if self.debug: print "new expected time[%s] = %s, old %s, cur %s" % \ (name, new, old, current) for metric, current in stepprogress.progress.items(): old = self.steps[name].get(metric) new = self.wavg(old, current) if self.debug: print "new expectation[%s][%s] = %s, old %s, cur %s" % \ (name, metric, new, old, current) self.steps[name][metric] = new def expectedBuildTime(self): if None in self.times.values(): return None #return sum(self.times.values()) # python-2.2 doesn't have 'sum'. TODO: drop python-2.2 support s = 0 for v in self.times.values(): s += v return s
gpl-2.0
dxj19831029/keras
tests/manual/check_constraints.py
86
2841
from __future__ import absolute_import from __future__ import print_function import keras from keras.datasets import mnist import keras.models from keras.models import Sequential from keras.layers.core import Dense, Dropout, Activation from keras.regularizers import l2, l1 from keras.constraints import maxnorm, nonneg from keras.optimizers import SGD, Adam, RMSprop from keras.utils import np_utils, generic_utils import theano import theano.tensor as T import numpy as np import scipy batch_size = 100 nb_classes = 10 nb_epoch = 10 # the data, shuffled and split between tran and test sets (X_train, y_train), (X_test, y_test) = mnist.load_data() X_train=X_train.reshape(60000,784) X_test=X_test.reshape(10000,784) X_train = X_train.astype("float32") X_test = X_test.astype("float32") X_train /= 255 X_test /= 255 # convert class vectors to binary class matrices Y_train = np_utils.to_categorical(y_train, nb_classes) Y_test = np_utils.to_categorical(y_test, nb_classes) model = Sequential() model.add(Dense(784, 20, W_constraint=maxnorm(1))) model.add(Activation('relu')) model.add(Dropout(0.1)) model.add(Dense(20, 20, W_constraint=nonneg())) model.add(Activation('relu')) model.add(Dropout(0.1)) model.add(Dense(20, 10, W_constraint=maxnorm(1))) model.add(Activation('softmax')) rms = RMSprop() model.compile(loss='categorical_crossentropy', optimizer=rms) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0) a=model.params[0].eval() if np.isclose(np.max(np.sqrt(np.sum(a**2, axis=0))),1): print('Maxnorm test passed') else: raise ValueError('Maxnorm test failed!') b=model.params[2].eval() if np.min(b)==0 and np.min(a)!=0: print('Nonneg test passed') else: raise ValueError('Nonneg test failed!') model = Sequential() model.add(Dense(784, 20)) model.add(Activation('relu')) model.add(Dense(20, 20, W_regularizer=l1(.01))) model.add(Activation('relu')) model.add(Dense(20, 10)) model.add(Activation('softmax')) rms = RMSprop() model.compile(loss='categorical_crossentropy', optimizer=rms) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=20, show_accuracy=True, verbose=0) a=model.params[2].eval().reshape(400) (D, p1) = scipy.stats.kurtosistest(a) model = Sequential() model.add(Dense(784, 20)) model.add(Activation('relu')) model.add(Dense(20, 20, W_regularizer=l2(.01))) model.add(Activation('relu')) model.add(Dense(20, 10)) model.add(Activation('softmax')) rms = RMSprop() model.compile(loss='categorical_crossentropy', optimizer=rms) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=20, show_accuracy=True, verbose=0) a=model.params[2].eval().reshape(400) (D, p2) = scipy.stats.kurtosistest(a) if p1<.01 and p2>.01: print('L1 and L2 regularization tests passed') else: raise ValueError('L1 and L2 regularization tests failed!')
mit
archesproject/arches
arches/app/models/migrations/0001_initial.py
1
36390
# -*- coding: utf-8 -*- import os import uuid import codecs import django.contrib.gis.db.models.fields from django.core import management from django.contrib.postgres.fields import JSONField from django.db import migrations, models from arches.db.migration_operations.extras import CreateExtension, CreateAutoPopulateUUIDField, CreateFunction from arches.app.models.system_settings import settings def get_sql_string_from_file(pathtofile): ret = [] with codecs.open(pathtofile, encoding="utf-8") as f: ret = f.read() # print sqlparse.split(sqlparse.format(ret,strip_comments=True)) # for stmt in sqlparse.split(sqlparse.format(f.read(),strip_comments=True)): # if stmt.strip() != '': # ret.append(stmt) return ret def forwards_func(apps, schema_editor): # We get the model from the versioned app registry; # if we directly import it, it'll be the wrong version pass def reverse_func(apps, schema_editor): Ontology = apps.get_model("models", "Ontology") Ontology.objects.filter(version="6.2").delete() # a work around for not being able to create permissions during an initial migration # from https://code.djangoproject.com/ticket/23422#comment:6 def make_permissions(apps, schema_editor, with_create_permissions=True): db_alias = schema_editor.connection.alias Group = apps.get_model("auth", "Group") User = apps.get_model("auth", "User") Permission = apps.get_model("auth", "Permission") try: read_nodegroup = Permission.objects.using(db_alias).get( codename="read_nodegroup", content_type__app_label="models", content_type__model="nodegroup" ) write_nodegroup = Permission.objects.using(db_alias).get( codename="write_nodegroup", content_type__app_label="models", content_type__model="nodegroup" ) delete_nodegroup = Permission.objects.using(db_alias).get( codename="delete_nodegroup", content_type__app_label="models", content_type__model="nodegroup" ) except Permission.DoesNotExist: if with_create_permissions: # Manually run create_permissions from django.contrib.auth.management import create_permissions assert not getattr(apps, "models_module", None) model_app = apps.get_app_config("models") model_app.models_module = True create_permissions(model_app, verbosity=0) model_app.models_module = None return make_permissions(apps, schema_editor, with_create_permissions=False) else: raise graph_editor_group = Group.objects.using(db_alias).create(name="Graph Editor") graph_editor_group.permissions.add(read_nodegroup, write_nodegroup, delete_nodegroup) resource_editor_group = Group.objects.using(db_alias).create(name="Resource Editor") rdm_admin_group = Group.objects.using(db_alias).create(name="RDM Administrator") app_admin_group = Group.objects.using(db_alias).create(name="Application Administrator") sys_admin_group = Group.objects.using(db_alias).create(name="System Administrator") mobile_project_admin_group = Group.objects.using(db_alias).create(name="Mobile Project Administrator") crowdsource_editor_group = Group.objects.using(db_alias).create(name="Crowdsource Editor") guest_group = Group.objects.using(db_alias).create(name="Guest") anonymous_user = User.objects.using(db_alias).get(username="anonymous") anonymous_user.groups.add(guest_group) admin_user = User.objects.using(db_alias).get(username="admin") admin_user.groups.add(graph_editor_group) admin_user.groups.add(resource_editor_group) admin_user.groups.add(rdm_admin_group) admin_user.groups.add(app_admin_group) admin_user.groups.add(sys_admin_group) admin_user.groups.add(mobile_project_admin_group) admin_user.groups.add(crowdsource_editor_group) admin_user.groups.add(guest_group) class Migration(migrations.Migration): dependencies = [] initial = True operations = [ CreateExtension(name="uuid-ossp"), CreateFunction( name="insert_relation", arguments=["p_label text", "p_relationtype text", "p_legacyid2 text"], declarations=["v_conceptidfrom uuid = null;", "v_conceptidto uuid = null;"], language="plpgsql", body=""" v_conceptidfrom = (select conceptid from concepts c where trim(legacyoid) = trim(p_legacyid1)); v_conceptidto = (select conceptid from concepts c where trim(legacyoid) = trim(p_legacyid2)); IF v_conceptidfrom is not null and v_conceptidto is not null and v_conceptidto <> v_conceptidfrom and v_conceptidfrom::text||v_conceptidto::text NOT IN (SELECT conceptidfrom::text||conceptidto::text FROM relations) then INSERT INTO relations(relationid, conceptidfrom, conceptidto, relationtype) VALUES (uuid_generate_v1mc(), v_conceptidfrom, v_conceptidto, p_relationtype); return 'success!'; ELSE return 'fail! no relation inserted.'; END IF; """, returntype="text", ), CreateFunction( name="get_conceptid", arguments=["p_label text"], declarations=["v_return text;",], language="plpgsql", body=""" v_return = (select a.conceptid from concepts a, values b where 1=1 and b.valuetype = 'prefLabel' and b.value = p_label and b.conceptid = a.conceptid LIMIT 1); return v_return; """, returntype="uuid", ), CreateFunction( name="insert_concept", arguments=["p_label text", "p_note text", "p_languageid text", "p_legacyid text", "p_nodetype text"], declarations=[ "v_conceptid uuid = public.uuid_generate_v1mc();", "v_valueid uuid = public.uuid_generate_v1mc();", "v_languageid text = p_languageid;", ], language="plpgsql", body=""" INSERT INTO concepts(conceptid, nodetype, legacyoid) VALUES (v_conceptid, p_nodetype, p_legacyid); IF trim(p_label) is not null and p_label<>'' then INSERT INTO values (valueid, conceptid, valuetype, value, languageid) VALUES (v_valueid, v_conceptid, 'prefLabel', trim(initcap(p_label)), v_languageid); END IF; IF trim(p_note) is not null and p_note <> '' then INSERT INTO values (valueid, conceptid, valuetype, value, languageid) VALUES (v_valueid, v_conceptid, 'scopeNote', p_note, v_languageid); END IF; return v_conceptid; """, returntype="uuid", ), migrations.CreateModel( name="GraphModel", fields=[ ("graphid", models.UUIDField(default=uuid.uuid1, serialize=False, primary_key=True)), ("name", models.TextField(null=True, blank=True)), ("description", models.TextField(null=True, blank=True)), ("deploymentfile", models.TextField(null=True, blank=True)), ("author", models.TextField(null=True, blank=True)), ("deploymentdate", models.DateTimeField(null=True, blank=True)), ("version", models.TextField(null=True, blank=True)), ("isresource", models.BooleanField()), ("isactive", models.BooleanField()), ("iconclass", models.TextField(null=True, blank=True)), ("mapfeaturecolor", models.TextField(blank=True, null=True)), ("maplinewidth", models.IntegerField(blank=True, null=True)), ("mappointsize", models.IntegerField(blank=True, null=True)), ("subtitle", models.TextField(null=True, blank=True)), ], options={"db_table": "graphs", "managed": True,}, ), migrations.CreateModel(name="Graph", fields=[], options={"proxy": True,}, bases=("models.GraphModel",),), migrations.CreateModel( name="CardModel", fields=[ ("cardid", models.UUIDField(default=uuid.uuid1, serialize=False, primary_key=True)), ("name", models.TextField(null=True, blank=True)), ("description", models.TextField(null=True, blank=True)), ("instructions", models.TextField(null=True, blank=True)), ("helpenabled", models.BooleanField(default=False)), ("helptitle", models.TextField(null=True, blank=True)), ("helptext", models.TextField(null=True, blank=True)), ("active", models.BooleanField(default=True)), ("visible", models.BooleanField(default=True)), ("sortorder", models.IntegerField(blank=True, null=True, default=None)), ], options={"db_table": "cards", "managed": True,}, ), migrations.CreateModel(name="Card", fields=[], options={"proxy": True,}, bases=("models.CardModel",),), migrations.CreateModel( name="CardXNodeXWidget", fields=[ ("card", models.ForeignKey(to="models.CardModel", db_column="cardid", on_delete=models.CASCADE)), ("id", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("config", JSONField(blank=True, db_column="config", null=True)), ("label", models.TextField(blank=True, null=True)), ("sortorder", models.IntegerField(blank=True, null=True, default=None)), ], options={"db_table": "cards_x_nodes_x_widgets", "managed": True,}, ), migrations.CreateModel( name="Concept", fields=[ ("conceptid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("legacyoid", models.TextField(unique=True)), ], options={"db_table": "concepts", "managed": True,}, ), migrations.CreateModel( name="DDataType", fields=[ ("datatype", models.TextField(primary_key=True, serialize=False)), ("iconclass", models.TextField()), ("modulename", models.TextField(blank=True, null=True)), ("classname", models.TextField(blank=True, null=True)), ("configcomponent", models.TextField(blank=True, null=True)), ("defaultconfig", JSONField(blank=True, db_column="defaultconfig", null=True)), ("configname", models.TextField(blank=True, null=True)), ("isgeometric", models.BooleanField(default=False)), ], options={"db_table": "d_data_types", "managed": True,}, ), migrations.CreateModel( name="DLanguage", fields=[ ("languageid", models.TextField(primary_key=True, serialize=False)), ("languagename", models.TextField()), ("isdefault", models.BooleanField()), ], options={"db_table": "d_languages", "managed": True,}, ), migrations.CreateModel( name="DNodeType", fields=[("nodetype", models.TextField(primary_key=True, serialize=False)), ("namespace", models.TextField()),], options={"db_table": "d_node_types", "managed": True,}, ), migrations.CreateModel( name="DRelationType", fields=[ ("relationtype", models.TextField(primary_key=True, serialize=False)), ("category", models.TextField()), ("namespace", models.TextField()), ], options={"db_table": "d_relation_types", "managed": True,}, ), migrations.CreateModel( name="DValueType", fields=[ ("valuetype", models.TextField(primary_key=True, serialize=False)), ("category", models.TextField(blank=True, null=True)), ("description", models.TextField(blank=True, null=True)), ("namespace", models.TextField()), ("datatype", models.TextField(blank=True, null=True)), ], options={"db_table": "d_value_types", "managed": True,}, ), migrations.CreateModel( name="Edge", fields=[ ("edgeid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("name", models.TextField(blank=True, null=True)), ("description", models.TextField(blank=True, null=True)), ("ontologyproperty", models.TextField(blank=True, null=True)), ( "graph", models.ForeignKey(blank=False, db_column="graphid", null=False, to="models.GraphModel", on_delete=models.CASCADE), ), ], options={"db_table": "edges", "managed": True,}, ), migrations.CreateModel( name="EditLog", fields=[ ("editlogid", models.UUIDField(default=uuid.uuid1, serialize=False, primary_key=True)), ("resourceclassid", models.TextField(null=True, blank=True)), ("resourceinstanceid", models.TextField(null=True, blank=True)), ("attributenodeid", models.TextField(null=True, blank=True)), ("tileinstanceid", models.TextField(null=True, blank=True)), ("edittype", models.TextField(null=True, blank=True)), ("newvalue", models.TextField(null=True, blank=True)), ("oldvalue", models.TextField(null=True, blank=True)), ("timestamp", models.DateTimeField(null=True, blank=True)), ("userid", models.TextField(null=True, blank=True)), ("user_firstname", models.TextField(null=True, blank=True)), ("user_lastname", models.TextField(null=True, blank=True)), ("user_email", models.TextField(null=True, blank=True)), ("note", models.TextField(null=True, blank=True)), ], options={"db_table": "edit_log", "managed": True,}, ), migrations.CreateModel( name="File", fields=[ ("fileid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("path", models.FileField(upload_to="uploadedfiles")), ], options={"db_table": "files", "managed": True,}, ), migrations.CreateModel( name="Form", fields=[ ("formid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("title", models.TextField(blank=True, null=True)), ("subtitle", models.TextField(blank=True, null=True)), ("iconclass", models.TextField(blank=True, null=True)), ("visible", models.BooleanField(default=True)), ("sortorder", models.IntegerField(blank=True, null=True, default=None)), ], options={"db_table": "forms", "managed": True,}, ), migrations.CreateModel( name="FormXCard", fields=[ ("id", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("card", models.ForeignKey(to="models.CardModel", db_column="cardid", on_delete=models.CASCADE)), ("form", models.ForeignKey(to="models.Form", db_column="formid", on_delete=models.CASCADE)), ("sortorder", models.IntegerField(blank=True, null=True, default=None)), ], options={"db_table": "forms_x_cards", "managed": True,}, ), migrations.CreateModel( name="Function", fields=[ ("functionid", models.UUIDField(primary_key=True, default=uuid.uuid1, serialize=False)), ("functiontype", models.TextField(blank=True, null=True)), ("name", models.TextField(blank=True, null=True)), ("description", models.TextField(blank=True, null=True)), ("defaultconfig", JSONField(blank=True, null=True, db_column="defaultconfig")), ("modulename", models.TextField(blank=True, null=True)), ("classname", models.TextField(blank=True, null=True)), ("component", models.TextField(blank=True, null=True)), ], options={"db_table": "functions", "managed": True,}, ), migrations.CreateModel( name="FunctionXGraph", fields=[ ("id", models.UUIDField(primary_key=True, default=uuid.uuid1, serialize=False)), ("function", models.ForeignKey(to="models.Function", db_column="functionid", on_delete=models.CASCADE)), ("graph", models.ForeignKey(to="models.GraphModel", db_column="graphid", on_delete=models.CASCADE)), ("config", JSONField(blank=True, null=True, db_column="config")), ], options={"db_table": "functions_x_graphs", "managed": True,}, ), migrations.CreateModel( name="Icon", fields=[ ("id", models.AutoField(primary_key=True, serialize=True)), ("name", models.TextField(blank=True, null=True)), ("cssclass", models.TextField(blank=True, null=True)), ], options={"db_table": "icons", "managed": True,}, ), migrations.CreateModel( name="Node", fields=[ ("nodeid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("name", models.TextField()), ("description", models.TextField(blank=True, null=True)), ("istopnode", models.BooleanField()), ("ontologyclass", models.TextField(blank=True, null=True)), ("datatype", models.TextField()), ( "graph", models.ForeignKey(blank=False, db_column="graphid", null=False, to="models.GraphModel", on_delete=models.CASCADE), ), ("config", JSONField(blank=True, db_column="config", null=True)), ], options={"db_table": "nodes", "managed": True,}, ), migrations.CreateModel( name="NodeGroup", fields=[ ("nodegroupid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("legacygroupid", models.TextField(blank=True, null=True)), ("cardinality", models.TextField(blank=True, default="1")), ( "parentnodegroup", models.ForeignKey( blank=True, db_column="parentnodegroupid", null=True, to="models.NodeGroup", on_delete=models.CASCADE ), ), ], options={ "db_table": "node_groups", "managed": True, "default_permissions": (), "permissions": ( ("read_nodegroup", "Read"), ("write_nodegroup", "Create/Update"), ("delete_nodegroup", "Delete"), ("no_access_to_nodegroup", "No Access"), ), }, ), migrations.CreateModel( name="Ontology", fields=[ ("ontologyid", models.UUIDField(default=uuid.uuid1, primary_key=True)), ("name", models.TextField()), ("version", models.TextField()), ("path", models.TextField()), ( "parentontology", models.ForeignKey( to="models.Ontology", db_column="parentontologyid", related_name="extensions", null=True, blank=True, on_delete=models.CASCADE, ), ), ], options={"db_table": "ontologies", "managed": True,}, ), migrations.CreateModel( name="OntologyClass", fields=[ ("ontologyclassid", models.UUIDField(default=uuid.uuid1, primary_key=True)), ("source", models.TextField()), ("target", JSONField(null=True)), ( "ontology", models.ForeignKey( to="models.Ontology", db_column="ontologyid", related_name="ontologyclasses", on_delete=models.CASCADE ), ), ], options={"db_table": "ontologyclasses", "managed": True,}, ), migrations.CreateModel( name="Relation", fields=[ ("relationid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ( "conceptfrom", models.ForeignKey( db_column="conceptidfrom", related_name="relation_concepts_from", to="models.Concept", on_delete=models.CASCADE ), ), ( "conceptto", models.ForeignKey( db_column="conceptidto", related_name="relation_concepts_to", to="models.Concept", on_delete=models.CASCADE ), ), ("relationtype", models.ForeignKey(db_column="relationtype", to="models.DRelationType", on_delete=models.CASCADE)), ], options={"db_table": "relations", "managed": True,}, ), migrations.CreateModel( name="ReportTemplate", fields=[ ("templateid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("name", models.TextField(null=True, blank=True)), ("description", models.TextField(null=True, blank=True)), ("component", models.TextField()), ("componentname", models.TextField()), ("defaultconfig", JSONField(blank=True, db_column="defaultconfig", null=True)), ], options={"db_table": "report_templates", "managed": True,}, ), migrations.CreateModel( name="Report", fields=[ ("reportid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("name", models.TextField(null=True, blank=True)), ("template", models.ForeignKey(db_column="templateid", to="models.ReportTemplate", on_delete=models.CASCADE)), ("graph", models.ForeignKey(db_column="graphid", to="models.GraphModel", on_delete=models.CASCADE)), ("config", JSONField(blank=True, db_column="config", null=True)), ("formsconfig", JSONField(blank=True, db_column="formsconfig", null=True)), ("active", models.BooleanField(default=False)), ], options={"db_table": "reports", "managed": True,}, ), migrations.CreateModel( name="Resource2ResourceConstraint", fields=[ ("resource2resourceid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ( "resourceclassfrom", models.ForeignKey( blank=True, db_column="resourceclassfrom", null=True, related_name="resxres_contstraint_classes_from", to="models.Node", on_delete=models.SET_NULL, ), ), ( "resourceclassto", models.ForeignKey( blank=True, db_column="resourceclassto", null=True, related_name="resxres_contstraint_classes_to", to="models.Node", on_delete=models.SET_NULL, ), ), ], options={"db_table": "resource_2_resource_constraints", "managed": True,}, ), migrations.CreateModel( name="ResourceInstance", fields=[ ("resourceinstanceid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("legacyid", models.TextField(blank=True, unique=True, null=True)), ("graph", models.ForeignKey(db_column="graphid", to="models.GraphModel", on_delete=models.CASCADE)), ("createdtime", models.DateTimeField(auto_now_add=True)), ], options={"db_table": "resource_instances", "managed": True,}, ), migrations.CreateModel( name="ResourceXResource", fields=[ ("resourcexid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("notes", models.TextField(blank=True, null=True)), ("datestarted", models.DateField(blank=True, null=True)), ("dateended", models.DateField(blank=True, null=True)), ], options={"db_table": "resource_x_resource", "managed": True,}, ), migrations.CreateModel( name="TileModel", fields=[ ("tileid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("data", JSONField(blank=True, db_column="tiledata", null=True)), ("nodegroup", models.ForeignKey(db_column="nodegroupid", to="models.NodeGroup", on_delete=models.CASCADE)), ( "parenttile", models.ForeignKey(blank=True, db_column="parenttileid", null=True, to="models.TileModel", on_delete=models.CASCADE), ), ( "resourceinstance", models.ForeignKey(db_column="resourceinstanceid", to="models.ResourceInstance", on_delete=models.CASCADE), ), ("sortorder", models.IntegerField(blank=True, null=True, default=0)), ], options={"db_table": "tiles", "managed": True,}, ), migrations.CreateModel( name="Value", fields=[ ("valueid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("value", models.TextField()), ("concept", models.ForeignKey(db_column="conceptid", to="models.Concept", on_delete=models.CASCADE)), ( "language", models.ForeignKey(blank=True, db_column="languageid", null=True, to="models.DLanguage", on_delete=models.CASCADE), ), ("valuetype", models.ForeignKey(db_column="valuetype", to="models.DValueType", on_delete=models.CASCADE)), ], options={"db_table": "values", "managed": True,}, ), migrations.CreateModel( name="Widget", fields=[ ("widgetid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("name", models.TextField()), ("component", models.TextField()), ("defaultconfig", JSONField(blank=True, db_column="defaultconfig", null=True)), ("helptext", models.TextField(blank=True, null=True)), ("datatype", models.TextField()), ], options={"db_table": "widgets", "managed": True,}, ), migrations.CreateModel( name="MapLayer", fields=[ ("maplayerid", models.UUIDField(default=uuid.uuid1, primary_key=True, serialize=False)), ("name", models.TextField(unique=True)), ("layerdefinitions", JSONField(blank=True, db_column="layerdefinitions", null=True)), ("isoverlay", models.BooleanField(default=False)), ("icon", models.TextField(default=None)), ("activated", models.BooleanField(default=True)), ("addtomap", models.BooleanField(default=False)), ], options={"db_table": "map_layers", "managed": True,}, ), migrations.CreateModel( name="MapSource", fields=[ ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), ("name", models.TextField(unique=True)), ("source", JSONField(blank=True, db_column="source", null=True)), ], options={"db_table": "map_sources", "managed": True,}, ), migrations.CreateModel( name="TileserverLayer", fields=[ ("name", models.TextField(unique=True)), ("path", models.TextField()), ("config", JSONField(db_column="config")), ("map_layer", models.ForeignKey(db_column="map_layerid", to="models.MapLayer", on_delete=models.CASCADE)), ("map_source", models.ForeignKey(db_column="map_sourceid", to="models.MapSource", on_delete=models.CASCADE)), ], options={"db_table": "tileserver_layers", "managed": True,}, ), migrations.CreateModel( name="GraphXMapping", fields=[ ("id", models.UUIDField(primary_key=True, default=uuid.uuid1, serialize=False)), ("graph", models.ForeignKey(to="models.GraphModel", db_column="graphid", on_delete=models.CASCADE)), ("mapping", JSONField(blank=True, db_column="mapping")), ], options={"db_table": "graphs_x_mapping_file", "managed": True,}, ), migrations.AddField( model_name="ddatatype", name="defaultwidget", field=models.ForeignKey(db_column="defaultwidget", to="models.Widget", null=True, on_delete=models.SET_NULL), ), migrations.AddField( model_name="resourcexresource", name="relationshiptype", field=models.ForeignKey(db_column="relationshiptype", to="models.Value", on_delete=models.CASCADE), ), migrations.AddField( model_name="resourcexresource", name="resourceinstanceidfrom", field=models.ForeignKey( blank=True, db_column="resourceinstanceidfrom", null=True, related_name="resxres_resource_instance_ids_from", to="models.ResourceInstance", on_delete=models.CASCADE, ), ), migrations.AddField( model_name="resourcexresource", name="resourceinstanceidto", field=models.ForeignKey( blank=True, db_column="resourceinstanceidto", null=True, related_name="resxres_resource_instance_ids_to", to="models.ResourceInstance", on_delete=models.CASCADE, ), ), migrations.AddField( model_name="node", name="nodegroup", field=models.ForeignKey(blank=True, db_column="nodegroupid", null=True, to="models.NodeGroup", on_delete=models.CASCADE), ), migrations.AddField( model_name="edge", name="domainnode", field=models.ForeignKey(db_column="domainnodeid", related_name="edge_domains", to="models.Node", on_delete=models.CASCADE), ), migrations.AddField( model_name="edge", name="rangenode", field=models.ForeignKey(db_column="rangenodeid", related_name="edge_ranges", to="models.Node", on_delete=models.CASCADE), ), migrations.AddField( model_name="concept", name="nodetype", field=models.ForeignKey(db_column="nodetype", to="models.DNodeType", on_delete=models.CASCADE), ), migrations.AddField( model_name="cardxnodexwidget", name="node", field=models.ForeignKey(db_column="nodeid", to="models.Node", on_delete=models.CASCADE), ), migrations.AddField( model_name="cardxnodexwidget", name="widget", field=models.ForeignKey(db_column="widgetid", to="models.Widget", on_delete=models.CASCADE), ), migrations.AddField( model_name="cardmodel", name="nodegroup", field=models.ForeignKey(db_column="nodegroupid", to="models.NodeGroup", on_delete=models.CASCADE), ), migrations.AddField( model_name="cardmodel", name="graph", field=models.ForeignKey(db_column="graphid", to="models.GraphModel", on_delete=models.CASCADE), ), migrations.AddField( model_name="form", name="graph", field=models.ForeignKey( to="models.GraphModel", db_column="graphid", related_name="forms", null=False, blank=False, on_delete=models.CASCADE ), ), migrations.AddField( model_name="graphmodel", name="functions", field=models.ManyToManyField(to="models.Function", through="FunctionXGraph"), ), migrations.AddField( model_name="graphmodel", name="ontology", field=models.ForeignKey( to="models.Ontology", db_column="ontologyid", related_name="graphs", null=True, blank=True, on_delete=models.SET_NULL ), ), migrations.AlterUniqueTogether(name="edge", unique_together={("rangenode", "domainnode")},), migrations.AlterUniqueTogether(name="cardxnodexwidget", unique_together={("node", "card", "widget")},), migrations.AlterUniqueTogether(name="ontologyclass", unique_together={("source", "ontology")},), migrations.AlterUniqueTogether(name="relation", unique_together={("conceptfrom", "conceptto", "relationtype")},), migrations.AlterUniqueTogether(name="functionxgraph", unique_together={("function", "graph")},), CreateAutoPopulateUUIDField("graphs", ["graphid"]), CreateAutoPopulateUUIDField("cards", ["cardid"]), CreateAutoPopulateUUIDField("concepts", ["conceptid"]), CreateAutoPopulateUUIDField("edges", ["edgeid"]), CreateAutoPopulateUUIDField("edit_log", ["editlogid"]), CreateAutoPopulateUUIDField("forms", ["formid"]), CreateAutoPopulateUUIDField("node_groups", ["nodegroupid"]), CreateAutoPopulateUUIDField("nodes", ["nodeid"]), CreateAutoPopulateUUIDField("relations", ["relationid"]), CreateAutoPopulateUUIDField("resource_2_resource_constraints", ["resource2resourceid"]), CreateAutoPopulateUUIDField("resource_instances", ["resourceinstanceid"]), CreateAutoPopulateUUIDField("tiles", ["tileid"]), CreateAutoPopulateUUIDField("values", ["valueid"]), CreateAutoPopulateUUIDField("widgets", ["widgetid"]), migrations.RunSQL( """ ALTER TABLE nodes ADD CONSTRAINT nodes_ddatatypes_fk FOREIGN KEY (datatype) REFERENCES public.d_data_types (datatype) MATCH SIMPLE """ ), migrations.RunSQL(get_sql_string_from_file(os.path.join(settings.ROOT_DIR, "db", "dml", "db_data.sql")), ""), migrations.RunPython(forwards_func, reverse_func), migrations.RunPython(make_permissions, reverse_code=lambda *args, **kwargs: True), ]
agpl-3.0
Dave667/service
plugin.video.cScVOD/resources/lib/jsunpack.py
2
6023
""" urlresolver XBMC Addon Copyright (C) 2013 Bstrdsmkr This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. Adapted for use in xbmc from: https://github.com/einars/js-beautify/blob/master/python/jsbeautifier/unpackers/packer.py usage: if detect(some_string): unpacked = unpack(some_string) Unpacker for Dean Edward's p.a.c.k.e.r """ import re import string def detect(source): """Detects whether `source` is P.A.C.K.E.R. coded.""" source = source.replace(' ', '') if re.search('eval(function(p,a,c,k,e,(?:r|d)', source): return True else: return False def unpack(source): """Unpacks P.A.C.K.E.R. packed js code.""" (payload, symtab, radix, count,) = _filterargs(source) if count != len(symtab): raise UnpackingError('Malformed p.a.c.k.e.r. symtab.') try: unbase = Unbaser(radix) except TypeError: raise UnpackingError('Unknown p.a.c.k.e.r. encoding.') def lookup(match): """Look up symbols in the synthetic symtab.""" word = match.group(0) return symtab[unbase(word)] or word source = re.sub('\\b\\w+\\b', lookup, payload) return _replacestrings(source) def _filterargs(source): """Juice from a source file the four args needed by decoder.""" argsregex = "}\\('(.*)', *(\\d+), *(\\d+), *'(.*?)'\\.split\\('\\|'\\)" args = re.search(argsregex, source, re.DOTALL).groups() try: return (args[0], args[3].split('|'), int(args[1]), int(args[2])) except ValueError: raise UnpackingError('Corrupted p.a.c.k.e.r. data.') def _replacestrings(source): """Strip string lookup table (list) and replace values in source.""" match = re.search('var *(_\\w+)\\=\\["(.*?)"\\];', source, re.DOTALL) if match: (varname, strings,) = match.groups() startpoint = len(match.group(0)) lookup = strings.split('","') variable = '%s[%%d]' % varname for (index, value,) in enumerate(lookup): source = source.replace(variable % index, '"%s"' % value) return source[startpoint:] return source class Unbaser(object): """Functor for a given base. Will efficiently convert strings to natural numbers.""" ALPHABET = {52: '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOP', 54: '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQR', 62: '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ', 95: ' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~'} def __init__(self, base): self.base = base if 2 <= base <= 36: self.unbase = lambda string: int(string, base) else: try: self.dictionary = dict(((cipher, index) for (index, cipher,) in enumerate(self.ALPHABET[base]))) except KeyError: raise TypeError('Unsupported base encoding.') self.unbase = self._dictunbaser def __call__(self, string): return self.unbase(string) def _dictunbaser(self, string): """Decodes a value to an integer.""" ret = 0 for (index, cipher,) in enumerate(string[::-1]): ret += self.base ** index * self.dictionary[cipher] return ret class UnpackingError(Exception): """Badly packed source or general error. Argument is a meaningful description.""" pass if __name__ == '__main__': test = 'eval(function(p,a,c,k,e,d){while(c--)if(k[c])p=p.replace(new RegExp(\'\\b\'+c.toString(a)+\'\\b\',\'g\'),k[c]);return p}(\'4(\'30\').2z({2y:\'5://a.8.7/i/z/y/w.2x\',2w:{b:\'2v\',19:\'<p><u><2 d="20" c="#17">2u 19.</2></u><16/><u><2 d="18" c="#15">2t 2s 2r 2q.</2></u></p>\',2p:\'<p><u><2 d="20" c="#17">2o 2n b.</2></u><16/><u><2 d="18" c="#15">2m 2l 2k 2j.</2></u></p>\',},2i:\'2h\',2g:[{14:"11",b:"5://a.8.7/2f/13.12"},{14:"2e",b:"5://a.8.7/2d/13.12"},],2c:"11",2b:[{10:\'2a\',29:\'5://v.8.7/t-m/m.28\'},{10:\'27\'}],26:{\'25-3\':{\'24\':{\'23\':22,\'21\':\'5://a.8.7/i/z/y/\',\'1z\':\'w\',\'1y\':\'1x\'}}},s:\'5://v.8.7/t-m/s/1w.1v\',1u:"1t",1s:"1r",1q:\'1p\',1o:"1n",1m:"1l",1k:\'5\',1j:\'o\',});l e;l k=0;l 6=0;4().1i(9(x){f(6>0)k+=x.r-6;6=x.r;f(q!=0&&k>=q){6=-1;4().1h();4().1g(o);$(\'#1f\').j();$(\'h.g\').j()}});4().1e(9(x){6=-1});4().1d(9(x){n(x)});4().1c(9(){$(\'h.g\').j()});9 n(x){$(\'h.g\').1b();f(e)1a;e=1;}\',36,109,\'||font||jwplayer|http|p0102895|me|vidto|function|edge3|file|color|size|vvplay|if|video_ad|div||show|tt102895|var|player|doPlay|false||21600|position|skin|test||static|1y7okrqkv4ji||00020|01|type|360p|mp4|video|label|FFFFFF|br|FF0000||deleted|return|hide|onComplete|onPlay|onSeek|play_limit_box|setFullscreen|stop|onTime|dock|provider|391|height|650|width|over|controlbar|5110|duration|uniform|stretching|zip|stormtrooper|213|frequency|prefix||path|true|enabled|preview|timeslidertooltipplugin|plugins|html5|swf|src|flash|modes|hd_default|3bjhohfxpiqwws4phvqtsnolxocychumk274dsnkblz6sfgq6uz6zt77gxia|240p|3bjhohfxpiqwws4phvqtsnolxocychumk274dsnkba36sfgq6uzy3tv2oidq|hd|original|ratio|broken|is|link|Your|such|No|nofile|more|any|availabe|Not|File|OK|previw|jpg|image|setup|flvplayer\'.split(\'|\')))' print unpack(test)
gpl-2.0
stuart-c/pgu
data/themes/default/generate.py
28
3502
import pygame from pygame.locals import * pygame.display.init() pygame.display.set_mode((80,80),32) def prep(name): fname = name+".png" img = pygame.image.load(fname) w,h = img.get_width()/2,img.get_height()/2 out = pygame.Surface((w*3,h*3),SWSURFACE|SRCALPHA,32) out.fill((0,0,0,0)) out.blit(img.subsurface(0,0,w,h),(0,0)) out.blit(img.subsurface(w,0,w,h),(w*2,0)) out.blit(img.subsurface(0,h,w,h),(0,h*2)) out.blit(img.subsurface(w,h,w,h),(w*2,h*2)) for i in range(0,w): img = out.subsurface((w-1,0,1,h*3)).convert_alpha() out.blit(img,(w+i,0)) for i in range(0,h): img = out.subsurface((0,h-1,w*3,1)).convert_alpha() out.blit(img,(0,h+i)) return out,w,h todo = [ ('button.normal','dot.normal',None,3,3,'789456123'), ('button.hover','dot.hover',None,3,3,'789456123'), ('button.down','dot.down',None,3,3,'789456123'), ('checkbox.off.normal','box.normal',None,2,2,'7913'), ('checkbox.on.normal','box.down','check',2,2,'7913'), ('checkbox.off.hover','box.hover',None,2,2,'7913'), ('checkbox.on.hover','box.hover','check',2,2,'7913'), ('radio.off.normal','dot.normal',None,2,2,'7913'), ('radio.on.normal','dot.down','radio',2,2,'7913'), ('radio.off.hover','dot.hover',None,2,2,'7913'), ('radio.on.hover','dot.hover','radio',2,2,'7913'), ('tool.normal','box.normal',None,3,3,'789456123'), ('tool.hover','box.hover',None,3,3,'789456123'), ('tool.down','box.down',None,3,3,'789456123'), ('hslider','idot.normal',None,3,3,'789456123'), ('hslider.bar.normal','dot.normal',None,3,3,'789456123'), ('hslider.bar.hover','dot.hover',None,3,3,'789456123'), ('hslider.left','sbox.normal','left',2,2,'7913'), ('hslider.right','sbox.normal','right',2,2,'7913'), ('vslider','idot.normal',None,3,3,'789456123'), ('vslider.bar.normal','vdot.normal',None,3,3,'789456123'), ('vslider.bar.hover','vdot.hover',None,3,3,'789456123'), ('vslider.up','vsbox.normal','up',2,2,'7913'), ('vslider.down','vsbox.normal','down',2,2,'7913'), ('dialog.close.normal','rdot.hover',None,2,2,'7913'), ('dialog.close.hover','rdot.hover','x',2,2,'7913'), ('dialog.close.down','rdot.down','x',2,2,'7913'), ('menu.normal','desktop',None,1,1,'7'), ('menu.hover','box.normal',None,3,3,'789456123'), ('menu.down','box.down',None,3,3,'789456123'), ('select.selected.normal','box.normal',None,3,3,'788455122'), ('select.selected.hover','box.hover',None,3,3,'788455122'), ('select.selected.down','box.down',None,3,3,'788455122'), ('select.arrow.normal','box.hover',None,3,3,'889556223'), ('select.arrow.hover','box.hover',None,3,3,'889556223'), ('select.arrow.down','box.down',None,3,3,'889556223'), ('progressbar','sbox.normal',None,3,3,'789456123'), ('progressbar.bar','box.hover',None,3,3,'789456123'), ] for fname,img,over,ww,hh,s in todo: print(fname) img,w,h = prep(img) out = pygame.Surface((ww*w,hh*h),SWSURFACE|SRCALPHA,32) out.fill((0,0,0,0)) n = 0 for y in range(0,hh): for x in range(0,ww): c = int(s[n]) xx,yy = (c-1)%3,2-(c-1)/3 out.blit(img.subsurface((xx*w,yy*h,w,h)),(x*w,y*h)) n += 1 if over != None: over = pygame.image.load(over+".png") out.blit(over,(0,0)) pygame.image.save(out,fname+".tga")
lgpl-2.1
eparis/contrib
hack/verify-flags-underscore.py
34
8924
#!/usr/bin/env python # Copyright 2015 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import json import mmap import os import re import sys import argparse parser = argparse.ArgumentParser() parser.add_argument("filenames", help="list of files to check, all files if unspecified", nargs='*') parser.add_argument("-e", "--skip-exceptions", help="ignore hack/verify-flags/exceptions.txt and print all output", action="store_true") args = parser.parse_args() # Cargo culted from http://stackoverflow.com/questions/898669/how-can-i-detect-if-a-file-is-binary-non-text-in-python def is_binary(pathname): """Return true if the given filename is binary. @raise EnvironmentError: if the file does not exist or cannot be accessed. @attention: found @ http://bytes.com/topic/python/answers/21222-determine-file-type-binary-text on 6/08/2010 @author: Trent Mick <[email protected]> @author: Jorge Orpinel <[email protected]>""" try: with open(pathname, 'r') as f: CHUNKSIZE = 1024 while 1: chunk = f.read(CHUNKSIZE) if '\0' in chunk: # found null byte return True if len(chunk) < CHUNKSIZE: break # done except: return True return False def get_all_files(rootdir): all_files = [] for root, dirs, files in os.walk(rootdir): # don't visit certain dirs if 'Godeps' in dirs: dirs.remove('Godeps') if 'third_party' in dirs: dirs.remove('third_party') if '.git' in dirs: dirs.remove('.git') if 'exceptions.txt' in files: files.remove('exceptions.txt') if 'known-flags.txt' in files: files.remove('known-flags.txt') if 'vendor' in dirs: dirs.remove('vendor') for name in files: if name.endswith(".svg"): continue if name.endswith(".gliffy"): continue pathname = os.path.join(root, name) if is_binary(pathname): continue all_files.append(pathname) return all_files def normalize_files(rootdir, files): newfiles = [] a = ['Godeps', 'vendor', 'third_party', 'exceptions.txt', 'known-flags.txt'] for f in files: if any(x in f for x in a): continue if f.endswith(".svg"): continue if f.endswith(".gliffy"): continue newfiles.append(f) for i, f in enumerate(newfiles): if not os.path.isabs(f): newfiles[i] = os.path.join(rootdir, f) return newfiles def line_has_bad_flag(line, flagre): results = flagre.findall(line) for result in results: if not "_" in result: return False # this should exclude many cases where jinja2 templates use kube flags # as variables, except it uses _ for the variable name if "{% set" + result + "= \"" in line: return False if "pillar[" + result + "]" in line: return False if "grains" + result in line: return False # These are usually yaml definitions if result.endswith(":"): return False # something common in juju variables... if "template_data[" + result + "]" in line: return False return True return False # The list of files might not be the whole repo. If someone only changed a # couple of files we don't want to run all of the golang files looking for # flags. Instead load the list of flags from hack/verify-flags/known-flags.txt # If running the golang files finds a new flag not in that file, return an # error and tell the user to add the flag to the flag list. def get_flags(rootdir, files): # preload the 'known' flags pathname = os.path.join(rootdir, "hack/verify-flags/known-flags.txt") f = open(pathname, 'r') flags = set(f.read().splitlines()) f.close() # preload the 'known' flags which don't follow the - standard pathname = os.path.join(rootdir, "hack/verify-flags/excluded-flags.txt") f = open(pathname, 'r') excluded_flags = set(f.read().splitlines()) f.close() regexs = [ re.compile('Var[P]?\([^,]*, "([^"]*)"'), re.compile('.String[P]?\("([^"]*)",[^,]+,[^)]+\)'), re.compile('.Int[P]?\("([^"]*)",[^,]+,[^)]+\)'), re.compile('.Bool[P]?\("([^"]*)",[^,]+,[^)]+\)'), re.compile('.Duration[P]?\("([^"]*)",[^,]+,[^)]+\)'), re.compile('.StringSlice[P]?\("([^"]*)",[^,]+,[^)]+\)') ] new_flags = set() new_excluded_flags = set() # walk all the files looking for any flags being declared for pathname in files: if not pathname.endswith(".go"): continue f = open(pathname, 'r') data = f.read() f.close() matches = [] for regex in regexs: matches = matches + regex.findall(data) for flag in matches: if any(x in flag for x in excluded_flags): continue if "_" in flag: new_excluded_flags.add(flag) if not "-" in flag: continue if flag not in flags: new_flags.add(flag) if len(new_excluded_flags) != 0: print("Found a flag declared with an _ but which is not explicitly listed as a valid flag name in hack/verify-flags/excluded-flags.txt") print("Are you certain this flag should not have been declared with an - instead?") l = list(new_excluded_flags) l.sort() print("%s" % "\n".join(l)) sys.exit(1) if len(new_flags) != 0: print("Found flags in golang files not in the list of known flags. Please add these to hack/verify-flags/known-flags.txt") l = list(new_flags) l.sort() print("%s" % "\n".join(l)) sys.exit(1) return list(flags) def flags_to_re(flags): """turn the list of all flags we found into a regex find both - and _ versions""" dashRE = re.compile('[-_]') flagREs = [] for flag in flags: # turn all flag names into regexs which will find both types newre = dashRE.sub('[-_]', flag) # only match if there is not a leading or trailing alphanumeric character flagREs.append("[^\w${]" + newre + "[^\w]") # turn that list of regex strings into a single large RE flagRE = "|".join(flagREs) flagRE = re.compile(flagRE) return flagRE def load_exceptions(rootdir): exceptions = set() if args.skip_exceptions: return exceptions exception_filename = os.path.join(rootdir, "hack/verify-flags/exceptions.txt") exception_file = open(exception_filename, 'r') for exception in exception_file.read().splitlines(): out = exception.split(":", 1) if len(out) != 2: printf("Invalid line in exceptions file: %s" % exception) continue filename = out[0] line = out[1] exceptions.add((filename, line)) return exceptions def main(): rootdir = os.path.dirname(__file__) + "/../" rootdir = os.path.abspath(rootdir) exceptions = load_exceptions(rootdir) if len(args.filenames) > 0: files = args.filenames else: files = get_all_files(rootdir) files = normalize_files(rootdir, files) flags = get_flags(rootdir, files) flagRE = flags_to_re(flags) bad_lines = [] # walk all the file looking for any flag that was declared and now has an _ for pathname in files: relname = os.path.relpath(pathname, rootdir) f = open(pathname, 'r') for line in f.read().splitlines(): if line_has_bad_flag(line, flagRE): if (relname, line) not in exceptions: bad_lines.append((relname, line)) f.close() if len(bad_lines) != 0: if not args.skip_exceptions: print("Found illegal 'flag' usage. If these are false positives you should run `hack/verify-flags-underscore.py -e > hack/verify-flags/exceptions.txt` to update the list.") bad_lines.sort() for (relname, line) in bad_lines: print("%s:%s" % (relname, line)) return 1 if __name__ == "__main__": sys.exit(main())
apache-2.0
noelbk/neutron-juniper
neutron/agent/securitygroups_rpc.py
8
8254
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright 2012, Nachi Ueno, NTT MCL, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from oslo.config import cfg from neutron.common import topics from neutron.openstack.common import importutils from neutron.openstack.common import log as logging LOG = logging.getLogger(__name__) SG_RPC_VERSION = "1.1" security_group_opts = [ cfg.StrOpt( 'firewall_driver', default='neutron.agent.firewall.NoopFirewallDriver', help=_('Driver for Security Groups Firewall')) ] cfg.CONF.register_opts(security_group_opts, 'SECURITYGROUP') def is_firewall_enabled(): return (cfg.CONF.SECURITYGROUP.firewall_driver != 'neutron.agent.firewall.NoopFirewallDriver') def disable_security_group_extension_if_noop_driver( supported_extension_aliases): if not is_firewall_enabled(): LOG.debug(_('Disabled security-group extension.')) supported_extension_aliases.remove('security-group') class SecurityGroupServerRpcApiMixin(object): """A mix-in that enable SecurityGroup support in plugin rpc.""" def security_group_rules_for_devices(self, context, devices): LOG.debug(_("Get security group rules " "for devices via rpc %r"), devices) return self.call(context, self.make_msg('security_group_rules_for_devices', devices=devices), version=SG_RPC_VERSION, topic=self.topic) class SecurityGroupAgentRpcCallbackMixin(object): """A mix-in that enable SecurityGroup agent support in agent implementations. """ #mix-in object should be have sg_agent sg_agent = None def _security_groups_agent_not_set(self): LOG.warning(_("Security group agent binding currently not set. " "This should be set by the end of the init " "process.")) def security_groups_rule_updated(self, context, **kwargs): """Callback for security group rule update. :param security_groups: list of updated security_groups """ security_groups = kwargs.get('security_groups', []) LOG.debug( _("Security group rule updated on remote: %s"), security_groups) if not self.sg_agent: return self._security_groups_agent_not_set() self.sg_agent.security_groups_rule_updated(security_groups) def security_groups_member_updated(self, context, **kwargs): """Callback for security group member update. :param security_groups: list of updated security_groups """ security_groups = kwargs.get('security_groups', []) LOG.debug( _("Security group member updated on remote: %s"), security_groups) if not self.sg_agent: return self._security_groups_agent_not_set() self.sg_agent.security_groups_member_updated(security_groups) def security_groups_provider_updated(self, context, **kwargs): """Callback for security group provider update.""" LOG.debug(_("Provider rule updated")) if not self.sg_agent: return self._security_groups_agent_not_set() self.sg_agent.security_groups_provider_updated() class SecurityGroupAgentRpcMixin(object): """A mix-in that enable SecurityGroup agent support in agent implementations. """ def init_firewall(self): firewall_driver = cfg.CONF.SECURITYGROUP.firewall_driver LOG.debug(_("Init firewall settings (driver=%s)"), firewall_driver) self.firewall = importutils.import_object(firewall_driver) def prepare_devices_filter(self, device_ids): if not device_ids: return LOG.info(_("Preparing filters for devices %s"), device_ids) devices = self.plugin_rpc.security_group_rules_for_devices( self.context, list(device_ids)) with self.firewall.defer_apply(): for device in devices.values(): self.firewall.prepare_port_filter(device) def security_groups_rule_updated(self, security_groups): LOG.info(_("Security group " "rule updated %r"), security_groups) self._security_group_updated( security_groups, 'security_groups') def security_groups_member_updated(self, security_groups): LOG.info(_("Security group " "member updated %r"), security_groups) self._security_group_updated( security_groups, 'security_group_source_groups') def _security_group_updated(self, security_groups, attribute): devices = [] sec_grp_set = set(security_groups) for device in self.firewall.ports.values(): if sec_grp_set & set(device.get(attribute, [])): devices.append(device) if devices: self.refresh_firewall(devices) def security_groups_provider_updated(self): LOG.info(_("Provider rule updated")) self.refresh_firewall() def remove_devices_filter(self, device_ids): if not device_ids: return LOG.info(_("Remove device filter for %r"), device_ids) with self.firewall.defer_apply(): for device_id in device_ids: device = self.firewall.ports.get(device_id) if not device: continue self.firewall.remove_port_filter(device) def refresh_firewall(self, devices=None): LOG.info(_("Refresh firewall rules")) if devices: device_ids = [d['device'] for d in devices] else: device_ids = self.firewall.ports.keys() if not device_ids: LOG.info(_("No ports here to refresh firewall")) return devices = self.plugin_rpc.security_group_rules_for_devices( self.context, device_ids) with self.firewall.defer_apply(): for device in devices.values(): LOG.debug(_("Update port filter for %s"), device['device']) self.firewall.update_port_filter(device) class SecurityGroupAgentRpcApiMixin(object): def _get_security_group_topic(self): return topics.get_topic_name(self.topic, topics.SECURITY_GROUP, topics.UPDATE) def security_groups_rule_updated(self, context, security_groups): """Notify rule updated security groups.""" if not security_groups: return self.fanout_cast(context, self.make_msg('security_groups_rule_updated', security_groups=security_groups), version=SG_RPC_VERSION, topic=self._get_security_group_topic()) def security_groups_member_updated(self, context, security_groups): """Notify member updated security groups.""" if not security_groups: return self.fanout_cast(context, self.make_msg('security_groups_member_updated', security_groups=security_groups), version=SG_RPC_VERSION, topic=self._get_security_group_topic()) def security_groups_provider_updated(self, context): """Notify provider updated security groups.""" self.fanout_cast(context, self.make_msg('security_groups_provider_updated'), version=SG_RPC_VERSION, topic=self._get_security_group_topic())
apache-2.0
linked67/p2pool-exclusivecoin
p2pool/bitcoin/height_tracker.py
227
4678
from twisted.internet import defer from twisted.python import log import p2pool from p2pool.bitcoin import data as bitcoin_data from p2pool.util import deferral, forest, jsonrpc, variable class HeaderWrapper(object): __slots__ = 'hash previous_hash'.split(' ') @classmethod def from_header(cls, header): return cls(bitcoin_data.hash256(bitcoin_data.block_header_type.pack(header)), header['previous_block']) def __init__(self, hash, previous_hash): self.hash, self.previous_hash = hash, previous_hash class HeightTracker(object): '''Point this at a factory and let it take care of getting block heights''' def __init__(self, best_block_func, factory, backlog_needed): self._best_block_func = best_block_func self._factory = factory self._backlog_needed = backlog_needed self._tracker = forest.Tracker() self._watch1 = self._factory.new_headers.watch(self._heard_headers) self._watch2 = self._factory.new_block.watch(self._request) self._requested = set() self._clear_task = deferral.RobustLoopingCall(self._requested.clear) self._clear_task.start(60) self._last_notified_size = 0 self.updated = variable.Event() self._think_task = deferral.RobustLoopingCall(self._think) self._think_task.start(15) self._think2_task = deferral.RobustLoopingCall(self._think2) self._think2_task.start(15) def _think(self): try: highest_head = max(self._tracker.heads, key=lambda h: self._tracker.get_height_and_last(h)[0]) if self._tracker.heads else None if highest_head is None: return # wait for think2 height, last = self._tracker.get_height_and_last(highest_head) if height < self._backlog_needed: self._request(last) except: log.err(None, 'Error in HeightTracker._think:') def _think2(self): self._request(self._best_block_func()) def _heard_headers(self, headers): changed = False for header in headers: hw = HeaderWrapper.from_header(header) if hw.hash in self._tracker.items: continue changed = True self._tracker.add(hw) if changed: self.updated.happened() self._think() if len(self._tracker.items) >= self._last_notified_size + 100: print 'Have %i/%i block headers' % (len(self._tracker.items), self._backlog_needed) self._last_notified_size = len(self._tracker.items) @defer.inlineCallbacks def _request(self, last): if last in self._tracker.items: return if last in self._requested: return self._requested.add(last) (yield self._factory.getProtocol()).send_getheaders(version=1, have=[], last=last) def get_height_rel_highest(self, block_hash): # callers: highest height can change during yields! best_height, best_last = self._tracker.get_height_and_last(self._best_block_func()) height, last = self._tracker.get_height_and_last(block_hash) if last != best_last: return -1000000000 # XXX hack return height - best_height @defer.inlineCallbacks def get_height_rel_highest_func(bitcoind, factory, best_block_func, net): if '\ngetblock ' in (yield deferral.retry()(bitcoind.rpc_help)()): @deferral.DeferredCacher @defer.inlineCallbacks def height_cacher(block_hash): try: x = yield bitcoind.rpc_getblock('%x' % (block_hash,)) except jsonrpc.Error_for_code(-5): # Block not found if not p2pool.DEBUG: raise deferral.RetrySilentlyException() else: raise defer.returnValue(x['blockcount'] if 'blockcount' in x else x['height']) best_height_cached = variable.Variable((yield deferral.retry()(height_cacher)(best_block_func()))) def get_height_rel_highest(block_hash): this_height = height_cacher.call_now(block_hash, 0) best_height = height_cacher.call_now(best_block_func(), 0) best_height_cached.set(max(best_height_cached.value, this_height, best_height)) return this_height - best_height_cached.value else: get_height_rel_highest = HeightTracker(best_block_func, factory, 5*net.SHARE_PERIOD*net.CHAIN_LENGTH/net.PARENT.BLOCK_PERIOD).get_height_rel_highest defer.returnValue(get_height_rel_highest)
gpl-3.0
benschulz/servo
tests/wpt/web-platform-tests/tools/wptserve/tests/functional/test_stash.py
299
1231
import os import unittest import urllib2 import json import uuid import wptserve from wptserve.router import any_method from base import TestUsingServer, doc_root class TestResponseSetCookie(TestUsingServer): def test_put_take(self): @wptserve.handlers.handler def handler(request, response): if request.method == "POST": request.server.stash.put(request.POST.first("id"), request.POST.first("data")) data = "OK" elif request.method == "GET": data = request.server.stash.take(request.GET.first("id")) if data is None: return "NOT FOUND" return data id = str(uuid.uuid4()) route = (any_method, "/test/put_take", handler) self.server.router.register(*route) resp = self.request(route[1], method="POST", body={"id": id, "data": "Sample data"}) self.assertEquals(resp.read(), "OK") resp = self.request(route[1], query="id=" + id) self.assertEquals(resp.read(), "Sample data") resp = self.request(route[1], query="id=" + id) self.assertEquals(resp.read(), "NOT FOUND") if __name__ == '__main__': unittest.main()
mpl-2.0
Juniper/neutron
neutron/common/test_lib.py
13
2136
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2010 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Colorizer Code is borrowed from Twisted: # Copyright (c) 2001-2010 Twisted Matrix Laboratories. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # describes parameters used by different unit/functional tests # a plugin-specific testing mechanism should import this dictionary # and override the values in it if needed (e.g., run_tests.py in # neutron/plugins/openvswitch/ ) test_config = {}
apache-2.0
cmbiwer/pycbc
pycbc/distributions/power_law.py
6
9306
# Copyright (C) 2016 Christopher M. Biwer # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 3 of the License, or (at your # option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ This modules provides classes for evaluating distributions where the probability density function is a power law. """ import numpy from pycbc.distributions import bounded class UniformPowerLaw(bounded.BoundedDist): r""" For a uniform distribution in power law. The parameters are independent of each other. Instances of this class can be called like a function. By default, logpdf will be called, but this can be changed by setting the class's __call__ method to its pdf method. The cumulative distribution function (CDF) will be the ratio of volumes: .. math:: F(r) = \frac{V(r)}{V(R)} Where :math:`R` is the radius of the sphere. So we can write our probability density function (PDF) as: .. math:: f(r) = c r^n For generality we use :math:`n` for the dimension of the volume element, eg. :math:`n=2` for a 3-dimensional sphere. And use :math:`c` as a general constant. So now we calculate the CDF in general for this type of PDF: .. math:: F(r) = \int f(r) dr = \int c r^n dr = \frac{1}{n + 1} c r^{n + 1} + k Now with the definition of the CDF at radius :math:`r_{l}` is equal to 0 and at radius :math:`r_{h}` is equal to 1 we find that the constant from integration from this system of equations: .. math:: 1 = \frac{1}{n + 1} c ((r_{h})^{n + 1} - (r_{l})^{n + 1}) + k Can see that :math:`c = (n + 1) / ((r_{h})^{n + 1} - (r_{l})^{n + 1}))`. And :math:`k` is: .. math:: k = - \frac{r_{l}^{n + 1}}{(r_{h})^{n + 1} - (r_{l})^{n + 1}} Can see that :math:`c= \frac{n + 1}{R^{n + 1}}`. So can see that the CDF is: .. math:: F(r) = \frac{1}{(r_{h})^{n + 1} - (r_{l})^{n + 1}} r^{n + 1} - \frac{r_{l}^{n + 1}}{(r_{h})^{n + 1} - (r_{l})^{n + 1}} And the PDF is the derivative of the CDF: .. math:: f(r) = \frac{(n + 1)}{(r_{h})^{n + 1} - (r_{l})^{n + 1}} (r)^n Now we use the probabilty integral transform method to get sampling on uniform numbers from a continuous random variable. To do this we find the inverse of the CDF evaluated for uniform numbers: .. math:: F(r) = u = \frac{1}{(r_{h})^{n + 1} - (r_{l})^{n + 1}} r^{n + 1} - \frac{r_{l}^{n + 1}}{(r_{h})^{n + 1} - (r_{l})^{n + 1}} And find :math:`F^{-1}(u)` gives: .. math:: u = \frac{1}{n + 1} \frac{(r_{h})^{n + 1} - (r_{l})^{n + 1}} - \frac{r_{l}^{n + 1}}{(r_{h})^{n + 1} - (r_{l})^{n + 1}} And solving for :math:`r` gives: .. math:: r = ( ((r_{h})^{n + 1} - (r_{l})^{n + 1}) u + (r_{l})^{n + 1})^{\frac{1}{n + 1}} Therefore the radius can be sampled by taking the n-th root of uniform numbers and multiplying by the radius offset by the lower bound radius. \**params : The keyword arguments should provide the names of parameters and their corresponding bounds, as either tuples or a `boundaries.Bounds` instance. Attributes ---------- name : 'uniform_radius' The name of this distribution. dim : int The dimension of volume space. In the notation above `dim` is :math:`n+1`. For a 3-dimensional sphere this is 3. Attributes ---------- params : list of strings The list of parameter names. bounds : dict A dictionary of the parameter names and their bounds. norm : float The normalization of the multi-dimensional pdf. lognorm : float The log of the normalization. """ name = "uniform_power_law" def __init__(self, dim=None, **params): super(UniformPowerLaw, self).__init__(**params) self.dim = dim self._norm = 1.0 self._lognorm = 0.0 for p in self._params: self._norm *= self.dim / \ (self._bounds[p][1]**(self.dim) - self._bounds[p][0]**(self.dim)) self._lognorm = numpy.log(self._norm) @property def norm(self): return self._norm @property def lognorm(self): return self._lognorm def rvs(self, size=1, param=None): """Gives a set of random values drawn from this distribution. Parameters ---------- size : {1, int} The number of values to generate; default is 1. param : {None, string} If provided, will just return values for the given parameter. Otherwise, returns random values for each parameter. Returns ------- structured array The random values in a numpy structured array. If a param was specified, the array will only have an element corresponding to the given parameter. Otherwise, the array will have an element for each parameter in self's params. """ if param is not None: dtype = [(param, float)] else: dtype = [(p, float) for p in self.params] arr = numpy.zeros(size, dtype=dtype) for (p,_) in dtype: offset = numpy.power(self._bounds[p][0], self.dim) factor = numpy.power(self._bounds[p][1], self.dim) - \ numpy.power(self._bounds[p][0], self.dim) arr[p] = numpy.random.uniform(0.0, 1.0, size=size) arr[p] = numpy.power(factor * arr[p] + offset, 1.0 / self.dim) return arr def cdfinv(self, param, value): """Return inverse of cdf to map unit interval to parameter bounds. """ n = self.dim - 1 r_l = self._bounds[param][0] r_h = self._bounds[param][1] new_value = ((r_h**(n+1) - r_l**(n+1))*value + r_l**(n+1))**(1./(n+1)) return new_value def _pdf(self, **kwargs): """Returns the pdf at the given values. The keyword arguments must contain all of parameters in self's params. Unrecognized arguments are ignored. """ for p in self._params: if p not in kwargs.keys(): raise ValueError( 'Missing parameter {} to construct pdf.'.format(p)) if kwargs in self: pdf = self._norm * \ numpy.prod([(kwargs[p])**(self.dim - 1) for p in self._params]) return float(pdf) else: return 0.0 def _logpdf(self, **kwargs): """Returns the log of the pdf at the given values. The keyword arguments must contain all of parameters in self's params. Unrecognized arguments are ignored. """ for p in self._params: if p not in kwargs.keys(): raise ValueError( 'Missing parameter {} to construct pdf.'.format(p)) if kwargs in self: log_pdf = self._lognorm + \ (self.dim - 1) * \ numpy.log([kwargs[p] for p in self._params]).sum() return log_pdf else: return -numpy.inf @classmethod def from_config(cls, cp, section, variable_args): """Returns a distribution based on a configuration file. The parameters for the distribution are retrieved from the section titled "[`section`-`variable_args`]" in the config file. Parameters ---------- cp : pycbc.workflow.WorkflowConfigParser A parsed configuration file that contains the distribution options. section : str Name of the section in the configuration file. variable_args : str The names of the parameters for this distribution, separated by `prior.VARARGS_DELIM`. These must appear in the "tag" part of the section header. Returns ------- Uniform A distribution instance from the pycbc.inference.prior module. """ return super(UniformPowerLaw, cls).from_config(cp, section, variable_args, bounds_required=True) class UniformRadius(UniformPowerLaw): """ For a uniform distribution in volume using spherical coordinates, this is the distriubtion to use for the radius. For more details see UniformPowerLaw. """ name = "uniform_radius" def __init__(self, dim=3, **params): super(UniformRadius, self).__init__(dim=3, **params) __all__ = ["UniformPowerLaw", "UniformRadius"]
gpl-3.0
iabdalkader/openmv
scripts/examples/16-Codes/find_barcodes.py
3
2316
# Barcode Example # # This example shows off how easy it is to detect bar codes using the # OpenMV Cam M7. Barcode detection does not work on the M4 Camera. import sensor, image, time, math sensor.reset() sensor.set_pixformat(sensor.GRAYSCALE) sensor.set_framesize(sensor.VGA) # High Res! sensor.set_windowing((640, 80)) # V Res of 80 == less work (40 for 2X the speed). sensor.skip_frames(time = 2000) sensor.set_auto_gain(False) # must turn this off to prevent image washout... sensor.set_auto_whitebal(False) # must turn this off to prevent image washout... clock = time.clock() # Barcode detection can run at the full 640x480 resolution of your OpenMV Cam's # OV7725 camera module. Barcode detection will also work in RGB565 mode but at # a lower resolution. That said, barcode detection requires a higher resolution # to work well so it should always be run at 640x480 in grayscale... def barcode_name(code): if(code.type() == image.EAN2): return "EAN2" if(code.type() == image.EAN5): return "EAN5" if(code.type() == image.EAN8): return "EAN8" if(code.type() == image.UPCE): return "UPCE" if(code.type() == image.ISBN10): return "ISBN10" if(code.type() == image.UPCA): return "UPCA" if(code.type() == image.EAN13): return "EAN13" if(code.type() == image.ISBN13): return "ISBN13" if(code.type() == image.I25): return "I25" if(code.type() == image.DATABAR): return "DATABAR" if(code.type() == image.DATABAR_EXP): return "DATABAR_EXP" if(code.type() == image.CODABAR): return "CODABAR" if(code.type() == image.CODE39): return "CODE39" if(code.type() == image.PDF417): return "PDF417" if(code.type() == image.CODE93): return "CODE93" if(code.type() == image.CODE128): return "CODE128" while(True): clock.tick() img = sensor.snapshot() codes = img.find_barcodes() for code in codes: img.draw_rectangle(code.rect()) print_args = (barcode_name(code), code.payload(), (180 * code.rotation()) / math.pi, code.quality(), clock.fps()) print("Barcode %s, Payload \"%s\", rotation %f (degrees), quality %d, FPS %f" % print_args) if not codes: print("FPS %f" % clock.fps())
mit
andreamerello/linux-stm32
scripts/tracing/draw_functrace.py
14676
3560
#!/usr/bin/python """ Copyright 2008 (c) Frederic Weisbecker <[email protected]> Licensed under the terms of the GNU GPL License version 2 This script parses a trace provided by the function tracer in kernel/trace/trace_functions.c The resulted trace is processed into a tree to produce a more human view of the call stack by drawing textual but hierarchical tree of calls. Only the functions's names and the the call time are provided. Usage: Be sure that you have CONFIG_FUNCTION_TRACER # mount -t debugfs nodev /sys/kernel/debug # echo function > /sys/kernel/debug/tracing/current_tracer $ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func Wait some times but not too much, the script is a bit slow. Break the pipe (Ctrl + Z) $ scripts/draw_functrace.py < raw_trace_func > draw_functrace Then you have your drawn trace in draw_functrace """ import sys, re class CallTree: """ This class provides a tree representation of the functions call stack. If a function has no parent in the kernel (interrupt, syscall, kernel thread...) then it is attached to a virtual parent called ROOT. """ ROOT = None def __init__(self, func, time = None, parent = None): self._func = func self._time = time if parent is None: self._parent = CallTree.ROOT else: self._parent = parent self._children = [] def calls(self, func, calltime): """ If a function calls another one, call this method to insert it into the tree at the appropriate place. @return: A reference to the newly created child node. """ child = CallTree(func, calltime, self) self._children.append(child) return child def getParent(self, func): """ Retrieve the last parent of the current node that has the name given by func. If this function is not on a parent, then create it as new child of root @return: A reference to the parent. """ tree = self while tree != CallTree.ROOT and tree._func != func: tree = tree._parent if tree == CallTree.ROOT: child = CallTree.ROOT.calls(func, None) return child return tree def __repr__(self): return self.__toString("", True) def __toString(self, branch, lastChild): if self._time is not None: s = "%s----%s (%s)\n" % (branch, self._func, self._time) else: s = "%s----%s\n" % (branch, self._func) i = 0 if lastChild: branch = branch[:-1] + " " while i < len(self._children): if i != len(self._children) - 1: s += "%s" % self._children[i].__toString(branch +\ " |", False) else: s += "%s" % self._children[i].__toString(branch +\ " |", True) i += 1 return s class BrokenLineException(Exception): """If the last line is not complete because of the pipe breakage, we want to stop the processing and ignore this line. """ pass class CommentLineException(Exception): """ If the line is a comment (as in the beginning of the trace file), just ignore it. """ pass def parseLine(line): line = line.strip() if line.startswith("#"): raise CommentLineException m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line) if m is None: raise BrokenLineException return (m.group(1), m.group(2), m.group(3)) def main(): CallTree.ROOT = CallTree("Root (Nowhere)", None, None) tree = CallTree.ROOT for line in sys.stdin: try: calltime, callee, caller = parseLine(line) except BrokenLineException: break except CommentLineException: continue tree = tree.getParent(caller) tree = tree.calls(callee, calltime) print CallTree.ROOT if __name__ == "__main__": main()
gpl-2.0
ramsateesh/designate
designate/objects/blacklist.py
6
1438
# Copyright (c) 2014 Rackspace Hosting # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from designate.objects import base class Blacklist(base.DictObjectMixin, base.PersistentObjectMixin, base.DesignateObject): FIELDS = { 'pattern': { 'schema': { 'type': 'string', 'description': 'Regex for blacklisted zone name', 'format': 'regex', 'maxLength': 255, }, 'required': True }, 'description': { 'schema': { 'type': ['string', 'null'], 'description': 'Description for the blacklisted zone', 'maxLength': 160 } } } STRING_KEYS = [ 'id', 'pattern' ] class BlacklistList(base.ListObjectMixin, base.DesignateObject): LIST_ITEM_TYPE = Blacklist
apache-2.0
Distrotech/qemu
scripts/tracetool/transform.py
78
4238
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Type-transformation rules. """ __author__ = "Lluís Vilanova <[email protected]>" __copyright__ = "Copyright 2012-2014, Lluís Vilanova <[email protected]>" __license__ = "GPL version 2 or (at your option) any later version" __maintainer__ = "Stefan Hajnoczi" __email__ = "[email protected]" def _transform_type(type_, trans): if isinstance(trans, str): return trans elif isinstance(trans, dict): if type_ in trans: return _transform_type(type_, trans[type_]) elif None in trans: return _transform_type(type_, trans[None]) else: return type_ elif callable(trans): return trans(type_) else: raise ValueError("Invalid type transformation rule: %s" % trans) def transform_type(type_, *trans): """Return a new type transformed according to the given rules. Applies each of the transformation rules in trans in order. If an element of trans is a string, return it. If an element of trans is a function, call it with type_ as its only argument. If an element of trans is a dict, search type_ in its keys. If type_ is a key, use the value as a transformation rule for type_. Otherwise, if None is a key use the value as a transformation rule for type_. Otherwise, return type_. Parameters ---------- type_ : str Type to transform. trans : list of function or dict Type transformation rules. """ if len(trans) == 0: raise ValueError res = type_ for t in trans: res = _transform_type(res, t) return res ################################################## # tcg -> host def _tcg_2_host(type_): if type_ == "TCGv": # force a fixed-size type (target-independent) return "uint64_t" else: return type_ TCG_2_HOST = { "TCGv_i32": "uint32_t", "TCGv_i64": "uint64_t", "TCGv_ptr": "void *", None: _tcg_2_host, } ################################################## # host -> host compatible with tcg sizes HOST_2_TCG_COMPAT = { "uint8_t": "uint32_t", } ################################################## # host/tcg -> tcg def _host_2_tcg(type_): if type_.startswith("TCGv"): return type_ raise ValueError("Don't know how to translate '%s' into a TCG type\n" % type_) HOST_2_TCG = { "uint32_t": "TCGv_i32", "uint64_t": "TCGv_i64", "void *" : "TCGv_ptr", None: _host_2_tcg, } ################################################## # tcg -> tcg helper definition def _tcg_2_helper_def(type_): if type_ == "TCGv": return "target_ulong" else: return type_ TCG_2_TCG_HELPER_DEF = { "TCGv_i32": "uint32_t", "TCGv_i64": "uint64_t", "TCGv_ptr": "void *", None: _tcg_2_helper_def, } ################################################## # tcg -> tcg helper declaration def _tcg_2_tcg_helper_decl_error(type_): raise ValueError("Don't know how to translate type '%s' into a TCG helper declaration type\n" % type_) TCG_2_TCG_HELPER_DECL = { "TCGv" : "tl", "TCGv_ptr": "ptr", "TCGv_i32": "i32", "TCGv_i64": "i64", None: _tcg_2_tcg_helper_decl_error, } ################################################## # host/tcg -> tcg temporal constant allocation def _host_2_tcg_tmp_new(type_): if type_.startswith("TCGv"): return "tcg_temp_new_nop" raise ValueError("Don't know how to translate type '%s' into a TCG temporal allocation" % type_) HOST_2_TCG_TMP_NEW = { "uint32_t": "tcg_const_i32", "uint64_t": "tcg_const_i64", "void *" : "tcg_const_ptr", None: _host_2_tcg_tmp_new, } ################################################## # host/tcg -> tcg temporal constant deallocation def _host_2_tcg_tmp_free(type_): if type_.startswith("TCGv"): return "tcg_temp_free_nop" raise ValueError("Don't know how to translate type '%s' into a TCG temporal deallocation" % type_) HOST_2_TCG_TMP_FREE = { "uint32_t": "tcg_temp_free_i32", "uint64_t": "tcg_temp_free_i64", "void *" : "tcg_temp_free_ptr", None: _host_2_tcg_tmp_free, }
gpl-2.0
miqui/python-hpOneView
hpOneView/security.py
2
5456
# -*- coding: utf-8 -*- """ security.py ~~~~~~~~~~~~ This module implements Settings HP OneView REST API """ from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() __title__ = 'security' __version__ = '0.0.1' __copyright__ = '(C) Copyright (2012-2015) Hewlett Packard Enterprise ' \ ' Development LP' __license__ = 'MIT' __status__ = 'Development' ### # (C) Copyright (2012-2015) Hewlett Packard Enterprise Development LP # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. ### from hpOneView.common import * from hpOneView.connection import * from hpOneView.activity import * from hpOneView.exceptions import * class security(object): def __init__(self, con): self._con = con ########################################################################### # User management and Roles ########################################################################### def get_users(self): body = self._con.get(uri['users']) return get_members(body) def get_user(self, user): body = self._con.get(uri['users'] + '/' + user) return body def get_user_roles(self, user): body = self._con.get(uri['userRole'] + '/' + user) return get_members(body) def set_user_roles(self, user, roles): request = [] for role in roles: req = {'type': 'RoleNameDtoV2', 'roleName': role} request.append(req) task, body = self._con.put(uri['users'] + '/' + user + '/roles?multiResource=true', request) return body def set_user_role(self, user, role): request = {'type': 'RoleNameDtoV2', 'roleName': role} task, body = self._con.put(uri['users'] + '/' + user + '/roles?multiResource=true', [request]) return body def create_user(self, name, password, enabled=True, fullName='', emailAddress='', officePhone='', mobilePhone='', roles=['Infrastructure administrator']): usr = make_user_dict(name, password, enabled, fullName, emailAddress, officePhone, mobilePhone, roles) task, body = self._con.post(uri['users'], usr) return body def delete_user(self, user): task, body = self._con.delete(uri['users'] + '/' + user) return body def update_user(self, updateUser): task, body = self._con.put(uri['users'], updateUser) return body def get_roles(self): body = self._con.get(uri['roles']) return get_members(body) ########################################################################### # Certificates ########################################################################### def get_certs(self): body = self._con.get(uri['certificates']) return body def get_cert_https(self): body = self._con.get(uri['cert-https']) return body def get_cert_ca(self): body = self._con.get(uri['ca']) return body def get_cert_ca_crl(self): body = self._con.get(uri['crl']) return body def gen_rabbitmq_internal_signed_ca(self): request = {'type': 'RabbitMqClientCertV2', 'commonName': 'default'} task, body = self._con.post(uri['rabbitmq'], request) return body def gen_rabbitmq_self_signed_ca(self): request = {'type': 'RabbitMqClientCertV2', 'commonName': 'any', 'signedCert': False} task, body = self._con.post(uri['rabbitmq'], request) return body def get_rabbitmq_kp(self, alias='default'): body = self._con.get(uri['rabbitmq-kp'] + '/' + alias) return body def get_rabbitmq_ca(self, alias='default'): body = self._con.get(uri['rabbitmq'] + '/' + alias) return body def get_active_user_sessions(self): body = self._con.get(uri['activeSessions']) return body def get_category_actions(self): body = self._con.get(uri['category-actions']) return body def get_role_category_actions(self): body = self._con.get(uri['role-category-actions']) return body # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
mit
duncanhawthorne/robot-robot
libs/future/backports/urllib/error.py
81
2715
"""Exception classes raised by urllib. The base exception class is URLError, which inherits from IOError. It doesn't define any behavior of its own, but is the base class for all exceptions defined in this package. HTTPError is an exception class that is also a valid HTTP response instance. It behaves this way because HTTP protocol errors are valid responses, with a status code, headers, and a body. In some contexts, an application may want to handle an exception like a regular response. """ from __future__ import absolute_import, division, unicode_literals from future import standard_library from future.backports.urllib import response as urllib_response __all__ = ['URLError', 'HTTPError', 'ContentTooShortError'] # do these error classes make sense? # make sure all of the IOError stuff is overridden. we just want to be # subtypes. class URLError(IOError): # URLError is a sub-type of IOError, but it doesn't share any of # the implementation. need to override __init__ and __str__. # It sets self.args for compatibility with other EnvironmentError # subclasses, but args doesn't have the typical format with errno in # slot 0 and strerror in slot 1. This may be better than nothing. def __init__(self, reason, filename=None): self.args = reason, self.reason = reason if filename is not None: self.filename = filename def __str__(self): return '<urlopen error %s>' % self.reason class HTTPError(URLError, urllib_response.addinfourl): """Raised when HTTP error occurs, but also acts like non-error return""" __super_init = urllib_response.addinfourl.__init__ def __init__(self, url, code, msg, hdrs, fp): self.code = code self.msg = msg self.hdrs = hdrs self.fp = fp self.filename = url # The addinfourl classes depend on fp being a valid file # object. In some cases, the HTTPError may not have a valid # file object. If this happens, the simplest workaround is to # not initialize the base classes. if fp is not None: self.__super_init(fp, hdrs, url, code) def __str__(self): return 'HTTP Error %s: %s' % (self.code, self.msg) # since URLError specifies a .reason attribute, HTTPError should also # provide this attribute. See issue13211 for discussion. @property def reason(self): return self.msg def info(self): return self.hdrs # exception raised when downloaded size does not match content-length class ContentTooShortError(URLError): def __init__(self, message, content): URLError.__init__(self, message) self.content = content
mit
TiVoMaker/boto
boto/file/bucket.py
153
4085
# Copyright 2010 Google Inc. # Copyright (c) 2011, Nexenta Systems Inc. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # File representation of bucket, for use with "file://" URIs. import os from boto.file.key import Key from boto.file.simpleresultset import SimpleResultSet from boto.s3.bucketlistresultset import BucketListResultSet class Bucket(object): def __init__(self, name, contained_key): """Instantiate an anonymous file-based Bucket around a single key. """ self.name = name self.contained_key = contained_key def __iter__(self): return iter(BucketListResultSet(self)) def __str__(self): return 'anonymous bucket for file://' + self.contained_key def delete_key(self, key_name, headers=None, version_id=None, mfa_token=None): """ Deletes a key from the bucket. :type key_name: string :param key_name: The key name to delete :type version_id: string :param version_id: Unused in this subclass. :type mfa_token: tuple or list of strings :param mfa_token: Unused in this subclass. """ os.remove(key_name) def get_all_keys(self, headers=None, **params): """ This method returns the single key around which this anonymous Bucket was instantiated. :rtype: SimpleResultSet :return: The result from file system listing the keys requested """ key = Key(self.name, self.contained_key) return SimpleResultSet([key]) def get_key(self, key_name, headers=None, version_id=None, key_type=Key.KEY_REGULAR_FILE): """ Check to see if a particular key exists within the bucket. Returns: An instance of a Key object or None :type key_name: string :param key_name: The name of the key to retrieve :type version_id: string :param version_id: Unused in this subclass. :type stream_type: integer :param stream_type: Type of the Key - Regular File or input/output Stream :rtype: :class:`boto.file.key.Key` :returns: A Key object from this bucket. """ if key_name == '-': return Key(self.name, '-', key_type=Key.KEY_STREAM_READABLE) else: fp = open(key_name, 'rb') return Key(self.name, key_name, fp) def new_key(self, key_name=None, key_type=Key.KEY_REGULAR_FILE): """ Creates a new key :type key_name: string :param key_name: The name of the key to create :rtype: :class:`boto.file.key.Key` :returns: An instance of the newly created key object """ if key_name == '-': return Key(self.name, '-', key_type=Key.KEY_STREAM_WRITABLE) else: dir_name = os.path.dirname(key_name) if dir_name and not os.path.exists(dir_name): os.makedirs(dir_name) fp = open(key_name, 'wb') return Key(self.name, key_name, fp)
mit
nickromano/django-slow-tests
_examples/django18/mysite/settings.py
2
2798
""" Django settings for mysite project. Generated by 'django-admin startproject' using Django 1.8.2. For more information on this file, see https://docs.djangoproject.com/en/1.8/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.8/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'vikmt@b6)=_z^3a3ji%2&#znmz)ure%k7xrz@phly(0#&as84z' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'polls', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) ROOT_URLCONF = 'mysite.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'mysite.wsgi.application' # Database # https://docs.djangoproject.com/en/1.8/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.8/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.8/howto/static-files/ STATIC_URL = '/static/' # Custom test runner TEST_RUNNER = 'django_slowtests.testrunner.DiscoverSlowestTestsRunner' NUM_SLOW_TESTS = 5
mit
qgis/QGIS-Django
qgis-app/plugins/tests/HelloWorld/1.8-author-slashes-error/HelloWorld/HelloWorld.py
20
1094
# -*- coding: utf-8 -*- # Import the PyQt and QGIS libraries from PyQt4.QtCore import * from PyQt4.QtGui import * from qgis.core import * class HelloWorld: def __init__(self, iface): # Save reference to the QGIS interface self.iface = iface self.canvas = iface.mapCanvas() def initGui(self): # Create action that will start plugin self.action = QAction(QIcon(":/plugins/"), "&HelloWorld", self.iface.mainWindow()) # connect the action to the run method QObject.connect(self.action, SIGNAL("activated()"), self.hello_world) # Add toolbar button and menu item self.iface.addPluginToMenu("HelloWorld", self.action) def unload(self): # Remove the plugin menu item and icon self.iface.removePluginMenu("HelloWorld",self.action) # run def hello_world(self): QMessageBox.information(self.iface.mainWindow(), QCoreApplication.translate('HelloWorld', "HelloWorld"), QCoreApplication.translate('HelloWorld', "HelloWorld")) return if __name__ == "__main__": pass
gpl-2.0
mxm/incubator-beam
sdks/python/apache_beam/runners/direct/direct_metrics_test.py
7
9689
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import absolute_import import unittest import hamcrest as hc from apache_beam.metrics.cells import DistributionData from apache_beam.metrics.cells import DistributionResult from apache_beam.metrics.execution import MetricKey from apache_beam.metrics.execution import MetricResult from apache_beam.metrics.execution import MetricUpdates from apache_beam.metrics.metricbase import MetricName from apache_beam.runners.direct.direct_metrics import DirectMetrics class DirectMetricsTest(unittest.TestCase): name1 = MetricName('namespace1', 'name1') name2 = MetricName('namespace1', 'name2') name3 = MetricName('namespace2', 'name1') bundle1 = object() # For this test, any object can be a bundle bundle2 = object() def test_combiner_functions(self): metrics = DirectMetrics() counter = metrics._counters['anykey'] counter.commit_logical(self.bundle1, 5) self.assertEqual(counter.extract_committed(), 5) with self.assertRaises(TypeError): counter.commit_logical(self.bundle1, None) distribution = metrics._distributions['anykey'] distribution.commit_logical(self.bundle1, DistributionData(4, 1, 4, 4)) self.assertEqual(distribution.extract_committed(), DistributionResult(DistributionData(4, 1, 4, 4))) with self.assertRaises(AttributeError): distribution.commit_logical(self.bundle1, None) def test_commit_logical_no_filter(self): metrics = DirectMetrics() metrics.commit_logical( self.bundle1, MetricUpdates( counters={MetricKey('step1', self.name1): 5, MetricKey('step1', self.name2): 8}, distributions={ MetricKey('step1', self.name1): DistributionData(8, 2, 3, 5)})) metrics.commit_logical( self.bundle1, MetricUpdates( counters={MetricKey('step2', self.name1): 7, MetricKey('step1', self.name2): 4}, distributions={ MetricKey('step1', self.name1): DistributionData(4, 1, 4, 4)})) results = metrics.query() hc.assert_that( results['counters'], hc.contains_inanyorder(*[ MetricResult(MetricKey('step1', self.name2), 12, 0), MetricResult(MetricKey('step2', self.name1), 7, 0), MetricResult(MetricKey('step1', self.name1), 5, 0)])) hc.assert_that( results['distributions'], hc.contains_inanyorder( MetricResult(MetricKey('step1', self.name1), DistributionResult( DistributionData(12, 3, 3, 5)), DistributionResult( DistributionData(0, 0, None, None))))) def test_apply_physical_no_filter(self): metrics = DirectMetrics() metrics.update_physical(object(), MetricUpdates( counters={MetricKey('step1', self.name1): 5, MetricKey('step1', self.name3): 8})) metrics.update_physical(object(), MetricUpdates( counters={MetricKey('step2', self.name1): 7, MetricKey('step1', self.name3): 4})) results = metrics.query() hc.assert_that(results['counters'], hc.contains_inanyorder(*[ MetricResult(MetricKey('step1', self.name1), 0, 5), MetricResult(MetricKey('step1', self.name3), 0, 12), MetricResult(MetricKey('step2', self.name1), 0, 7)])) metrics.commit_physical(object(), MetricUpdates()) results = metrics.query() hc.assert_that(results['counters'], hc.contains_inanyorder(*[ MetricResult(MetricKey('step1', self.name1), 0, 5), MetricResult(MetricKey('step1', self.name3), 0, 12), MetricResult(MetricKey('step2', self.name1), 0, 7)])) def test_apply_physical_logical(self): metrics = DirectMetrics() dist_zero = DistributionData(0, 0, None, None) metrics.update_physical( object(), MetricUpdates( counters={MetricKey('step1', self.name1): 7, MetricKey('step1', self.name2): 5, MetricKey('step2', self.name1): 1}, distributions={MetricKey('step1', self.name1): DistributionData(3, 1, 3, 3), MetricKey('step2', self.name3): DistributionData(8, 2, 4, 4)})) results = metrics.query() hc.assert_that(results['counters'], hc.contains_inanyorder(*[ MetricResult(MetricKey('step1', self.name1), 0, 7), MetricResult(MetricKey('step1', self.name2), 0, 5), MetricResult(MetricKey('step2', self.name1), 0, 1)])) hc.assert_that(results['distributions'], hc.contains_inanyorder(*[ MetricResult( MetricKey('step1', self.name1), DistributionResult(dist_zero), DistributionResult(DistributionData(3, 1, 3, 3))), MetricResult( MetricKey('step2', self.name3), DistributionResult(dist_zero), DistributionResult(DistributionData(8, 2, 4, 4)))])) metrics.commit_physical( object(), MetricUpdates( counters={MetricKey('step1', self.name1): -3, MetricKey('step2', self.name1): -5}, distributions={MetricKey('step1', self.name1): DistributionData(8, 4, 1, 5), MetricKey('step2', self.name2): DistributionData(8, 8, 1, 1)})) results = metrics.query() hc.assert_that(results['counters'], hc.contains_inanyorder(*[ MetricResult(MetricKey('step1', self.name1), 0, 4), MetricResult(MetricKey('step1', self.name2), 0, 5), MetricResult(MetricKey('step2', self.name1), 0, -4)])) hc.assert_that(results['distributions'], hc.contains_inanyorder(*[ MetricResult( MetricKey('step1', self.name1), DistributionResult(dist_zero), DistributionResult(DistributionData(11, 5, 1, 5))), MetricResult( MetricKey('step2', self.name3), DistributionResult(dist_zero), DistributionResult(DistributionData(8, 2, 4, 4))), MetricResult( MetricKey('step2', self.name2), DistributionResult(dist_zero), DistributionResult(DistributionData(8, 8, 1, 1)))])) metrics.commit_logical( object(), MetricUpdates( counters={MetricKey('step1', self.name1): 3, MetricKey('step1', self.name2): 5, MetricKey('step2', self.name1): -3}, distributions={MetricKey('step1', self.name1): DistributionData(11, 5, 1, 5), MetricKey('step2', self.name2): DistributionData(8, 8, 1, 1), MetricKey('step2', self.name3): DistributionData(4, 1, 4, 4)})) results = metrics.query() hc.assert_that(results['counters'], hc.contains_inanyorder(*[ MetricResult(MetricKey('step1', self.name1), 3, 4), MetricResult(MetricKey('step1', self.name2), 5, 5), MetricResult(MetricKey('step2', self.name1), -3, -4)])) hc.assert_that(results['distributions'], hc.contains_inanyorder(*[ MetricResult( MetricKey('step1', self.name1), DistributionResult(DistributionData(11, 5, 1, 5)), DistributionResult(DistributionData(11, 5, 1, 5))), MetricResult( MetricKey('step2', self.name3), DistributionResult(DistributionData(4, 1, 4, 4)), DistributionResult(DistributionData(8, 2, 4, 4))), MetricResult( MetricKey('step2', self.name2), DistributionResult(DistributionData(8, 8, 1, 1)), DistributionResult(DistributionData(8, 8, 1, 1)))])) if __name__ == '__main__': unittest.main()
apache-2.0
ScoutAlarm/node-sodium-linux
node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
778
65880
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Unit tests for the MSVSSettings.py file.""" import StringIO import unittest import gyp.MSVSSettings as MSVSSettings class TestSequenceFunctions(unittest.TestCase): def setUp(self): self.stderr = StringIO.StringIO() def _ExpectedWarnings(self, expected): """Compares recorded lines to expected warnings.""" self.stderr.seek(0) actual = self.stderr.read().split('\n') actual = [line for line in actual if line] self.assertEqual(sorted(expected), sorted(actual)) def testValidateMSVSSettings_tool_names(self): """Tests that only MSVS tool names are allowed.""" MSVSSettings.ValidateMSVSSettings( {'VCCLCompilerTool': {}, 'VCLinkerTool': {}, 'VCMIDLTool': {}, 'foo': {}, 'VCResourceCompilerTool': {}, 'VCLibrarianTool': {}, 'VCManifestTool': {}, 'ClCompile': {}}, self.stderr) self._ExpectedWarnings([ 'Warning: unrecognized tool foo', 'Warning: unrecognized tool ClCompile']) def testValidateMSVSSettings_settings(self): """Tests that for invalid MSVS settings.""" MSVSSettings.ValidateMSVSSettings( {'VCCLCompilerTool': { 'AdditionalIncludeDirectories': 'folder1;folder2', 'AdditionalOptions': ['string1', 'string2'], 'AdditionalUsingDirectories': 'folder1;folder2', 'AssemblerListingLocation': 'a_file_name', 'AssemblerOutput': '0', 'BasicRuntimeChecks': '5', 'BrowseInformation': 'fdkslj', 'BrowseInformationFile': 'a_file_name', 'BufferSecurityCheck': 'true', 'CallingConvention': '-1', 'CompileAs': '1', 'DebugInformationFormat': '2', 'DefaultCharIsUnsigned': 'true', 'Detect64BitPortabilityProblems': 'true', 'DisableLanguageExtensions': 'true', 'DisableSpecificWarnings': 'string1;string2', 'EnableEnhancedInstructionSet': '1', 'EnableFiberSafeOptimizations': 'true', 'EnableFunctionLevelLinking': 'true', 'EnableIntrinsicFunctions': 'true', 'EnablePREfast': 'true', 'Enableprefast': 'bogus', 'ErrorReporting': '1', 'ExceptionHandling': '1', 'ExpandAttributedSource': 'true', 'FavorSizeOrSpeed': '1', 'FloatingPointExceptions': 'true', 'FloatingPointModel': '1', 'ForceConformanceInForLoopScope': 'true', 'ForcedIncludeFiles': 'file1;file2', 'ForcedUsingFiles': 'file1;file2', 'GeneratePreprocessedFile': '1', 'GenerateXMLDocumentationFiles': 'true', 'IgnoreStandardIncludePath': 'true', 'InlineFunctionExpansion': '1', 'KeepComments': 'true', 'MinimalRebuild': 'true', 'ObjectFile': 'a_file_name', 'OmitDefaultLibName': 'true', 'OmitFramePointers': 'true', 'OpenMP': 'true', 'Optimization': '1', 'PrecompiledHeaderFile': 'a_file_name', 'PrecompiledHeaderThrough': 'a_file_name', 'PreprocessorDefinitions': 'string1;string2', 'ProgramDataBaseFileName': 'a_file_name', 'RuntimeLibrary': '1', 'RuntimeTypeInfo': 'true', 'ShowIncludes': 'true', 'SmallerTypeCheck': 'true', 'StringPooling': 'true', 'StructMemberAlignment': '1', 'SuppressStartupBanner': 'true', 'TreatWChar_tAsBuiltInType': 'true', 'UndefineAllPreprocessorDefinitions': 'true', 'UndefinePreprocessorDefinitions': 'string1;string2', 'UseFullPaths': 'true', 'UsePrecompiledHeader': '1', 'UseUnicodeResponseFiles': 'true', 'WarnAsError': 'true', 'WarningLevel': '1', 'WholeProgramOptimization': 'true', 'XMLDocumentationFileName': 'a_file_name', 'ZZXYZ': 'bogus'}, 'VCLinkerTool': { 'AdditionalDependencies': 'file1;file2', 'AdditionalLibraryDirectories': 'folder1;folder2', 'AdditionalManifestDependencies': 'file1;file2', 'AdditionalOptions': 'a string1', 'AddModuleNamesToAssembly': 'file1;file2', 'AllowIsolation': 'true', 'AssemblyDebug': '2', 'AssemblyLinkResource': 'file1;file2', 'BaseAddress': 'a string1', 'CLRImageType': '2', 'CLRThreadAttribute': '2', 'CLRUnmanagedCodeCheck': 'true', 'DataExecutionPrevention': '2', 'DelayLoadDLLs': 'file1;file2', 'DelaySign': 'true', 'Driver': '2', 'EmbedManagedResourceFile': 'file1;file2', 'EnableCOMDATFolding': '2', 'EnableUAC': 'true', 'EntryPointSymbol': 'a string1', 'ErrorReporting': '2', 'FixedBaseAddress': '2', 'ForceSymbolReferences': 'file1;file2', 'FunctionOrder': 'a_file_name', 'GenerateDebugInformation': 'true', 'GenerateManifest': 'true', 'GenerateMapFile': 'true', 'HeapCommitSize': 'a string1', 'HeapReserveSize': 'a string1', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreDefaultLibraryNames': 'file1;file2', 'IgnoreEmbeddedIDL': 'true', 'IgnoreImportLibrary': 'true', 'ImportLibrary': 'a_file_name', 'KeyContainer': 'a_file_name', 'KeyFile': 'a_file_name', 'LargeAddressAware': '2', 'LinkIncremental': '2', 'LinkLibraryDependencies': 'true', 'LinkTimeCodeGeneration': '2', 'ManifestFile': 'a_file_name', 'MapExports': 'true', 'MapFileName': 'a_file_name', 'MergedIDLBaseFileName': 'a_file_name', 'MergeSections': 'a string1', 'MidlCommandFile': 'a_file_name', 'ModuleDefinitionFile': 'a_file_name', 'OptimizeForWindows98': '1', 'OptimizeReferences': '2', 'OutputFile': 'a_file_name', 'PerUserRedirection': 'true', 'Profile': 'true', 'ProfileGuidedDatabase': 'a_file_name', 'ProgramDatabaseFile': 'a_file_name', 'RandomizedBaseAddress': '2', 'RegisterOutput': 'true', 'ResourceOnlyDLL': 'true', 'SetChecksum': 'true', 'ShowProgress': '2', 'StackCommitSize': 'a string1', 'StackReserveSize': 'a string1', 'StripPrivateSymbols': 'a_file_name', 'SubSystem': '2', 'SupportUnloadOfDelayLoadedDLL': 'true', 'SuppressStartupBanner': 'true', 'SwapRunFromCD': 'true', 'SwapRunFromNet': 'true', 'TargetMachine': '2', 'TerminalServerAware': '2', 'TurnOffAssemblyGeneration': 'true', 'TypeLibraryFile': 'a_file_name', 'TypeLibraryResourceID': '33', 'UACExecutionLevel': '2', 'UACUIAccess': 'true', 'UseLibraryDependencyInputs': 'true', 'UseUnicodeResponseFiles': 'true', 'Version': 'a string1'}, 'VCMIDLTool': { 'AdditionalIncludeDirectories': 'folder1;folder2', 'AdditionalOptions': 'a string1', 'CPreprocessOptions': 'a string1', 'DefaultCharType': '1', 'DLLDataFileName': 'a_file_name', 'EnableErrorChecks': '1', 'ErrorCheckAllocations': 'true', 'ErrorCheckBounds': 'true', 'ErrorCheckEnumRange': 'true', 'ErrorCheckRefPointers': 'true', 'ErrorCheckStubData': 'true', 'GenerateStublessProxies': 'true', 'GenerateTypeLibrary': 'true', 'HeaderFileName': 'a_file_name', 'IgnoreStandardIncludePath': 'true', 'InterfaceIdentifierFileName': 'a_file_name', 'MkTypLibCompatible': 'true', 'notgood': 'bogus', 'OutputDirectory': 'a string1', 'PreprocessorDefinitions': 'string1;string2', 'ProxyFileName': 'a_file_name', 'RedirectOutputAndErrors': 'a_file_name', 'StructMemberAlignment': '1', 'SuppressStartupBanner': 'true', 'TargetEnvironment': '1', 'TypeLibraryName': 'a_file_name', 'UndefinePreprocessorDefinitions': 'string1;string2', 'ValidateParameters': 'true', 'WarnAsError': 'true', 'WarningLevel': '1'}, 'VCResourceCompilerTool': { 'AdditionalOptions': 'a string1', 'AdditionalIncludeDirectories': 'folder1;folder2', 'Culture': '1003', 'IgnoreStandardIncludePath': 'true', 'notgood2': 'bogus', 'PreprocessorDefinitions': 'string1;string2', 'ResourceOutputFileName': 'a string1', 'ShowProgress': 'true', 'SuppressStartupBanner': 'true', 'UndefinePreprocessorDefinitions': 'string1;string2'}, 'VCLibrarianTool': { 'AdditionalDependencies': 'file1;file2', 'AdditionalLibraryDirectories': 'folder1;folder2', 'AdditionalOptions': 'a string1', 'ExportNamedFunctions': 'string1;string2', 'ForceSymbolReferences': 'a string1', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreSpecificDefaultLibraries': 'file1;file2', 'LinkLibraryDependencies': 'true', 'ModuleDefinitionFile': 'a_file_name', 'OutputFile': 'a_file_name', 'SuppressStartupBanner': 'true', 'UseUnicodeResponseFiles': 'true'}, 'VCManifestTool': { 'AdditionalManifestFiles': 'file1;file2', 'AdditionalOptions': 'a string1', 'AssemblyIdentity': 'a string1', 'ComponentFileName': 'a_file_name', 'DependencyInformationFile': 'a_file_name', 'GenerateCatalogFiles': 'true', 'InputResourceManifests': 'a string1', 'ManifestResourceFile': 'a_file_name', 'OutputManifestFile': 'a_file_name', 'RegistrarScriptFile': 'a_file_name', 'ReplacementsFile': 'a_file_name', 'SuppressStartupBanner': 'true', 'TypeLibraryFile': 'a_file_name', 'UpdateFileHashes': 'truel', 'UpdateFileHashesSearchPath': 'a_file_name', 'UseFAT32Workaround': 'true', 'UseUnicodeResponseFiles': 'true', 'VerboseOutput': 'true'}}, self.stderr) self._ExpectedWarnings([ 'Warning: for VCCLCompilerTool/BasicRuntimeChecks, ' 'index value (5) not in expected range [0, 4)', 'Warning: for VCCLCompilerTool/BrowseInformation, ' "invalid literal for int() with base 10: 'fdkslj'", 'Warning: for VCCLCompilerTool/CallingConvention, ' 'index value (-1) not in expected range [0, 3)', 'Warning: for VCCLCompilerTool/DebugInformationFormat, ' 'converted value for 2 not specified.', 'Warning: unrecognized setting VCCLCompilerTool/Enableprefast', 'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ', 'Warning: for VCLinkerTool/TargetMachine, ' 'converted value for 2 not specified.', 'Warning: unrecognized setting VCMIDLTool/notgood', 'Warning: unrecognized setting VCResourceCompilerTool/notgood2', 'Warning: for VCManifestTool/UpdateFileHashes, ' "expected bool; got 'truel'" '']) def testValidateMSBuildSettings_settings(self): """Tests that for invalid MSBuild settings.""" MSVSSettings.ValidateMSBuildSettings( {'ClCompile': { 'AdditionalIncludeDirectories': 'folder1;folder2', 'AdditionalOptions': ['string1', 'string2'], 'AdditionalUsingDirectories': 'folder1;folder2', 'AssemblerListingLocation': 'a_file_name', 'AssemblerOutput': 'NoListing', 'BasicRuntimeChecks': 'StackFrameRuntimeCheck', 'BrowseInformation': 'false', 'BrowseInformationFile': 'a_file_name', 'BufferSecurityCheck': 'true', 'BuildingInIDE': 'true', 'CallingConvention': 'Cdecl', 'CompileAs': 'CompileAsC', 'CompileAsManaged': 'Pure', 'CreateHotpatchableImage': 'true', 'DebugInformationFormat': 'ProgramDatabase', 'DisableLanguageExtensions': 'true', 'DisableSpecificWarnings': 'string1;string2', 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions', 'EnableFiberSafeOptimizations': 'true', 'EnablePREfast': 'true', 'Enableprefast': 'bogus', 'ErrorReporting': 'Prompt', 'ExceptionHandling': 'SyncCThrow', 'ExpandAttributedSource': 'true', 'FavorSizeOrSpeed': 'Neither', 'FloatingPointExceptions': 'true', 'FloatingPointModel': 'Precise', 'ForceConformanceInForLoopScope': 'true', 'ForcedIncludeFiles': 'file1;file2', 'ForcedUsingFiles': 'file1;file2', 'FunctionLevelLinking': 'false', 'GenerateXMLDocumentationFiles': 'true', 'IgnoreStandardIncludePath': 'true', 'InlineFunctionExpansion': 'OnlyExplicitInline', 'IntrinsicFunctions': 'false', 'MinimalRebuild': 'true', 'MultiProcessorCompilation': 'true', 'ObjectFileName': 'a_file_name', 'OmitDefaultLibName': 'true', 'OmitFramePointers': 'true', 'OpenMPSupport': 'true', 'Optimization': 'Disabled', 'PrecompiledHeader': 'NotUsing', 'PrecompiledHeaderFile': 'a_file_name', 'PrecompiledHeaderOutputFile': 'a_file_name', 'PreprocessKeepComments': 'true', 'PreprocessorDefinitions': 'string1;string2', 'PreprocessOutputPath': 'a string1', 'PreprocessSuppressLineNumbers': 'false', 'PreprocessToFile': 'false', 'ProcessorNumber': '33', 'ProgramDataBaseFileName': 'a_file_name', 'RuntimeLibrary': 'MultiThreaded', 'RuntimeTypeInfo': 'true', 'ShowIncludes': 'true', 'SmallerTypeCheck': 'true', 'StringPooling': 'true', 'StructMemberAlignment': '1Byte', 'SuppressStartupBanner': 'true', 'TrackerLogDirectory': 'a_folder', 'TreatSpecificWarningsAsErrors': 'string1;string2', 'TreatWarningAsError': 'true', 'TreatWChar_tAsBuiltInType': 'true', 'UndefineAllPreprocessorDefinitions': 'true', 'UndefinePreprocessorDefinitions': 'string1;string2', 'UseFullPaths': 'true', 'UseUnicodeForAssemblerListing': 'true', 'WarningLevel': 'TurnOffAllWarnings', 'WholeProgramOptimization': 'true', 'XMLDocumentationFileName': 'a_file_name', 'ZZXYZ': 'bogus'}, 'Link': { 'AdditionalDependencies': 'file1;file2', 'AdditionalLibraryDirectories': 'folder1;folder2', 'AdditionalManifestDependencies': 'file1;file2', 'AdditionalOptions': 'a string1', 'AddModuleNamesToAssembly': 'file1;file2', 'AllowIsolation': 'true', 'AssemblyDebug': '', 'AssemblyLinkResource': 'file1;file2', 'BaseAddress': 'a string1', 'BuildingInIDE': 'true', 'CLRImageType': 'ForceIJWImage', 'CLRSupportLastError': 'Enabled', 'CLRThreadAttribute': 'MTAThreadingAttribute', 'CLRUnmanagedCodeCheck': 'true', 'CreateHotPatchableImage': 'X86Image', 'DataExecutionPrevention': 'false', 'DelayLoadDLLs': 'file1;file2', 'DelaySign': 'true', 'Driver': 'NotSet', 'EmbedManagedResourceFile': 'file1;file2', 'EnableCOMDATFolding': 'false', 'EnableUAC': 'true', 'EntryPointSymbol': 'a string1', 'FixedBaseAddress': 'false', 'ForceFileOutput': 'Enabled', 'ForceSymbolReferences': 'file1;file2', 'FunctionOrder': 'a_file_name', 'GenerateDebugInformation': 'true', 'GenerateMapFile': 'true', 'HeapCommitSize': 'a string1', 'HeapReserveSize': 'a string1', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreEmbeddedIDL': 'true', 'IgnoreSpecificDefaultLibraries': 'a_file_list', 'ImageHasSafeExceptionHandlers': 'true', 'ImportLibrary': 'a_file_name', 'KeyContainer': 'a_file_name', 'KeyFile': 'a_file_name', 'LargeAddressAware': 'false', 'LinkDLL': 'true', 'LinkErrorReporting': 'SendErrorReport', 'LinkStatus': 'true', 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', 'ManifestFile': 'a_file_name', 'MapExports': 'true', 'MapFileName': 'a_file_name', 'MergedIDLBaseFileName': 'a_file_name', 'MergeSections': 'a string1', 'MidlCommandFile': 'a_file_name', 'MinimumRequiredVersion': 'a string1', 'ModuleDefinitionFile': 'a_file_name', 'MSDOSStubFileName': 'a_file_name', 'NoEntryPoint': 'true', 'OptimizeReferences': 'false', 'OutputFile': 'a_file_name', 'PerUserRedirection': 'true', 'PreventDllBinding': 'true', 'Profile': 'true', 'ProfileGuidedDatabase': 'a_file_name', 'ProgramDatabaseFile': 'a_file_name', 'RandomizedBaseAddress': 'false', 'RegisterOutput': 'true', 'SectionAlignment': '33', 'SetChecksum': 'true', 'ShowProgress': 'LinkVerboseREF', 'SpecifySectionAttributes': 'a string1', 'StackCommitSize': 'a string1', 'StackReserveSize': 'a string1', 'StripPrivateSymbols': 'a_file_name', 'SubSystem': 'Console', 'SupportNobindOfDelayLoadedDLL': 'true', 'SupportUnloadOfDelayLoadedDLL': 'true', 'SuppressStartupBanner': 'true', 'SwapRunFromCD': 'true', 'SwapRunFromNET': 'true', 'TargetMachine': 'MachineX86', 'TerminalServerAware': 'false', 'TrackerLogDirectory': 'a_folder', 'TreatLinkerWarningAsErrors': 'true', 'TurnOffAssemblyGeneration': 'true', 'TypeLibraryFile': 'a_file_name', 'TypeLibraryResourceID': '33', 'UACExecutionLevel': 'AsInvoker', 'UACUIAccess': 'true', 'Version': 'a string1'}, 'ResourceCompile': { 'AdditionalIncludeDirectories': 'folder1;folder2', 'AdditionalOptions': 'a string1', 'Culture': '0x236', 'IgnoreStandardIncludePath': 'true', 'NullTerminateStrings': 'true', 'PreprocessorDefinitions': 'string1;string2', 'ResourceOutputFileName': 'a string1', 'ShowProgress': 'true', 'SuppressStartupBanner': 'true', 'TrackerLogDirectory': 'a_folder', 'UndefinePreprocessorDefinitions': 'string1;string2'}, 'Midl': { 'AdditionalIncludeDirectories': 'folder1;folder2', 'AdditionalOptions': 'a string1', 'ApplicationConfigurationMode': 'true', 'ClientStubFile': 'a_file_name', 'CPreprocessOptions': 'a string1', 'DefaultCharType': 'Signed', 'DllDataFileName': 'a_file_name', 'EnableErrorChecks': 'EnableCustom', 'ErrorCheckAllocations': 'true', 'ErrorCheckBounds': 'true', 'ErrorCheckEnumRange': 'true', 'ErrorCheckRefPointers': 'true', 'ErrorCheckStubData': 'true', 'GenerateClientFiles': 'Stub', 'GenerateServerFiles': 'None', 'GenerateStublessProxies': 'true', 'GenerateTypeLibrary': 'true', 'HeaderFileName': 'a_file_name', 'IgnoreStandardIncludePath': 'true', 'InterfaceIdentifierFileName': 'a_file_name', 'LocaleID': '33', 'MkTypLibCompatible': 'true', 'OutputDirectory': 'a string1', 'PreprocessorDefinitions': 'string1;string2', 'ProxyFileName': 'a_file_name', 'RedirectOutputAndErrors': 'a_file_name', 'ServerStubFile': 'a_file_name', 'StructMemberAlignment': 'NotSet', 'SuppressCompilerWarnings': 'true', 'SuppressStartupBanner': 'true', 'TargetEnvironment': 'Itanium', 'TrackerLogDirectory': 'a_folder', 'TypeLibFormat': 'NewFormat', 'TypeLibraryName': 'a_file_name', 'UndefinePreprocessorDefinitions': 'string1;string2', 'ValidateAllParameters': 'true', 'WarnAsError': 'true', 'WarningLevel': '1'}, 'Lib': { 'AdditionalDependencies': 'file1;file2', 'AdditionalLibraryDirectories': 'folder1;folder2', 'AdditionalOptions': 'a string1', 'DisplayLibrary': 'a string1', 'ErrorReporting': 'PromptImmediately', 'ExportNamedFunctions': 'string1;string2', 'ForceSymbolReferences': 'a string1', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreSpecificDefaultLibraries': 'file1;file2', 'LinkTimeCodeGeneration': 'true', 'MinimumRequiredVersion': 'a string1', 'ModuleDefinitionFile': 'a_file_name', 'Name': 'a_file_name', 'OutputFile': 'a_file_name', 'RemoveObjects': 'file1;file2', 'SubSystem': 'Console', 'SuppressStartupBanner': 'true', 'TargetMachine': 'MachineX86i', 'TrackerLogDirectory': 'a_folder', 'TreatLibWarningAsErrors': 'true', 'UseUnicodeResponseFiles': 'true', 'Verbose': 'true'}, 'Manifest': { 'AdditionalManifestFiles': 'file1;file2', 'AdditionalOptions': 'a string1', 'AssemblyIdentity': 'a string1', 'ComponentFileName': 'a_file_name', 'EnableDPIAwareness': 'fal', 'GenerateCatalogFiles': 'truel', 'GenerateCategoryTags': 'true', 'InputResourceManifests': 'a string1', 'ManifestFromManagedAssembly': 'a_file_name', 'notgood3': 'bogus', 'OutputManifestFile': 'a_file_name', 'OutputResourceManifests': 'a string1', 'RegistrarScriptFile': 'a_file_name', 'ReplacementsFile': 'a_file_name', 'SuppressDependencyElement': 'true', 'SuppressStartupBanner': 'true', 'TrackerLogDirectory': 'a_folder', 'TypeLibraryFile': 'a_file_name', 'UpdateFileHashes': 'true', 'UpdateFileHashesSearchPath': 'a_file_name', 'VerboseOutput': 'true'}, 'ProjectReference': { 'LinkLibraryDependencies': 'true', 'UseLibraryDependencyInputs': 'true'}, 'ManifestResourceCompile': { 'ResourceOutputFileName': 'a_file_name'}, '': { 'EmbedManifest': 'true', 'GenerateManifest': 'true', 'IgnoreImportLibrary': 'true', 'LinkIncremental': 'false'}}, self.stderr) self._ExpectedWarnings([ 'Warning: unrecognized setting ClCompile/Enableprefast', 'Warning: unrecognized setting ClCompile/ZZXYZ', 'Warning: unrecognized setting Manifest/notgood3', 'Warning: for Manifest/GenerateCatalogFiles, ' "expected bool; got 'truel'", 'Warning: for Lib/TargetMachine, unrecognized enumerated value ' 'MachineX86i', "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"]) def testConvertToMSBuildSettings_empty(self): """Tests an empty conversion.""" msvs_settings = {} expected_msbuild_settings = {} actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) self._ExpectedWarnings([]) def testConvertToMSBuildSettings_minimal(self): """Tests a minimal conversion.""" msvs_settings = { 'VCCLCompilerTool': { 'AdditionalIncludeDirectories': 'dir1', 'AdditionalOptions': '/foo', 'BasicRuntimeChecks': '0', }, 'VCLinkerTool': { 'LinkTimeCodeGeneration': '1', 'ErrorReporting': '1', 'DataExecutionPrevention': '2', }, } expected_msbuild_settings = { 'ClCompile': { 'AdditionalIncludeDirectories': 'dir1', 'AdditionalOptions': '/foo', 'BasicRuntimeChecks': 'Default', }, 'Link': { 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', 'LinkErrorReporting': 'PromptImmediately', 'DataExecutionPrevention': 'true', }, } actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) self._ExpectedWarnings([]) def testConvertToMSBuildSettings_warnings(self): """Tests conversion that generates warnings.""" msvs_settings = { 'VCCLCompilerTool': { 'AdditionalIncludeDirectories': '1', 'AdditionalOptions': '2', # These are incorrect values: 'BasicRuntimeChecks': '12', 'BrowseInformation': '21', 'UsePrecompiledHeader': '13', 'GeneratePreprocessedFile': '14'}, 'VCLinkerTool': { # These are incorrect values: 'Driver': '10', 'LinkTimeCodeGeneration': '31', 'ErrorReporting': '21', 'FixedBaseAddress': '6'}, 'VCResourceCompilerTool': { # Custom 'Culture': '1003'}} expected_msbuild_settings = { 'ClCompile': { 'AdditionalIncludeDirectories': '1', 'AdditionalOptions': '2'}, 'Link': {}, 'ResourceCompile': { # Custom 'Culture': '0x03eb'}} actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) self._ExpectedWarnings([ 'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to ' 'MSBuild, index value (12) not in expected range [0, 4)', 'Warning: while converting VCCLCompilerTool/BrowseInformation to ' 'MSBuild, index value (21) not in expected range [0, 3)', 'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to ' 'MSBuild, index value (13) not in expected range [0, 3)', 'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to ' 'MSBuild, value must be one of [0, 1, 2]; got 14', 'Warning: while converting VCLinkerTool/Driver to ' 'MSBuild, index value (10) not in expected range [0, 4)', 'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to ' 'MSBuild, index value (31) not in expected range [0, 5)', 'Warning: while converting VCLinkerTool/ErrorReporting to ' 'MSBuild, index value (21) not in expected range [0, 3)', 'Warning: while converting VCLinkerTool/FixedBaseAddress to ' 'MSBuild, index value (6) not in expected range [0, 3)', ]) def testConvertToMSBuildSettings_full_synthetic(self): """Tests conversion of all the MSBuild settings.""" msvs_settings = { 'VCCLCompilerTool': { 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'AdditionalUsingDirectories': 'folder1;folder2;folder3', 'AssemblerListingLocation': 'a_file_name', 'AssemblerOutput': '0', 'BasicRuntimeChecks': '1', 'BrowseInformation': '2', 'BrowseInformationFile': 'a_file_name', 'BufferSecurityCheck': 'true', 'CallingConvention': '0', 'CompileAs': '1', 'DebugInformationFormat': '4', 'DefaultCharIsUnsigned': 'true', 'Detect64BitPortabilityProblems': 'true', 'DisableLanguageExtensions': 'true', 'DisableSpecificWarnings': 'd1;d2;d3', 'EnableEnhancedInstructionSet': '0', 'EnableFiberSafeOptimizations': 'true', 'EnableFunctionLevelLinking': 'true', 'EnableIntrinsicFunctions': 'true', 'EnablePREfast': 'true', 'ErrorReporting': '1', 'ExceptionHandling': '2', 'ExpandAttributedSource': 'true', 'FavorSizeOrSpeed': '0', 'FloatingPointExceptions': 'true', 'FloatingPointModel': '1', 'ForceConformanceInForLoopScope': 'true', 'ForcedIncludeFiles': 'file1;file2;file3', 'ForcedUsingFiles': 'file1;file2;file3', 'GeneratePreprocessedFile': '1', 'GenerateXMLDocumentationFiles': 'true', 'IgnoreStandardIncludePath': 'true', 'InlineFunctionExpansion': '2', 'KeepComments': 'true', 'MinimalRebuild': 'true', 'ObjectFile': 'a_file_name', 'OmitDefaultLibName': 'true', 'OmitFramePointers': 'true', 'OpenMP': 'true', 'Optimization': '3', 'PrecompiledHeaderFile': 'a_file_name', 'PrecompiledHeaderThrough': 'a_file_name', 'PreprocessorDefinitions': 'd1;d2;d3', 'ProgramDataBaseFileName': 'a_file_name', 'RuntimeLibrary': '0', 'RuntimeTypeInfo': 'true', 'ShowIncludes': 'true', 'SmallerTypeCheck': 'true', 'StringPooling': 'true', 'StructMemberAlignment': '1', 'SuppressStartupBanner': 'true', 'TreatWChar_tAsBuiltInType': 'true', 'UndefineAllPreprocessorDefinitions': 'true', 'UndefinePreprocessorDefinitions': 'd1;d2;d3', 'UseFullPaths': 'true', 'UsePrecompiledHeader': '1', 'UseUnicodeResponseFiles': 'true', 'WarnAsError': 'true', 'WarningLevel': '2', 'WholeProgramOptimization': 'true', 'XMLDocumentationFileName': 'a_file_name'}, 'VCLinkerTool': { 'AdditionalDependencies': 'file1;file2;file3', 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3', 'AdditionalManifestDependencies': 'file1;file2;file3', 'AdditionalOptions': 'a_string', 'AddModuleNamesToAssembly': 'file1;file2;file3', 'AllowIsolation': 'true', 'AssemblyDebug': '0', 'AssemblyLinkResource': 'file1;file2;file3', 'BaseAddress': 'a_string', 'CLRImageType': '1', 'CLRThreadAttribute': '2', 'CLRUnmanagedCodeCheck': 'true', 'DataExecutionPrevention': '0', 'DelayLoadDLLs': 'file1;file2;file3', 'DelaySign': 'true', 'Driver': '1', 'EmbedManagedResourceFile': 'file1;file2;file3', 'EnableCOMDATFolding': '0', 'EnableUAC': 'true', 'EntryPointSymbol': 'a_string', 'ErrorReporting': '0', 'FixedBaseAddress': '1', 'ForceSymbolReferences': 'file1;file2;file3', 'FunctionOrder': 'a_file_name', 'GenerateDebugInformation': 'true', 'GenerateManifest': 'true', 'GenerateMapFile': 'true', 'HeapCommitSize': 'a_string', 'HeapReserveSize': 'a_string', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreDefaultLibraryNames': 'file1;file2;file3', 'IgnoreEmbeddedIDL': 'true', 'IgnoreImportLibrary': 'true', 'ImportLibrary': 'a_file_name', 'KeyContainer': 'a_file_name', 'KeyFile': 'a_file_name', 'LargeAddressAware': '2', 'LinkIncremental': '1', 'LinkLibraryDependencies': 'true', 'LinkTimeCodeGeneration': '2', 'ManifestFile': 'a_file_name', 'MapExports': 'true', 'MapFileName': 'a_file_name', 'MergedIDLBaseFileName': 'a_file_name', 'MergeSections': 'a_string', 'MidlCommandFile': 'a_file_name', 'ModuleDefinitionFile': 'a_file_name', 'OptimizeForWindows98': '1', 'OptimizeReferences': '0', 'OutputFile': 'a_file_name', 'PerUserRedirection': 'true', 'Profile': 'true', 'ProfileGuidedDatabase': 'a_file_name', 'ProgramDatabaseFile': 'a_file_name', 'RandomizedBaseAddress': '1', 'RegisterOutput': 'true', 'ResourceOnlyDLL': 'true', 'SetChecksum': 'true', 'ShowProgress': '0', 'StackCommitSize': 'a_string', 'StackReserveSize': 'a_string', 'StripPrivateSymbols': 'a_file_name', 'SubSystem': '2', 'SupportUnloadOfDelayLoadedDLL': 'true', 'SuppressStartupBanner': 'true', 'SwapRunFromCD': 'true', 'SwapRunFromNet': 'true', 'TargetMachine': '3', 'TerminalServerAware': '2', 'TurnOffAssemblyGeneration': 'true', 'TypeLibraryFile': 'a_file_name', 'TypeLibraryResourceID': '33', 'UACExecutionLevel': '1', 'UACUIAccess': 'true', 'UseLibraryDependencyInputs': 'false', 'UseUnicodeResponseFiles': 'true', 'Version': 'a_string'}, 'VCResourceCompilerTool': { 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'Culture': '1003', 'IgnoreStandardIncludePath': 'true', 'PreprocessorDefinitions': 'd1;d2;d3', 'ResourceOutputFileName': 'a_string', 'ShowProgress': 'true', 'SuppressStartupBanner': 'true', 'UndefinePreprocessorDefinitions': 'd1;d2;d3'}, 'VCMIDLTool': { 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'CPreprocessOptions': 'a_string', 'DefaultCharType': '0', 'DLLDataFileName': 'a_file_name', 'EnableErrorChecks': '2', 'ErrorCheckAllocations': 'true', 'ErrorCheckBounds': 'true', 'ErrorCheckEnumRange': 'true', 'ErrorCheckRefPointers': 'true', 'ErrorCheckStubData': 'true', 'GenerateStublessProxies': 'true', 'GenerateTypeLibrary': 'true', 'HeaderFileName': 'a_file_name', 'IgnoreStandardIncludePath': 'true', 'InterfaceIdentifierFileName': 'a_file_name', 'MkTypLibCompatible': 'true', 'OutputDirectory': 'a_string', 'PreprocessorDefinitions': 'd1;d2;d3', 'ProxyFileName': 'a_file_name', 'RedirectOutputAndErrors': 'a_file_name', 'StructMemberAlignment': '3', 'SuppressStartupBanner': 'true', 'TargetEnvironment': '1', 'TypeLibraryName': 'a_file_name', 'UndefinePreprocessorDefinitions': 'd1;d2;d3', 'ValidateParameters': 'true', 'WarnAsError': 'true', 'WarningLevel': '4'}, 'VCLibrarianTool': { 'AdditionalDependencies': 'file1;file2;file3', 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'ExportNamedFunctions': 'd1;d2;d3', 'ForceSymbolReferences': 'a_string', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3', 'LinkLibraryDependencies': 'true', 'ModuleDefinitionFile': 'a_file_name', 'OutputFile': 'a_file_name', 'SuppressStartupBanner': 'true', 'UseUnicodeResponseFiles': 'true'}, 'VCManifestTool': { 'AdditionalManifestFiles': 'file1;file2;file3', 'AdditionalOptions': 'a_string', 'AssemblyIdentity': 'a_string', 'ComponentFileName': 'a_file_name', 'DependencyInformationFile': 'a_file_name', 'EmbedManifest': 'true', 'GenerateCatalogFiles': 'true', 'InputResourceManifests': 'a_string', 'ManifestResourceFile': 'my_name', 'OutputManifestFile': 'a_file_name', 'RegistrarScriptFile': 'a_file_name', 'ReplacementsFile': 'a_file_name', 'SuppressStartupBanner': 'true', 'TypeLibraryFile': 'a_file_name', 'UpdateFileHashes': 'true', 'UpdateFileHashesSearchPath': 'a_file_name', 'UseFAT32Workaround': 'true', 'UseUnicodeResponseFiles': 'true', 'VerboseOutput': 'true'}} expected_msbuild_settings = { 'ClCompile': { 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string /J', 'AdditionalUsingDirectories': 'folder1;folder2;folder3', 'AssemblerListingLocation': 'a_file_name', 'AssemblerOutput': 'NoListing', 'BasicRuntimeChecks': 'StackFrameRuntimeCheck', 'BrowseInformation': 'true', 'BrowseInformationFile': 'a_file_name', 'BufferSecurityCheck': 'true', 'CallingConvention': 'Cdecl', 'CompileAs': 'CompileAsC', 'DebugInformationFormat': 'EditAndContinue', 'DisableLanguageExtensions': 'true', 'DisableSpecificWarnings': 'd1;d2;d3', 'EnableEnhancedInstructionSet': 'NotSet', 'EnableFiberSafeOptimizations': 'true', 'EnablePREfast': 'true', 'ErrorReporting': 'Prompt', 'ExceptionHandling': 'Async', 'ExpandAttributedSource': 'true', 'FavorSizeOrSpeed': 'Neither', 'FloatingPointExceptions': 'true', 'FloatingPointModel': 'Strict', 'ForceConformanceInForLoopScope': 'true', 'ForcedIncludeFiles': 'file1;file2;file3', 'ForcedUsingFiles': 'file1;file2;file3', 'FunctionLevelLinking': 'true', 'GenerateXMLDocumentationFiles': 'true', 'IgnoreStandardIncludePath': 'true', 'InlineFunctionExpansion': 'AnySuitable', 'IntrinsicFunctions': 'true', 'MinimalRebuild': 'true', 'ObjectFileName': 'a_file_name', 'OmitDefaultLibName': 'true', 'OmitFramePointers': 'true', 'OpenMPSupport': 'true', 'Optimization': 'Full', 'PrecompiledHeader': 'Create', 'PrecompiledHeaderFile': 'a_file_name', 'PrecompiledHeaderOutputFile': 'a_file_name', 'PreprocessKeepComments': 'true', 'PreprocessorDefinitions': 'd1;d2;d3', 'PreprocessSuppressLineNumbers': 'false', 'PreprocessToFile': 'true', 'ProgramDataBaseFileName': 'a_file_name', 'RuntimeLibrary': 'MultiThreaded', 'RuntimeTypeInfo': 'true', 'ShowIncludes': 'true', 'SmallerTypeCheck': 'true', 'StringPooling': 'true', 'StructMemberAlignment': '1Byte', 'SuppressStartupBanner': 'true', 'TreatWarningAsError': 'true', 'TreatWChar_tAsBuiltInType': 'true', 'UndefineAllPreprocessorDefinitions': 'true', 'UndefinePreprocessorDefinitions': 'd1;d2;d3', 'UseFullPaths': 'true', 'WarningLevel': 'Level2', 'WholeProgramOptimization': 'true', 'XMLDocumentationFileName': 'a_file_name'}, 'Link': { 'AdditionalDependencies': 'file1;file2;file3', 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', 'AdditionalManifestDependencies': 'file1;file2;file3', 'AdditionalOptions': 'a_string', 'AddModuleNamesToAssembly': 'file1;file2;file3', 'AllowIsolation': 'true', 'AssemblyDebug': '', 'AssemblyLinkResource': 'file1;file2;file3', 'BaseAddress': 'a_string', 'CLRImageType': 'ForceIJWImage', 'CLRThreadAttribute': 'STAThreadingAttribute', 'CLRUnmanagedCodeCheck': 'true', 'DataExecutionPrevention': '', 'DelayLoadDLLs': 'file1;file2;file3', 'DelaySign': 'true', 'Driver': 'Driver', 'EmbedManagedResourceFile': 'file1;file2;file3', 'EnableCOMDATFolding': '', 'EnableUAC': 'true', 'EntryPointSymbol': 'a_string', 'FixedBaseAddress': 'false', 'ForceSymbolReferences': 'file1;file2;file3', 'FunctionOrder': 'a_file_name', 'GenerateDebugInformation': 'true', 'GenerateMapFile': 'true', 'HeapCommitSize': 'a_string', 'HeapReserveSize': 'a_string', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreEmbeddedIDL': 'true', 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3', 'ImportLibrary': 'a_file_name', 'KeyContainer': 'a_file_name', 'KeyFile': 'a_file_name', 'LargeAddressAware': 'true', 'LinkErrorReporting': 'NoErrorReport', 'LinkTimeCodeGeneration': 'PGInstrument', 'ManifestFile': 'a_file_name', 'MapExports': 'true', 'MapFileName': 'a_file_name', 'MergedIDLBaseFileName': 'a_file_name', 'MergeSections': 'a_string', 'MidlCommandFile': 'a_file_name', 'ModuleDefinitionFile': 'a_file_name', 'NoEntryPoint': 'true', 'OptimizeReferences': '', 'OutputFile': 'a_file_name', 'PerUserRedirection': 'true', 'Profile': 'true', 'ProfileGuidedDatabase': 'a_file_name', 'ProgramDatabaseFile': 'a_file_name', 'RandomizedBaseAddress': 'false', 'RegisterOutput': 'true', 'SetChecksum': 'true', 'ShowProgress': 'NotSet', 'StackCommitSize': 'a_string', 'StackReserveSize': 'a_string', 'StripPrivateSymbols': 'a_file_name', 'SubSystem': 'Windows', 'SupportUnloadOfDelayLoadedDLL': 'true', 'SuppressStartupBanner': 'true', 'SwapRunFromCD': 'true', 'SwapRunFromNET': 'true', 'TargetMachine': 'MachineARM', 'TerminalServerAware': 'true', 'TurnOffAssemblyGeneration': 'true', 'TypeLibraryFile': 'a_file_name', 'TypeLibraryResourceID': '33', 'UACExecutionLevel': 'HighestAvailable', 'UACUIAccess': 'true', 'Version': 'a_string'}, 'ResourceCompile': { 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'Culture': '0x03eb', 'IgnoreStandardIncludePath': 'true', 'PreprocessorDefinitions': 'd1;d2;d3', 'ResourceOutputFileName': 'a_string', 'ShowProgress': 'true', 'SuppressStartupBanner': 'true', 'UndefinePreprocessorDefinitions': 'd1;d2;d3'}, 'Midl': { 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'CPreprocessOptions': 'a_string', 'DefaultCharType': 'Unsigned', 'DllDataFileName': 'a_file_name', 'EnableErrorChecks': 'All', 'ErrorCheckAllocations': 'true', 'ErrorCheckBounds': 'true', 'ErrorCheckEnumRange': 'true', 'ErrorCheckRefPointers': 'true', 'ErrorCheckStubData': 'true', 'GenerateStublessProxies': 'true', 'GenerateTypeLibrary': 'true', 'HeaderFileName': 'a_file_name', 'IgnoreStandardIncludePath': 'true', 'InterfaceIdentifierFileName': 'a_file_name', 'MkTypLibCompatible': 'true', 'OutputDirectory': 'a_string', 'PreprocessorDefinitions': 'd1;d2;d3', 'ProxyFileName': 'a_file_name', 'RedirectOutputAndErrors': 'a_file_name', 'StructMemberAlignment': '4', 'SuppressStartupBanner': 'true', 'TargetEnvironment': 'Win32', 'TypeLibraryName': 'a_file_name', 'UndefinePreprocessorDefinitions': 'd1;d2;d3', 'ValidateAllParameters': 'true', 'WarnAsError': 'true', 'WarningLevel': '4'}, 'Lib': { 'AdditionalDependencies': 'file1;file2;file3', 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', 'AdditionalOptions': 'a_string', 'ExportNamedFunctions': 'd1;d2;d3', 'ForceSymbolReferences': 'a_string', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3', 'ModuleDefinitionFile': 'a_file_name', 'OutputFile': 'a_file_name', 'SuppressStartupBanner': 'true', 'UseUnicodeResponseFiles': 'true'}, 'Manifest': { 'AdditionalManifestFiles': 'file1;file2;file3', 'AdditionalOptions': 'a_string', 'AssemblyIdentity': 'a_string', 'ComponentFileName': 'a_file_name', 'GenerateCatalogFiles': 'true', 'InputResourceManifests': 'a_string', 'OutputManifestFile': 'a_file_name', 'RegistrarScriptFile': 'a_file_name', 'ReplacementsFile': 'a_file_name', 'SuppressStartupBanner': 'true', 'TypeLibraryFile': 'a_file_name', 'UpdateFileHashes': 'true', 'UpdateFileHashesSearchPath': 'a_file_name', 'VerboseOutput': 'true'}, 'ManifestResourceCompile': { 'ResourceOutputFileName': 'my_name'}, 'ProjectReference': { 'LinkLibraryDependencies': 'true', 'UseLibraryDependencyInputs': 'false'}, '': { 'EmbedManifest': 'true', 'GenerateManifest': 'true', 'IgnoreImportLibrary': 'true', 'LinkIncremental': 'false'}} actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) self._ExpectedWarnings([]) def testConvertToMSBuildSettings_actual(self): """Tests the conversion of an actual project. A VS2008 project with most of the options defined was created through the VS2008 IDE. It was then converted to VS2010. The tool settings found in the .vcproj and .vcxproj files were converted to the two dictionaries msvs_settings and expected_msbuild_settings. Note that for many settings, the VS2010 converter adds macros like %(AdditionalIncludeDirectories) to make sure than inherited values are included. Since the Gyp projects we generate do not use inheritance, we removed these macros. They were: ClCompile: AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)' AdditionalOptions: ' %(AdditionalOptions)' AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)' DisableSpecificWarnings: ';%(DisableSpecificWarnings)', ForcedIncludeFiles: ';%(ForcedIncludeFiles)', ForcedUsingFiles: ';%(ForcedUsingFiles)', PreprocessorDefinitions: ';%(PreprocessorDefinitions)', UndefinePreprocessorDefinitions: ';%(UndefinePreprocessorDefinitions)', Link: AdditionalDependencies: ';%(AdditionalDependencies)', AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)', AdditionalManifestDependencies: ';%(AdditionalManifestDependencies)', AdditionalOptions: ' %(AdditionalOptions)', AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)', AssemblyLinkResource: ';%(AssemblyLinkResource)', DelayLoadDLLs: ';%(DelayLoadDLLs)', EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)', ForceSymbolReferences: ';%(ForceSymbolReferences)', IgnoreSpecificDefaultLibraries: ';%(IgnoreSpecificDefaultLibraries)', ResourceCompile: AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)', AdditionalOptions: ' %(AdditionalOptions)', PreprocessorDefinitions: ';%(PreprocessorDefinitions)', Manifest: AdditionalManifestFiles: ';%(AdditionalManifestFiles)', AdditionalOptions: ' %(AdditionalOptions)', InputResourceManifests: ';%(InputResourceManifests)', """ msvs_settings = { 'VCCLCompilerTool': { 'AdditionalIncludeDirectories': 'dir1', 'AdditionalOptions': '/more', 'AdditionalUsingDirectories': 'test', 'AssemblerListingLocation': '$(IntDir)\\a', 'AssemblerOutput': '1', 'BasicRuntimeChecks': '3', 'BrowseInformation': '1', 'BrowseInformationFile': '$(IntDir)\\e', 'BufferSecurityCheck': 'false', 'CallingConvention': '1', 'CompileAs': '1', 'DebugInformationFormat': '4', 'DefaultCharIsUnsigned': 'true', 'Detect64BitPortabilityProblems': 'true', 'DisableLanguageExtensions': 'true', 'DisableSpecificWarnings': 'abc', 'EnableEnhancedInstructionSet': '1', 'EnableFiberSafeOptimizations': 'true', 'EnableFunctionLevelLinking': 'true', 'EnableIntrinsicFunctions': 'true', 'EnablePREfast': 'true', 'ErrorReporting': '2', 'ExceptionHandling': '2', 'ExpandAttributedSource': 'true', 'FavorSizeOrSpeed': '2', 'FloatingPointExceptions': 'true', 'FloatingPointModel': '1', 'ForceConformanceInForLoopScope': 'false', 'ForcedIncludeFiles': 'def', 'ForcedUsingFiles': 'ge', 'GeneratePreprocessedFile': '2', 'GenerateXMLDocumentationFiles': 'true', 'IgnoreStandardIncludePath': 'true', 'InlineFunctionExpansion': '1', 'KeepComments': 'true', 'MinimalRebuild': 'true', 'ObjectFile': '$(IntDir)\\b', 'OmitDefaultLibName': 'true', 'OmitFramePointers': 'true', 'OpenMP': 'true', 'Optimization': '3', 'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche', 'PrecompiledHeaderThrough': 'StdAfx.hd', 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE', 'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb', 'RuntimeLibrary': '3', 'RuntimeTypeInfo': 'false', 'ShowIncludes': 'true', 'SmallerTypeCheck': 'true', 'StringPooling': 'true', 'StructMemberAlignment': '3', 'SuppressStartupBanner': 'false', 'TreatWChar_tAsBuiltInType': 'false', 'UndefineAllPreprocessorDefinitions': 'true', 'UndefinePreprocessorDefinitions': 'wer', 'UseFullPaths': 'true', 'UsePrecompiledHeader': '0', 'UseUnicodeResponseFiles': 'false', 'WarnAsError': 'true', 'WarningLevel': '3', 'WholeProgramOptimization': 'true', 'XMLDocumentationFileName': '$(IntDir)\\c'}, 'VCLinkerTool': { 'AdditionalDependencies': 'zx', 'AdditionalLibraryDirectories': 'asd', 'AdditionalManifestDependencies': 's2', 'AdditionalOptions': '/mor2', 'AddModuleNamesToAssembly': 'd1', 'AllowIsolation': 'false', 'AssemblyDebug': '1', 'AssemblyLinkResource': 'd5', 'BaseAddress': '23423', 'CLRImageType': '3', 'CLRThreadAttribute': '1', 'CLRUnmanagedCodeCheck': 'true', 'DataExecutionPrevention': '0', 'DelayLoadDLLs': 'd4', 'DelaySign': 'true', 'Driver': '2', 'EmbedManagedResourceFile': 'd2', 'EnableCOMDATFolding': '1', 'EnableUAC': 'false', 'EntryPointSymbol': 'f5', 'ErrorReporting': '2', 'FixedBaseAddress': '1', 'ForceSymbolReferences': 'd3', 'FunctionOrder': 'fssdfsd', 'GenerateDebugInformation': 'true', 'GenerateManifest': 'false', 'GenerateMapFile': 'true', 'HeapCommitSize': '13', 'HeapReserveSize': '12', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreDefaultLibraryNames': 'flob;flok', 'IgnoreEmbeddedIDL': 'true', 'IgnoreImportLibrary': 'true', 'ImportLibrary': 'f4', 'KeyContainer': 'f7', 'KeyFile': 'f6', 'LargeAddressAware': '2', 'LinkIncremental': '0', 'LinkLibraryDependencies': 'false', 'LinkTimeCodeGeneration': '1', 'ManifestFile': '$(IntDir)\\$(TargetFileName).2intermediate.manifest', 'MapExports': 'true', 'MapFileName': 'd5', 'MergedIDLBaseFileName': 'f2', 'MergeSections': 'f5', 'MidlCommandFile': 'f1', 'ModuleDefinitionFile': 'sdsd', 'OptimizeForWindows98': '2', 'OptimizeReferences': '2', 'OutputFile': '$(OutDir)\\$(ProjectName)2.exe', 'PerUserRedirection': 'true', 'Profile': 'true', 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd', 'ProgramDatabaseFile': 'Flob.pdb', 'RandomizedBaseAddress': '1', 'RegisterOutput': 'true', 'ResourceOnlyDLL': 'true', 'SetChecksum': 'false', 'ShowProgress': '1', 'StackCommitSize': '15', 'StackReserveSize': '14', 'StripPrivateSymbols': 'd3', 'SubSystem': '1', 'SupportUnloadOfDelayLoadedDLL': 'true', 'SuppressStartupBanner': 'false', 'SwapRunFromCD': 'true', 'SwapRunFromNet': 'true', 'TargetMachine': '1', 'TerminalServerAware': '1', 'TurnOffAssemblyGeneration': 'true', 'TypeLibraryFile': 'f3', 'TypeLibraryResourceID': '12', 'UACExecutionLevel': '2', 'UACUIAccess': 'true', 'UseLibraryDependencyInputs': 'true', 'UseUnicodeResponseFiles': 'false', 'Version': '333'}, 'VCResourceCompilerTool': { 'AdditionalIncludeDirectories': 'f3', 'AdditionalOptions': '/more3', 'Culture': '3084', 'IgnoreStandardIncludePath': 'true', 'PreprocessorDefinitions': '_UNICODE;UNICODE2', 'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res', 'ShowProgress': 'true'}, 'VCManifestTool': { 'AdditionalManifestFiles': 'sfsdfsd', 'AdditionalOptions': 'afdsdafsd', 'AssemblyIdentity': 'sddfdsadfsa', 'ComponentFileName': 'fsdfds', 'DependencyInformationFile': '$(IntDir)\\mt.depdfd', 'EmbedManifest': 'false', 'GenerateCatalogFiles': 'true', 'InputResourceManifests': 'asfsfdafs', 'ManifestResourceFile': '$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf', 'OutputManifestFile': '$(TargetPath).manifestdfs', 'RegistrarScriptFile': 'sdfsfd', 'ReplacementsFile': 'sdffsd', 'SuppressStartupBanner': 'false', 'TypeLibraryFile': 'sfsd', 'UpdateFileHashes': 'true', 'UpdateFileHashesSearchPath': 'sfsd', 'UseFAT32Workaround': 'true', 'UseUnicodeResponseFiles': 'false', 'VerboseOutput': 'true'}} expected_msbuild_settings = { 'ClCompile': { 'AdditionalIncludeDirectories': 'dir1', 'AdditionalOptions': '/more /J', 'AdditionalUsingDirectories': 'test', 'AssemblerListingLocation': '$(IntDir)a', 'AssemblerOutput': 'AssemblyCode', 'BasicRuntimeChecks': 'EnableFastChecks', 'BrowseInformation': 'true', 'BrowseInformationFile': '$(IntDir)e', 'BufferSecurityCheck': 'false', 'CallingConvention': 'FastCall', 'CompileAs': 'CompileAsC', 'DebugInformationFormat': 'EditAndContinue', 'DisableLanguageExtensions': 'true', 'DisableSpecificWarnings': 'abc', 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions', 'EnableFiberSafeOptimizations': 'true', 'EnablePREfast': 'true', 'ErrorReporting': 'Queue', 'ExceptionHandling': 'Async', 'ExpandAttributedSource': 'true', 'FavorSizeOrSpeed': 'Size', 'FloatingPointExceptions': 'true', 'FloatingPointModel': 'Strict', 'ForceConformanceInForLoopScope': 'false', 'ForcedIncludeFiles': 'def', 'ForcedUsingFiles': 'ge', 'FunctionLevelLinking': 'true', 'GenerateXMLDocumentationFiles': 'true', 'IgnoreStandardIncludePath': 'true', 'InlineFunctionExpansion': 'OnlyExplicitInline', 'IntrinsicFunctions': 'true', 'MinimalRebuild': 'true', 'ObjectFileName': '$(IntDir)b', 'OmitDefaultLibName': 'true', 'OmitFramePointers': 'true', 'OpenMPSupport': 'true', 'Optimization': 'Full', 'PrecompiledHeader': 'NotUsing', # Actual conversion gives '' 'PrecompiledHeaderFile': 'StdAfx.hd', 'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche', 'PreprocessKeepComments': 'true', 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE', 'PreprocessSuppressLineNumbers': 'true', 'PreprocessToFile': 'true', 'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb', 'RuntimeLibrary': 'MultiThreadedDebugDLL', 'RuntimeTypeInfo': 'false', 'ShowIncludes': 'true', 'SmallerTypeCheck': 'true', 'StringPooling': 'true', 'StructMemberAlignment': '4Bytes', 'SuppressStartupBanner': 'false', 'TreatWarningAsError': 'true', 'TreatWChar_tAsBuiltInType': 'false', 'UndefineAllPreprocessorDefinitions': 'true', 'UndefinePreprocessorDefinitions': 'wer', 'UseFullPaths': 'true', 'WarningLevel': 'Level3', 'WholeProgramOptimization': 'true', 'XMLDocumentationFileName': '$(IntDir)c'}, 'Link': { 'AdditionalDependencies': 'zx', 'AdditionalLibraryDirectories': 'asd', 'AdditionalManifestDependencies': 's2', 'AdditionalOptions': '/mor2', 'AddModuleNamesToAssembly': 'd1', 'AllowIsolation': 'false', 'AssemblyDebug': 'true', 'AssemblyLinkResource': 'd5', 'BaseAddress': '23423', 'CLRImageType': 'ForceSafeILImage', 'CLRThreadAttribute': 'MTAThreadingAttribute', 'CLRUnmanagedCodeCheck': 'true', 'DataExecutionPrevention': '', 'DelayLoadDLLs': 'd4', 'DelaySign': 'true', 'Driver': 'UpOnly', 'EmbedManagedResourceFile': 'd2', 'EnableCOMDATFolding': 'false', 'EnableUAC': 'false', 'EntryPointSymbol': 'f5', 'FixedBaseAddress': 'false', 'ForceSymbolReferences': 'd3', 'FunctionOrder': 'fssdfsd', 'GenerateDebugInformation': 'true', 'GenerateMapFile': 'true', 'HeapCommitSize': '13', 'HeapReserveSize': '12', 'IgnoreAllDefaultLibraries': 'true', 'IgnoreEmbeddedIDL': 'true', 'IgnoreSpecificDefaultLibraries': 'flob;flok', 'ImportLibrary': 'f4', 'KeyContainer': 'f7', 'KeyFile': 'f6', 'LargeAddressAware': 'true', 'LinkErrorReporting': 'QueueForNextLogin', 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', 'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest', 'MapExports': 'true', 'MapFileName': 'd5', 'MergedIDLBaseFileName': 'f2', 'MergeSections': 'f5', 'MidlCommandFile': 'f1', 'ModuleDefinitionFile': 'sdsd', 'NoEntryPoint': 'true', 'OptimizeReferences': 'true', 'OutputFile': '$(OutDir)$(ProjectName)2.exe', 'PerUserRedirection': 'true', 'Profile': 'true', 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd', 'ProgramDatabaseFile': 'Flob.pdb', 'RandomizedBaseAddress': 'false', 'RegisterOutput': 'true', 'SetChecksum': 'false', 'ShowProgress': 'LinkVerbose', 'StackCommitSize': '15', 'StackReserveSize': '14', 'StripPrivateSymbols': 'd3', 'SubSystem': 'Console', 'SupportUnloadOfDelayLoadedDLL': 'true', 'SuppressStartupBanner': 'false', 'SwapRunFromCD': 'true', 'SwapRunFromNET': 'true', 'TargetMachine': 'MachineX86', 'TerminalServerAware': 'false', 'TurnOffAssemblyGeneration': 'true', 'TypeLibraryFile': 'f3', 'TypeLibraryResourceID': '12', 'UACExecutionLevel': 'RequireAdministrator', 'UACUIAccess': 'true', 'Version': '333'}, 'ResourceCompile': { 'AdditionalIncludeDirectories': 'f3', 'AdditionalOptions': '/more3', 'Culture': '0x0c0c', 'IgnoreStandardIncludePath': 'true', 'PreprocessorDefinitions': '_UNICODE;UNICODE2', 'ResourceOutputFileName': '$(IntDir)%(Filename)3.res', 'ShowProgress': 'true'}, 'Manifest': { 'AdditionalManifestFiles': 'sfsdfsd', 'AdditionalOptions': 'afdsdafsd', 'AssemblyIdentity': 'sddfdsadfsa', 'ComponentFileName': 'fsdfds', 'GenerateCatalogFiles': 'true', 'InputResourceManifests': 'asfsfdafs', 'OutputManifestFile': '$(TargetPath).manifestdfs', 'RegistrarScriptFile': 'sdfsfd', 'ReplacementsFile': 'sdffsd', 'SuppressStartupBanner': 'false', 'TypeLibraryFile': 'sfsd', 'UpdateFileHashes': 'true', 'UpdateFileHashesSearchPath': 'sfsd', 'VerboseOutput': 'true'}, 'ProjectReference': { 'LinkLibraryDependencies': 'false', 'UseLibraryDependencyInputs': 'true'}, '': { 'EmbedManifest': 'false', 'GenerateManifest': 'false', 'IgnoreImportLibrary': 'true', 'LinkIncremental': '' }, 'ManifestResourceCompile': { 'ResourceOutputFileName': '$(IntDir)$(TargetFileName).embed.manifest.resfdsf'} } actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( msvs_settings, self.stderr) self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) self._ExpectedWarnings([]) if __name__ == '__main__': unittest.main()
mit
ppanczyk/ansible
lib/ansible/module_utils/avi.py
20
3790
# This code is part of Ansible, but is an independent component. # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by Ansible # still belong to the author of the module, and may assign their own license # to the complete work. # # Copyright (c), Gaurav Rastogi <[email protected]>, 2017 # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # This module initially matched the namespace of network module avi. However, # that causes namespace import error when other modules from avi namespaces # are imported. Added import of absolute_import to avoid import collisions for # avi.sdk. from __future__ import absolute_import import os from distutils.version import LooseVersion HAS_AVI = True try: import avi.sdk sdk_version = getattr(avi.sdk, '__version__', None) if ((sdk_version is None) or (sdk_version and (LooseVersion(sdk_version) < LooseVersion('17.1')))): # It allows the __version__ to be '' as that value is used in development builds raise ImportError from avi.sdk.utils.ansible_utils import avi_ansible_api except ImportError: HAS_AVI = False def avi_common_argument_spec(): """ Returns common arguments for all Avi modules :return: dict """ return dict( controller=dict(default=os.environ.get('AVI_CONTROLLER', '')), username=dict(default=os.environ.get('AVI_USERNAME', '')), password=dict(default=os.environ.get('AVI_PASSWORD', ''), no_log=True), tenant=dict(default='admin'), tenant_uuid=dict(default=''), api_version=dict(default='16.4')) def ansible_return(module, rsp, changed, req=None, existing_obj=None): """ Helper function to return the right ansible return based on the error code and changed status. :param module: AnsibleModule :param rsp: ApiResponse object returned from ApiSession. :param changed: Whether something changed in this module. :param req: Dict data for Avi API call. :param existing_obj: Dict representing current HTTP resource in Avi Controller. Returns: specific ansible module exit function """ if rsp.status_code > 299: return module.fail_json(msg='Error %d Msg %s req: %s' % ( rsp.status_code, rsp.text, req)) if changed and existing_obj: return module.exit_json( changed=changed, obj=rsp.json(), old_obj=existing_obj) return module.exit_json(changed=changed, obj=rsp.json())
gpl-3.0
hesam-setareh/nest-simulator
pynest/nest/tests/test_connect_pairwise_bernoulli.py
4
3399
# -*- coding: utf-8 -*- # # test_connect_pairwise_bernoulli.py # # This file is part of NEST. # # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with NEST. If not, see <http://www.gnu.org/licenses/>. import numpy as np import unittest import scipy.stats from . import test_connect_helpers as hf from .test_connect_parameters import TestParams class TestPairwiseBernoulli(TestParams): # specify connection pattern and specific params rule = 'pairwise_bernoulli' p = 0.5 conn_dict = {'rule': rule, 'p': p} # sizes of source-, target-population and connection probability for # statistical test N_s = 50 N_t = 50 # Critical values and number of iterations of two level test stat_dict = {'alpha2': 0.05, 'n_runs': 20} def testStatistics(self): for fan in ['in', 'out']: expected = hf.get_expected_degrees_bernoulli( self.p, fan, self.N_s, self.N_t) pvalues = [] for i in range(self.stat_dict['n_runs']): hf.reset_seed(i, self.nr_threads) self.setUpNetwork(conn_dict=self.conn_dict, N1=self.N_s, N2=self.N_t) degrees = hf.get_degrees(fan, self.pop1, self.pop2) degrees = hf.gather_data(degrees) # degrees = self.comm.gather(degrees, root=0) # if self.rank == 0: if degrees is not None: chi, p = hf.chi_squared_check(degrees, expected, self.rule) pvalues.append(p) hf.mpi_barrier() if degrees is not None: ks, p = scipy.stats.kstest(pvalues, 'uniform') self.assertTrue(p > self.stat_dict['alpha2']) def testAutapses(self): conn_params = self.conn_dict.copy() N = 10 conn_params['multapses'] = False # test that autapses exist conn_params['p'] = 1. conn_params['autapses'] = True pop = hf.nest.Create('iaf_psc_alpha', N) hf.nest.Connect(pop, pop, conn_params) # make sure all connections do exist M = hf.get_connectivity_matrix(pop, pop) hf.mpi_assert(np.diag(M), np.ones(N), self) hf.nest.ResetKernel() # test that autapses were excluded conn_params['p'] = 1. conn_params['autapses'] = False pop = hf.nest.Create('iaf_psc_alpha', N) hf.nest.Connect(pop, pop, conn_params) # make sure all connections do exist M = hf.get_connectivity_matrix(pop, pop) hf.mpi_assert(np.diag(M), np.zeros(N), self) def suite(): suite = unittest.TestLoader().loadTestsFromTestCase(TestPairwiseBernoulli) return suite def run(): runner = unittest.TextTestRunner(verbosity=2) runner.run(suite()) if __name__ == '__main__': run()
gpl-2.0
Liyier/learning_log
env/Lib/site-packages/pip/_vendor/requests/packages/chardet/langbulgarianmodel.py
2965
12784
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # 255: Control characters that usually does not exist in any text # 254: Carriage/Return # 253: symbol (punctuation) that does not belong to word # 252: 0 - 9 # Character Mapping Table: # this table is modified base on win1251BulgarianCharToOrderMap, so # only number <64 is sure valid Latin5_BulgarianCharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 ) win1251BulgarianCharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 ) # Model Table: # total sequences: 100% # first 512 sequences: 96.9392% # first 1024 sequences:3.0618% # rest sequences: 0.2992% # negative sequences: 0.0020% BulgarianLangModel = ( 0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, 3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, 0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, 0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, 0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, 1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, 0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, 0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, 2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, 3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, 1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, 3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, 1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, 2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, 2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, 3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, 1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, 2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, 2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, 3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, 1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, 2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, 2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, 2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, 1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, 2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, 1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, 3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, 1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, 3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, 1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, 2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, 1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, 2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, 1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, 2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, 1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, 1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, 1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, 2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, 1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, 2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, 1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, 0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, 1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, 1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, 1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, 0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, 0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, 0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, 1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, 0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, 0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, 1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, 1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, 1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, ) Latin5BulgarianModel = { 'charToOrderMap': Latin5_BulgarianCharToOrderMap, 'precedenceMatrix': BulgarianLangModel, 'mTypicalPositiveRatio': 0.969392, 'keepEnglishLetter': False, 'charsetName': "ISO-8859-5" } Win1251BulgarianModel = { 'charToOrderMap': win1251BulgarianCharToOrderMap, 'precedenceMatrix': BulgarianLangModel, 'mTypicalPositiveRatio': 0.969392, 'keepEnglishLetter': False, 'charsetName': "windows-1251" } # flake8: noqa
mit
clubcapra/ngv_dev
CALIBRATION_0/camera_calibration/gui_cam_calibration.py
1
6603
from PySide.QtGui import * import timeit #import camera import cv2 import os import sys import numpy as np def generate_crop_indexes_3d(width, height, crop_width, crop_height): idxs = [] for row in xrange(0, height, crop_height): for col in xrange(0, width / crop_width): indexes = [] for c in xrange(crop_height): indexes.append(range(c * width, c * width + crop_height)) idxs.append(np.add(indexes, col * crop_width + row * width)) return np.asarray(idxs, dtype=np.int64) def generate_crop_pixels_indexes_3d(width, height, crop_width, crop_height): def channel_indexes(pixel_coord): channel_coord = long(pixel_coord) * 3 return range(channel_coord, channel_coord + 3) crop_indexes = generate_crop_indexes_3d(width, height, crop_width, crop_height) pixel_channel_indexes = np.zeros(crop_indexes.shape + (3,), dtype=np.int64) for i in xrange(len(crop_indexes)): for j in xrange(len(crop_indexes[0])): for k in xrange(len(crop_indexes[0][0])): pixel_channel_indexes[i][j][k] = \ channel_indexes(crop_indexes[i][j][k]) return np.int64(pixel_channel_indexes) def generate_indexes(width, height, crop_width, crop_height): indexes = np.arange(width * height).reshape((height, width)) row_count = height / crop_height col_count = width / crop_width pixel_indexes = [] for row in xrange(row_count): for col in xrange(col_count): pixel_indexes.append(np.asarray(indexes[row * crop_height:(row + 1) * crop_height, col * crop_width:(col + 1) * crop_width])) pixel_indexes = np.asarray(pixel_indexes).reshape((2304, 20, 20, 1)) return np.concatenate((pixel_indexes * 3, pixel_indexes * 3 + 1, pixel_indexes * 3 + 2), axis=3) class ElementImage(QWidget): def __init__(self, parent=None): super(ElementImage, self).__init__(parent) self.setFixedSize(22, 22) self.image = QImage(20, 20, QImage.Format_RGB32) self.img_arr = np.ndarray(shape=(20, 20), dtype=np.uint32, buffer=self.image.bits()) np.copyto(self.img_arr, np.zeros((20, 20), dtype=np.uint32)) self.color = QColor(100, 100, 100) def enterEvent(self, e): self.color = QColor(255, 150, 0) self.repaint() def leaveEvent(self, e): self.color = QColor(100, 100, 100) self.repaint() def mousePressEvent(self, e): self.color = QColor(255, 255, 0) self.repaint() def mouseReleaseEvent(self, e): self.color = QColor(100, 100, 100) self.repaint() def paintEvent(self, e): qp = QPainter() qp.begin(self) self.__draw_element(qp) qp.end() def __draw_element(self, qp): qp.drawImage(1, 1, self.image) qp.setPen(self.color) qp.drawRect(0, 0, 21, 21) def numpy_buffer(self): return self.img_arr class MainWindow(QWidget): def __init__(self): super(MainWindow, self).__init__() self.element_images = [] self.init() def init(self): def generate_grid_pos(w, h): pos = [] for row in xrange(h): for col in xrange(w): pos.append((row, col)) return pos h_count = 36 w_count = 64 buffer_indexes = generate_crop_pixels_indexes_3d(1280, 720, 20, 20) grid = QGridLayout() count = 0 for coord in generate_grid_pos(w_count, h_count): e_img = ElementImage() self.element_images.append((e_img, e_img.numpy_buffer(), buffer_indexes[count])) grid.addWidget(e_img, coord[0], coord[1]) count += 1 grid.setColumnStretch(w_count, 1) grid.setSpacing(0) grid.setRowStretch(h_count, 1) grid.setVerticalSpacing(0) self.setLayout(grid) self.setGeometry(20, 20, 1550, 850) self.setWindowTitle("Camera Mapping Calibration") self.show() self.display_image() def display_image(self): img = cv2.cvtColor(cv2.imread(os.path.abspath('test.png')), cv2.COLOR_BGR2RGB)[14:, 5:1285] #indexes = generate_indexes(1280, 720, 20, 20) row_count = len(img) col_count = len(img[0]) print row_count, col_count qt_img = np.zeros((row_count, col_count), dtype=np.uint32) #print indexes[0] t = timeit.default_timer() for row in xrange(row_count): for col in xrange(col_count): pixel = img[row][col] qt_img[row][col] = qRgb(pixel[0], pixel[1], pixel[2]) #for img_element in img[row*20:(row+1)*20, col*20:(col+1)*20]: #element_buffer = self.element_images[count][1] #for row in xrange(row_count): # for col in xrange(col_count): # pixel = img_element[row][col] # #print pixel, img[row, col] # element_buffer[row][col] = qRgb(pixel[0], pixel[1], pixel[2]) print 'refreshing elements took...', timeit.default_timer() - t, 'seconds' #for row in xrange(row_count): # for col in xrange(col_count): # pixel = img.take(indexes[0]) # print pixel.shape #element_buffer[row][col] = qRgb(pixel[0], pixel[1], pixel[2]) """ t = timeit.default_timer() count = 0 for row in xrange(36): for col in xrange(64): crop = img[row*20:(row+1)*20, col*20:(col+1)*20] pixels = [] for pixel_row in crop: pixels_col = [] for pixel_col in pixel_row: pixels_col.append(qRgb(pixel_col[0], pixel_col[1], pixel_col[2])) pixels.append(pixels_col) np.copyto(self.element_images[count][1], np.asarray(pixels, dtype=np.uint32)) count += 1 print 'refreshing elements took...', timeit.default_timer() - t, 'seconds' """ for i in xrange(len(self.element_images)): self.element_images[i][0].repaint() def main(): app = QApplication(sys.argv) window = MainWindow() sys.exit(app.exec_()) def testing(): pass if __name__ == "__main__": testing() main()
gpl-3.0
jsxc/xmpp-cloud-auth
xclib/tests/30_isuser_stub_test.py
1
2149
# Checks whether the isuser() function works as it should # Stubs the cloud_request() functions for these tests from xclib.sigcloud import sigcloud from xclib import xcauth from xclib.check import assertEqual def setup_module(): global xc, sc xc = xcauth(domain_db={ b'xdomain': b'99999\thttps://remotehost\tydomain\t', b'udomain': b'8888\thttps://oldhost\t', }, default_url='https://localhost', default_secret='01234') sc = sigcloud(xc, 'user1', 'domain1') def teardown_module(): pass def sc_timeout(data): assertEqual(data['operation'], 'isuser') assertEqual(data['username'], 'user1') assertEqual(data['domain'], 'domain1') return (False, None, 'Timeout', None) def test_timeout(): sc.verbose_cloud_request = sc_timeout assertEqual(sc.isuser(), None) def sc_404(data): return (False, 404, None, None) def test_http404(): sc.verbose_cloud_request = sc_404 assertEqual(sc.isuser(), None) def sc_500json(data): return (False, 500, {'result': 'failure'}, None) def test_http500json(): sc.verbose_cloud_request = sc_500json assertEqual(sc.isuser(), None) def sc_malformed(data): return (True, None, {'result': 'success'}, None) def test_malformed(): sc.verbose_cloud_request = sc_malformed assertEqual(sc.isuser(), None) def sc_success(data): return (True, None, { 'result': 'success', 'data': { 'isUser': '1' }}, 'fake body') def test_success(): sc.verbose_cloud_request = sc_success assertEqual(sc.isuser(), True) def sc_xdomain(data): assertEqual(data['operation'], 'isuser') assertEqual(data['username'], 'xuser') assertEqual(data['domain'], 'ydomain') return (True, None, { 'result': 'success', 'data': { 'isUser': '1' }}, 'fake body') def test_xdomain(): sc = sigcloud(xc, 'xuser', 'xdomain') sc.verbose_cloud_request = sc_xdomain assertEqual(sc.isuser(), True) def test_domain_upgrade(): sc = sigcloud(xc, 'uuser', 'udomain') sc.verbose_cloud_request = sc_success assertEqual(sc.isuser(), True)
mit
agdsn/sipa
sipa.py
2
1632
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ sipa.py ~~~~~~~~~~~~~~ This file shall be used to start the Flask app. Specific things are handled in the `sipa` package. """ import argparse import logging from sipa import create_app from sipa.utils import support_hotline_available logger = logging.getLogger(__name__) logger.info('Starting sipa...') if __name__ == "__main__": parser = argparse.ArgumentParser(description="Sipa launcher") parser.add_argument("--debug", action="store_true", help="run Sipa in debug mode") parser.add_argument("--exposed", action="store_const", const='0.0.0.0', dest='host', help="expose Sipa on the network") parser.add_argument("-p", "--port", action="store", help="tcp port to use", type=int, default=5000) args = parser.parse_args() def preparation(app): if args.debug: app.debug = True logger.warning('Running in Debug mode') app = create_app(prepare_callable=preparation) app.run(debug=args.debug, host=args.host, port=args.port) else: # __name__ == 'uwsgi_file_sipa' import uwsgi debug = uwsgi.opt.get('debug', False) app = create_app() if debug: logger.warning("Running in debug mode") app.debug = True from werkzeug.debug import DebuggedApplication app.wsgi_app = DebuggedApplication(app.wsgi_app, evalex=True) # app will now be used by `uwsgi` @app.context_processor def inject_hotline_status(): return dict(support_hotline_available=support_hotline_available())
mit
EliotBerriot/django
tests/postgres_tests/test_array.py
89
19906
import decimal import json import unittest import uuid from django import forms from django.core import exceptions, serializers, validators from django.core.management import call_command from django.db import IntegrityError, connection, models from django.test import TransactionTestCase, override_settings from django.utils import timezone from . import PostgreSQLTestCase from .models import ( ArrayFieldSubclass, CharArrayModel, DateTimeArrayModel, IntegerArrayModel, NestedIntegerArrayModel, NullableIntegerArrayModel, OtherTypesArrayModel, PostgreSQLModel, ) try: from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.forms import SimpleArrayField, SplitArrayField except ImportError: pass class TestSaveLoad(PostgreSQLTestCase): def test_integer(self): instance = IntegerArrayModel(field=[1, 2, 3]) instance.save() loaded = IntegerArrayModel.objects.get() self.assertEqual(instance.field, loaded.field) def test_char(self): instance = CharArrayModel(field=['hello', 'goodbye']) instance.save() loaded = CharArrayModel.objects.get() self.assertEqual(instance.field, loaded.field) def test_dates(self): instance = DateTimeArrayModel( datetimes=[timezone.now()], dates=[timezone.now().date()], times=[timezone.now().time()], ) instance.save() loaded = DateTimeArrayModel.objects.get() self.assertEqual(instance.datetimes, loaded.datetimes) self.assertEqual(instance.dates, loaded.dates) self.assertEqual(instance.times, loaded.times) def test_tuples(self): instance = IntegerArrayModel(field=(1,)) instance.save() loaded = IntegerArrayModel.objects.get() self.assertSequenceEqual(instance.field, loaded.field) def test_integers_passed_as_strings(self): # This checks that get_prep_value is deferred properly instance = IntegerArrayModel(field=['1']) instance.save() loaded = IntegerArrayModel.objects.get() self.assertEqual(loaded.field, [1]) def test_default_null(self): instance = NullableIntegerArrayModel() instance.save() loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk) self.assertEqual(loaded.field, None) self.assertEqual(instance.field, loaded.field) def test_null_handling(self): instance = NullableIntegerArrayModel(field=None) instance.save() loaded = NullableIntegerArrayModel.objects.get() self.assertEqual(instance.field, loaded.field) instance = IntegerArrayModel(field=None) with self.assertRaises(IntegrityError): instance.save() def test_nested(self): instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]]) instance.save() loaded = NestedIntegerArrayModel.objects.get() self.assertEqual(instance.field, loaded.field) def test_other_array_types(self): instance = OtherTypesArrayModel( ips=['192.168.0.1', '::1'], uuids=[uuid.uuid4()], decimals=[decimal.Decimal(1.25), 1.75], ) instance.save() loaded = OtherTypesArrayModel.objects.get() self.assertEqual(instance.ips, loaded.ips) self.assertEqual(instance.uuids, loaded.uuids) self.assertEqual(instance.decimals, loaded.decimals) def test_model_set_on_base_field(self): instance = IntegerArrayModel() field = instance._meta.get_field('field') self.assertEqual(field.model, IntegerArrayModel) self.assertEqual(field.base_field.model, IntegerArrayModel) class TestQuerying(PostgreSQLTestCase): def setUp(self): self.objs = [ NullableIntegerArrayModel.objects.create(field=[1]), NullableIntegerArrayModel.objects.create(field=[2]), NullableIntegerArrayModel.objects.create(field=[2, 3]), NullableIntegerArrayModel.objects.create(field=[20, 30, 40]), NullableIntegerArrayModel.objects.create(field=None), ] def test_exact(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__exact=[1]), self.objs[:1] ) def test_isnull(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__isnull=True), self.objs[-1:] ) def test_gt(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__gt=[0]), self.objs[:4] ) def test_lt(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__lt=[2]), self.objs[:1] ) def test_in(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]), self.objs[:2] ) def test_contained_by(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]), self.objs[:2] ) def test_contains(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__contains=[2]), self.objs[1:3] ) def test_contains_charfield(self): # Regression for #22907 self.assertSequenceEqual( CharArrayModel.objects.filter(field__contains=['text']), [] ) def test_contained_by_charfield(self): self.assertSequenceEqual( CharArrayModel.objects.filter(field__contained_by=['text']), [] ) def test_overlap_charfield(self): self.assertSequenceEqual( CharArrayModel.objects.filter(field__overlap=['text']), [] ) def test_index(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0=2), self.objs[1:3] ) def test_index_chained(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0__lt=3), self.objs[0:3] ) def test_index_nested(self): instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) self.assertSequenceEqual( NestedIntegerArrayModel.objects.filter(field__0__0=1), [instance] ) @unittest.expectedFailure def test_index_used_on_nested_data(self): instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) self.assertSequenceEqual( NestedIntegerArrayModel.objects.filter(field__0=[1, 2]), [instance] ) def test_overlap(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]), self.objs[0:3] ) def test_len(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__len__lte=2), self.objs[0:3] ) def test_slice(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0_1=[2]), self.objs[1:3] ) self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]), self.objs[2:3] ) @unittest.expectedFailure def test_slice_nested(self): instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]]) self.assertSequenceEqual( NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]), [instance] ) def test_usage_in_subquery(self): self.assertSequenceEqual( NullableIntegerArrayModel.objects.filter( id__in=NullableIntegerArrayModel.objects.filter(field__len=3) ), [self.objs[3]] ) class TestChecks(PostgreSQLTestCase): def test_field_checks(self): class MyModel(PostgreSQLModel): field = ArrayField(models.CharField()) model = MyModel() errors = model.check() self.assertEqual(len(errors), 1) self.assertEqual(errors[0].id, 'postgres.E001') def test_invalid_base_fields(self): class MyModel(PostgreSQLModel): field = ArrayField(models.ManyToManyField('postgres_tests.IntegerArrayModel')) model = MyModel() errors = model.check() self.assertEqual(len(errors), 1) self.assertEqual(errors[0].id, 'postgres.E002') @unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific tests") class TestMigrations(TransactionTestCase): available_apps = ['postgres_tests'] def test_deconstruct(self): field = ArrayField(models.IntegerField()) name, path, args, kwargs = field.deconstruct() new = ArrayField(*args, **kwargs) self.assertEqual(type(new.base_field), type(field.base_field)) def test_deconstruct_with_size(self): field = ArrayField(models.IntegerField(), size=3) name, path, args, kwargs = field.deconstruct() new = ArrayField(*args, **kwargs) self.assertEqual(new.size, field.size) def test_deconstruct_args(self): field = ArrayField(models.CharField(max_length=20)) name, path, args, kwargs = field.deconstruct() new = ArrayField(*args, **kwargs) self.assertEqual(new.base_field.max_length, field.base_field.max_length) def test_subclass_deconstruct(self): field = ArrayField(models.IntegerField()) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, 'django.contrib.postgres.fields.ArrayField') field = ArrayFieldSubclass() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, 'postgres_tests.models.ArrayFieldSubclass') @override_settings(MIGRATION_MODULES={ "postgres_tests": "postgres_tests.array_default_migrations", }) def test_adding_field_with_default(self): # See #22962 table_name = 'postgres_tests_integerarraydefaultmodel' with connection.cursor() as cursor: self.assertNotIn(table_name, connection.introspection.table_names(cursor)) call_command('migrate', 'postgres_tests', verbosity=0) with connection.cursor() as cursor: self.assertIn(table_name, connection.introspection.table_names(cursor)) call_command('migrate', 'postgres_tests', 'zero', verbosity=0) with connection.cursor() as cursor: self.assertNotIn(table_name, connection.introspection.table_names(cursor)) @override_settings(MIGRATION_MODULES={ "postgres_tests": "postgres_tests.array_index_migrations", }) def test_adding_arrayfield_with_index(self): """ ArrayField shouldn't have varchar_patterns_ops or text_patterns_ops indexes. """ table_name = 'postgres_tests_chartextarrayindexmodel' call_command('migrate', 'postgres_tests', verbosity=0) with connection.cursor() as cursor: like_constraint_field_names = [ c.rsplit('_', 2)[0][len(table_name) + 1:] for c in connection.introspection.get_constraints(cursor, table_name) if c.endswith('_like') ] # Only the CharField should have a LIKE index. self.assertEqual(like_constraint_field_names, ['char2']) with connection.cursor() as cursor: indexes = connection.introspection.get_indexes(cursor, table_name) # All fields should have regular indexes. self.assertIn('char', indexes) self.assertIn('char2', indexes) self.assertIn('text', indexes) call_command('migrate', 'postgres_tests', 'zero', verbosity=0) with connection.cursor() as cursor: self.assertNotIn(table_name, connection.introspection.table_names(cursor)) class TestSerialization(PostgreSQLTestCase): test_data = ( '[{"fields": {"field": "[\\"1\\", \\"2\\"]"}, "model": "postgres_tests.integerarraymodel", "pk": null}]' ) def test_dumping(self): instance = IntegerArrayModel(field=[1, 2]) data = serializers.serialize('json', [instance]) self.assertEqual(json.loads(data), json.loads(self.test_data)) def test_loading(self): instance = list(serializers.deserialize('json', self.test_data))[0].object self.assertEqual(instance.field, [1, 2]) class TestValidation(PostgreSQLTestCase): def test_unbounded(self): field = ArrayField(models.IntegerField()) with self.assertRaises(exceptions.ValidationError) as cm: field.clean([1, None], None) self.assertEqual(cm.exception.code, 'item_invalid') self.assertEqual( cm.exception.message % cm.exception.params, 'Item 1 in the array did not validate: This field cannot be null.' ) def test_blank_true(self): field = ArrayField(models.IntegerField(blank=True, null=True)) # This should not raise a validation error field.clean([1, None], None) def test_with_size(self): field = ArrayField(models.IntegerField(), size=3) field.clean([1, 2, 3], None) with self.assertRaises(exceptions.ValidationError) as cm: field.clean([1, 2, 3, 4], None) self.assertEqual(cm.exception.messages[0], 'List contains 4 items, it should contain no more than 3.') def test_nested_array_mismatch(self): field = ArrayField(ArrayField(models.IntegerField())) field.clean([[1, 2], [3, 4]], None) with self.assertRaises(exceptions.ValidationError) as cm: field.clean([[1, 2], [3, 4, 5]], None) self.assertEqual(cm.exception.code, 'nested_array_mismatch') self.assertEqual(cm.exception.messages[0], 'Nested arrays must have the same length.') def test_with_validators(self): field = ArrayField(models.IntegerField(validators=[validators.MinValueValidator(1)])) field.clean([1, 2], None) with self.assertRaises(exceptions.ValidationError) as cm: field.clean([0], None) self.assertEqual(cm.exception.code, 'item_invalid') self.assertEqual( cm.exception.messages[0], 'Item 0 in the array did not validate: Ensure this value is greater than or equal to 1.' ) class TestSimpleFormField(PostgreSQLTestCase): def test_valid(self): field = SimpleArrayField(forms.CharField()) value = field.clean('a,b,c') self.assertEqual(value, ['a', 'b', 'c']) def test_to_python_fail(self): field = SimpleArrayField(forms.IntegerField()) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,b,9') self.assertEqual(cm.exception.messages[0], 'Item 0 in the array did not validate: Enter a whole number.') def test_validate_fail(self): field = SimpleArrayField(forms.CharField(required=True)) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,b,') self.assertEqual(cm.exception.messages[0], 'Item 2 in the array did not validate: This field is required.') def test_validators_fail(self): field = SimpleArrayField(forms.RegexField('[a-e]{2}')) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,bc,de') self.assertEqual(cm.exception.messages[0], 'Item 0 in the array did not validate: Enter a valid value.') def test_delimiter(self): field = SimpleArrayField(forms.CharField(), delimiter='|') value = field.clean('a|b|c') self.assertEqual(value, ['a', 'b', 'c']) def test_delimiter_with_nesting(self): field = SimpleArrayField(SimpleArrayField(forms.CharField()), delimiter='|') value = field.clean('a,b|c,d') self.assertEqual(value, [['a', 'b'], ['c', 'd']]) def test_prepare_value(self): field = SimpleArrayField(forms.CharField()) value = field.prepare_value(['a', 'b', 'c']) self.assertEqual(value, 'a,b,c') def test_max_length(self): field = SimpleArrayField(forms.CharField(), max_length=2) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,b,c') self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no more than 2.') def test_min_length(self): field = SimpleArrayField(forms.CharField(), min_length=4) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('a,b,c') self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no fewer than 4.') def test_required(self): field = SimpleArrayField(forms.CharField(), required=True) with self.assertRaises(exceptions.ValidationError) as cm: field.clean('') self.assertEqual(cm.exception.messages[0], 'This field is required.') def test_model_field_formfield(self): model_field = ArrayField(models.CharField(max_length=27)) form_field = model_field.formfield() self.assertIsInstance(form_field, SimpleArrayField) self.assertIsInstance(form_field.base_field, forms.CharField) self.assertEqual(form_field.base_field.max_length, 27) def test_model_field_formfield_size(self): model_field = ArrayField(models.CharField(max_length=27), size=4) form_field = model_field.formfield() self.assertIsInstance(form_field, SimpleArrayField) self.assertEqual(form_field.max_length, 4) class TestSplitFormField(PostgreSQLTestCase): def test_valid(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), size=3) data = {'array_0': 'a', 'array_1': 'b', 'array_2': 'c'} form = SplitForm(data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data, {'array': ['a', 'b', 'c']}) def test_required(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), required=True, size=3) data = {'array_0': '', 'array_1': '', 'array_2': ''} form = SplitForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors, {'array': ['This field is required.']}) def test_remove_trailing_nulls(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(required=False), size=5, remove_trailing_nulls=True) data = {'array_0': 'a', 'array_1': '', 'array_2': 'b', 'array_3': '', 'array_4': ''} form = SplitForm(data) self.assertTrue(form.is_valid(), form.errors) self.assertEqual(form.cleaned_data, {'array': ['a', '', 'b']}) def test_required_field(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), size=3) data = {'array_0': 'a', 'array_1': 'b', 'array_2': ''} form = SplitForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors, {'array': ['Item 2 in the array did not validate: This field is required.']}) def test_rendering(self): class SplitForm(forms.Form): array = SplitArrayField(forms.CharField(), size=3) self.assertHTMLEqual(str(SplitForm()), ''' <tr> <th><label for="id_array_0">Array:</label></th> <td> <input id="id_array_0" name="array_0" type="text" /> <input id="id_array_1" name="array_1" type="text" /> <input id="id_array_2" name="array_2" type="text" /> </td> </tr> ''')
bsd-3-clause
MatthieuBizien/scikit-learn
sklearn/model_selection/__init__.py
53
1587
from ._split import BaseCrossValidator from ._split import KFold from ._split import LabelKFold from ._split import StratifiedKFold from ._split import LeaveOneLabelOut from ._split import LeaveOneOut from ._split import LeavePLabelOut from ._split import LeavePOut from ._split import ShuffleSplit from ._split import LabelShuffleSplit from ._split import StratifiedShuffleSplit from ._split import PredefinedSplit from ._split import train_test_split from ._split import check_cv from ._validation import cross_val_score from ._validation import cross_val_predict from ._validation import learning_curve from ._validation import permutation_test_score from ._validation import validation_curve from ._search import GridSearchCV from ._search import RandomizedSearchCV from ._search import ParameterGrid from ._search import ParameterSampler from ._search import fit_grid_point __all__ = ('BaseCrossValidator', 'GridSearchCV', 'KFold', 'LabelKFold', 'LabelShuffleSplit', 'LeaveOneLabelOut', 'LeaveOneOut', 'LeavePLabelOut', 'LeavePOut', 'ParameterGrid', 'ParameterSampler', 'PredefinedSplit', 'RandomizedSearchCV', 'ShuffleSplit', 'StratifiedKFold', 'StratifiedShuffleSplit', 'check_cv', 'cross_val_predict', 'cross_val_score', 'fit_grid_point', 'learning_curve', 'permutation_test_score', 'train_test_split', 'validation_curve')
bsd-3-clause
xNovax/SickRage
lib/unidecode/x023.py
174
4346
data = ( '[?]', # 0x00 '[?]', # 0x01 '[?]', # 0x02 '[?]', # 0x03 '[?]', # 0x04 '[?]', # 0x05 '[?]', # 0x06 '[?]', # 0x07 '[?]', # 0x08 '[?]', # 0x09 '[?]', # 0x0a '[?]', # 0x0b '[?]', # 0x0c '[?]', # 0x0d '[?]', # 0x0e '[?]', # 0x0f '[?]', # 0x10 '[?]', # 0x11 '[?]', # 0x12 '[?]', # 0x13 '[?]', # 0x14 '[?]', # 0x15 '[?]', # 0x16 '[?]', # 0x17 '[?]', # 0x18 '[?]', # 0x19 '[?]', # 0x1a '[?]', # 0x1b '[?]', # 0x1c '[?]', # 0x1d '[?]', # 0x1e '[?]', # 0x1f '[?]', # 0x20 '[?]', # 0x21 '[?]', # 0x22 '[?]', # 0x23 '[?]', # 0x24 '[?]', # 0x25 '[?]', # 0x26 '[?]', # 0x27 '[?]', # 0x28 '[?]', # 0x29 '[?]', # 0x2a '[?]', # 0x2b '[?]', # 0x2c '[?]', # 0x2d '[?]', # 0x2e '[?]', # 0x2f '[?]', # 0x30 '[?]', # 0x31 '[?]', # 0x32 '[?]', # 0x33 '[?]', # 0x34 '[?]', # 0x35 '[?]', # 0x36 '[?]', # 0x37 '[?]', # 0x38 '[?]', # 0x39 '[?]', # 0x3a '[?]', # 0x3b '[?]', # 0x3c '[?]', # 0x3d '[?]', # 0x3e '[?]', # 0x3f '[?]', # 0x40 '[?]', # 0x41 '[?]', # 0x42 '[?]', # 0x43 '[?]', # 0x44 '[?]', # 0x45 '[?]', # 0x46 '[?]', # 0x47 '[?]', # 0x48 '[?]', # 0x49 '[?]', # 0x4a '[?]', # 0x4b '[?]', # 0x4c '[?]', # 0x4d '[?]', # 0x4e '[?]', # 0x4f '[?]', # 0x50 '[?]', # 0x51 '[?]', # 0x52 '[?]', # 0x53 '[?]', # 0x54 '[?]', # 0x55 '[?]', # 0x56 '[?]', # 0x57 '[?]', # 0x58 '[?]', # 0x59 '[?]', # 0x5a '[?]', # 0x5b '[?]', # 0x5c '[?]', # 0x5d '[?]', # 0x5e '[?]', # 0x5f '[?]', # 0x60 '[?]', # 0x61 '[?]', # 0x62 '[?]', # 0x63 '[?]', # 0x64 '[?]', # 0x65 '[?]', # 0x66 '[?]', # 0x67 '[?]', # 0x68 '[?]', # 0x69 '[?]', # 0x6a '[?]', # 0x6b '[?]', # 0x6c '[?]', # 0x6d '[?]', # 0x6e '[?]', # 0x6f '[?]', # 0x70 '[?]', # 0x71 '[?]', # 0x72 '[?]', # 0x73 '[?]', # 0x74 '[?]', # 0x75 '[?]', # 0x76 '[?]', # 0x77 '[?]', # 0x78 '[?]', # 0x79 '[?]', # 0x7a '[?]', # 0x7b '[?]', # 0x7c '[?]', # 0x7d '[?]', # 0x7e '[?]', # 0x7f '[?]', # 0x80 '[?]', # 0x81 '[?]', # 0x82 '[?]', # 0x83 '[?]', # 0x84 '[?]', # 0x85 '[?]', # 0x86 '[?]', # 0x87 '[?]', # 0x88 '[?]', # 0x89 '[?]', # 0x8a '[?]', # 0x8b '[?]', # 0x8c '[?]', # 0x8d '[?]', # 0x8e '[?]', # 0x8f '[?]', # 0x90 '[?]', # 0x91 '[?]', # 0x92 '[?]', # 0x93 '[?]', # 0x94 '[?]', # 0x95 '[?]', # 0x96 '[?]', # 0x97 '[?]', # 0x98 '[?]', # 0x99 '[?]', # 0x9a '[?]', # 0x9b '[?]', # 0x9c '[?]', # 0x9d '[?]', # 0x9e '[?]', # 0x9f '[?]', # 0xa0 '[?]', # 0xa1 '[?]', # 0xa2 '[?]', # 0xa3 '[?]', # 0xa4 '[?]', # 0xa5 '[?]', # 0xa6 '[?]', # 0xa7 '[?]', # 0xa8 '[?]', # 0xa9 '[?]', # 0xaa '[?]', # 0xab '[?]', # 0xac '[?]', # 0xad '[?]', # 0xae '[?]', # 0xaf '[?]', # 0xb0 '[?]', # 0xb1 '[?]', # 0xb2 '[?]', # 0xb3 '[?]', # 0xb4 '[?]', # 0xb5 '[?]', # 0xb6 '[?]', # 0xb7 '[?]', # 0xb8 '[?]', # 0xb9 '[?]', # 0xba '[?]', # 0xbb '[?]', # 0xbc '[?]', # 0xbd '[?]', # 0xbe '[?]', # 0xbf '[?]', # 0xc0 '[?]', # 0xc1 '[?]', # 0xc2 '[?]', # 0xc3 '[?]', # 0xc4 '[?]', # 0xc5 '[?]', # 0xc6 '[?]', # 0xc7 '[?]', # 0xc8 '[?]', # 0xc9 '[?]', # 0xca '[?]', # 0xcb '[?]', # 0xcc '[?]', # 0xcd '[?]', # 0xce '[?]', # 0xcf '[?]', # 0xd0 '[?]', # 0xd1 '[?]', # 0xd2 '[?]', # 0xd3 '[?]', # 0xd4 '[?]', # 0xd5 '[?]', # 0xd6 '[?]', # 0xd7 '[?]', # 0xd8 '[?]', # 0xd9 '[?]', # 0xda '[?]', # 0xdb '[?]', # 0xdc '[?]', # 0xdd '[?]', # 0xde '[?]', # 0xdf '[?]', # 0xe0 '[?]', # 0xe1 '[?]', # 0xe2 '[?]', # 0xe3 '[?]', # 0xe4 '[?]', # 0xe5 '[?]', # 0xe6 '[?]', # 0xe7 '[?]', # 0xe8 '[?]', # 0xe9 '[?]', # 0xea '[?]', # 0xeb '[?]', # 0xec '[?]', # 0xed '[?]', # 0xee '[?]', # 0xef '[?]', # 0xf0 '[?]', # 0xf1 '[?]', # 0xf2 '[?]', # 0xf3 '[?]', # 0xf4 '[?]', # 0xf5 '[?]', # 0xf6 '[?]', # 0xf7 '[?]', # 0xf8 '[?]', # 0xf9 '[?]', # 0xfa '[?]', # 0xfb '[?]', # 0xfc '[?]', # 0xfd '[?]', # 0xfe )
gpl-3.0
Ichag/odoo
openerp/tools/amount_to_text.py
393
7719
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## #------------------------------------------------------------- # French #------------------------------------------------------------- to_19_fr = ( u'zéro', 'un', 'deux', 'trois', 'quatre', 'cinq', 'six', 'sept', 'huit', 'neuf', 'dix', 'onze', 'douze', 'treize', 'quatorze', 'quinze', 'seize', 'dix-sept', 'dix-huit', 'dix-neuf' ) tens_fr = ( 'vingt', 'trente', 'quarante', 'Cinquante', 'Soixante', 'Soixante-dix', 'Quatre-vingts', 'Quatre-vingt Dix') denom_fr = ( '', 'Mille', 'Millions', 'Milliards', 'Billions', 'Quadrillions', 'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion', 'Décillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion', 'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Icosillion', 'Vigintillion' ) def _convert_nn_fr(val): """ convert a value < 100 to French """ if val < 20: return to_19_fr[val] for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_fr)): if dval + 10 > val: if val % 10: return dcap + '-' + to_19_fr[val % 10] return dcap def _convert_nnn_fr(val): """ convert a value < 1000 to french special cased because it is the level that kicks off the < 100 special case. The rest are more general. This also allows you to get strings in the form of 'forty-five hundred' if called directly. """ word = '' (mod, rem) = (val % 100, val // 100) if rem > 0: word = to_19_fr[rem] + ' Cent' if mod > 0: word += ' ' if mod > 0: word += _convert_nn_fr(mod) return word def french_number(val): if val < 100: return _convert_nn_fr(val) if val < 1000: return _convert_nnn_fr(val) for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_fr))): if dval > val: mod = 1000 ** didx l = val // mod r = val - (l * mod) ret = _convert_nnn_fr(l) + ' ' + denom_fr[didx] if r > 0: ret = ret + ', ' + french_number(r) return ret def amount_to_text_fr(number, currency): number = '%.2f' % number units_name = currency list = str(number).split('.') start_word = french_number(abs(int(list[0]))) end_word = french_number(int(list[1])) cents_number = int(list[1]) cents_name = (cents_number > 1) and ' Cents' or ' Cent' final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name return final_result #------------------------------------------------------------- # Dutch #------------------------------------------------------------- to_19_nl = ( 'Nul', 'Een', 'Twee', 'Drie', 'Vier', 'Vijf', 'Zes', 'Zeven', 'Acht', 'Negen', 'Tien', 'Elf', 'Twaalf', 'Dertien', 'Veertien', 'Vijftien', 'Zestien', 'Zeventien', 'Achttien', 'Negentien' ) tens_nl = ( 'Twintig', 'Dertig', 'Veertig', 'Vijftig', 'Zestig', 'Zeventig', 'Tachtig', 'Negentig') denom_nl = ( '', 'Duizend', 'Miljoen', 'Miljard', 'Triljoen', 'Quadriljoen', 'Quintillion', 'Sextiljoen', 'Septillion', 'Octillion', 'Nonillion', 'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion', 'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' ) def _convert_nn_nl(val): """ convert a value < 100 to Dutch """ if val < 20: return to_19_nl[val] for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_nl)): if dval + 10 > val: if val % 10: return dcap + '-' + to_19_nl[val % 10] return dcap def _convert_nnn_nl(val): """ convert a value < 1000 to Dutch special cased because it is the level that kicks off the < 100 special case. The rest are more general. This also allows you to get strings in the form of 'forty-five hundred' if called directly. """ word = '' (mod, rem) = (val % 100, val // 100) if rem > 0: word = to_19_nl[rem] + ' Honderd' if mod > 0: word += ' ' if mod > 0: word += _convert_nn_nl(mod) return word def dutch_number(val): if val < 100: return _convert_nn_nl(val) if val < 1000: return _convert_nnn_nl(val) for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_nl))): if dval > val: mod = 1000 ** didx l = val // mod r = val - (l * mod) ret = _convert_nnn_nl(l) + ' ' + denom_nl[didx] if r > 0: ret = ret + ', ' + dutch_number(r) return ret def amount_to_text_nl(number, currency): number = '%.2f' % number units_name = currency list = str(number).split('.') start_word = dutch_number(int(list[0])) end_word = dutch_number(int(list[1])) cents_number = int(list[1]) cents_name = (cents_number > 1) and 'cent' or 'cent' final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name return final_result #------------------------------------------------------------- # Generic functions #------------------------------------------------------------- _translate_funcs = {'fr' : amount_to_text_fr, 'nl' : amount_to_text_nl} def add_amount_to_text_function(lang, func): _translate_funcs[lang] = func #TODO: we should use the country AND language (ex: septante VS soixante dix) #TODO: we should use en by default, but the translation func is yet to be implemented def amount_to_text(nbr, lang='fr', currency='euro'): """ Converts an integer to its textual representation, using the language set in the context if any. Example:: 1654: mille six cent cinquante-quatre. """ # if nbr > 1000000: ##TODO: use logger # print "WARNING: number too large '%d', can't translate it!" % (nbr,) # return str(nbr) if not _translate_funcs.has_key(lang): #TODO: use logger print "WARNING: no translation function found for lang: '%s'" % (lang,) #TODO: (default should be en) same as above lang = 'fr' return _translate_funcs[lang](abs(nbr), currency) if __name__=='__main__': from sys import argv lang = 'nl' if len(argv) < 2: for i in range(1,200): print i, ">>", amount_to_text(i, lang) for i in range(200,999999,139): print i, ">>", amount_to_text(i, lang) else: print amount_to_text(int(argv[1]), lang) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
jonathanslenders/pyvim
pyvim/welcome_message.py
1
1246
""" The welcome message. This is displayed when the editor opens without any files. """ from __future__ import unicode_literals from prompt_toolkit.formatted_text.utils import fragment_list_len import prompt_toolkit import pyvim import platform import sys version = sys.version_info pyvim_version = pyvim.__version__ __all__ = ( 'WELCOME_MESSAGE_TOKENS', 'WELCOME_MESSAGE_WIDTH', 'WELCOME_MESSAGE_HEIGHT', ) WELCOME_MESSAGE_WIDTH = 36 WELCOME_MESSAGE_TOKENS = [ ('class:title', 'PyVim - Pure Python Vi clone\n'), ('', 'Still experimental\n\n'), ('', 'version '), ('class:version', pyvim_version), ('', ', prompt_toolkit '), ('class:version', prompt_toolkit.__version__), ('', '\n'), ('', 'by Jonathan Slenders\n\n'), ('', 'type :q'), ('class:key', '<Enter>'), ('', ' to exit\n'), ('', 'type :help'), ('class:key', '<Enter>'), ('', ' or '), ('class:key', '<F1>'), ('', ' for help\n\n'), ('', 'All feedback is appreciated.\n\n'), ('class:pythonversion', ' %s %i.%i.%i ' % ( platform.python_implementation(), version[0], version[1], version[2])), ] WELCOME_MESSAGE_HEIGHT = ''.join(t[1] for t in WELCOME_MESSAGE_TOKENS).count('\n') + 1
bsd-3-clause
channing/gyp
test/ninja/action_dependencies/gyptest-action-dependencies.py
246
1850
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Verify that building an object file correctly depends on running actions in dependent targets, but not the targets themselves. """ import os import sys import TestGyp # NOTE(piman): This test will not work with other generators because: # - it explicitly tests the optimization, which is not implemented (yet?) on # other generators # - it relies on the exact path to output object files, which is generator # dependent, and actually, relies on the ability to build only that object file, # which I don't think is available on all generators. # TODO(piman): Extend to other generators when possible. test = TestGyp.TestGyp(formats=['ninja']) test.run_gyp('action_dependencies.gyp', chdir='src') chdir = 'relocate/src' test.relocate('src', chdir) objext = '.obj' if sys.platform == 'win32' else '.o' test.build('action_dependencies.gyp', os.path.join('obj', 'b.b' + objext), chdir=chdir) # The 'a' actions should be run (letting b.c compile), but the a static library # should not be built. test.built_file_must_not_exist('a', type=test.STATIC_LIB, chdir=chdir) test.built_file_must_not_exist('b', type=test.STATIC_LIB, chdir=chdir) test.built_file_must_exist(os.path.join('obj', 'b.b' + objext), chdir=chdir) test.build('action_dependencies.gyp', os.path.join('obj', 'c.c' + objext), chdir=chdir) # 'a' and 'b' should be built, so that the 'c' action succeeds, letting c.c # compile test.built_file_must_exist('a', type=test.STATIC_LIB, chdir=chdir) test.built_file_must_exist('b', type=test.EXECUTABLE, chdir=chdir) test.built_file_must_exist(os.path.join('obj', 'c.c' + objext), chdir=chdir) test.pass_test()
bsd-3-clause
kumar303/olympia
src/olympia/devhub/tests/test_forms.py
2
35137
# -*- coding: utf-8 -*- import os import shutil import tempfile from datetime import timedelta from django.conf import settings from django.core.files.storage import default_storage as storage from django.utils import translation import pytest import six from freezegun import freeze_time from unittest import mock from waffle.testutils import override_switch from olympia import amo, core from olympia.addons.models import Addon, Category from olympia.amo.tests import ( addon_factory, get_random_ip, req_factory_factory, TestCase, user_factory) from olympia.amo.tests.test_helpers import get_image_path from olympia.amo.utils import rm_local_tmp_dir from olympia.applications.models import AppVersion from olympia.devhub import forms from olympia.files.models import FileUpload from olympia.signing.views import VersionView from olympia.tags.models import AddonTag, Tag class TestNewUploadForm(TestCase): def test_firefox_default_selected(self): upload = FileUpload.objects.create(valid=False) data = {'upload': upload.uuid} request = req_factory_factory('/', post=True, data=data) request.user = user_factory() form = forms.NewUploadForm(data, request=request) assert form.fields['compatible_apps'].initial == [amo.FIREFOX.id] def test_compat_apps_widget_custom_label_class_rendered(self): """We are setting a custom class at the label of the compatibility apps multi-select to correctly render images. """ upload = FileUpload.objects.create(valid=False) data = {'upload': upload.uuid} request = req_factory_factory('/', post=True, data=data) request.user = user_factory() form = forms.NewUploadForm(data, request=request) result = form.fields['compatible_apps'].widget.render( name='compatible_apps', value=amo.FIREFOX.id) assert 'class="app firefox"' in result result = form.fields['compatible_apps'].widget.render( name='compatible_apps', value=amo.ANDROID.id) assert 'class="app android"' in result def test_only_valid_uploads(self): upload = FileUpload.objects.create(valid=False) upload = FileUpload.objects.create(valid=False) data = {'upload': upload.uuid, 'compatible_apps': [amo.FIREFOX.id]} request = req_factory_factory('/', post=True, data=data) request.user = user_factory() form = forms.NewUploadForm(data, request=request) assert ('There was an error with your upload. Please try again.' in form.errors.get('__all__')), form.errors # Admin override makes the form ignore the brokenness with mock.patch('olympia.access.acl.action_allowed_user') as acl: # For the 'Addons:Edit' permission check. acl.return_value = True data['admin_override_validation'] = True form = forms.NewUploadForm(data, request=request) assert ('There was an error with your upload. Please try' not in form.errors.get('__all__')), form.errors upload.validation = '{"errors": 0}' upload.save() addon = Addon.objects.create() data.pop('admin_override_validation') form = forms.NewUploadForm(data, request=request, addon=addon) assert ('There was an error with your upload. Please try again.' not in form.errors.get('__all__')), form.errors @mock.patch('olympia.devhub.forms.parse_addon') def test_throttling(self, parse_addon_mock): upload = FileUpload.objects.create(valid=True, name='foo.xpi') data = {'upload': upload.uuid, 'compatible_apps': [amo.FIREFOX.id]} request = req_factory_factory('/', post=True, data=data) request.user = user_factory() request.META['REMOTE_ADDR'] = '5.6.7.8' with freeze_time('2019-04-08 15:16:23.42') as frozen_time: for x in range(0, 6): self._add_fake_throttling_action( view_class=VersionView, url='/', user=request.user, remote_addr=get_random_ip(), ) form = forms.NewUploadForm(data, request=request) assert not form.is_valid() assert form.errors.get('__all__') == [ 'You have submitted too many uploads recently. ' 'Please try again after some time.' ] frozen_time.tick(delta=timedelta(seconds=61)) form = forms.NewUploadForm(data, request=request) assert form.is_valid() # Those three patches are so files.utils.parse_addon doesn't fail on a # non-existent file even before having a chance to call check_xpi_info. @mock.patch('olympia.files.utils.Extractor.parse') @mock.patch('olympia.files.utils.extract_xpi', lambda xpi, path: None) @mock.patch('olympia.files.utils.get_file', lambda xpi: None) # This is the one we want to test. @mock.patch('olympia.files.utils.check_xpi_info') def test_check_xpi_called(self, mock_check_xpi_info, mock_parse): """Make sure the check_xpi_info helper is called. There's some important checks made in check_xpi_info, if we ever refactor the form to not call it anymore, we need to make sure those checks are run at some point. """ mock_parse.return_value = None mock_check_xpi_info.return_value = {'name': 'foo', 'type': 2} upload = FileUpload.objects.create(valid=True, name='foo.xpi') addon = Addon.objects.create() data = {'upload': upload.uuid, 'compatible_apps': [amo.FIREFOX.id]} request = req_factory_factory('/', post=True, data=data) request.user = user_factory() form = forms.NewUploadForm(data, addon=addon, request=request) form.clean() assert mock_check_xpi_info.called class TestCompatForm(TestCase): fixtures = ['base/addon_3615'] def setUp(self): super(TestCompatForm, self).setUp() AppVersion.objects.create( application=amo.ANDROID.id, version='50.0') AppVersion.objects.create( application=amo.ANDROID.id, version='56.0') AppVersion.objects.create( application=amo.FIREFOX.id, version='56.0') AppVersion.objects.create( application=amo.FIREFOX.id, version='56.*') AppVersion.objects.create( application=amo.FIREFOX.id, version='57.0') AppVersion.objects.create( application=amo.FIREFOX.id, version='57.*') def test_forms(self): version = Addon.objects.get(id=3615).current_version formset = forms.CompatFormSet(None, queryset=version.apps.all(), form_kwargs={'version': version}) apps = [form.app for form in formset.forms] assert set(apps) == set(amo.APP_USAGE) def test_form_initial(self): version = Addon.objects.get(id=3615).current_version current_min = version.apps.filter(application=amo.FIREFOX.id).get().min current_max = version.apps.filter(application=amo.FIREFOX.id).get().max formset = forms.CompatFormSet(None, queryset=version.apps.all(), form_kwargs={'version': version}) form = formset.forms[0] assert form.app == amo.FIREFOX assert form.initial['application'] == amo.FIREFOX.id assert form.initial['min'] == current_min.pk assert form.initial['max'] == current_max.pk def _test_form_choices_expect_all_versions(self, version): expected_min_choices = [(u'', u'---------')] + list( AppVersion.objects.filter(application=amo.FIREFOX.id) .exclude(version__contains='*') .values_list('pk', 'version') .order_by('version_int')) expected_max_choices = [(u'', u'---------')] + list( AppVersion.objects.filter(application=amo.FIREFOX.id) .values_list('pk', 'version') .order_by('version_int')) formset = forms.CompatFormSet(None, queryset=version.apps.all(), form_kwargs={'version': version}) form = formset.forms[0] assert form.app == amo.FIREFOX assert list(form.fields['min'].choices) == expected_min_choices assert list(form.fields['max'].choices) == expected_max_choices def test_form_choices(self): version = Addon.objects.get(id=3615).current_version version.files.all().update(is_webextension=True) del version.all_files self._test_form_choices_expect_all_versions(version) def test_form_choices_no_compat(self): version = Addon.objects.get(id=3615).current_version version.files.all().update(is_webextension=False) version.addon.update(type=amo.ADDON_DICT) del version.all_files self._test_form_choices_expect_all_versions(version) def test_form_choices_language_pack(self): version = Addon.objects.get(id=3615).current_version version.files.all().update(is_webextension=False) version.addon.update(type=amo.ADDON_LPAPP) del version.all_files self._test_form_choices_expect_all_versions(version) def test_form_choices_legacy(self): version = Addon.objects.get(id=3615).current_version version.files.all().update(is_webextension=False) del version.all_files firefox_57 = AppVersion.objects.get( application=amo.FIREFOX.id, version='57.0') firefox_57_s = AppVersion.objects.get( application=amo.FIREFOX.id, version='57.*') expected_min_choices = [(u'', u'---------')] + list( AppVersion.objects.filter(application=amo.FIREFOX.id) .exclude(version__contains='*') .exclude(pk__in=(firefox_57.pk, firefox_57_s.pk)) .values_list('pk', 'version') .order_by('version_int')) expected_max_choices = [(u'', u'---------')] + list( AppVersion.objects.filter(application=amo.FIREFOX.id) .exclude(pk__in=(firefox_57.pk, firefox_57_s.pk)) .values_list('pk', 'version') .order_by('version_int')) formset = forms.CompatFormSet(None, queryset=version.apps.all(), form_kwargs={'version': version}) form = formset.forms[0] assert form.app == amo.FIREFOX assert list(form.fields['min'].choices) == expected_min_choices assert list(form.fields['max'].choices) == expected_max_choices expected_an_choices = [(u'', u'---------')] + list( AppVersion.objects.filter(application=amo.ANDROID.id) .values_list('pk', 'version').order_by('version_int')) form = formset.forms[1] assert form.app == amo.ANDROID assert list(form.fields['min'].choices) == expected_an_choices assert list(form.fields['max'].choices) == expected_an_choices def test_form_choices_mozilla_signed_legacy(self): version = Addon.objects.get(id=3615).current_version version.files.all().update( is_webextension=False, is_mozilla_signed_extension=True) del version.all_files self._test_form_choices_expect_all_versions(version) def test_static_theme(self): version = Addon.objects.get(id=3615).current_version version.files.all().update(is_webextension=True) version.addon.update(type=amo.ADDON_STATICTHEME) del version.all_files self._test_form_choices_expect_all_versions(version) formset = forms.CompatFormSet(None, queryset=version.apps.all(), form_kwargs={'version': version}) assert formset.can_delete is False # No deleting Firefox app plz. assert formset.extra == 0 # And lets not extra apps be added. class TestPreviewForm(TestCase): fixtures = ['base/addon_3615'] def setUp(self): super(TestPreviewForm, self).setUp() self.dest = os.path.join(settings.TMP_PATH, 'preview') if not os.path.exists(self.dest): os.makedirs(self.dest) @mock.patch('olympia.amo.models.ModelBase.update') def test_preview_modified(self, update_mock): addon = Addon.objects.get(pk=3615) name = 'transparent.png' form = forms.PreviewForm({'caption': 'test', 'upload_hash': name, 'position': 1}) with storage.open(os.path.join(self.dest, name), 'wb') as f: shutil.copyfileobj(open(get_image_path(name), 'rb'), f) assert form.is_valid() form.save(addon) assert update_mock.called @mock.patch('olympia.amo.utils.pngcrush_image') def test_preview_size(self, pngcrush_image_mock): addon = Addon.objects.get(pk=3615) name = 'teamaddons.jpg' form = forms.PreviewForm({'caption': 'test', 'upload_hash': name, 'position': 1}) with storage.open(os.path.join(self.dest, name), 'wb') as f: shutil.copyfileobj(open(get_image_path(name), 'rb'), f) assert form.is_valid() form.save(addon) preview = addon.previews.all()[0] assert preview.sizes == ( {u'image': [2400, 1600], u'thumbnail': [640, 427], u'original': [3000, 2000]}) assert os.path.exists(preview.image_path) assert os.path.exists(preview.thumbnail_path) assert os.path.exists(preview.original_path) assert pngcrush_image_mock.call_count == 2 assert pngcrush_image_mock.call_args_list[0][0][0] == ( preview.thumbnail_path) assert pngcrush_image_mock.call_args_list[1][0][0] == ( preview.image_path) class TestDistributionChoiceForm(TestCase): @pytest.mark.needs_locales_compilation def test_lazy_choice_labels(self): """Tests that the labels in `choices` are still lazy We had a problem that the labels weren't properly marked as lazy which led to labels being returned in mixed languages depending on what server we hit in production. """ with translation.override('en-US'): form = forms.DistributionChoiceForm() label = form.fields['channel'].choices[0][1] expected = 'On this site.' label = six.text_type(label) assert label.startswith(expected) with translation.override('de'): form = forms.DistributionChoiceForm() label = form.fields['channel'].choices[0][1] expected = 'Auf dieser Website.' label = six.text_type(label) assert label.startswith(expected) class TestDescribeForm(TestCase): fixtures = ('base/addon_3615', 'base/addon_3615_categories', 'addons/denied') def setUp(self): super(TestDescribeForm, self).setUp() self.existing_name = 'Delicious Bookmarks' self.non_existing_name = 'Does Not Exist' self.error_msg = 'This name is already in use. Please choose another.' self.request = req_factory_factory('/') def test_slug_deny(self): delicious = Addon.objects.get() form = forms.DescribeForm( {'slug': u'submit'}, request=self.request, instance=delicious) assert not form.is_valid() assert form.errors['slug'] == ( [u'The slug cannot be "submit". Please choose another.']) def test_name_trademark_mozilla(self): delicious = Addon.objects.get() form = forms.DescribeForm( {'name': u'Delicious Mozilla', 'summary': u'foô', 'slug': u'bar'}, request=self.request, instance=delicious) assert not form.is_valid() assert form.errors['name'].data[0].message.startswith( u'Add-on names cannot contain the Mozilla or Firefox trademarks.') def test_name_trademark_firefox(self): delicious = Addon.objects.get() form = forms.DescribeForm( {'name': u'Delicious Firefox', 'summary': u'foö', 'slug': u'bar'}, request=self.request, instance=delicious) assert not form.is_valid() assert form.errors['name'].data[0].message.startswith( u'Add-on names cannot contain the Mozilla or Firefox trademarks.') @override_switch('content-optimization', active=False) def test_name_trademark_allowed_for_prefix(self): delicious = Addon.objects.get() form = forms.DescribeForm( {'name': u'Delicious for Mozilla', 'summary': u'foø', 'slug': u'bar'}, request=self.request, instance=delicious) assert form.is_valid() def test_name_no_trademark(self): delicious = Addon.objects.get() form = forms.DescribeForm( {'name': u'Delicious Dumdidum', 'summary': u'đoo', 'slug': u'bar'}, request=self.request, instance=delicious) assert form.is_valid() def test_slug_isdigit(self): delicious = Addon.objects.get() form = forms.DescribeForm( {'slug': u'123'}, request=self.request, instance=delicious) assert not form.is_valid() assert form.errors['slug'] == ( [u'The slug cannot be "123". Please choose another.']) def test_bogus_support_url(self): form = forms.DescribeForm( {'support_url': 'javascript://something.com'}, request=self.request, instance=Addon.objects.get()) assert not form.is_valid() assert form.errors['support_url'] == [u'Enter a valid URL.'] def test_ftp_support_url(self): form = forms.DescribeForm( {'support_url': 'ftp://foo.com'}, request=self.request, instance=Addon.objects.get()) assert not form.is_valid() assert form.errors['support_url'] == [u'Enter a valid URL.'] def test_http_support_url(self): form = forms.DescribeForm( {'name': u'Delicious Dumdidum', 'summary': u'foo', 'slug': u'bar', 'support_url': 'http://foo.com'}, request=self.request, instance=Addon.objects.get()) assert form.is_valid(), form.errors def test_description_optional(self): delicious = Addon.objects.get() assert delicious.type == amo.ADDON_EXTENSION with override_switch('content-optimization', active=False): form = forms.DescribeForm( {'name': u'Delicious for everyone', 'summary': u'foo', 'slug': u'bar'}, request=self.request, instance=delicious) assert form.is_valid(), form.errors with override_switch('content-optimization', active=True): form = forms.DescribeForm( {'name': u'Delicious for everyone', 'summary': u'foo', 'slug': u'bar'}, request=self.request, instance=delicious) assert not form.is_valid() # But only extensions are required to have a description delicious.update(type=amo.ADDON_STATICTHEME) form = forms.DescribeForm( {'name': u'Delicious for everyone', 'summary': u'foo', 'slug': u'bar'}, request=self.request, instance=delicious) assert form.is_valid(), form.errors # Do it again, but this time with a description delicious.update(type=amo.ADDON_EXTENSION) form = forms.DescribeForm( {'name': u'Delicious for everyone', 'summary': u'foo', 'slug': u'bar', 'description': u'its a description'}, request=self.request, instance=delicious) assert form.is_valid(), form.errors def test_description_min_length(self): delicious = Addon.objects.get() assert delicious.type == amo.ADDON_EXTENSION with override_switch('content-optimization', active=False): form = forms.DescribeForm( {'name': u'Delicious for everyone', 'summary': u'foo', 'slug': u'bar', 'description': u'123456789'}, request=self.request, instance=delicious) assert form.is_valid(), form.errors with override_switch('content-optimization', active=True): form = forms.DescribeForm( {'name': u'Delicious for everyone', 'summary': u'foo', 'slug': u'bar', 'description': u'123456789'}, request=self.request, instance=delicious) assert not form.is_valid() # But only extensions have a minimum length delicious.update(type=amo.ADDON_STATICTHEME) form = forms.DescribeForm( {'name': u'Delicious for everyone', 'summary': u'foo', 'slug': u'bar', 'description': u'123456789'}, request=self.request, instance=delicious) assert form.is_valid() # Do it again, but this time with a longer description delicious.update(type=amo.ADDON_EXTENSION) form = forms.DescribeForm( {'name': u'Delicious for everyone', 'summary': u'foo', 'slug': u'bar', 'description': u'1234567890'}, request=self.request, instance=delicious) assert form.is_valid(), form.errors def test_name_summary_lengths(self): delicious = Addon.objects.get() short_data = { 'name': u'n', 'summary': u's', 'slug': u'bar', 'description': u'1234567890'} over_70_data = { 'name': u'this is a name that hits the 50 char limit almost', 'summary': u'this is a summary that doesn`t get close to the ' u'existing 250 limit but is over 70', 'slug': u'bar', 'description': u'1234567890'} under_70_data = { 'name': u'this is a name that is over the 50 char limit by a few', 'summary': u'ab', 'slug': u'bar', 'description': u'1234567890'} # short name and summary - both allowed with DescribeForm form = forms.DescribeForm( short_data, request=self.request, instance=delicious) assert form.is_valid() # but not with DescribeFormContentOptimization form = forms.DescribeFormContentOptimization( short_data, request=self.request, instance=delicious) assert not form.is_valid() assert form.errors['name'] == [ u'Ensure this value has at least 2 characters (it has 1).'] assert form.errors['summary'] == [ u'Ensure this value has at least 2 characters (it has 1).'] # As are long names and summaries form = forms.DescribeForm( over_70_data, request=self.request, instance=delicious) assert form.is_valid() # but together are over 70 chars so no longer allowed form = forms.DescribeFormContentOptimization( over_70_data, request=self.request, instance=delicious) assert not form.is_valid() assert len(over_70_data['name']) + len(over_70_data['summary']) == 130 assert form.errors['name'] == [ u'Ensure name and summary combined are at most 70 characters ' u'(they have 130).'] assert 'summary' not in form.errors # DescribeForm has a lower limit for name length form = forms.DescribeForm( under_70_data, request=self.request, instance=delicious) assert not form.is_valid() assert form.errors['name'] == [ u'Ensure this value has at most 50 characters (it has 54).'] # DescribeFormContentOptimization only cares that the total is <= 70 form = forms.DescribeFormContentOptimization( under_70_data, request=self.request, instance=delicious) assert form.is_valid() assert len(under_70_data['name']) + len(under_70_data['summary']) == 56 def test_name_summary_auto_cropping(self): delicious = Addon.objects.get() assert delicious.default_locale == 'en-US' summary_needs_cropping = { 'name_en-us': u'a' * 25, 'name_fr': u'b' * 30, 'summary_en-us': u'c' * 45, 'summary_fr': u'd' * 45, # 30 + 45 is > 70 'slug': u'slug', 'description_en-us': u'z' * 10, } form = forms.DescribeFormContentOptimization( summary_needs_cropping, request=self.request, instance=delicious, should_auto_crop=True) assert form.is_valid(), form.errors assert form.cleaned_data['name']['en-us'] == u'a' * 25 # no change assert form.cleaned_data['summary']['en-us'] == u'c' * 45 # no change assert form.cleaned_data['name']['fr'] == u'b' * 30 # no change assert form.cleaned_data['summary']['fr'] == u'd' * 40 # 45 to 40 summary_needs_cropping_no_name = { 'name_en-us': u'a' * 25, 'summary_en-us': u'c' * 45, 'summary_fr': u'd' * 50, 'slug': u'slug', 'description_en-us': u'z' * 10, } form = forms.DescribeFormContentOptimization( summary_needs_cropping_no_name, request=self.request, instance=delicious, should_auto_crop=True) assert form.is_valid(), form.errors assert form.cleaned_data['name']['en-us'] == u'a' * 25 assert form.cleaned_data['summary']['en-us'] == u'c' * 45 assert 'fr' not in form.cleaned_data['name'] # we've not added it assert form.cleaned_data['summary']['fr'] == u'd' * 45 # 50 to 45 name_needs_cropping = { 'name_en-us': u'a' * 67, 'name_fr': u'b' * 69, 'summary_en-us': u'c' * 2, 'summary_fr': u'd' * 3, 'slug': u'slug', 'description_en-us': u'z' * 10, } form = forms.DescribeFormContentOptimization( name_needs_cropping, request=self.request, instance=delicious, should_auto_crop=True) assert form.is_valid(), form.errors assert form.cleaned_data['name']['en-us'] == u'a' * 67 # no change assert form.cleaned_data['summary']['en-us'] == u'c' * 2 # no change assert form.cleaned_data['name']['fr'] == u'b' * 68 # 69 to 68 assert form.cleaned_data['summary']['fr'] == u'd' * 2 # 3 to 2 name_needs_cropping_no_summary = { 'name_en-us': u'a' * 50, 'name_fr': u'b' * 69, 'summary_en-us': u'c' * 20, 'slug': u'slug', 'description_en-us': u'z' * 10, } form = forms.DescribeFormContentOptimization( name_needs_cropping_no_summary, request=self.request, instance=delicious, should_auto_crop=True) assert form.is_valid(), form.errors assert form.cleaned_data['name']['en-us'] == u'a' * 50 # no change assert form.cleaned_data['summary']['en-us'] == u'c' * 20 # no change assert form.cleaned_data['name']['fr'] == u'b' * 50 # 69 to 50 assert 'fr' not in form.cleaned_data['summary'] class TestAdditionalDetailsForm(TestCase): fixtures = ['base/addon_3615', 'base/users'] def setUp(self): super(TestAdditionalDetailsForm, self).setUp() self.addon = Addon.objects.get(pk=3615) self.data = { 'default_locale': 'en-US', 'homepage': str(self.addon.homepage), } self.user = self.addon.authors.all()[0] core.set_user(self.user) self.request = req_factory_factory('/') def test_locales(self): form = forms.AdditionalDetailsForm( request=self.request, instance=self.addon) assert form.fields['default_locale'].choices[0][0] == 'af' def add_tags(self, tags): data = self.data.copy() data.update({'tags': tags}) form = forms.AdditionalDetailsForm( data=data, request=self.request, instance=self.addon) assert form.is_valid() form.save(self.addon) return form def get_tag_text(self): return [t.tag_text for t in self.addon.tags.all()] def test_tags(self): self.add_tags('foo, bar') assert self.get_tag_text() == ['bar', 'foo'] def test_tags_xss(self): self.add_tags('<script>alert("foo")</script>, bar') assert self.get_tag_text() == ['bar', 'scriptalertfooscript'] def test_tags_case_spaces(self): self.add_tags('foo, bar') self.add_tags('foo, bar , Bar, BAR, b a r ') assert self.get_tag_text() == ['b a r', 'bar', 'foo'] def test_tags_spaces(self): self.add_tags('foo, bar beer') assert self.get_tag_text() == ['bar beer', 'foo'] def test_tags_unicode(self): self.add_tags(u'Österreich') assert self.get_tag_text() == [u'Österreich'.lower()] def add_restricted(self, *args): if not args: args = ['i_am_a_restricted_tag'] for arg in args: tag = Tag.objects.create(tag_text=arg, restricted=True) AddonTag.objects.create(tag=tag, addon=self.addon) def test_tags_restricted(self): self.add_restricted() self.add_tags('foo, bar') form = forms.AdditionalDetailsForm( data=self.data, request=self.request, instance=self.addon) assert form.fields['tags'].initial == 'bar, foo' assert self.get_tag_text() == ['bar', 'foo', 'i_am_a_restricted_tag'] self.add_tags('') assert self.get_tag_text() == ['i_am_a_restricted_tag'] def test_tags_error(self): self.add_restricted('i_am_a_restricted_tag', 'sdk') data = self.data.copy() data.update({'tags': 'i_am_a_restricted_tag'}) form = forms.AdditionalDetailsForm( data=data, request=self.request, instance=self.addon) assert form.errors['tags'][0] == ( '"i_am_a_restricted_tag" is a reserved tag and cannot be used.') data.update({'tags': 'i_am_a_restricted_tag, sdk'}) form = forms.AdditionalDetailsForm( data=data, request=self.request, instance=self.addon) assert form.errors['tags'][0] == ( '"i_am_a_restricted_tag", "sdk" are reserved tags and' ' cannot be used.') @mock.patch('olympia.access.acl.action_allowed') def test_tags_admin_restricted(self, action_allowed): action_allowed.return_value = True self.add_restricted('i_am_a_restricted_tag') self.add_tags('foo, bar') assert self.get_tag_text() == ['bar', 'foo'] self.add_tags('foo, bar, i_am_a_restricted_tag') assert self.get_tag_text() == ['bar', 'foo', 'i_am_a_restricted_tag'] form = forms.AdditionalDetailsForm( data=self.data, request=self.request, instance=self.addon) assert form.fields['tags'].initial == 'bar, foo, i_am_a_restricted_tag' @mock.patch('olympia.access.acl.action_allowed') def test_tags_admin_restricted_count(self, action_allowed): action_allowed.return_value = True self.add_restricted() self.add_tags('i_am_a_restricted_tag, %s' % (', '.join('tag-test-%s' % i for i in range(0, 20)))) def test_tags_restricted_count(self): self.add_restricted() self.add_tags(', '.join('tag-test-%s' % i for i in range(0, 20))) def test_tags_slugified_count(self): self.add_tags(', '.join('tag-test' for i in range(0, 21))) assert self.get_tag_text() == ['tag-test'] def test_tags_limit(self): self.add_tags(' %s' % ('t' * 128)) def test_tags_long(self): tag = ' -%s' % ('t' * 128) data = self.data.copy() data.update({"tags": tag}) form = forms.AdditionalDetailsForm( data=data, request=self.request, instance=self.addon) assert not form.is_valid() assert form.errors['tags'] == [ 'All tags must be 128 characters or less after invalid characters' ' are removed.'] def test_bogus_homepage(self): form = forms.AdditionalDetailsForm( {'homepage': 'javascript://something.com'}, request=self.request, instance=self.addon) assert not form.is_valid() assert form.errors['homepage'] == [u'Enter a valid URL.'] def test_ftp_homepage(self): form = forms.AdditionalDetailsForm( {'homepage': 'ftp://foo.com'}, request=self.request, instance=self.addon) assert not form.is_valid() assert form.errors['homepage'] == [u'Enter a valid URL.'] def test_homepage_is_not_required(self): form = forms.AdditionalDetailsForm( {'default_locale': 'en-US'}, request=self.request, instance=self.addon) assert form.is_valid() class TestIconForm(TestCase): fixtures = ['base/addon_3615'] def setUp(self): super(TestIconForm, self).setUp() self.temp_dir = tempfile.mkdtemp(dir=settings.TMP_PATH) self.addon = Addon.objects.get(pk=3615) class DummyRequest: FILES = None self.request = DummyRequest() self.icon_path = os.path.join(settings.TMP_PATH, 'icon') if not os.path.exists(self.icon_path): os.makedirs(self.icon_path) def tearDown(self): rm_local_tmp_dir(self.temp_dir) super(TestIconForm, self).tearDown() def get_icon_paths(self): path = os.path.join(self.addon.get_icon_dir(), str(self.addon.id)) return ['%s-%s.png' % (path, size) for size in amo.ADDON_ICON_SIZES] @mock.patch('olympia.amo.models.ModelBase.update') def test_icon_modified(self, update_mock): name = 'transparent.png' form = forms.AddonFormMedia({'icon_upload_hash': name}, request=self.request, instance=self.addon) dest = os.path.join(self.icon_path, name) with storage.open(dest, 'wb') as f: shutil.copyfileobj(open(get_image_path(name), 'rb'), f) assert form.is_valid() form.save(addon=self.addon) assert update_mock.called class TestCategoryForm(TestCase): def test_no_possible_categories(self): Category.objects.create(type=amo.ADDON_SEARCH, application=amo.FIREFOX.id) addon = addon_factory(type=amo.ADDON_SEARCH) request = req_factory_factory('/') form = forms.CategoryFormSet(addon=addon, request=request) apps = [f.app for f in form.forms] assert apps == [amo.FIREFOX]
bsd-3-clause
Intel-tensorflow/tensorflow
tensorflow/python/tools/saved_model_utils_test.py
6
5038
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for SavedModel utils.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.lib.io import file_io from tensorflow.python.ops import variables from tensorflow.python.platform import test from tensorflow.python.saved_model import builder as saved_model_builder from tensorflow.python.saved_model import tag_constants from tensorflow.python.tools import saved_model_utils def tearDownModule(): file_io.delete_recursively(test.get_temp_dir()) class SavedModelUtilTest(test.TestCase): def _init_and_validate_variable(self, sess, variable_name, variable_value): v = variables.Variable(variable_value, name=variable_name) sess.run(variables.global_variables_initializer()) self.assertEqual(variable_value, v.eval()) @test_util.deprecated_graph_mode_only def testReadSavedModelValid(self): saved_model_dir = os.path.join(test.get_temp_dir(), "valid_saved_model") builder = saved_model_builder.SavedModelBuilder(saved_model_dir) with self.session(graph=ops.Graph()) as sess: self._init_and_validate_variable(sess, "v", 42) builder.add_meta_graph_and_variables(sess, [tag_constants.TRAINING]) builder.save() actual_saved_model_pb = saved_model_utils.read_saved_model(saved_model_dir) self.assertEqual(len(actual_saved_model_pb.meta_graphs), 1) self.assertEqual( len(actual_saved_model_pb.meta_graphs[0].meta_info_def.tags), 1) self.assertEqual(actual_saved_model_pb.meta_graphs[0].meta_info_def.tags[0], tag_constants.TRAINING) def testReadSavedModelInvalid(self): saved_model_dir = os.path.join(test.get_temp_dir(), "invalid_saved_model") with self.assertRaisesRegex( IOError, "SavedModel file does not exist at: %s" % saved_model_dir): saved_model_utils.read_saved_model(saved_model_dir) def testGetSavedModelTagSets(self): saved_model_dir = os.path.join(test.get_temp_dir(), "test_tags") builder = saved_model_builder.SavedModelBuilder(saved_model_dir) # Force test to run in graph mode since SavedModelBuilder.save requires a # session to work. with ops.Graph().as_default(): # Graph with a single variable. SavedModel invoked to: # - add with weights. # - a single tag (from predefined constants). with self.session(graph=ops.Graph()) as sess: self._init_and_validate_variable(sess, "v", 42) builder.add_meta_graph_and_variables(sess, [tag_constants.TRAINING]) # Graph that updates the single variable. SavedModel invoked to: # - simply add the model (weights are not updated). # - a single tag (from predefined constants). with self.session(graph=ops.Graph()) as sess: self._init_and_validate_variable(sess, "v", 43) builder.add_meta_graph([tag_constants.SERVING]) # Graph that updates the single variable. SavedModel is invoked: # - to add the model (weights are not updated). # - multiple predefined tags. with self.session(graph=ops.Graph()) as sess: self._init_and_validate_variable(sess, "v", 44) builder.add_meta_graph([tag_constants.SERVING, tag_constants.GPU]) # Graph that updates the single variable. SavedModel is invoked: # - to add the model (weights are not updated). # - multiple predefined tags for serving on TPU. with self.session(graph=ops.Graph()) as sess: self._init_and_validate_variable(sess, "v", 44) builder.add_meta_graph([tag_constants.SERVING, tag_constants.TPU]) # Graph that updates the single variable. SavedModel is invoked: # - to add the model (weights are not updated). # - multiple custom tags. with self.session(graph=ops.Graph()) as sess: self._init_and_validate_variable(sess, "v", 45) builder.add_meta_graph(["foo", "bar"]) # Save the SavedModel to disk. builder.save() actual_tags = saved_model_utils.get_saved_model_tag_sets(saved_model_dir) expected_tags = [["train"], ["serve"], ["serve", "gpu"], ["serve", "tpu"], ["foo", "bar"]] self.assertEqual(expected_tags, actual_tags) if __name__ == "__main__": test.main()
apache-2.0
infobloxopen/neutron
neutron/tests/unit/api/test_extensions.py
11
33104
# Copyright (c) 2011 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import mock from oslo_config import cfg from oslo_log import log as logging from oslo_serialization import jsonutils import routes import webob import webob.exc as webexc import webtest import neutron from neutron.api import extensions from neutron.api.v2 import attributes from neutron.common import config from neutron.common import exceptions from neutron.db import db_base_plugin_v2 from neutron import manager from neutron.plugins.common import constants from neutron.plugins.ml2 import plugin as ml2_plugin from neutron import quota from neutron.tests import base from neutron.tests.unit.api.v2 import test_base from neutron.tests.unit import extension_stubs as ext_stubs import neutron.tests.unit.extensions from neutron.tests.unit.extensions import extendedattribute as extattr from neutron.tests.unit import testlib_api from neutron import wsgi LOG = logging.getLogger(__name__) _uuid = test_base._uuid _get_path = test_base._get_path extensions_path = ':'.join(neutron.tests.unit.extensions.__path__) class ExtensionsTestApp(wsgi.Router): def __init__(self, options={}): mapper = routes.Mapper() controller = ext_stubs.StubBaseAppController() mapper.resource("dummy_resource", "/dummy_resources", controller=controller) super(ExtensionsTestApp, self).__init__(mapper) class FakePluginWithExtension(db_base_plugin_v2.NeutronDbPluginV2): """A fake plugin used only for extension testing in this file.""" supported_extension_aliases = ["FOXNSOX"] def method_to_support_foxnsox_extension(self, context): self._log("method_to_support_foxnsox_extension", context) class PluginInterfaceTest(base.BaseTestCase): def test_issubclass_hook(self): class A(object): def f(self): pass class B(extensions.PluginInterface): @abc.abstractmethod def f(self): pass self.assertTrue(issubclass(A, B)) def test_issubclass_hook_class_without_abstract_methods(self): class A(object): def f(self): pass class B(extensions.PluginInterface): def f(self): pass self.assertFalse(issubclass(A, B)) def test_issubclass_hook_not_all_methods_implemented(self): class A(object): def f(self): pass class B(extensions.PluginInterface): @abc.abstractmethod def f(self): pass @abc.abstractmethod def g(self): pass self.assertFalse(issubclass(A, B)) class ResourceExtensionTest(base.BaseTestCase): class ResourceExtensionController(wsgi.Controller): def index(self, request): return "resource index" def show(self, request, id): return {'data': {'id': id}} def notimplemented_function(self, request, id): return webob.exc.HTTPNotImplemented() def custom_member_action(self, request, id): return {'member_action': 'value'} def custom_collection_action(self, request, **kwargs): return {'collection': 'value'} class DummySvcPlugin(wsgi.Controller): def get_plugin_type(self): return constants.DUMMY def index(self, request, **kwargs): return "resource index" def custom_member_action(self, request, **kwargs): return {'member_action': 'value'} def collection_action(self, request, **kwargs): return {'collection': 'value'} def show(self, request, id): return {'data': {'id': id}} def test_exceptions_notimplemented(self): controller = self.ResourceExtensionController() member = {'notimplemented_function': "GET"} res_ext = extensions.ResourceExtension('tweedles', controller, member_actions=member) test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) # Ideally we would check for a 501 code here but webtest doesn't take # anything that is below 200 or above 400 so we can't actually check # it. It throws webtest.AppError instead. try: test_app.get("/tweedles/some_id/notimplemented_function") # Shouldn't be reached self.assertTrue(False) except webtest.AppError as e: self.assertIn('501', e.message) def test_resource_can_be_added_as_extension(self): res_ext = extensions.ResourceExtension( 'tweedles', self.ResourceExtensionController()) test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) index_response = test_app.get("/tweedles") self.assertEqual(200, index_response.status_int) self.assertEqual("resource index", index_response.body) show_response = test_app.get("/tweedles/25266") self.assertEqual({'data': {'id': "25266"}}, show_response.json) def test_resource_gets_prefix_of_plugin(self): class DummySvcPlugin(wsgi.Controller): def index(self, request): return "" def get_plugin_type(self): return constants.DUMMY res_ext = extensions.ResourceExtension( 'tweedles', DummySvcPlugin(), path_prefix="/dummy_svc") test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) index_response = test_app.get("/dummy_svc/tweedles") self.assertEqual(200, index_response.status_int) def test_resource_extension_with_custom_member_action(self): controller = self.ResourceExtensionController() member = {'custom_member_action': "GET"} res_ext = extensions.ResourceExtension('tweedles', controller, member_actions=member) test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) response = test_app.get("/tweedles/some_id/custom_member_action") self.assertEqual(200, response.status_int) self.assertEqual(jsonutils.loads(response.body)['member_action'], "value") def test_resource_ext_with_custom_member_action_gets_plugin_prefix(self): controller = self.DummySvcPlugin() member = {'custom_member_action': "GET"} collections = {'collection_action': "GET"} res_ext = extensions.ResourceExtension('tweedles', controller, path_prefix="/dummy_svc", member_actions=member, collection_actions=collections) test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) response = test_app.get("/dummy_svc/tweedles/1/custom_member_action") self.assertEqual(200, response.status_int) self.assertEqual(jsonutils.loads(response.body)['member_action'], "value") response = test_app.get("/dummy_svc/tweedles/collection_action") self.assertEqual(200, response.status_int) self.assertEqual(jsonutils.loads(response.body)['collection'], "value") def test_plugin_prefix_with_parent_resource(self): controller = self.DummySvcPlugin() parent = dict(member_name="tenant", collection_name="tenants") member = {'custom_member_action': "GET"} collections = {'collection_action': "GET"} res_ext = extensions.ResourceExtension('tweedles', controller, parent, path_prefix="/dummy_svc", member_actions=member, collection_actions=collections) test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) index_response = test_app.get("/dummy_svc/tenants/1/tweedles") self.assertEqual(200, index_response.status_int) response = test_app.get("/dummy_svc/tenants/1/" "tweedles/1/custom_member_action") self.assertEqual(200, response.status_int) self.assertEqual(jsonutils.loads(response.body)['member_action'], "value") response = test_app.get("/dummy_svc/tenants/2/" "tweedles/collection_action") self.assertEqual(200, response.status_int) self.assertEqual(jsonutils.loads(response.body)['collection'], "value") def test_resource_extension_for_get_custom_collection_action(self): controller = self.ResourceExtensionController() collections = {'custom_collection_action': "GET"} res_ext = extensions.ResourceExtension('tweedles', controller, collection_actions=collections) test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) response = test_app.get("/tweedles/custom_collection_action") self.assertEqual(200, response.status_int) LOG.debug(jsonutils.loads(response.body)) self.assertEqual(jsonutils.loads(response.body)['collection'], "value") def test_resource_extension_for_put_custom_collection_action(self): controller = self.ResourceExtensionController() collections = {'custom_collection_action': "PUT"} res_ext = extensions.ResourceExtension('tweedles', controller, collection_actions=collections) test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) response = test_app.put("/tweedles/custom_collection_action") self.assertEqual(200, response.status_int) self.assertEqual(jsonutils.loads(response.body)['collection'], 'value') def test_resource_extension_for_post_custom_collection_action(self): controller = self.ResourceExtensionController() collections = {'custom_collection_action': "POST"} res_ext = extensions.ResourceExtension('tweedles', controller, collection_actions=collections) test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) response = test_app.post("/tweedles/custom_collection_action") self.assertEqual(200, response.status_int) self.assertEqual(jsonutils.loads(response.body)['collection'], 'value') def test_resource_extension_for_delete_custom_collection_action(self): controller = self.ResourceExtensionController() collections = {'custom_collection_action': "DELETE"} res_ext = extensions.ResourceExtension('tweedles', controller, collection_actions=collections) test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) response = test_app.delete("/tweedles/custom_collection_action") self.assertEqual(200, response.status_int) self.assertEqual(jsonutils.loads(response.body)['collection'], 'value') def test_resource_ext_for_formatted_req_on_custom_collection_action(self): controller = self.ResourceExtensionController() collections = {'custom_collection_action': "GET"} res_ext = extensions.ResourceExtension('tweedles', controller, collection_actions=collections) test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) response = test_app.get("/tweedles/custom_collection_action.json") self.assertEqual(200, response.status_int) self.assertEqual(jsonutils.loads(response.body)['collection'], "value") def test_resource_ext_for_nested_resource_custom_collection_action(self): controller = self.ResourceExtensionController() collections = {'custom_collection_action': "GET"} parent = dict(collection_name='beetles', member_name='beetle') res_ext = extensions.ResourceExtension('tweedles', controller, collection_actions=collections, parent=parent) test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) response = test_app.get("/beetles/beetle_id" "/tweedles/custom_collection_action") self.assertEqual(200, response.status_int) self.assertEqual(jsonutils.loads(response.body)['collection'], "value") def test_resource_extension_with_custom_member_action_and_attr_map(self): controller = self.ResourceExtensionController() member = {'custom_member_action': "GET"} params = { 'tweedles': { 'id': {'allow_post': False, 'allow_put': False, 'validate': {'type:uuid': None}, 'is_visible': True}, 'name': {'allow_post': True, 'allow_put': True, 'validate': {'type:string': None}, 'default': '', 'is_visible': True}, } } res_ext = extensions.ResourceExtension('tweedles', controller, member_actions=member, attr_map=params) test_app = _setup_extensions_test_app(SimpleExtensionManager(res_ext)) response = test_app.get("/tweedles/some_id/custom_member_action") self.assertEqual(200, response.status_int) self.assertEqual(jsonutils.loads(response.body)['member_action'], "value") def test_returns_404_for_non_existent_extension(self): test_app = _setup_extensions_test_app(SimpleExtensionManager(None)) response = test_app.get("/non_extistant_extension", status='*') self.assertEqual(404, response.status_int) class ActionExtensionTest(base.BaseTestCase): def setUp(self): super(ActionExtensionTest, self).setUp() self.extension_app = _setup_extensions_test_app() def test_extended_action_for_adding_extra_data(self): action_name = 'FOXNSOX:add_tweedle' action_params = dict(name='Beetle') req_body = jsonutils.dumps({action_name: action_params}) response = self.extension_app.post('/dummy_resources/1/action', req_body, content_type='application/json') self.assertEqual("Tweedle Beetle Added.", response.body) def test_extended_action_for_deleting_extra_data(self): action_name = 'FOXNSOX:delete_tweedle' action_params = dict(name='Bailey') req_body = jsonutils.dumps({action_name: action_params}) response = self.extension_app.post("/dummy_resources/1/action", req_body, content_type='application/json') self.assertEqual("Tweedle Bailey Deleted.", response.body) def test_returns_404_for_non_existent_action(self): non_existent_action = 'blah_action' action_params = dict(name="test") req_body = jsonutils.dumps({non_existent_action: action_params}) response = self.extension_app.post("/dummy_resources/1/action", req_body, content_type='application/json', status='*') self.assertEqual(404, response.status_int) def test_returns_404_for_non_existent_resource(self): action_name = 'add_tweedle' action_params = dict(name='Beetle') req_body = jsonutils.dumps({action_name: action_params}) response = self.extension_app.post("/asdf/1/action", req_body, content_type='application/json', status='*') self.assertEqual(404, response.status_int) class RequestExtensionTest(base.BaseTestCase): def test_headers_can_be_extended(self): def extend_headers(req, res): assert req.headers['X-NEW-REQUEST-HEADER'] == "sox" res.headers['X-NEW-RESPONSE-HEADER'] = "response_header_data" return res app = self._setup_app_with_request_handler(extend_headers, 'GET') response = app.get("/dummy_resources/1", headers={'X-NEW-REQUEST-HEADER': "sox"}) self.assertEqual(response.headers['X-NEW-RESPONSE-HEADER'], "response_header_data") def test_extend_get_resource_response(self): def extend_response_data(req, res): data = jsonutils.loads(res.body) data['FOXNSOX:extended_key'] = req.GET.get('extended_key') res.body = jsonutils.dumps(data) return res app = self._setup_app_with_request_handler(extend_response_data, 'GET') response = app.get("/dummy_resources/1?extended_key=extended_data") self.assertEqual(200, response.status_int) response_data = jsonutils.loads(response.body) self.assertEqual('extended_data', response_data['FOXNSOX:extended_key']) self.assertEqual('knox', response_data['fort']) def test_get_resources(self): app = _setup_extensions_test_app() response = app.get("/dummy_resources/1?chewing=newblue") response_data = jsonutils.loads(response.body) self.assertEqual('newblue', response_data['FOXNSOX:googoose']) self.assertEqual("Pig Bands!", response_data['FOXNSOX:big_bands']) def test_edit_previously_uneditable_field(self): def _update_handler(req, res): data = jsonutils.loads(res.body) data['uneditable'] = req.params['uneditable'] res.body = jsonutils.dumps(data) return res base_app = webtest.TestApp(setup_base_app(self)) response = base_app.put("/dummy_resources/1", {'uneditable': "new_value"}) self.assertEqual(response.json['uneditable'], "original_value") ext_app = self._setup_app_with_request_handler(_update_handler, 'PUT') ext_response = ext_app.put("/dummy_resources/1", {'uneditable': "new_value"}) self.assertEqual(ext_response.json['uneditable'], "new_value") def _setup_app_with_request_handler(self, handler, verb): req_ext = extensions.RequestExtension(verb, '/dummy_resources/:(id)', handler) manager = SimpleExtensionManager(None, None, req_ext) return _setup_extensions_test_app(manager) class ExtensionManagerTest(base.BaseTestCase): def test_invalid_extensions_are_not_registered(self): class InvalidExtension(object): """Invalid extension. This Extension doesn't implement extension methods : get_name, get_description, get_namespace and get_updated """ def get_alias(self): return "invalid_extension" ext_mgr = extensions.ExtensionManager('') ext_mgr.add_extension(InvalidExtension()) ext_mgr.add_extension(ext_stubs.StubExtension("valid_extension")) self.assertIn('valid_extension', ext_mgr.extensions) self.assertNotIn('invalid_extension', ext_mgr.extensions) class PluginAwareExtensionManagerTest(base.BaseTestCase): def test_unsupported_extensions_are_not_loaded(self): stub_plugin = ext_stubs.StubPlugin(supported_extensions=["e1", "e3"]) plugin_info = {constants.CORE: stub_plugin} with mock.patch("neutron.api.extensions.PluginAwareExtensionManager." "check_if_plugin_extensions_loaded"): ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info) ext_mgr.add_extension(ext_stubs.StubExtension("e1")) ext_mgr.add_extension(ext_stubs.StubExtension("e2")) ext_mgr.add_extension(ext_stubs.StubExtension("e3")) self.assertIn("e1", ext_mgr.extensions) self.assertNotIn("e2", ext_mgr.extensions) self.assertIn("e3", ext_mgr.extensions) def test_extensions_are_not_loaded_for_plugins_unaware_of_extensions(self): class ExtensionUnawarePlugin(object): """This plugin does not implement supports_extension method. Extensions will not be loaded when this plugin is used. """ pass plugin_info = {constants.CORE: ExtensionUnawarePlugin()} ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info) ext_mgr.add_extension(ext_stubs.StubExtension("e1")) self.assertNotIn("e1", ext_mgr.extensions) def test_extensions_not_loaded_for_plugin_without_expected_interface(self): class PluginWithoutExpectedIface(object): """Does not implement get_foo method as expected by extension.""" supported_extension_aliases = ["supported_extension"] plugin_info = {constants.CORE: PluginWithoutExpectedIface()} with mock.patch("neutron.api.extensions.PluginAwareExtensionManager." "check_if_plugin_extensions_loaded"): ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info) ext_mgr.add_extension(ext_stubs.ExtensionExpectingPluginInterface( "supported_extension")) self.assertNotIn("e1", ext_mgr.extensions) def test_extensions_are_loaded_for_plugin_with_expected_interface(self): class PluginWithExpectedInterface(object): """Implements get_foo method as expected by extension.""" supported_extension_aliases = ["supported_extension"] def get_foo(self, bar=None): pass plugin_info = {constants.CORE: PluginWithExpectedInterface()} with mock.patch("neutron.api.extensions.PluginAwareExtensionManager." "check_if_plugin_extensions_loaded"): ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info) ext_mgr.add_extension(ext_stubs.ExtensionExpectingPluginInterface( "supported_extension")) self.assertIn("supported_extension", ext_mgr.extensions) def test_extensions_expecting_neutron_plugin_interface_are_loaded(self): class ExtensionForQuamtumPluginInterface(ext_stubs.StubExtension): """This Extension does not implement get_plugin_interface method. This will work with any plugin implementing NeutronPluginBase """ pass stub_plugin = ext_stubs.StubPlugin(supported_extensions=["e1"]) plugin_info = {constants.CORE: stub_plugin} with mock.patch("neutron.api.extensions.PluginAwareExtensionManager." "check_if_plugin_extensions_loaded"): ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info) ext_mgr.add_extension(ExtensionForQuamtumPluginInterface("e1")) self.assertIn("e1", ext_mgr.extensions) def test_extensions_without_need_for__plugin_interface_are_loaded(self): class ExtensionWithNoNeedForPluginInterface(ext_stubs.StubExtension): """This Extension does not need any plugin interface. This will work with any plugin implementing NeutronPluginBase """ def get_plugin_interface(self): return None stub_plugin = ext_stubs.StubPlugin(supported_extensions=["e1"]) plugin_info = {constants.CORE: stub_plugin} with mock.patch("neutron.api.extensions.PluginAwareExtensionManager." "check_if_plugin_extensions_loaded"): ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info) ext_mgr.add_extension(ExtensionWithNoNeedForPluginInterface("e1")) self.assertIn("e1", ext_mgr.extensions) def test_extension_loaded_for_non_core_plugin(self): class NonCorePluginExtenstion(ext_stubs.StubExtension): def get_plugin_interface(self): return None stub_plugin = ext_stubs.StubPlugin(supported_extensions=["e1"]) plugin_info = {constants.DUMMY: stub_plugin} with mock.patch("neutron.api.extensions.PluginAwareExtensionManager." "check_if_plugin_extensions_loaded"): ext_mgr = extensions.PluginAwareExtensionManager('', plugin_info) ext_mgr.add_extension(NonCorePluginExtenstion("e1")) self.assertIn("e1", ext_mgr.extensions) def test_unloaded_supported_extensions_raises_exception(self): stub_plugin = ext_stubs.StubPlugin( supported_extensions=["unloaded_extension"]) plugin_info = {constants.CORE: stub_plugin} self.assertRaises(exceptions.ExtensionsNotFound, extensions.PluginAwareExtensionManager, '', plugin_info) class ExtensionControllerTest(testlib_api.WebTestCase): def setUp(self): super(ExtensionControllerTest, self).setUp() self.test_app = _setup_extensions_test_app() def test_index_gets_all_registerd_extensions(self): response = self.test_app.get("/extensions." + self.fmt) res_body = self.deserialize(response) foxnsox = res_body["extensions"][0] self.assertEqual(foxnsox["alias"], "FOXNSOX") self.assertEqual(foxnsox["namespace"], "http://www.fox.in.socks/api/ext/pie/v1.0") def test_extension_can_be_accessed_by_alias(self): response = self.test_app.get("/extensions/FOXNSOX." + self.fmt) foxnsox_extension = self.deserialize(response) foxnsox_extension = foxnsox_extension['extension'] self.assertEqual(foxnsox_extension["alias"], "FOXNSOX") self.assertEqual(foxnsox_extension["namespace"], "http://www.fox.in.socks/api/ext/pie/v1.0") def test_show_returns_not_found_for_non_existent_extension(self): response = self.test_app.get("/extensions/non_existent" + self.fmt, status="*") self.assertEqual(response.status_int, 404) def app_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) return ExtensionsTestApp(conf) def setup_base_app(test): base.BaseTestCase.config_parse() app = config.load_paste_app('extensions_test_app') return app def setup_extensions_middleware(extension_manager=None): extension_manager = (extension_manager or extensions.PluginAwareExtensionManager( extensions_path, {constants.CORE: FakePluginWithExtension()})) base.BaseTestCase.config_parse() app = config.load_paste_app('extensions_test_app') return extensions.ExtensionMiddleware(app, ext_mgr=extension_manager) def _setup_extensions_test_app(extension_manager=None): return webtest.TestApp(setup_extensions_middleware(extension_manager)) class SimpleExtensionManager(object): def __init__(self, resource_ext=None, action_ext=None, request_ext=None): self.resource_ext = resource_ext self.action_ext = action_ext self.request_ext = request_ext def get_resources(self): resource_exts = [] if self.resource_ext: resource_exts.append(self.resource_ext) return resource_exts def get_actions(self): action_exts = [] if self.action_ext: action_exts.append(self.action_ext) return action_exts def get_request_extensions(self): request_extensions = [] if self.request_ext: request_extensions.append(self.request_ext) return request_extensions class ExtensionExtendedAttributeTestPlugin( ml2_plugin.Ml2Plugin): supported_extension_aliases = [ 'ext-obj-test', "extended-ext-attr" ] def __init__(self, configfile=None): super(ExtensionExtendedAttributeTestPlugin, self) self.objs = [] self.objh = {} def create_ext_test_resource(self, context, ext_test_resource): obj = ext_test_resource['ext_test_resource'] id = _uuid() obj['id'] = id self.objs.append(obj) self.objh.update({id: obj}) return obj def get_ext_test_resources(self, context, filters=None, fields=None): return self.objs def get_ext_test_resource(self, context, id, fields=None): return self.objh[id] class ExtensionExtendedAttributeTestCase(base.BaseTestCase): def setUp(self): super(ExtensionExtendedAttributeTestCase, self).setUp() plugin = ( "neutron.tests.unit.api.test_extensions." "ExtensionExtendedAttributeTestPlugin" ) # point config file to: neutron/tests/etc/neutron.conf.test self.config_parse() self.setup_coreplugin(plugin) ext_mgr = extensions.PluginAwareExtensionManager( extensions_path, {constants.CORE: ExtensionExtendedAttributeTestPlugin} ) ext_mgr.extend_resources("2.0", {}) extensions.PluginAwareExtensionManager._instance = ext_mgr app = config.load_paste_app('extensions_test_app') self._api = extensions.ExtensionMiddleware(app, ext_mgr=ext_mgr) self._tenant_id = "8c70909f-b081-452d-872b-df48e6c355d1" # Save the global RESOURCE_ATTRIBUTE_MAP self.saved_attr_map = {} for resource, attrs in attributes.RESOURCE_ATTRIBUTE_MAP.iteritems(): self.saved_attr_map[resource] = attrs.copy() # Add the resources to the global attribute map # This is done here as the setup process won't # initialize the main API router which extends # the global attribute map attributes.RESOURCE_ATTRIBUTE_MAP.update( extattr.EXTENDED_ATTRIBUTES_2_0) self.agentscheduler_dbMinxin = manager.NeutronManager.get_plugin() self.addCleanup(self.restore_attribute_map) quota.QUOTAS._driver = None cfg.CONF.set_override('quota_driver', 'neutron.quota.ConfDriver', group='QUOTAS') def restore_attribute_map(self): # Restore the original RESOURCE_ATTRIBUTE_MAP attributes.RESOURCE_ATTRIBUTE_MAP = self.saved_attr_map def _do_request(self, method, path, data=None, params=None, action=None): content_type = 'application/json' body = None if data is not None: # empty dict is valid body = wsgi.Serializer().serialize(data, content_type) req = testlib_api.create_request( path, body, content_type, method, query_string=params) res = req.get_response(self._api) if res.status_code >= 400: raise webexc.HTTPClientError(detail=res.body, code=res.status_code) if res.status_code != webexc.HTTPNoContent.code: return res.json def _ext_test_resource_create(self, attr=None): data = { "ext_test_resource": { "tenant_id": self._tenant_id, "name": "test", extattr.EXTENDED_ATTRIBUTE: attr } } res = self._do_request('POST', _get_path('ext_test_resources'), data) return res['ext_test_resource'] def test_ext_test_resource_create(self): ext_test_resource = self._ext_test_resource_create() attr = _uuid() ext_test_resource = self._ext_test_resource_create(attr) self.assertEqual(ext_test_resource[extattr.EXTENDED_ATTRIBUTE], attr) def test_ext_test_resource_get(self): attr = _uuid() obj = self._ext_test_resource_create(attr) obj_id = obj['id'] res = self._do_request('GET', _get_path( 'ext_test_resources/{0}'.format(obj_id))) obj2 = res['ext_test_resource'] self.assertEqual(obj2[extattr.EXTENDED_ATTRIBUTE], attr)
apache-2.0
ravibhure/ansible
lib/ansible/utils/module_docs_fragments/dellos9.py
75
2591
# # (c) 2015, Peter Sprygada <[email protected]> # # Copyright (c) 2016 Dell Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. class ModuleDocFragment(object): # Standard files documentation fragment DOCUMENTATION = """ options: provider: description: - A dict object containing connection details. default: null suboptions: host: description: - Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport. required: true port: description: - Specifies the port to use when building the connection to the remote device. default: 22 username: description: - User to authenticate the SSH session to the remote device. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead. password: description: - Password to authenticate the SSH session to the remote device. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead. default: null ssh_keyfile: description: - Path to an ssh key used to authenticate the SSH session to the remote device. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE) will be used instead. timeout: description: - Specifies idle timeout (in seconds) for the connection. Useful if the console freezes before continuing. For example when saving configurations. default: 10 notes: - For more information on using Ansible to manage Dell EMC Network devices see U(https://www.ansible.com/ansible-dell-networking). """
gpl-3.0
adedayo/intellij-community
python/lib/Lib/site-packages/django/contrib/messages/storage/cookie.py
89
5873
from django.conf import settings from django.contrib.messages import constants from django.contrib.messages.storage.base import BaseStorage, Message from django.http import CompatCookie from django.utils import simplejson as json from django.utils.crypto import salted_hmac, constant_time_compare class MessageEncoder(json.JSONEncoder): """ Compactly serializes instances of the ``Message`` class as JSON. """ message_key = '__json_message' def default(self, obj): if isinstance(obj, Message): message = [self.message_key, obj.level, obj.message] if obj.extra_tags: message.append(obj.extra_tags) return message return super(MessageEncoder, self).default(obj) class MessageDecoder(json.JSONDecoder): """ Decodes JSON that includes serialized ``Message`` instances. """ def process_messages(self, obj): if isinstance(obj, list) and obj: if obj[0] == MessageEncoder.message_key: return Message(*obj[1:]) return [self.process_messages(item) for item in obj] if isinstance(obj, dict): return dict([(key, self.process_messages(value)) for key, value in obj.iteritems()]) return obj def decode(self, s, **kwargs): decoded = super(MessageDecoder, self).decode(s, **kwargs) return self.process_messages(decoded) class CookieStorage(BaseStorage): """ Stores messages in a cookie. """ cookie_name = 'messages' # We should be able to store 4K in a cookie, but Internet Explorer # imposes 4K as the *total* limit for a domain. To allow other # cookies, we go for 3/4 of 4K. max_cookie_size = 3072 not_finished = '__messagesnotfinished__' def _get(self, *args, **kwargs): """ Retrieves a list of messages from the messages cookie. If the not_finished sentinel value is found at the end of the message list, remove it and return a result indicating that not all messages were retrieved by this storage. """ data = self.request.COOKIES.get(self.cookie_name) messages = self._decode(data) all_retrieved = not (messages and messages[-1] == self.not_finished) if messages and not all_retrieved: # remove the sentinel value messages.pop() return messages, all_retrieved def _update_cookie(self, encoded_data, response): """ Either sets the cookie with the encoded data if there is any data to store, or deletes the cookie. """ if encoded_data: response.set_cookie(self.cookie_name, encoded_data) else: response.delete_cookie(self.cookie_name) def _store(self, messages, response, remove_oldest=True, *args, **kwargs): """ Stores the messages to a cookie, returning a list of any messages which could not be stored. If the encoded data is larger than ``max_cookie_size``, removes messages until the data fits (these are the messages which are returned), and add the not_finished sentinel value to indicate as much. """ unstored_messages = [] encoded_data = self._encode(messages) if self.max_cookie_size: # data is going to be stored eventually by CompatCookie, which # adds it's own overhead, which we must account for. cookie = CompatCookie() # create outside the loop def stored_length(val): return len(cookie.value_encode(val)[1]) while encoded_data and stored_length(encoded_data) > self.max_cookie_size: if remove_oldest: unstored_messages.append(messages.pop(0)) else: unstored_messages.insert(0, messages.pop()) encoded_data = self._encode(messages + [self.not_finished], encode_empty=unstored_messages) self._update_cookie(encoded_data, response) return unstored_messages def _hash(self, value): """ Creates an HMAC/SHA1 hash based on the value and the project setting's SECRET_KEY, modified to make it unique for the present purpose. """ key_salt = 'django.contrib.messages' return salted_hmac(key_salt, value).hexdigest() def _encode(self, messages, encode_empty=False): """ Returns an encoded version of the messages list which can be stored as plain text. Since the data will be retrieved from the client-side, the encoded data also contains a hash to ensure that the data was not tampered with. """ if messages or encode_empty: encoder = MessageEncoder(separators=(',', ':')) value = encoder.encode(messages) return '%s$%s' % (self._hash(value), value) def _decode(self, data): """ Safely decodes a encoded text stream back into a list of messages. If the encoded text stream contained an invalid hash or was in an invalid format, ``None`` is returned. """ if not data: return None bits = data.split('$', 1) if len(bits) == 2: hash, value = bits if constant_time_compare(hash, self._hash(value)): try: # If we get here (and the JSON decode works), everything is # good. In any other case, drop back and return None. return json.loads(value, cls=MessageDecoder) except ValueError: pass # Mark the data as used (so it gets removed) since something was wrong # with the data. self.used = True return None
apache-2.0
rahul67/hue
desktop/core/ext-py/requests-2.6.0/requests/packages/urllib3/connection.py
371
8967
import datetime import sys import socket from socket import timeout as SocketTimeout import warnings from .packages import six try: # Python 3 from http.client import HTTPConnection as _HTTPConnection, HTTPException except ImportError: from httplib import HTTPConnection as _HTTPConnection, HTTPException class DummyConnection(object): "Used to detect a failed ConnectionCls import." pass try: # Compiled with SSL? HTTPSConnection = DummyConnection import ssl BaseSSLError = ssl.SSLError except (ImportError, AttributeError): # Platform-specific: No SSL. ssl = None class BaseSSLError(BaseException): pass try: # Python 3: # Not a no-op, we're adding this to the namespace so it can be imported. ConnectionError = ConnectionError except NameError: # Python 2: class ConnectionError(Exception): pass from .exceptions import ( ConnectTimeoutError, SystemTimeWarning, SecurityWarning, ) from .packages.ssl_match_hostname import match_hostname from .util.ssl_ import ( resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, assert_fingerprint, ) from .util import connection port_by_scheme = { 'http': 80, 'https': 443, } RECENT_DATE = datetime.date(2014, 1, 1) class HTTPConnection(_HTTPConnection, object): """ Based on httplib.HTTPConnection but provides an extra constructor backwards-compatibility layer between older and newer Pythons. Additional keyword parameters are used to configure attributes of the connection. Accepted parameters include: - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` - ``source_address``: Set the source address for the current connection. .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x - ``socket_options``: Set specific options on the underlying socket. If not specified, then defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. For example, if you wish to enable TCP Keep Alive in addition to the defaults, you might pass:: HTTPConnection.default_socket_options + [ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), ] Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). """ default_port = port_by_scheme['http'] #: Disable Nagle's algorithm by default. #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] #: Whether this connection verifies the host's certificate. is_verified = False def __init__(self, *args, **kw): if six.PY3: # Python 3 kw.pop('strict', None) # Pre-set source_address in case we have an older Python like 2.6. self.source_address = kw.get('source_address') if sys.version_info < (2, 7): # Python 2.6 # _HTTPConnection on Python 2.6 will balk at this keyword arg, but # not newer versions. We can still use it when creating a # connection though, so we pop it *after* we have saved it as # self.source_address. kw.pop('source_address', None) #: The socket options provided by the user. If no options are #: provided, we use the default options. self.socket_options = kw.pop('socket_options', self.default_socket_options) # Superclass also sets self.source_address in Python 2.7+. _HTTPConnection.__init__(self, *args, **kw) def _new_conn(self): """ Establish a socket connection and set nodelay settings on it. :return: New socket connection. """ extra_kw = {} if self.source_address: extra_kw['source_address'] = self.source_address if self.socket_options: extra_kw['socket_options'] = self.socket_options try: conn = connection.create_connection( (self.host, self.port), self.timeout, **extra_kw) except SocketTimeout: raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) return conn def _prepare_conn(self, conn): self.sock = conn # the _tunnel_host attribute was added in python 2.6.3 (via # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do # not have them. if getattr(self, '_tunnel_host', None): # TODO: Fix tunnel so it doesn't depend on self.sock state. self._tunnel() # Mark this connection as not reusable self.auto_open = 0 def connect(self): conn = self._new_conn() self._prepare_conn(conn) class HTTPSConnection(HTTPConnection): default_port = port_by_scheme['https'] def __init__(self, host, port=None, key_file=None, cert_file=None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw): HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw) self.key_file = key_file self.cert_file = cert_file # Required property for Google AppEngine 1.9.0 which otherwise causes # HTTPS requests to go out as HTTP. (See Issue #356) self._protocol = 'https' def connect(self): conn = self._new_conn() self._prepare_conn(conn) self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file) class VerifiedHTTPSConnection(HTTPSConnection): """ Based on httplib.HTTPSConnection but wraps the socket with SSL certification. """ cert_reqs = None ca_certs = None ssl_version = None assert_fingerprint = None def set_cert(self, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, assert_hostname=None, assert_fingerprint=None): self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.ca_certs = ca_certs self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint def connect(self): # Add certificate verification conn = self._new_conn() resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs) resolved_ssl_version = resolve_ssl_version(self.ssl_version) hostname = self.host if getattr(self, '_tunnel_host', None): # _tunnel_host was added in Python 2.6.3 # (See: http://hg.python.org/cpython/rev/0f57b30a152f) self.sock = conn # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() # Mark this connection as not reusable self.auto_open = 0 # Override the host with the one we're requesting data from. hostname = self._tunnel_host is_time_off = datetime.date.today() < RECENT_DATE if is_time_off: warnings.warn(( 'System time is way off (before {0}). This will probably ' 'lead to SSL verification errors').format(RECENT_DATE), SystemTimeWarning ) # Wrap socket using verification with the root certs in # trusted_root_certs self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file, cert_reqs=resolved_cert_reqs, ca_certs=self.ca_certs, server_hostname=hostname, ssl_version=resolved_ssl_version) if self.assert_fingerprint: assert_fingerprint(self.sock.getpeercert(binary_form=True), self.assert_fingerprint) elif resolved_cert_reqs != ssl.CERT_NONE \ and self.assert_hostname is not False: cert = self.sock.getpeercert() if not cert.get('subjectAltName', ()): warnings.warn(( 'Certificate has no `subjectAltName`, falling back to check for a `commonName` for now. ' 'This feature is being removed by major browsers and deprecated by RFC 2818. ' '(See https://github.com/shazow/urllib3/issues/497 for details.)'), SecurityWarning ) match_hostname(cert, self.assert_hostname or hostname) self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or self.assert_fingerprint is not None) if ssl: # Make a copy for testing. UnverifiedHTTPSConnection = HTTPSConnection HTTPSConnection = VerifiedHTTPSConnection
apache-2.0
ansible/ansible
lib/ansible/module_utils/facts/hardware/hpux.py
159
8351
# This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import re from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector class HPUXHardware(Hardware): """ HP-UX-specific subclass of Hardware. Defines memory and CPU facts: - memfree_mb - memtotal_mb - swapfree_mb - swaptotal_mb - processor - processor_cores - processor_count - model - firmware """ platform = 'HP-UX' def populate(self, collected_facts=None): hardware_facts = {} cpu_facts = self.get_cpu_facts(collected_facts=collected_facts) memory_facts = self.get_memory_facts() hw_facts = self.get_hw_facts() hardware_facts.update(cpu_facts) hardware_facts.update(memory_facts) hardware_facts.update(hw_facts) return hardware_facts def get_cpu_facts(self, collected_facts=None): cpu_facts = {} collected_facts = collected_facts or {} if collected_facts.get('ansible_architecture') in ['9000/800', '9000/785']: rc, out, err = self.module.run_command("ioscan -FkCprocessor | wc -l", use_unsafe_shell=True) cpu_facts['processor_count'] = int(out.strip()) # Working with machinfo mess elif collected_facts.get('ansible_architecture') == 'ia64': if collected_facts.get('ansible_distribution_version') == "B.11.23": rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep 'Number of CPUs'", use_unsafe_shell=True) if out: cpu_facts['processor_count'] = int(out.strip().split('=')[1]) rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep 'processor family'", use_unsafe_shell=True) if out: cpu_facts['processor'] = re.search('.*(Intel.*)', out).groups()[0].strip() rc, out, err = self.module.run_command("ioscan -FkCprocessor | wc -l", use_unsafe_shell=True) cpu_facts['processor_cores'] = int(out.strip()) if collected_facts.get('ansible_distribution_version') == "B.11.31": # if machinfo return cores strings release B.11.31 > 1204 rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep core | wc -l", use_unsafe_shell=True) if out.strip() == '0': rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep Intel", use_unsafe_shell=True) cpu_facts['processor_count'] = int(out.strip().split(" ")[0]) # If hyperthreading is active divide cores by 2 rc, out, err = self.module.run_command("/usr/sbin/psrset | grep LCPU", use_unsafe_shell=True) data = re.sub(' +', ' ', out).strip().split(' ') if len(data) == 1: hyperthreading = 'OFF' else: hyperthreading = data[1] rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep logical", use_unsafe_shell=True) data = out.strip().split(" ") if hyperthreading == 'ON': cpu_facts['processor_cores'] = int(data[0]) / 2 else: if len(data) == 1: cpu_facts['processor_cores'] = cpu_facts['processor_count'] else: cpu_facts['processor_cores'] = int(data[0]) rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep Intel |cut -d' ' -f4-", use_unsafe_shell=True) cpu_facts['processor'] = out.strip() else: rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | egrep 'socket[s]?$' | tail -1", use_unsafe_shell=True) cpu_facts['processor_count'] = int(out.strip().split(" ")[0]) rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep -e '[0-9] core' | tail -1", use_unsafe_shell=True) cpu_facts['processor_cores'] = int(out.strip().split(" ")[0]) rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep Intel", use_unsafe_shell=True) cpu_facts['processor'] = out.strip() return cpu_facts def get_memory_facts(self, collected_facts=None): memory_facts = {} collected_facts = collected_facts or {} pagesize = 4096 rc, out, err = self.module.run_command("/usr/bin/vmstat | tail -1", use_unsafe_shell=True) data = int(re.sub(' +', ' ', out).split(' ')[5].strip()) memory_facts['memfree_mb'] = pagesize * data // 1024 // 1024 if collected_facts.get('ansible_architecture') in ['9000/800', '9000/785']: try: rc, out, err = self.module.run_command("grep Physical /var/adm/syslog/syslog.log") data = re.search('.*Physical: ([0-9]*) Kbytes.*', out).groups()[0].strip() memory_facts['memtotal_mb'] = int(data) // 1024 except AttributeError: # For systems where memory details aren't sent to syslog or the log has rotated, use parsed # adb output. Unfortunately /dev/kmem doesn't have world-read, so this only works as root. if os.access("/dev/kmem", os.R_OK): rc, out, err = self.module.run_command("echo 'phys_mem_pages/D' | adb -k /stand/vmunix /dev/kmem | tail -1 | awk '{print $2}'", use_unsafe_shell=True) if not err: data = out memory_facts['memtotal_mb'] = int(data) / 256 else: rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo | grep Memory", use_unsafe_shell=True) data = re.search(r'Memory[\ :=]*([0-9]*).*MB.*', out).groups()[0].strip() memory_facts['memtotal_mb'] = int(data) rc, out, err = self.module.run_command("/usr/sbin/swapinfo -m -d -f -q") memory_facts['swaptotal_mb'] = int(out.strip()) rc, out, err = self.module.run_command("/usr/sbin/swapinfo -m -d -f | egrep '^dev|^fs'", use_unsafe_shell=True) swap = 0 for line in out.strip().splitlines(): swap += int(re.sub(' +', ' ', line).split(' ')[3].strip()) memory_facts['swapfree_mb'] = swap return memory_facts def get_hw_facts(self, collected_facts=None): hw_facts = {} collected_facts = collected_facts or {} rc, out, err = self.module.run_command("model") hw_facts['model'] = out.strip() if collected_facts.get('ansible_architecture') == 'ia64': separator = ':' if collected_facts.get('ansible_distribution_version') == "B.11.23": separator = '=' rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo |grep -i 'Firmware revision' | grep -v BMC", use_unsafe_shell=True) hw_facts['firmware_version'] = out.split(separator)[1].strip() rc, out, err = self.module.run_command("/usr/contrib/bin/machinfo |grep -i 'Machine serial number' ", use_unsafe_shell=True) if rc == 0 and out: hw_facts['product_serial'] = out.split(separator)[1].strip() return hw_facts class HPUXHardwareCollector(HardwareCollector): _fact_class = HPUXHardware _platform = 'HP-UX' required_facts = set(['platform', 'distribution'])
gpl-3.0
carlosdiazsuarez/movierecommender
MovieRecommender/connectors/imdb/imdb_connector.py
1
2413
''' Google Movie Showtimes parser class for Python. This script provides a Python class that can be used to parse Google Movie Showtimes (www.google.com/movies) pages into dictionary objects. @author Vaidik Kapoor @version 0.1 ''' import httplib, urllib, BeautifulSoup, re from copy import deepcopy from BeautifulSoup import BeautifulSoup ''' GoogleMovieShowtimes class This class is used for getting response from www.google.com/movies ''' class IMDB: def __init__(self): return def getIDs(self, name): self.params = {'q': name.encode("latin_1")} params = deepcopy(self.params) for key, val in params.iteritems(): if val == '': self.params.pop(key) params = urllib.urlencode(self.params) conn = httplib.HTTPConnection('www.imdb.com') conn.request("GET", "/find?" + params, "") response = conn.getresponse() self.response_code = response.status self.response = response.getheaders self.response_body = "" while 1: data = response.read() if not data: break self.response_body += data if (self.response_code == 200): self.html = BeautifulSoup(self.response_body) results = self.html.findAll('td', attrs={'class': 'result_text'}) self.ids = [] for td in results: self.ids.append(td.a.attrs[0][1]) return self.ids def getTitleExtra(self, title): conn = httplib.HTTPConnection('www.imdb.com') conn.request("GET", title, "") response = conn.getresponse() self.response_code = response.status self.response = response.getheaders self.response_body = "" while 1: data = response.read() if not data: break self.response_body += data if (self.response_code == 200): self.html = BeautifulSoup(self.response_body) results = self.html.findAll('span', attrs={'class': 'title-extra'}) self.titles = [] for span in results: title = span.contents[0] title = title.replace('\n', ' ') title = title.replace('"', ' ') title = title.strip() self.titles.append(title) return self.titles
gpl-2.0
guewen/OpenUpgrade
addons/crm_project_issue/project_issue.py
380
2373
from openerp.osv import osv, fields class crm_lead_to_project_issue_wizard(osv.TransientModel): """ wizard to convert a Lead into a Project Issue and move the Mail Thread """ _name = "crm.lead2projectissue.wizard" _inherit = 'crm.partner.binding' _columns = { "lead_id": fields.many2one("crm.lead", "Lead", domain=[("type", "=", "lead")]), "project_id": fields.many2one("project.project", "Project", domain=[("use_issues", "=", True)]) } _defaults = { "lead_id": lambda self, cr, uid, context=None: context.get('active_id') } def action_lead_to_project_issue(self, cr, uid, ids, context=None): # get the wizards and models wizards = self.browse(cr, uid, ids, context=context) Lead = self.pool["crm.lead"] Issue = self.pool["project.issue"] for wizard in wizards: # get the lead to transform lead = wizard.lead_id partner = self._find_matching_partner(cr, uid, context=context) if not partner and (lead.partner_name or lead.contact_name): partner_ids = Lead.handle_partner_assignation(cr, uid, [lead.id], context=context) partner = partner_ids[lead.id] # create new project.issue vals = { "name": lead.name, "description": lead.description, "email_from": lead.email_from, "project_id": wizard.project_id.id, "partner_id": partner, "user_id": None } issue_id = Issue.create(cr, uid, vals, context=None) # move the mail thread Lead.message_change_thread(cr, uid, lead.id, issue_id, "project.issue", context=context) # delete the lead Lead.unlink(cr, uid, [lead.id], context=None) # return the action to go to the form view of the new Issue view_id = self.pool.get('ir.ui.view').search(cr, uid, [('model', '=', 'project.issue'), ('name', '=', 'project_issue_form_view')]) return { 'name': 'Issue created', 'view_type': 'form', 'view_mode': 'form', 'view_id': view_id, 'res_model': 'project.issue', 'type': 'ir.actions.act_window', 'res_id': issue_id, 'context': context }
agpl-3.0
gingerswede/ITSecCardGame
src/View/Game.py
1
2031
''' IDE: Eclipse (PyDev) Python version: 2.7 Operating system: Windows 8.1 @author: Emil Carlsson @copyright: 2015 Emil Carlsson @license: This program is distributed under the terms of the GNU General Public License ''' from View import GlobalFunc from View.Board import Board class GameView(object): __root = None __controller = None __boardView = None def __init__(self, root, gameController, *args, **kwargs): self.__root = root self.__controller = gameController def StartNewGame(self, player=None, opponent=None): GlobalFunc.RemoveAllChildren(self.__root) self.__boardView = Board(self.__root, self.__controller, player, opponent) def OutOfMoves(self): self.__boardView.AddInformation("You are out of moves.\nPlease finnish your turn.") def ResetInformation(self): self.__boardView.ResetInformation() def MaxHandSize(self): self.__boardView.AddInformation("Maximum hand size reached.\nPlease play a card if possible.") def MaxVisibleHandSize(self): self.__boardView.AddInformation("Maximum amount of visible cards reached.") def RefreshBoard(self, playerOne, playerTwo): self.__boardView.RefreshBoard(playerOne, playerTwo) def RemoveFrame(self, frame): frame.destroy() def PlayerLost(self): self.__boardView.AddInformation("You lost!\nGame Over!") def PlayerWon(self): self.__boardView.AddInformation("You won!\nGame Over!") def OutOfCards(self): self.__boardView.AddInformation("You are out of cards in your deck.") def CardNotInHand(self): self.__boardView.AddInformation("Card not on your hand.") #TODO: Does not show def WaitingForOpponent(self): self.__boardView.AddInformation("Waiting for opponent...") def AppendMessage(self, message): self.__boardView.AppendInformation(message)
gpl-3.0
jimarnold/gomatic
gomatic/go_cd_configurator.py
1
10148
#!/usr/bin/env python import json import time import xml.etree.ElementTree as ET import argparse import sys import subprocess import requests from decimal import Decimal from gomatic.gocd.pipelines import Pipeline, PipelineGroup from gomatic.gocd.agents import Agent from gomatic.xml_operations import Ensurance, PossiblyMissingElement, move_all_to_end, prettify class GoCdConfigurator(object): def __init__(self, host_rest_client): self.__host_rest_client = host_rest_client self.__set_initial_config_xml() def __set_initial_config_xml(self): self.__initial_config, self._initial_md5 = self.__current_config_response() self.__xml_root = ET.fromstring(self.__initial_config) def __repr__(self): return "GoCdConfigurator(%s)" % self.__host_rest_client def as_python(self, pipeline, with_save=True): result = "#!/usr/bin/env python\nfrom gomatic import *\n\nconfigurator = " + str(self) + "\n" result += "pipeline = configurator" result += pipeline.as_python_commands_applied_to_server() save_part = "" if with_save: save_part = "\n\nconfigurator.save_updated_config(save_config_locally=True, dry_run=True)" return result + save_part @property def current_config(self): return self.__current_config_response()[0] def __current_config_response(self): config_url = "/go/admin/restful/configuration/file/GET/xml" response = self.__host_rest_client.get(config_url) if response.status_code != 200: raise Exception("Failed to get {} status {}\n:{}".format(config_url, response.status_code, response.text)) return response.text, response.headers['x-cruise-config-md5'] def reorder_elements_to_please_go(self): move_all_to_end(self.__xml_root, 'pipelines') move_all_to_end(self.__xml_root, 'templates') move_all_to_end(self.__xml_root, 'environments') move_all_to_end(self.__xml_root, 'agents') for pipeline in self.pipelines: pipeline.reorder_elements_to_please_go() for template in self.templates: template.reorder_elements_to_please_go() @property def config(self): self.reorder_elements_to_please_go() return ET.tostring(self.__xml_root, 'utf-8') @property def artifacts_dir(self): return self.__possibly_missing_server_element().attribute('artifactsdir') @artifacts_dir.setter def artifacts_dir(self, artifacts_dir): self.__server_element_ensurance().set('artifactsdir', artifacts_dir) @property def site_url(self): return self.__possibly_missing_server_element().attribute('siteUrl') @site_url.setter def site_url(self, site_url): self.__server_element_ensurance().set('siteUrl', site_url) @property def agent_auto_register_key(self): return self.__possibly_missing_server_element().attribute('agentAutoRegisterKey') @agent_auto_register_key.setter def agent_auto_register_key(self, agent_auto_register_key): self.__server_element_ensurance().set('agentAutoRegisterKey', agent_auto_register_key) @property def purge_start(self): return self.__server_decimal_attribute('purgeStart') @purge_start.setter def purge_start(self, purge_start_decimal): assert isinstance(purge_start_decimal, Decimal) self.__server_element_ensurance().set('purgeStart', str(purge_start_decimal)) @property def purge_upto(self): return self.__server_decimal_attribute('purgeUpto') @purge_upto.setter def purge_upto(self, purge_upto_decimal): assert isinstance(purge_upto_decimal, Decimal) self.__server_element_ensurance().set('purgeUpto', str(purge_upto_decimal)) def __server_decimal_attribute(self, attribute_name): attribute = self.__possibly_missing_server_element().attribute(attribute_name) return Decimal(attribute) if attribute else None def __possibly_missing_server_element(self): return PossiblyMissingElement(self.__xml_root).possibly_missing_child('server') def __server_element_ensurance(self): return Ensurance(self.__xml_root).ensure_child('server') @property def pipeline_groups(self): return [PipelineGroup(e, self) for e in self.__xml_root.findall('pipelines')] def ensure_pipeline_group(self, group_name): pipeline_group_element = Ensurance(self.__xml_root).ensure_child_with_attribute("pipelines", "group", group_name) return PipelineGroup(pipeline_group_element.element, self) def ensure_removal_of_pipeline_group(self, group_name): matching = [g for g in self.pipeline_groups if g.name == group_name] for group in matching: self.__xml_root.remove(group.element) return self def remove_all_pipeline_groups(self): for e in self.__xml_root.findall('pipelines'): self.__xml_root.remove(e) return self @property def agents(self): return [Agent(e) for e in PossiblyMissingElement(self.__xml_root).possibly_missing_child('agents').findall('agent')] def ensure_removal_of_agent(self, hostname): matching = [agent for agent in self.agents if agent.hostname == hostname] for agent in matching: Ensurance(self.__xml_root).ensure_child('agents').element.remove(agent._element) return self @property def pipelines(self): result = [] groups = self.pipeline_groups for group in groups: result.extend(group.pipelines) return result @property def templates(self): return [Pipeline(e, 'templates') for e in PossiblyMissingElement(self.__xml_root).possibly_missing_child('templates').findall('pipeline')] def ensure_template(self, template_name): pipeline_element = Ensurance(self.__xml_root).ensure_child('templates').ensure_child_with_attribute('pipeline', 'name', template_name).element return Pipeline(pipeline_element, 'templates') def ensure_replacement_of_template(self, template_name): template = self.ensure_template(template_name) template.make_empty() return template def ensure_removal_of_template(self, template_name): matching = [template for template in self.templates if template.name == template_name] root = Ensurance(self.__xml_root) templates_element = root.ensure_child('templates').element for template in matching: templates_element.remove(template.element) if len(self.templates) == 0: root.element.remove(templates_element) return self @property def git_urls(self): return [pipeline.git_url for pipeline in self.pipelines if pipeline.has_single_git_material] @property def has_changes(self): return prettify(self.__initial_config) != prettify(self.config) def save_updated_config(self, save_config_locally=False, dry_run=False): config_before = prettify(self.__initial_config) config_after = prettify(self.config) if save_config_locally: open('config-before.xml', 'w').write(config_before.encode('utf-8')) open('config-after.xml', 'w').write(config_after.encode('utf-8')) def has_kdiff3(): try: return subprocess.call(["kdiff3", "-version"]) == 0 except: return False if dry_run and config_before != config_after and has_kdiff3(): subprocess.call(["kdiff3", "config-before.xml", "config-after.xml"]) data = { 'xmlFile': self.config, 'md5': self._initial_md5 } if not dry_run and config_before != config_after: self.__host_rest_client.post('/go/admin/restful/configuration/file/POST/xml', data) self.__set_initial_config_xml() class HostRestClient(object): def __init__(self, host): self.__host = host def __repr__(self): return 'HostRestClient("%s")' % self.__host def __path(self, path): return ('http://%s' % self.__host) + path def get(self, path): result = requests.get(self.__path(path)) count = 0 while ((result.status_code == 503) or (result.status_code == 504)) and (count < 5): result = requests.get(self.__path(path)) time.sleep(1) count += 1 return result def post(self, path, data): url = self.__path(path) result = requests.post(url, data) if result.status_code != 200: try: result_json = json.loads(result.text.replace("\\'", "'")) message = result_json.get('result', result.text) raise RuntimeError("Could not post config to Go server (%s) [status code=%s]:\n%s" % (url, result.status_code, message)) except ValueError: raise RuntimeError("Could not post config to Go server (%s) [status code=%s] (and result was not json):\n%s" % (url, result.status_code, result)) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Gomatic is an API for configuring GoCD. ' 'Run python -m gomatic.go_cd_configurator to reverse engineer code to configure an existing pipeline.') parser.add_argument('-s', '--server', help='the go server (e.g. "localhost:8153" or "my.gocd.com")') parser.add_argument('-p', '--pipeline', help='the name of the pipeline to reverse-engineer the config for') args = parser.parse_args() if len(sys.argv) == 1: parser.print_help() sys.exit(1) go_server = GoCdConfigurator(HostRestClient(args.server)) matching_pipelines = [p for p in go_server.pipelines if p.name == args.pipeline] if len(matching_pipelines) != 1: raise RuntimeError("Should have found one matching pipeline but found %s" % matching_pipelines) pipeline = matching_pipelines[0] print(go_server.as_python(pipeline))
mit
hynnet/hiwifi-openwrt-HC5661-HC5761
staging_dir/host/lib/python2.7/test/test_base64.py
113
8195
import unittest from test import test_support import base64 class LegacyBase64TestCase(unittest.TestCase): def test_encodestring(self): eq = self.assertEqual eq(base64.encodestring("www.python.org"), "d3d3LnB5dGhvbi5vcmc=\n") eq(base64.encodestring("a"), "YQ==\n") eq(base64.encodestring("ab"), "YWI=\n") eq(base64.encodestring("abc"), "YWJj\n") eq(base64.encodestring(""), "") eq(base64.encodestring("abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}"), "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n") def test_decodestring(self): eq = self.assertEqual eq(base64.decodestring("d3d3LnB5dGhvbi5vcmc=\n"), "www.python.org") eq(base64.decodestring("YQ==\n"), "a") eq(base64.decodestring("YWI=\n"), "ab") eq(base64.decodestring("YWJj\n"), "abc") eq(base64.decodestring("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n"), "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}") eq(base64.decodestring(''), '') def test_encode(self): eq = self.assertEqual from cStringIO import StringIO infp = StringIO('abcdefghijklmnopqrstuvwxyz' 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' '0123456789!@#0^&*();:<>,. []{}') outfp = StringIO() base64.encode(infp, outfp) eq(outfp.getvalue(), 'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE' 'RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT' 'Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==\n') def test_decode(self): from cStringIO import StringIO infp = StringIO('d3d3LnB5dGhvbi5vcmc=') outfp = StringIO() base64.decode(infp, outfp) self.assertEqual(outfp.getvalue(), 'www.python.org') class BaseXYTestCase(unittest.TestCase): def test_b64encode(self): eq = self.assertEqual # Test default alphabet eq(base64.b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=") eq(base64.b64encode('\x00'), 'AA==') eq(base64.b64encode("a"), "YQ==") eq(base64.b64encode("ab"), "YWI=") eq(base64.b64encode("abc"), "YWJj") eq(base64.b64encode(""), "") eq(base64.b64encode("abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}"), "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") # Test with arbitrary alternative characters eq(base64.b64encode('\xd3V\xbeo\xf7\x1d', altchars='*$'), '01a*b$cd') # Test standard alphabet eq(base64.standard_b64encode("www.python.org"), "d3d3LnB5dGhvbi5vcmc=") eq(base64.standard_b64encode("a"), "YQ==") eq(base64.standard_b64encode("ab"), "YWI=") eq(base64.standard_b64encode("abc"), "YWJj") eq(base64.standard_b64encode(""), "") eq(base64.standard_b64encode("abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}"), "YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ==") # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64encode('\xd3V\xbeo\xf7\x1d'), '01a-b_cd') def test_b64decode(self): eq = self.assertEqual eq(base64.b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org") eq(base64.b64decode('AA=='), '\x00') eq(base64.b64decode("YQ=="), "a") eq(base64.b64decode("YWI="), "ab") eq(base64.b64decode("YWJj"), "abc") eq(base64.b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0\nNT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}") eq(base64.b64decode(''), '') # Test with arbitrary alternative characters eq(base64.b64decode('01a*b$cd', altchars='*$'), '\xd3V\xbeo\xf7\x1d') # Test standard alphabet eq(base64.standard_b64decode("d3d3LnB5dGhvbi5vcmc="), "www.python.org") eq(base64.standard_b64decode("YQ=="), "a") eq(base64.standard_b64decode("YWI="), "ab") eq(base64.standard_b64decode("YWJj"), "abc") eq(base64.standard_b64decode(""), "") eq(base64.standard_b64decode("YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNE" "RUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0NT" "Y3ODkhQCMwXiYqKCk7Ojw+LC4gW117fQ=="), "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789!@#0^&*();:<>,. []{}") # Test with 'URL safe' alternative characters eq(base64.urlsafe_b64decode('01a-b_cd'), '\xd3V\xbeo\xf7\x1d') def test_b64decode_error(self): self.assertRaises(TypeError, base64.b64decode, 'abc') def test_b32encode(self): eq = self.assertEqual eq(base64.b32encode(''), '') eq(base64.b32encode('\x00'), 'AA======') eq(base64.b32encode('a'), 'ME======') eq(base64.b32encode('ab'), 'MFRA====') eq(base64.b32encode('abc'), 'MFRGG===') eq(base64.b32encode('abcd'), 'MFRGGZA=') eq(base64.b32encode('abcde'), 'MFRGGZDF') def test_b32decode(self): eq = self.assertEqual eq(base64.b32decode(''), '') eq(base64.b32decode('AA======'), '\x00') eq(base64.b32decode('ME======'), 'a') eq(base64.b32decode('MFRA===='), 'ab') eq(base64.b32decode('MFRGG==='), 'abc') eq(base64.b32decode('MFRGGZA='), 'abcd') eq(base64.b32decode('MFRGGZDF'), 'abcde') def test_b32decode_casefold(self): eq = self.assertEqual eq(base64.b32decode('', True), '') eq(base64.b32decode('ME======', True), 'a') eq(base64.b32decode('MFRA====', True), 'ab') eq(base64.b32decode('MFRGG===', True), 'abc') eq(base64.b32decode('MFRGGZA=', True), 'abcd') eq(base64.b32decode('MFRGGZDF', True), 'abcde') # Lower cases eq(base64.b32decode('me======', True), 'a') eq(base64.b32decode('mfra====', True), 'ab') eq(base64.b32decode('mfrgg===', True), 'abc') eq(base64.b32decode('mfrggza=', True), 'abcd') eq(base64.b32decode('mfrggzdf', True), 'abcde') # Expected exceptions self.assertRaises(TypeError, base64.b32decode, 'me======') # Mapping zero and one eq(base64.b32decode('MLO23456'), 'b\xdd\xad\xf3\xbe') eq(base64.b32decode('M1023456', map01='L'), 'b\xdd\xad\xf3\xbe') eq(base64.b32decode('M1023456', map01='I'), 'b\x1d\xad\xf3\xbe') def test_b32decode_error(self): self.assertRaises(TypeError, base64.b32decode, 'abc') self.assertRaises(TypeError, base64.b32decode, 'ABCDEF==') def test_b16encode(self): eq = self.assertEqual eq(base64.b16encode('\x01\x02\xab\xcd\xef'), '0102ABCDEF') eq(base64.b16encode('\x00'), '00') def test_b16decode(self): eq = self.assertEqual eq(base64.b16decode('0102ABCDEF'), '\x01\x02\xab\xcd\xef') eq(base64.b16decode('00'), '\x00') # Lower case is not allowed without a flag self.assertRaises(TypeError, base64.b16decode, '0102abcdef') # Case fold eq(base64.b16decode('0102abcdef', True), '\x01\x02\xab\xcd\xef') def test_main(): test_support.run_unittest(__name__) if __name__ == '__main__': test_main()
gpl-2.0
uzh/msregistry
app/main/views.py
1
1040
# Copyright (C) 2016 University of Zurich. All rights reserved. # # This file is part of MSRegistry Backend. # # MSRegistry Backend is free software: you can redistribute it and/or # modify it under the terms of the version 3 of the GNU Affero General # Public License as published by the Free Software Foundation, or any # other later version. # # MSRegistry Backend is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version # 3 of the GNU Affero General Public License for more details. # # You should have received a copy of the version 3 of the GNU Affero # General Public License along with MSRegistry Backend. If not, see # <http://www.gnu.org/licenses/>. __author__ = "Filippo Panessa <[email protected]>" __copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik," " University of Zurich") from . import main @main.route('/', methods=['GET', 'POST']) def index(): return '', 200
agpl-3.0
suyashphadtare/sajil-final-erp
erpnext/erpnext/projects/utils.py
37
1205
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt # For license information, please see license.txt from __future__ import unicode_literals import frappe @frappe.whitelist() def get_time_log_list(doctype, txt, searchfield, start, page_len, filters): return frappe.db.get_values("Time Log", filters, ["name", "activity_type", "owner"]) @frappe.whitelist() def query_task(doctype, txt, searchfield, start, page_len, filters): from frappe.widgets.reportview import build_match_conditions search_string = "%%%s%%" % txt order_by_string = "%s%%" % txt match_conditions = build_match_conditions("Task") match_conditions = ("and" + match_conditions) if match_conditions else "" return frappe.db.sql("""select name, subject from `tabTask` where (`%s` like %s or `subject` like %s) %s order by case when `subject` like %s then 0 else 1 end, case when `%s` like %s then 0 else 1 end, `%s`, subject limit %s, %s""" % (searchfield, "%s", "%s", match_conditions, "%s", searchfield, "%s", searchfield, "%s", "%s"), (search_string, search_string, order_by_string, order_by_string, start, page_len))
agpl-3.0
romain-li/edx-platform
lms/envs/test_static_optimized.py
26
2169
""" Settings used when generating static assets for use in tests. For example, Bok Choy uses two different settings files: 1. test_static_optimized is used when invoking collectstatic 2. bok_choy is used when running CMS and LMS Note: it isn't possible to have a single settings file, because Django doesn't support both generating static assets to a directory and also serving static from the same directory. """ # Start with the common settings from .common import * # pylint: disable=wildcard-import, unused-wildcard-import # Use an in-memory database since this settings file is only used for updating assets DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'ATOMIC_REQUESTS': True, }, 'student_module_history': { 'ENGINE': 'django.db.backends.sqlite3', }, } # Provide a dummy XQUEUE_INTERFACE setting as LMS expects it to exist on start up XQUEUE_INTERFACE = { "url": "https://sandbox-xqueue.edx.org", "django_auth": { "username": "lms", "password": "***REMOVED***" }, "basic_auth": ('anant', 'agarwal'), } ######################### PIPELINE #################################### # Use RequireJS optimized storage STATICFILES_STORAGE = 'openedx.core.lib.django_require.staticstorage.OptimizedCachedRequireJsStorage' # Revert to the default set of finders as we don't want to dynamically pick up files from the pipeline STATICFILES_FINDERS = [ 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', 'openedx.core.lib.xblock_pipeline.finder.XBlockPipelineFinder', ] # Redirect to the test_root folder within the repo TEST_ROOT = REPO_ROOT / "test_root" LOG_DIR = (TEST_ROOT / "log").abspath() # Store the static files under test root so that they don't overwrite existing static assets STATIC_ROOT = (TEST_ROOT / "staticfiles" / "lms").abspath() # Disable uglify when tests are running (used by build.js). # 1. Uglify is by far the slowest part of the build process # 2. Having full source code makes debugging tests easier for developers os.environ['REQUIRE_BUILD_PROFILE_OPTIMIZE'] = 'none'
agpl-3.0
thaumos/ansible
lib/ansible/modules/cloud/azure/azure_rm_hdinsightcluster.py
12
20099
#!/usr/bin/python # # Copyright (c) 2019 Zim Kalinowski, (@zikalino) # # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: azure_rm_hdinsightcluster version_added: "2.8" short_description: Manage Azure HDInsight Cluster instance. description: - Create, update and delete instance of Azure HDInsight Cluster. options: resource_group: description: - The name of the resource group. required: True name: description: - The name of the cluster. required: True location: description: - Resource location. If not set, location from the resource group will be used as default. cluster_version: description: - The version of the cluster. For example I(3.6) os_type: description: - The type of operating system. choices: - 'linux' tier: description: - The cluster tier. choices: - 'standard' - 'premium' cluster_definition: description: - The cluster definition. suboptions: kind: description: - The type of cluster. choices: - hadoop - spark - hbase - storm gateway_rest_username: description: - Gateway REST user name. gateway_rest_password: description: - Gateway REST password. compute_profile_roles: description: - The list of roles in the cluster. type: list suboptions: name: description: - The name of the role. choices: - 'headnode' - 'workernode' - 'zookepernode' min_instance_count: description: - The minimum instance count of the cluster. target_instance_count: description: - The instance count of the cluster. vm_size: description: - The size of the VM linux_profile: description: - The Linux OS profile. suboptions: username: description: - User name password: description: - Password storage_accounts: description: - The list of storage accounts in the cluster. type: list suboptions: name: description: - Blob storage endpoint. is_default: description: - Whether or not the storage account is the default storage account. container: description: - The container in the storage account. key: description: - The storage account access key. state: description: - Assert the state of the cluster. - Use C(present) to create or update a cluster and C(absent) to delete it. default: present choices: - absent - present extends_documentation_fragment: - azure - azure_tags author: - "Zim Kalinowski (@zikalino)" ''' EXAMPLES = ''' - name: Create instance of HDInsight Cluster azure_rm_hdinsightcluster: resource_group: myResourceGroup name: myCluster location: eastus2 cluster_version: 3.6 os_type: linux tier: standard cluster_definition: kind: spark gateway_rest_username: http-user gateway_rest_password: MuABCPassword!!@123 storage_accounts: - name: myStorageAccount.blob.core.windows.net is_default: yes container: myContainer key: GExmaxH4lDNdHA9nwAsCt8t4AOQas2y9vXQP1kKALTram7Q3/5xLVIab3+nYG1x63Xyak9/VXxQyNBHA9pDWw== compute_profile_roles: - name: headnode target_instance_count: 2 hardware_profile: vm_size: Standard_D3 linux_profile: username: sshuser password: MuABCPassword!!@123 - name: workernode target_instance_count: 2 vm_size: Standard_D3 linux_profile: username: sshuser password: MuABCPassword!!@123 ''' RETURN = ''' id: description: - Fully qualified resource id of the cluster. returned: always type: str sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.HDInsight/clusters/myCluster ''' import time from ansible.module_utils.azure_rm_common import AzureRMModuleBase try: from msrestazure.azure_exceptions import CloudError from msrest.polling import LROPoller from msrestazure.azure_operation import AzureOperationPoller from azure.mgmt.hdinsight import HDInsightManagementClient from msrest.serialization import Model except ImportError: # This is handled in azure_rm_common pass class Actions: NoAction, Create, Update, Delete = range(4) class AzureRMClusters(AzureRMModuleBase): """Configuration class for an Azure RM Cluster resource""" def __init__(self): self.module_arg_spec = dict( resource_group=dict( type='str', required=True ), name=dict( type='str', required=True ), location=dict( type='str' ), cluster_version=dict( type='str' ), os_type=dict( type='str', choices=['linux'] ), tier=dict( type='str', choices=['standard', 'premium'] ), cluster_definition=dict( type='dict' ), compute_profile_roles=dict( type='list' ), storage_accounts=dict( type='list' ), state=dict( type='str', default='present', choices=['present', 'absent'] ) ) self.resource_group = None self.name = None self.parameters = dict() self.results = dict(changed=False) self.mgmt_client = None self.state = None self.to_do = Actions.NoAction self.tags_changed = False self.new_instance_count = None super(AzureRMClusters, self).__init__(derived_arg_spec=self.module_arg_spec, supports_check_mode=True, supports_tags=True) def exec_module(self, **kwargs): """Main module execution method""" for key in list(self.module_arg_spec.keys()) + ['tags']: if hasattr(self, key): setattr(self, key, kwargs[key]) elif kwargs[key] is not None: self.parameters[key] = kwargs[key] dict_expand(self.parameters, ['cluster_version'], 'properties') dict_camelize(self.parameters, ['os_type'], True) dict_expand(self.parameters, ['os_type'], 'properties') dict_camelize(self.parameters, ['tier'], True) dict_expand(self.parameters, ['tier'], 'properties') dict_rename(self.parameters, ['cluster_definition', 'gateway_rest_username'], 'restAuthCredential.username') dict_rename(self.parameters, ['cluster_definition', 'gateway_rest_password'], 'restAuthCredential.password') dict_expand(self.parameters, ['cluster_definition', 'restAuthCredential.username'], 'gateway') dict_expand(self.parameters, ['cluster_definition', 'restAuthCredential.password'], 'gateway') dict_expand(self.parameters, ['cluster_definition', 'gateway'], 'configurations') dict_expand(self.parameters, ['cluster_definition'], 'properties') dict_expand(self.parameters, ['compute_profile_roles', 'vm_size'], 'hardware_profile') dict_rename(self.parameters, ['compute_profile_roles', 'linux_profile'], 'linux_operating_system_profile') dict_expand(self.parameters, ['compute_profile_roles', 'linux_operating_system_profile'], 'os_profile') dict_rename(self.parameters, ['compute_profile_roles'], 'roles') dict_expand(self.parameters, ['roles'], 'compute_profile') dict_expand(self.parameters, ['compute_profile'], 'properties') dict_rename(self.parameters, ['storage_accounts'], 'storageaccounts') dict_expand(self.parameters, ['storageaccounts'], 'storage_profile') dict_expand(self.parameters, ['storage_profile'], 'properties') response = None self.mgmt_client = self.get_mgmt_svc_client(HDInsightManagementClient, base_url=self._cloud_environment.endpoints.resource_manager) resource_group = self.get_resource_group(self.resource_group) if "location" not in self.parameters: self.parameters["location"] = resource_group.location old_response = self.get_cluster() if not old_response: self.log("Cluster instance doesn't exist") if self.state == 'absent': self.log("Old instance didn't exist") else: self.to_do = Actions.Create else: self.log("Cluster instance already exists") if self.state == 'absent': self.to_do = Actions.Delete elif self.state == 'present': compare_result = {} if (not default_compare(self.parameters, old_response, '', compare_result)): if compare_result.pop('/properties/compute_profile/roles/*/target_instance_count', False): # check if it's workernode new_count = 0 old_count = 0 for role in self.parameters['properties']['compute_profile']['roles']: if role['name'] == 'workernode': new_count = role['target_instance_count'] for role in old_response['properties']['compute_profile']['roles']: if role['name'] == 'workernode': old_count = role['target_instance_count'] if old_count != new_count: self.new_instance_count = new_count self.to_do = Actions.Update if compare_result.pop('/tags', False): self.to_do = Actions.Update self.tags_changed = True if compare_result: for k in compare_result.keys(): self.module.warn("property '" + k + "' cannot be updated (" + compare_result[k] + ")") self.module.warn("only tags and target_instance_count can be updated") if (self.to_do == Actions.Create) or (self.to_do == Actions.Update): self.log("Need to Create / Update the Cluster instance") self.results['changed'] = True if self.check_mode: return self.results response = self.create_update_cluster() self.log("Creation / Update done") elif self.to_do == Actions.Delete: self.log("Cluster instance deleted") self.results['changed'] = True if self.check_mode: return self.results self.delete_cluster() else: self.log("Cluster instance unchanged") self.results['changed'] = False response = old_response if self.state == 'present': self.results.update(self.format_item(response)) return self.results def create_update_cluster(self): ''' Creates or updates Cluster with the specified configuration. :return: deserialized Cluster instance state dictionary ''' self.log("Creating / Updating the Cluster instance {0}".format(self.name)) try: if self.to_do == Actions.Create: response = self.mgmt_client.clusters.create(resource_group_name=self.resource_group, cluster_name=self.name, parameters=self.parameters) if isinstance(response, LROPoller) or isinstance(response, AzureOperationPoller): response = self.get_poller_result(response) else: if self.tags_changed: response = self.mgmt_client.clusters.update(resource_group_name=self.resource_group, cluster_name=self.name, tags=self.parameters.get('tags')) if isinstance(response, LROPoller) or isinstance(response, AzureOperationPoller): response = self.get_poller_result(response) if self.new_instance_count: response = self.mgmt_client.clusters.resize(resource_group_name=self.resource_group, cluster_name=self.name, target_instance_count=self.new_instance_count) if isinstance(response, LROPoller) or isinstance(response, AzureOperationPoller): response = self.get_poller_result(response) except CloudError as exc: self.fail("Error creating or updating Cluster instance: {0}".format(str(exc))) return response.as_dict() if response else {} def delete_cluster(self): ''' Deletes specified Cluster instance in the specified subscription and resource group. :return: True ''' self.log("Deleting the Cluster instance {0}".format(self.name)) try: response = self.mgmt_client.clusters.delete(resource_group_name=self.resource_group, cluster_name=self.name) except CloudError as e: self.fail("Error deleting the Cluster instance: {0}".format(str(e))) return True def get_cluster(self): ''' Gets the properties of the specified Cluster. :return: deserialized Cluster instance state dictionary ''' self.log("Checking if the Cluster instance {0} is present".format(self.name)) found = False try: response = self.mgmt_client.clusters.get(resource_group_name=self.resource_group, cluster_name=self.name) found = True self.log("Response : {0}".format(response)) self.log("Cluster instance : {0} found".format(response.name)) except Exception as e: self.log('Did not find the Cluster instance.') if found is True: return response.as_dict() return False def format_item(self, d): d = { 'id': d.get('id', None) } return d def default_compare(new, old, path, result): if new is None: match = True elif isinstance(new, dict): match = True if not isinstance(old, dict): result[path] = 'old dict is null' match = False else: for k in new.keys(): if not default_compare(new.get(k), old.get(k, None), path + '/' + k, result): match = False elif isinstance(new, list): if not isinstance(old, list) or len(new) != len(old): result[path] = 'length is different or null' match = False elif len(old) == 0: match = True else: match = True if isinstance(old[0], dict): key = None if 'id' in old[0] and 'id' in new[0]: key = 'id' elif 'name' in old[0] and 'name' in new[0]: key = 'name' else: key = list(old[0])[0] new = sorted(new, key=lambda x: x.get(key, '')) old = sorted(old, key=lambda x: x.get(key, '')) else: new = sorted(new) old = sorted(old) for i in range(len(new)): if not default_compare(new[i], old[i], path + '/*', result): match = False return match else: if path.endswith('password'): match = True else: if path == '/location' or path.endswith('location_name'): new = new.replace(' ', '').lower() old = new.replace(' ', '').lower() if new == old: match = True else: result[path] = str(new) + ' != ' + str(old) match = False return match def dict_camelize(d, path, camelize_first): if isinstance(d, list): for i in range(len(d)): dict_camelize(d[i], path, camelize_first) elif isinstance(d, dict): if len(path) == 1: old_value = d.get(path[0], None) if old_value is not None: d[path[0]] = _snake_to_camel(old_value, camelize_first) else: sd = d.get(path[0], None) if sd is not None: dict_camelize(sd, path[1:], camelize_first) def dict_upper(d, path): if isinstance(d, list): for i in range(len(d)): dict_upper(d[i], path) elif isinstance(d, dict): if len(path) == 1: old_value = d.get(path[0], None) if old_value is not None: d[path[0]] = old_value.upper() else: sd = d.get(path[0], None) if sd is not None: dict_upper(sd, path[1:]) def dict_rename(d, path, new_name): if isinstance(d, list): for i in range(len(d)): dict_rename(d[i], path, new_name) elif isinstance(d, dict): if len(path) == 1: old_value = d.pop(path[0], None) if old_value is not None: d[new_name] = old_value else: sd = d.get(path[0], None) if sd is not None: dict_rename(sd, path[1:], new_name) def dict_expand(d, path, outer_dict_name): if isinstance(d, list): for i in range(len(d)): dict_expand(d[i], path, outer_dict_name) elif isinstance(d, dict): if len(path) == 1: old_value = d.pop(path[0], None) if old_value is not None: d[outer_dict_name] = d.get(outer_dict_name, {}) d[outer_dict_name][path[0]] = old_value else: sd = d.get(path[0], None) if sd is not None: dict_expand(sd, path[1:], outer_dict_name) def _snake_to_camel(snake, capitalize_first=False): if capitalize_first: return ''.join(x.capitalize() or '_' for x in snake.split('_')) else: return snake.split('_')[0] + ''.join(x.capitalize() or '_' for x in snake.split('_')[1:]) def main(): """Main execution""" AzureRMClusters() if __name__ == '__main__': main()
gpl-3.0
edxzw/edx-platform
cms/djangoapps/contentstore/views/tests/test_programs.py
19
6364
"""Tests covering the Programs listing on the Studio home.""" import json from django.conf import settings from django.core.urlresolvers import reverse import httpretty import mock from oauth2_provider.tests.factories import ClientFactory from provider.constants import CONFIDENTIAL from openedx.core.djangoapps.programs.models import ProgramsApiConfig from openedx.core.djangoapps.programs.tests.mixins import ProgramsApiConfigMixin, ProgramsDataMixin from student.tests.factories import UserFactory from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase class TestProgramListing(ProgramsApiConfigMixin, ProgramsDataMixin, SharedModuleStoreTestCase): """Verify Program listing behavior.""" def setUp(self): super(TestProgramListing, self).setUp() ClientFactory(name=ProgramsApiConfig.OAUTH2_CLIENT_NAME, client_type=CONFIDENTIAL) self.staff = UserFactory(is_staff=True) self.client.login(username=self.staff.username, password='test') self.studio_home = reverse('home') @httpretty.activate def test_programs_config_disabled(self): """Verify that the programs tab and creation button aren't rendered when config is disabled.""" self.create_config(enable_studio_tab=False) self.mock_programs_api() response = self.client.get(self.studio_home) self.assertNotIn("You haven't created any programs yet.", response.content) for program_name in self.PROGRAM_NAMES: self.assertNotIn(program_name, response.content) @httpretty.activate def test_programs_requires_staff(self): """ Verify that the programs tab and creation button aren't rendered unless the user has global staff permissions. """ student = UserFactory(is_staff=False) self.client.login(username=student.username, password='test') self.create_config() self.mock_programs_api() response = self.client.get(self.studio_home) self.assertNotIn("You haven't created any programs yet.", response.content) @httpretty.activate def test_programs_displayed(self): """Verify that the programs tab and creation button can be rendered when config is enabled.""" self.create_config() # When no data is provided, expect creation prompt. self.mock_programs_api(data={'results': []}) response = self.client.get(self.studio_home) self.assertIn("You haven't created any programs yet.", response.content) # When data is provided, expect a program listing. self.mock_programs_api() response = self.client.get(self.studio_home) for program_name in self.PROGRAM_NAMES: self.assertIn(program_name, response.content) class TestProgramAuthoringView(ProgramsApiConfigMixin, SharedModuleStoreTestCase): """Verify the behavior of the program authoring app's host view.""" def setUp(self): super(TestProgramAuthoringView, self).setUp() self.staff = UserFactory(is_staff=True) self.programs_path = reverse('programs') def _assert_status(self, status_code): """Verify the status code returned by the Program authoring view.""" response = self.client.get(self.programs_path) self.assertEquals(response.status_code, status_code) return response def test_authoring_login_required(self): """Verify that accessing the view requires the user to be authenticated.""" response = self.client.get(self.programs_path) self.assertRedirects( response, '{login_url}?next={programs}'.format( login_url=settings.LOGIN_URL, programs=self.programs_path ) ) def test_authoring_header(self): """Verify that the header contains the expected text.""" self.client.login(username=self.staff.username, password='test') self.create_config() response = self._assert_status(200) self.assertIn("Program Administration", response.content) def test_authoring_access(self): """ Verify that a 404 is returned if Programs authoring is disabled, or the user does not have global staff permissions. """ self.client.login(username=self.staff.username, password='test') self._assert_status(404) # Enable Programs authoring interface self.create_config() student = UserFactory(is_staff=False) self.client.login(username=student.username, password='test') self._assert_status(404) class TestProgramsIdTokenView(ProgramsApiConfigMixin, SharedModuleStoreTestCase): """Tests for the programs id_token endpoint.""" def setUp(self): super(TestProgramsIdTokenView, self).setUp() self.user = UserFactory() self.client.login(username=self.user.username, password='test') self.path = reverse('programs_id_token') def test_config_disabled(self): """Ensure the endpoint returns 404 when Programs authoring is disabled.""" self.create_config(enable_studio_tab=False) response = self.client.get(self.path) self.assertEqual(response.status_code, 404) def test_not_logged_in(self): """Ensure the endpoint denies access to unauthenticated users.""" self.create_config() self.client.logout() response = self.client.get(self.path) self.assertEqual(response.status_code, 302) self.assertIn(settings.LOGIN_URL, response['Location']) @mock.patch('cms.djangoapps.contentstore.views.program.get_id_token', return_value='test-id-token') def test_config_enabled(self, mock_get_id_token): """ Ensure the endpoint responds with a valid JSON payload when authoring is enabled. """ self.create_config() response = self.client.get(self.path) self.assertEqual(response.status_code, 200) payload = json.loads(response.content) self.assertEqual(payload, {"id_token": "test-id-token"}) # this comparison is a little long-handed because we need to compare user instances directly user, client_name = mock_get_id_token.call_args[0] self.assertEqual(user, self.user) self.assertEqual(client_name, "programs")
agpl-3.0
2mny/mylar
lib/js2py/prototypes/jsarray.py
27
14886
import six if six.PY3: xrange = range import functools def to_arr(this): """Returns Python array from Js array""" return [this.get(str(e)) for e in xrange(len(this))] ARR_STACK = set({}) class ArrayPrototype: def toString(): # this function is wrong but I will leave it here fore debugging purposes. func = this.get('join') if not func.is_callable(): @this.Js def func(): return '[object %s]'%this.Class return func.call(this, ()) def toLocaleString(): array = this.to_object() arr_len = array.get('length').to_uint32() # separator is simply a comma ',' if not arr_len: return '' res = [] for i in xrange(arr_len): element = array[str(i)] if element.is_undefined() or element.is_null(): res.append('') else: cand = element.to_object() str_func = element.get('toLocaleString') if not str_func.is_callable(): raise this.MakeError('TypeError', 'toLocaleString method of item at index %d is not callable'%i) res.append(element.callprop('toLocaleString').value) return ','.join(res) def concat(): array = this.to_object() A = this.Js([]) items = [array] items.extend(to_arr(arguments)) n = 0 for E in items: if E.Class=='Array': k = 0 e_len = len(E) while k<e_len: if E.has_property(str(k)): A.put(str(n), E.get(str(k))) n+=1 k+=1 else: A.put(str(n), E) n+=1 return A def join(separator): ARR_STACK.add(this) array = this.to_object() arr_len = array.get('length').to_uint32() separator = ',' if separator.is_undefined() else separator.to_string().value elems = [] for e in xrange(arr_len): elem = array.get(str(e)) if elem in ARR_STACK: s = '' else: s = elem.to_string().value elems.append(s if not (elem.is_undefined() or elem.is_null()) else '') res = separator.join(elems) ARR_STACK.remove(this) return res def pop(): #todo check array = this.to_object() arr_len = array.get('length').to_uint32() if not arr_len: array.put('length', this.Js(arr_len)) return None ind = str(arr_len-1) element = array.get(ind) array.delete(ind) array.put('length', this.Js(arr_len-1)) return element def push(item): # todo check array = this.to_object() arr_len = array.get('length').to_uint32() to_put = arguments.to_list() i = arr_len for i, e in enumerate(to_put, arr_len): array.put(str(i), e) if to_put: i+=1 array.put('length', this.Js(i)) return i def reverse(): array = this.to_object() # my own algorithm vals = to_arr(array) has_props = [array.has_property(str(e)) for e in xrange(len(array))] vals.reverse() has_props.reverse() for i, val in enumerate(vals): if has_props[i]: array.put(str(i), val) else: array.delete(str(i)) return array def shift(): #todo check array = this.to_object() arr_len = array.get('length').to_uint32() if not arr_len: array.put('length', this.Js(0)) return None first = array.get('0') for k in xrange(1, arr_len): from_s, to_s = str(k), str(k-1) if array.has_property(from_s): array.put(to_s, array.get(from_s)) else: array.delete(to) array.delete(str(arr_len-1)) array.put('length', this.Js(str(arr_len-1))) return first def slice(start, end): # todo check array = this.to_object() arr_len = array.get('length').to_uint32() relative_start = start.to_int() k = max((arr_len + relative_start), 0) if relative_start<0 else min(relative_start, arr_len) relative_end = arr_len if end.is_undefined() else end.to_int() final = max((arr_len + relative_end), 0) if relative_end<0 else min(relative_end, arr_len) res = [] n = 0 while k<final: pk = str(k) if array.has_property(pk): res.append(array.get(pk)) k += 1 n += 1 return res def sort(cmpfn): if not this.Class in {'Array', 'Arguments'}: return this.to_object() # do nothing arr = [] for i in xrange(len(this)): arr.append(this.get(six.text_type(i))) if not arr: return this if not cmpfn.is_callable(): cmpfn = None cmp = lambda a,b: sort_compare(a, b, cmpfn) if six.PY3: key = functools.cmp_to_key(cmp) arr.sort(key=key) else: arr.sort(cmp=cmp) for i in xrange(len(arr)): this.put(six.text_type(i), arr[i]) return this def splice(start, deleteCount): # 1-8 array = this.to_object() arr_len = array.get('length').to_uint32() relative_start = start.to_int() actual_start = max((arr_len + relative_start),0) if relative_start<0 else min(relative_start, arr_len) actual_delete_count = min(max(deleteCount.to_int(),0 ), arr_len - actual_start) k = 0 A = this.Js([]) # 9 while k<actual_delete_count: if array.has_property(str(actual_start+k)): A.put(str(k), array.get(str(actual_start+k))) k += 1 # 10-11 items = to_arr(arguments)[2:] items_len = len(items) # 12 if items_len<actual_delete_count: k = actual_start while k < (arr_len-actual_delete_count): fr = str(k+actual_delete_count) to = str(k+items_len) if array.has_property(fr): array.put(to, array.get(fr)) else: array.delete(to) k += 1 k = arr_len while k > (arr_len - actual_delete_count + items_len): array.delete(str(k-1)) k -= 1 # 13 elif items_len>actual_delete_count: k = arr_len - actual_delete_count while k>actual_start: fr = str(k + actual_delete_count - 1) to = str(k + items_len - 1) if array.has_property(fr): array.put(to, array.get(fr)) else: array.delete(to) k -= 1 # 14-17 k = actual_start while items: E = items.pop(0) array.put(str(k), E) k += 1 array.put('length', this.Js(arr_len - actual_delete_count + items_len)) return A def unshift(): array = this.to_object() arr_len = array.get('length').to_uint32() argCount = len(arguments) k = arr_len while k > 0: fr = str(k - 1) to = str(k + argCount - 1) if array.has_property(fr): array.put(to, array.get(fr)) else: array.delete(to) k -= 1 j = 0 items = to_arr(arguments) while items: E = items.pop(0) array.put(str(j), E) j += 1 array.put('length', this.Js(arr_len + argCount)) return arr_len + argCount def indexOf(searchElement): array = this.to_object() arr_len = array.get('length').to_uint32() if arr_len == 0: return -1 if len(arguments)>1: n = arguments[1].to_int() else: n = 0 if n >= arr_len: return -1 if n >= 0: k = n else: k = arr_len - abs(n) if k < 0: k = 0 while k < arr_len: if array.has_property(str(k)): elementK = array.get(str(k)) if searchElement.strict_equality_comparison(elementK): return k k += 1 return -1 def lastIndexOf(searchElement): array = this.to_object() arr_len = array.get('length').to_uint32() if arr_len == 0: return -1 if len(arguments)>1: n = arguments[1].to_int() else: n = arr_len - 1 if n >= 0: k = min(n, arr_len-1) else: k = arr_len - abs(n) while k >= 0: if array.has_property(str(k)): elementK = array.get(str(k)) if searchElement.strict_equality_comparison(elementK): return k k -= 1 return -1 def every(callbackfn): array = this.to_object() arr_len = array.get('length').to_uint32() if not callbackfn.is_callable(): raise this.MakeError('TypeError', 'callbackfn must be a function') T = arguments[1] k = 0 while k<arr_len: if array.has_property(str(k)): kValue = array.get(str(k)) if not callbackfn.call(T, (kValue, this.Js(k), array)).to_boolean().value: return False k += 1 return True def some(callbackfn): array = this.to_object() arr_len = array.get('length').to_uint32() if not callbackfn.is_callable(): raise this.MakeError('TypeError', 'callbackfn must be a function') T = arguments[1] k = 0 while k<arr_len: if array.has_property(str(k)): kValue = array.get(str(k)) if callbackfn.call(T, (kValue, this.Js(k), array)).to_boolean().value: return True k += 1 return False def forEach(callbackfn): array = this.to_object() arr_len = array.get('length').to_uint32() if not callbackfn.is_callable(): raise this.MakeError('TypeError', 'callbackfn must be a function') T = arguments[1] k = 0 while k<arr_len: if array.has_property(str(k)): kValue = array.get(str(k)) callbackfn.call(T, (kValue, this.Js(k), array)) k+=1 def map(callbackfn): array = this.to_object() arr_len = array.get('length').to_uint32() if not callbackfn.is_callable(): raise this.MakeError('TypeError', 'callbackfn must be a function') T = arguments[1] A = this.Js([]) k = 0 while k<arr_len: Pk = str(k) if array.has_property(Pk): kValue = array.get(Pk) mappedValue = callbackfn.call(T, (kValue, this.Js(k), array)) A.define_own_property(Pk, {'value': mappedValue, 'writable': True, 'enumerable': True, 'configurable': True}) k += 1 return A def filter(callbackfn): array = this.to_object() arr_len = array.get('length').to_uint32() if not callbackfn.is_callable(): raise this.MakeError('TypeError', 'callbackfn must be a function') T = arguments[1] res = [] k = 0 while k<arr_len: if array.has_property(str(k)): kValue = array.get(str(k)) if callbackfn.call(T, (kValue, this.Js(k), array)).to_boolean().value: res.append(kValue) k += 1 return res # converted to js array automatically def reduce(callbackfn): array = this.to_object() arr_len = array.get('length').to_uint32() if not callbackfn.is_callable(): raise this.MakeError('TypeError', 'callbackfn must be a function') if not arr_len and len(arguments)<2: raise this.MakeError('TypeError', 'Reduce of empty array with no initial value') k = 0 if len(arguments)>1: # initial value present accumulator = arguments[1] else: kPresent = False while not kPresent and k<arr_len: kPresent = array.has_property(str(k)) if kPresent: accumulator = array.get(str(k)) k += 1 if not kPresent: raise this.MakeError('TypeError', 'Reduce of empty array with no initial value') while k<arr_len: if array.has_property(str(k)): kValue = array.get(str(k)) accumulator = callbackfn.call(this.undefined, (accumulator, kValue, this.Js(k), array)) k += 1 return accumulator def reduceRight(callbackfn): array = this.to_object() arr_len = array.get('length').to_uint32() if not callbackfn.is_callable(): raise this.MakeError('TypeError', 'callbackfn must be a function') if not arr_len and len(arguments)<2: raise this.MakeError('TypeError', 'Reduce of empty array with no initial value') k = arr_len - 1 if len(arguments)>1: # initial value present accumulator = arguments[1] else: kPresent = False while not kPresent and k>=0: kPresent = array.has_property(str(k)) if kPresent: accumulator = array.get(str(k)) k -= 1 if not kPresent: raise this.MakeError('TypeError', 'Reduce of empty array with no initial value') while k>=0: if array.has_property(str(k)): kValue = array.get(str(k)) accumulator = callbackfn.call(this.undefined, (accumulator, kValue, this.Js(k), array)) k -= 1 return accumulator def sort_compare(a, b, comp): if a is None: if b is None: return 0 return 1 if b is None: if a is None: return 0 return -1 if a.is_undefined(): if b.is_undefined(): return 0 return 1 if b.is_undefined(): if a.is_undefined(): return 0 return -1 if comp is not None: res = comp.call(a.undefined, (a, b)) return res.to_int() x, y = a.to_string(), b.to_string() if x<y: return -1 elif x>y: return 1 return 0
gpl-3.0
JackWoot/E2E-Messenger
Server/passlib/apache.py
21
39058
"""passlib.apache - apache password support""" # XXX: relocate this to passlib.ext.apache? #============================================================================= # imports #============================================================================= from __future__ import with_statement # core from hashlib import md5 import logging; log = logging.getLogger(__name__) import os import sys from warnings import warn # site # pkg from passlib.context import CryptContext from passlib.exc import ExpectedStringError from passlib.hash import htdigest from passlib.utils import consteq, render_bytes, to_bytes, deprecated_method, is_ascii_codec from passlib.utils.compat import b, bytes, join_bytes, str_to_bascii, u, \ unicode, BytesIO, iteritems, imap, PY3 # local __all__ = [ 'HtpasswdFile', 'HtdigestFile', ] #============================================================================= # constants & support #============================================================================= _UNSET = object() _BCOLON = b(":") # byte values that aren't allowed in fields. _INVALID_FIELD_CHARS = b(":\n\r\t\x00") #============================================================================= # backport of OrderedDict for PY2.5 #============================================================================= try: from collections import OrderedDict except ImportError: # Python 2.5 class OrderedDict(dict): """hacked OrderedDict replacement. NOTE: this doesn't provide a full OrderedDict implementation, just the minimum needed by the Htpasswd internals. """ def __init__(self): self._keys = [] def __iter__(self): return iter(self._keys) def __setitem__(self, key, value): if key not in self: self._keys.append(key) super(OrderedDict, self).__setitem__(key, value) def __delitem__(self, key): super(OrderedDict, self).__delitem__(key) self._keys.remove(key) def iteritems(self): return ((key, self[key]) for key in self) # these aren't used or implemented, so disabling them for safety. update = pop = popitem = clear = keys = iterkeys = None #============================================================================= # common helpers #============================================================================= class _CommonFile(object): """common framework for HtpasswdFile & HtdigestFile""" #=================================================================== # instance attrs #=================================================================== # charset encoding used by file (defaults to utf-8) encoding = None # whether users() and other public methods should return unicode or bytes? # (defaults to False under PY2, True under PY3) return_unicode = None # if bound to local file, these will be set. _path = None # local file path _mtime = None # mtime when last loaded, or 0 # if true, automatically save to local file after changes are made. autosave = False # ordered dict mapping key -> value for all records in database. # (e.g. user => hash for Htpasswd) _records = None #=================================================================== # alt constuctors #=================================================================== @classmethod def from_string(cls, data, **kwds): """create new object from raw string. :type data: unicode or bytes :arg data: database to load, as single string. :param \*\*kwds: all other keywords are the same as in the class constructor """ if 'path' in kwds: raise TypeError("'path' not accepted by from_string()") self = cls(**kwds) self.load_string(data) return self @classmethod def from_path(cls, path, **kwds): """create new object from file, without binding object to file. :type path: str :arg path: local filepath to load from :param \*\*kwds: all other keywords are the same as in the class constructor """ self = cls(**kwds) self.load(path) return self #=================================================================== # init #=================================================================== def __init__(self, path=None, new=False, autoload=True, autosave=False, encoding="utf-8", return_unicode=PY3, ): # set encoding if not encoding: warn("``encoding=None`` is deprecated as of Passlib 1.6, " "and will cause a ValueError in Passlib 1.8, " "use ``return_unicode=False`` instead.", DeprecationWarning, stacklevel=2) encoding = "utf-8" return_unicode = False elif not is_ascii_codec(encoding): # htpasswd/htdigest files assumes 1-byte chars, and use ":" separator, # so only ascii-compatible encodings are allowed. raise ValueError("encoding must be 7-bit ascii compatible") self.encoding = encoding # set other attrs self.return_unicode = return_unicode self.autosave = autosave self._path = path self._mtime = 0 # init db if not autoload: warn("``autoload=False`` is deprecated as of Passlib 1.6, " "and will be removed in Passlib 1.8, use ``new=True`` instead", DeprecationWarning, stacklevel=2) new = True if path and not new: self.load() else: self._records = OrderedDict() def __repr__(self): tail = '' if self.autosave: tail += ' autosave=True' if self._path: tail += ' path=%r' % self._path if self.encoding != "utf-8": tail += ' encoding=%r' % self.encoding return "<%s 0x%0x%s>" % (self.__class__.__name__, id(self), tail) # NOTE: ``path`` is a property so that ``_mtime`` is wiped when it's set. def _get_path(self): return self._path def _set_path(self, value): if value != self._path: self._mtime = 0 self._path = value path = property(_get_path, _set_path) @property def mtime(self): "modify time when last loaded (if bound to a local file)" return self._mtime #=================================================================== # loading #=================================================================== def load_if_changed(self): """Reload from ``self.path`` only if file has changed since last load""" if not self._path: raise RuntimeError("%r is not bound to a local file" % self) if self._mtime and self._mtime == os.path.getmtime(self._path): return False self.load() return True def load(self, path=None, force=True): """Load state from local file. If no path is specified, attempts to load from ``self.path``. :type path: str :arg path: local file to load from :type force: bool :param force: if ``force=False``, only load from ``self.path`` if file has changed since last load. .. deprecated:: 1.6 This keyword will be removed in Passlib 1.8; Applications should use :meth:`load_if_changed` instead. """ if path is not None: with open(path, "rb") as fh: self._mtime = 0 self._load_lines(fh) elif not force: warn("%(name)s.load(force=False) is deprecated as of Passlib 1.6," "and will be removed in Passlib 1.8; " "use %(name)s.load_if_changed() instead." % dict(name=self.__class__.__name__), DeprecationWarning, stacklevel=2) return self.load_if_changed() elif self._path: with open(self._path, "rb") as fh: self._mtime = os.path.getmtime(self._path) self._load_lines(fh) else: raise RuntimeError("%s().path is not set, an explicit path is required" % self.__class__.__name__) return True def load_string(self, data): "Load state from unicode or bytes string, replacing current state" data = to_bytes(data, self.encoding, "data") self._mtime = 0 self._load_lines(BytesIO(data)) def _load_lines(self, lines): "load from sequence of lists" # XXX: found reference that "#" comment lines may be supported by # htpasswd, should verify this, and figure out how to handle them. # if true, this would also affect what can be stored in user field. # XXX: if multiple entries for a key, should we use the first one # or the last one? going w/ first entry for now. # XXX: how should this behave if parsing fails? currently # it will contain everything that was loaded up to error. # could clear / restore old state instead. parse = self._parse_record records = self._records = OrderedDict() for idx, line in enumerate(lines): key, value = parse(line, idx+1) if key not in records: records[key] = value def _parse_record(cls, record, lineno): # pragma: no cover - abstract method "parse line of file into (key, value) pair" raise NotImplementedError("should be implemented in subclass") #=================================================================== # saving #=================================================================== def _autosave(self): "subclass helper to call save() after any changes" if self.autosave and self._path: self.save() def save(self, path=None): """Save current state to file. If no path is specified, attempts to save to ``self.path``. """ if path is not None: with open(path, "wb") as fh: fh.writelines(self._iter_lines()) elif self._path: self.save(self._path) self._mtime = os.path.getmtime(self._path) else: raise RuntimeError("%s().path is not set, cannot autosave" % self.__class__.__name__) def to_string(self): "Export current state as a string of bytes" return join_bytes(self._iter_lines()) def _iter_lines(self): "iterator yielding lines of database" return (self._render_record(key,value) for key,value in iteritems(self._records)) def _render_record(cls, key, value): # pragma: no cover - abstract method "given key/value pair, encode as line of file" raise NotImplementedError("should be implemented in subclass") #=================================================================== # field encoding #=================================================================== def _encode_user(self, user): "user-specific wrapper for _encode_field()" return self._encode_field(user, "user") def _encode_realm(self, realm): # pragma: no cover - abstract method "realm-specific wrapper for _encode_field()" return self._encode_field(realm, "realm") def _encode_field(self, value, param="field"): """convert field to internal representation. internal representation is always bytes. byte strings are left as-is, unicode strings encoding using file's default encoding (or ``utf-8`` if no encoding has been specified). :raises UnicodeEncodeError: if unicode value cannot be encoded using default encoding. :raises ValueError: if resulting byte string contains a forbidden character, or is too long (>255 bytes). :returns: encoded identifer as bytes """ if isinstance(value, unicode): value = value.encode(self.encoding) elif not isinstance(value, bytes): raise ExpectedStringError(value, param) if len(value) > 255: raise ValueError("%s must be at most 255 characters: %r" % (param, value)) if any(c in _INVALID_FIELD_CHARS for c in value): raise ValueError("%s contains invalid characters: %r" % (param, value,)) return value def _decode_field(self, value): """decode field from internal representation to format returns by users() method, etc. :raises UnicodeDecodeError: if unicode value cannot be decoded using default encoding. (usually indicates wrong encoding set for file). :returns: field as unicode or bytes, as appropriate. """ assert isinstance(value, bytes), "expected value to be bytes" if self.return_unicode: return value.decode(self.encoding) else: return value # FIXME: htpasswd doc says passwords limited to 255 chars under Windows & MPE, # and that longer ones are truncated. this may be side-effect of those # platforms supporting the 'plaintext' scheme. these classes don't currently # check for this. #=================================================================== # eoc #=================================================================== #============================================================================= # htpasswd editing #============================================================================= # FIXME: apr_md5_crypt technically the default only for windows, netware and tpf. # TODO: find out if htpasswd's "crypt" mode is a crypt() *call* or just des_crypt implementation. # if the former, we can support anything supported by passlib.hosts.host_context, # allowing more secure hashes than apr_md5_crypt to be used. # could perhaps add this behavior as an option to the constructor. # c.f. http://httpd.apache.org/docs/2.2/programs/htpasswd.html htpasswd_context = CryptContext([ "apr_md5_crypt", # man page notes supported everywhere, default on Windows, Netware, TPF "des_crypt", # man page notes server does NOT support this on Windows, Netware, TPF "ldap_sha1", # man page notes only for transitioning <-> ldap "plaintext" # man page notes server ONLY supports this on Windows, Netware, TPF ]) class HtpasswdFile(_CommonFile): """class for reading & writing Htpasswd files. The class constructor accepts the following arguments: :type path: filepath :param path: Specifies path to htpasswd file, use to implicitly load from and save to. This class has two modes of operation: 1. It can be "bound" to a local file by passing a ``path`` to the class constructor. In this case it will load the contents of the file when created, and the :meth:`load` and :meth:`save` methods will automatically load from and save to that file if they are called without arguments. 2. Alternately, it can exist as an independant object, in which case :meth:`load` and :meth:`save` will require an explicit path to be provided whenever they are called. As well, ``autosave`` behavior will not be available. This feature is new in Passlib 1.6, and is the default if no ``path`` value is provided to the constructor. This is also exposed as a readonly instance attribute. :type new: bool :param new: Normally, if *path* is specified, :class:`HtpasswdFile` will immediately load the contents of the file. However, when creating a new htpasswd file, applications can set ``new=True`` so that the existing file (if any) will not be loaded. .. versionadded:: 1.6 This feature was previously enabled by setting ``autoload=False``. That alias has been deprecated, and will be removed in Passlib 1.8 :type autosave: bool :param autosave: Normally, any changes made to an :class:`HtpasswdFile` instance will not be saved until :meth:`save` is explicitly called. However, if ``autosave=True`` is specified, any changes made will be saved to disk immediately (assuming *path* has been set). This is also exposed as a writeable instance attribute. :type encoding: str :param encoding: Optionally specify character encoding used to read/write file and hash passwords. Defaults to ``utf-8``, though ``latin-1`` is the only other commonly encountered encoding. This is also exposed as a readonly instance attribute. :type default_scheme: str :param default_scheme: Optionally specify default scheme to use when encoding new passwords. Must be one of ``"apr_md5_crypt"``, ``"des_crypt"``, ``"ldap_sha1"``, ``"plaintext"``. It defaults to ``"apr_md5_crypt"``. .. versionadded:: 1.6 This keyword was previously named ``default``. That alias has been deprecated, and will be removed in Passlib 1.8. :type context: :class:`~passlib.context.CryptContext` :param context: :class:`!CryptContext` instance used to encrypt and verify the hashes found in the htpasswd file. The default value is a pre-built context which supports all of the hashes officially allowed in an htpasswd file. This is also exposed as a readonly instance attribute. .. warning:: This option may be used to add support for non-standard hash formats to an htpasswd file. However, the resulting file will probably not be usuable by another application, and particularly not by Apache. :param autoload: Set to ``False`` to prevent the constructor from automatically loaded the file from disk. .. deprecated:: 1.6 This has been replaced by the *new* keyword. Instead of setting ``autoload=False``, you should use ``new=True``. Support for this keyword will be removed in Passlib 1.8. :param default: Change the default algorithm used to encrypt new passwords. .. deprecated:: 1.6 This has been renamed to *default_scheme* for clarity. Support for this alias will be removed in Passlib 1.8. Loading & Saving ================ .. automethod:: load .. automethod:: load_if_changed .. automethod:: load_string .. automethod:: save .. automethod:: to_string Inspection ================ .. automethod:: users .. automethod:: check_password .. automethod:: get_hash Modification ================ .. automethod:: set_password .. automethod:: delete Alternate Constructors ====================== .. automethod:: from_string Attributes ========== .. attribute:: path Path to local file that will be used as the default for all :meth:`load` and :meth:`save` operations. May be written to, initialized by the *path* constructor keyword. .. attribute:: autosave Writeable flag indicating whether changes will be automatically written to *path*. Errors ====== :raises ValueError: All of the methods in this class will raise a :exc:`ValueError` if any user name contains a forbidden character (one of ``:\\r\\n\\t\\x00``), or is longer than 255 characters. """ #=================================================================== # instance attrs #=================================================================== # NOTE: _records map stores <user> for the key, and <hash> for the value, # both in bytes which use self.encoding #=================================================================== # init & serialization #=================================================================== def __init__(self, path=None, default_scheme=None, context=htpasswd_context, **kwds): if 'default' in kwds: warn("``default`` is deprecated as of Passlib 1.6, " "and will be removed in Passlib 1.8, it has been renamed " "to ``default_scheem``.", DeprecationWarning, stacklevel=2) default_scheme = kwds.pop("default") if default_scheme: context = context.copy(default=default_scheme) self.context = context super(HtpasswdFile, self).__init__(path, **kwds) def _parse_record(self, record, lineno): # NOTE: should return (user, hash) tuple result = record.rstrip().split(_BCOLON) if len(result) != 2: raise ValueError("malformed htpasswd file (error reading line %d)" % lineno) return result def _render_record(self, user, hash): return render_bytes("%s:%s\n", user, hash) #=================================================================== # public methods #=================================================================== def users(self): "Return list of all users in database" return [self._decode_field(user) for user in self._records] ##def has_user(self, user): ## "check whether entry is present for user" ## return self._encode_user(user) in self._records ##def rename(self, old, new): ## """rename user account""" ## old = self._encode_user(old) ## new = self._encode_user(new) ## hash = self._records.pop(old) ## self._records[new] = hash ## self._autosave() def set_password(self, user, password): """Set password for user; adds user if needed. :returns: * ``True`` if existing user was updated. * ``False`` if user account was added. .. versionchanged:: 1.6 This method was previously called ``update``, it was renamed to prevent ambiguity with the dictionary method. The old alias is deprecated, and will be removed in Passlib 1.8. """ user = self._encode_user(user) hash = self.context.encrypt(password) if PY3: hash = hash.encode(self.encoding) existing = (user in self._records) self._records[user] = hash self._autosave() return existing @deprecated_method(deprecated="1.6", removed="1.8", replacement="set_password") def update(self, user, password): "set password for user" return self.set_password(user, password) def get_hash(self, user): """Return hash stored for user, or ``None`` if user not found. .. versionchanged:: 1.6 This method was previously named ``find``, it was renamed for clarity. The old name is deprecated, and will be removed in Passlib 1.8. """ try: return self._records[self._encode_user(user)] except KeyError: return None @deprecated_method(deprecated="1.6", removed="1.8", replacement="get_hash") def find(self, user): "return hash for user" return self.get_hash(user) # XXX: rename to something more explicit, like delete_user()? def delete(self, user): """Delete user's entry. :returns: * ``True`` if user deleted. * ``False`` if user not found. """ try: del self._records[self._encode_user(user)] except KeyError: return False self._autosave() return True def check_password(self, user, password): """Verify password for specified user. :returns: * ``None`` if user not found. * ``False`` if user found, but password does not match. * ``True`` if user found and password matches. .. versionchanged:: 1.6 This method was previously called ``verify``, it was renamed to prevent ambiguity with the :class:`!CryptContext` method. The old alias is deprecated, and will be removed in Passlib 1.8. """ user = self._encode_user(user) hash = self._records.get(user) if hash is None: return None if isinstance(password, unicode): # NOTE: encoding password to match file, making the assumption # that server will use same encoding to hash the password. password = password.encode(self.encoding) ok, new_hash = self.context.verify_and_update(password, hash) if ok and new_hash is not None: # rehash user's password if old hash was deprecated self._records[user] = new_hash self._autosave() return ok @deprecated_method(deprecated="1.6", removed="1.8", replacement="check_password") def verify(self, user, password): "verify password for user" return self.check_password(user, password) #=================================================================== # eoc #=================================================================== #============================================================================= # htdigest editing #============================================================================= class HtdigestFile(_CommonFile): """class for reading & writing Htdigest files. The class constructor accepts the following arguments: :type path: filepath :param path: Specifies path to htdigest file, use to implicitly load from and save to. This class has two modes of operation: 1. It can be "bound" to a local file by passing a ``path`` to the class constructor. In this case it will load the contents of the file when created, and the :meth:`load` and :meth:`save` methods will automatically load from and save to that file if they are called without arguments. 2. Alternately, it can exist as an independant object, in which case :meth:`load` and :meth:`save` will require an explicit path to be provided whenever they are called. As well, ``autosave`` behavior will not be available. This feature is new in Passlib 1.6, and is the default if no ``path`` value is provided to the constructor. This is also exposed as a readonly instance attribute. :type default_realm: str :param default_realm: If ``default_realm`` is set, all the :class:`HtdigestFile` methods that require a realm will use this value if one is not provided explicitly. If unset, they will raise an error stating that an explicit realm is required. This is also exposed as a writeable instance attribute. .. versionadded:: 1.6 :type new: bool :param new: Normally, if *path* is specified, :class:`HtdigestFile` will immediately load the contents of the file. However, when creating a new htpasswd file, applications can set ``new=True`` so that the existing file (if any) will not be loaded. .. versionadded:: 1.6 This feature was previously enabled by setting ``autoload=False``. That alias has been deprecated, and will be removed in Passlib 1.8 :type autosave: bool :param autosave: Normally, any changes made to an :class:`HtdigestFile` instance will not be saved until :meth:`save` is explicitly called. However, if ``autosave=True`` is specified, any changes made will be saved to disk immediately (assuming *path* has been set). This is also exposed as a writeable instance attribute. :type encoding: str :param encoding: Optionally specify character encoding used to read/write file and hash passwords. Defaults to ``utf-8``, though ``latin-1`` is the only other commonly encountered encoding. This is also exposed as a readonly instance attribute. :param autoload: Set to ``False`` to prevent the constructor from automatically loaded the file from disk. .. deprecated:: 1.6 This has been replaced by the *new* keyword. Instead of setting ``autoload=False``, you should use ``new=True``. Support for this keyword will be removed in Passlib 1.8. Loading & Saving ================ .. automethod:: load .. automethod:: load_if_changed .. automethod:: load_string .. automethod:: save .. automethod:: to_string Inspection ========== .. automethod:: realms .. automethod:: users .. automethod:: check_password(user[, realm], password) .. automethod:: get_hash Modification ============ .. automethod:: set_password(user[, realm], password) .. automethod:: delete .. automethod:: delete_realm Alternate Constructors ====================== .. automethod:: from_string Attributes ========== .. attribute:: default_realm The default realm that will be used if one is not provided to methods that require it. By default this is ``None``, in which case an explicit realm must be provided for every method call. Can be written to. .. attribute:: path Path to local file that will be used as the default for all :meth:`load` and :meth:`save` operations. May be written to, initialized by the *path* constructor keyword. .. attribute:: autosave Writeable flag indicating whether changes will be automatically written to *path*. Errors ====== :raises ValueError: All of the methods in this class will raise a :exc:`ValueError` if any user name or realm contains a forbidden character (one of ``:\\r\\n\\t\\x00``), or is longer than 255 characters. """ #=================================================================== # instance attrs #=================================================================== # NOTE: _records map stores (<user>,<realm>) for the key, # and <hash> as the value, all as <self.encoding> bytes. # NOTE: unlike htpasswd, this class doesn't use a CryptContext, # as only one hash format is supported: htdigest. # optionally specify default realm that will be used if none # is provided to a method call. otherwise realm is always required. default_realm = None #=================================================================== # init & serialization #=================================================================== def __init__(self, path=None, default_realm=None, **kwds): self.default_realm = default_realm super(HtdigestFile, self).__init__(path, **kwds) def _parse_record(self, record, lineno): result = record.rstrip().split(_BCOLON) if len(result) != 3: raise ValueError("malformed htdigest file (error reading line %d)" % lineno) user, realm, hash = result return (user, realm), hash def _render_record(self, key, hash): user, realm = key return render_bytes("%s:%s:%s\n", user, realm, hash) def _encode_realm(self, realm): # override default _encode_realm to fill in default realm field if realm is None: realm = self.default_realm if realm is None: raise TypeError("you must specify a realm explicitly, " "or set the default_realm attribute") return self._encode_field(realm, "realm") #=================================================================== # public methods #=================================================================== def realms(self): """Return list of all realms in database""" realms = set(key[1] for key in self._records) return [self._decode_field(realm) for realm in realms] def users(self, realm=None): """Return list of all users in specified realm. * uses ``self.default_realm`` if no realm explicitly provided. * returns empty list if realm not found. """ realm = self._encode_realm(realm) return [self._decode_field(key[0]) for key in self._records if key[1] == realm] ##def has_user(self, user, realm=None): ## "check if user+realm combination exists" ## user = self._encode_user(user) ## realm = self._encode_realm(realm) ## return (user,realm) in self._records ##def rename_realm(self, old, new): ## """rename all accounts in realm""" ## old = self._encode_realm(old) ## new = self._encode_realm(new) ## keys = [key for key in self._records if key[1] == old] ## for key in keys: ## hash = self._records.pop(key) ## self._records[key[0],new] = hash ## self._autosave() ## return len(keys) ##def rename(self, old, new, realm=None): ## """rename user account""" ## old = self._encode_user(old) ## new = self._encode_user(new) ## realm = self._encode_realm(realm) ## hash = self._records.pop((old,realm)) ## self._records[new,realm] = hash ## self._autosave() def set_password(self, user, realm=None, password=_UNSET): """Set password for user; adds user & realm if needed. If ``self.default_realm`` has been set, this may be called with the syntax ``set_password(user, password)``, otherwise it must be called with all three arguments: ``set_password(user, realm, password)``. :returns: * ``True`` if existing user was updated * ``False`` if user account added. """ if password is _UNSET: # called w/ two args - (user, password), use default realm realm, password = None, realm user = self._encode_user(user) realm = self._encode_realm(realm) key = (user, realm) existing = (key in self._records) hash = htdigest.encrypt(password, user, realm, encoding=self.encoding) if PY3: hash = hash.encode(self.encoding) self._records[key] = hash self._autosave() return existing @deprecated_method(deprecated="1.6", removed="1.8", replacement="set_password") def update(self, user, realm, password): "set password for user" return self.set_password(user, realm, password) # XXX: rename to something more explicit, like get_hash()? def get_hash(self, user, realm=None): """Return :class:`~passlib.hash.htdigest` hash stored for user. * uses ``self.default_realm`` if no realm explicitly provided. * returns ``None`` if user or realm not found. .. versionchanged:: 1.6 This method was previously named ``find``, it was renamed for clarity. The old name is deprecated, and will be removed in Passlib 1.8. """ key = (self._encode_user(user), self._encode_realm(realm)) hash = self._records.get(key) if hash is None: return None if PY3: hash = hash.decode(self.encoding) return hash @deprecated_method(deprecated="1.6", removed="1.8", replacement="get_hash") def find(self, user, realm): "return hash for user" return self.get_hash(user, realm) # XXX: rename to something more explicit, like delete_user()? def delete(self, user, realm=None): """Delete user's entry for specified realm. if realm is not specified, uses ``self.default_realm``. :returns: * ``True`` if user deleted, * ``False`` if user not found in realm. """ key = (self._encode_user(user), self._encode_realm(realm)) try: del self._records[key] except KeyError: return False self._autosave() return True def delete_realm(self, realm): """Delete all users for specified realm. if realm is not specified, uses ``self.default_realm``. :returns: number of users deleted (0 if realm not found) """ realm = self._encode_realm(realm) records = self._records keys = [key for key in records if key[1] == realm] for key in keys: del records[key] self._autosave() return len(keys) def check_password(self, user, realm=None, password=_UNSET): """Verify password for specified user + realm. If ``self.default_realm`` has been set, this may be called with the syntax ``check_password(user, password)``, otherwise it must be called with all three arguments: ``check_password(user, realm, password)``. :returns: * ``None`` if user or realm not found. * ``False`` if user found, but password does not match. * ``True`` if user found and password matches. .. versionchanged:: 1.6 This method was previously called ``verify``, it was renamed to prevent ambiguity with the :class:`!CryptContext` method. The old alias is deprecated, and will be removed in Passlib 1.8. """ if password is _UNSET: # called w/ two args - (user, password), use default realm realm, password = None, realm user = self._encode_user(user) realm = self._encode_realm(realm) hash = self._records.get((user,realm)) if hash is None: return None return htdigest.verify(password, hash, user, realm, encoding=self.encoding) @deprecated_method(deprecated="1.6", removed="1.8", replacement="check_password") def verify(self, user, realm, password): "verify password for user" return self.check_password(user, realm, password) #=================================================================== # eoc #=================================================================== #============================================================================= # eof #=============================================================================
gpl-2.0
guorendong/iridium-browser-ubuntu
tools/json_schema_compiler/js_externs_generator.py
12
11188
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Generator that produces an externs file for the Closure Compiler. Note: This is a work in progress, and generated externs may require tweaking. See https://developers.google.com/closure/compiler/docs/api-tutorial3#externs """ from code import Code from model import * from schema_util import * import os from datetime import datetime import re LICENSE = ("""// Copyright %s The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. """ % datetime.now().year) class JsExternsGenerator(object): def Generate(self, namespace): return _Generator(namespace).Generate() class _Generator(object): def __init__(self, namespace): self._namespace = namespace def Generate(self): """Generates a Code object with the schema for the entire namespace. """ c = Code() (c.Append(LICENSE) .Append() .Append('/** @fileoverview Externs generated from namespace: %s */' % self._namespace.name) .Append()) c.Cblock(self._GenerateNamespaceObject()) for js_type in self._namespace.types.values(): c.Cblock(self._GenerateType(js_type)) for function in self._namespace.functions.values(): c.Cblock(self._GenerateFunction(function)) for event in self._namespace.events.values(): c.Cblock(self._GenerateEvent(event)) return c def _GenerateType(self, js_type): """Given a Type object, returns the Code for this type's definition. """ c = Code() if js_type.property_type is PropertyType.ENUM: c.Concat(self._GenerateEnumJsDoc(js_type)) else: c.Concat(self._GenerateTypeJsDoc(js_type)) return c def _GenerateEnumJsDoc(self, js_type): """ Given an Enum Type object, returns the Code for the enum's definition. """ c = Code() (c.Sblock(line='/**', line_prefix=' * ') .Append('@enum {string}') .Append(self._GenerateSeeLink('type', js_type.simple_name)) .Eblock(' */')) c.Append('chrome.%s.%s = {' % (self._namespace.name, js_type.name)) def get_property_name(e): # Enum properties are normified to be in ALL_CAPS_STYLE. # Assume enum '1ring-rulesThemAll'. # Transform to '1ring-rules_Them_All'. e = re.sub(r'([a-z])([A-Z])', r'\1_\2', e) # Transform to '1ring_rules_Them_All'. e = re.sub(r'\W', '_', e) # Transform to '_1ring_rules_Them_All'. e = re.sub(r'^(\d)', r'_\1', e) # Transform to '_1RING_RULES_THEM_ALL'. return e.upper() c.Append('\n'.join( [" %s: '%s'," % (get_property_name(v.name), v.name) for v in js_type.enum_values])) c.Append('};') return c def _IsTypeConstructor(self, js_type): """Returns true if the given type should be a @constructor. If this returns false, the type is a typedef. """ return any(prop.type_.property_type is PropertyType.FUNCTION for prop in js_type.properties.values()) def _GenerateTypeJsDoc(self, js_type): """Generates the documentation for a type as a Code. Returns an empty code object if the object has no documentation. """ c = Code() c.Sblock(line='/**', line_prefix=' * ') if js_type.description: for line in js_type.description.splitlines(): c.Append(line) is_constructor = self._IsTypeConstructor(js_type) if is_constructor: c.Comment('@constructor', comment_prefix = ' * ', wrap_indent=4) else: c.Concat(self._GenerateTypedef(js_type.properties)) c.Append(self._GenerateSeeLink('type', js_type.simple_name)) c.Eblock(' */') var = 'var ' + js_type.simple_name if is_constructor: var += ' = function() {}' var += ';' c.Append(var) return c def _GenerateTypedef(self, properties): """Given an OrderedDict of properties, returns a Code containing a @typedef. """ if not properties: return Code() c = Code() c.Append('@typedef {') c.Concat(self._GenerateObjectDefinition(properties), new_line=False) c.Append('}', new_line=False) return c def _GenerateObjectDefinition(self, properties): """Given an OrderedDict of properties, returns a Code containing the description of an object. """ if not properties: return Code() c = Code() c.Sblock('{') first = True for field, prop in properties.items(): # Avoid trailing comma. # TODO(devlin): This will be unneeded, if/when # https://github.com/google/closure-compiler/issues/796 is fixed. if not first: c.Append(',', new_line=False) first = False js_type = self._TypeToJsType(prop.type_) if prop.optional: js_type = (Code(). Append('('). Concat(js_type, new_line=False). Append('|undefined)', new_line=False)) c.Append('%s: ' % field, strip_right=False) c.Concat(js_type, new_line=False) c.Eblock('}') return c def _GenerateFunctionJsDoc(self, function): """Generates the documentation for a function as a Code. Returns an empty code object if the object has no documentation. """ c = Code() c.Sblock(line='/**', line_prefix=' * ') if function.description: c.Comment(function.description, comment_prefix='') def append_field(c, tag, js_type, name, optional, description): c.Append('@%s {' % tag) c.Concat(js_type, new_line=False) if optional: c.Append('=', new_line=False) c.Append('} %s' % name, new_line=False) if description: c.Comment(' %s' % description, comment_prefix='', wrap_indent=4, new_line=False) for param in function.params: append_field(c, 'param', self._TypeToJsType(param.type_), param.name, param.optional, param.description) if function.callback: append_field(c, 'param', self._FunctionToJsFunction(function.callback), function.callback.name, function.callback.optional, function.callback.description) if function.returns: append_field(c, 'return', self._TypeToJsType(function.returns), '', False, function.returns.description) if function.deprecated: c.Append('@deprecated %s' % function.deprecated) c.Append(self._GenerateSeeLink('method', function.name)) c.Eblock(' */') return c def _FunctionToJsFunction(self, function): """Converts a model.Function to a JS type (i.e., function([params])...)""" c = Code() c.Append('function(') for i, param in enumerate(function.params): c.Concat(self._TypeToJsType(param.type_), new_line=False) if i is not len(function.params) - 1: c.Append(', ', new_line=False, strip_right=False) c.Append('):', new_line=False) if function.returns: c.Concat(self._TypeToJsType(function.returns), new_line=False) else: c.Append('void', new_line=False) return c def _TypeToJsType(self, js_type): """Converts a model.Type to a JS type (number, Array, etc.)""" if js_type.property_type in (PropertyType.INTEGER, PropertyType.DOUBLE): return Code().Append('number') if js_type.property_type is PropertyType.OBJECT: if js_type.properties: return self._GenerateObjectDefinition(js_type.properties) return Code().Append('Object') if js_type.property_type is PropertyType.ARRAY: return (Code().Append('!Array<'). Concat(self._TypeToJsType(js_type.item_type), new_line=False). Append('>', new_line=False)) if js_type.property_type is PropertyType.REF: ref_type = js_type.ref_type # Enums are defined as chrome.fooAPI.MyEnum, but types are defined simply # as MyType. if self._namespace.types[ref_type].property_type is PropertyType.ENUM: ref_type = '!chrome.%s.%s' % (self._namespace.name, ref_type) return Code().Append(ref_type) if js_type.property_type is PropertyType.CHOICES: c = Code() c.Append('(') for i, choice in enumerate(js_type.choices): c.Concat(self._TypeToJsType(choice), new_line=False) if i is not len(js_type.choices) - 1: c.Append('|', new_line=False) c.Append(')', new_line=False) return c if js_type.property_type is PropertyType.FUNCTION: return self._FunctionToJsFunction(js_type.function) if js_type.property_type is PropertyType.ANY: return Code().Append('*') if js_type.property_type.is_fundamental: return Code().Append(js_type.property_type.name) return Code().Append('?') # TODO(tbreisacher): Make this more specific. def _GenerateFunction(self, function): """Generates the code representing a function, including its documentation. For example: /** * @param {string} title The new title. */ chrome.window.setTitle = function(title) {}; """ c = Code() params = self._GenerateFunctionParams(function) (c.Concat(self._GenerateFunctionJsDoc(function)) .Append('chrome.%s.%s = function(%s) {};' % (self._namespace.name, function.name, params)) ) return c def _GenerateEvent(self, event): """Generates the code representing an event. For example: /** @type {!ChromeEvent} */ chrome.bookmarks.onChildrenReordered; """ c = Code() c.Sblock(line='/**', line_prefix=' * ') if (event.description): c.Comment(event.description, comment_prefix='') c.Append('@type {!ChromeEvent}') c.Append(self._GenerateSeeLink('event', event.name)) c.Eblock(' */') c.Append('chrome.%s.%s;' % (self._namespace.name, event.name)) return c def _GenerateNamespaceObject(self): """Generates the code creating namespace object. For example: /** * @const */ chrome.bookmarks = {}; """ c = Code() (c.Append("""/** * @const */""") .Append('chrome.%s = {};' % self._namespace.name)) return c def _GenerateFunctionParams(self, function): params = function.params[:] if function.callback: params.append(function.callback) return ', '.join(param.name for param in params) def _GenerateSeeLink(self, object_type, object_name): """Generates a @see link for a given API 'object' (type, method, or event). """ # NOTE(devlin): This is kind of a hack. Some APIs will be hosted on # developer.chrome.com/apps/ instead of /extensions/, and some APIs have # '.'s in them (like app.window), which should resolve to 'app_window'. # Luckily, the doc server has excellent url resolution, and knows exactly # what we mean. This saves us from needing any complicated logic here. return ('@see https://developer.chrome.com/extensions/%s#%s-%s' % (self._namespace.name, object_type, object_name))
bsd-3-clause
qk4l/Flexget
flexget/tests/test_input_sites.py
5
1359
from __future__ import unicode_literals, division, absolute_import from builtins import * # noqa pylint: disable=unused-import, redefined-builtin import pytest @pytest.mark.online class TestInputSites(object): config = (""" templates: global: headers: User-Agent: "Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 """ + """(KHTML, like Gecko) Chrome/35.0.1916.114 Safari/537.36" tasks: test_sceper: sceper: http://sceper.ws/category/movies/movies-dvd-rip test_apple_trailers: apple_trailers: quality: 480p genres: ['Action and Adventure'] test_apple_trailers_simple: apple_trailers: 720p """) @pytest.mark.skip(reason='Missing a usable urlrewriter for uploadgig?') def test_sceper(self, execute_task): task = execute_task('test_sceper') assert task.entries, 'no entries created / site may be down' def test_apple_trailers(self, execute_task, use_vcr): task = execute_task('test_apple_trailers') assert task.entries, 'no entries created / site may be down' def test_apple_trailers_simple(self, execute_task): task = execute_task('test_apple_trailers_simple') assert task.entries, 'no entries created / site may be down'
mit
ChanChiChoi/scikit-learn
sklearn/cluster/tests/test_birch.py
342
5603
""" Tests for the birch clustering algorithm. """ from scipy import sparse import numpy as np from sklearn.cluster.tests.common import generate_clustered_data from sklearn.cluster.birch import Birch from sklearn.cluster.hierarchical import AgglomerativeClustering from sklearn.datasets import make_blobs from sklearn.linear_model import ElasticNet from sklearn.metrics import pairwise_distances_argmin, v_measure_score from sklearn.utils.testing import assert_greater_equal from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_greater from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_raises from sklearn.utils.testing import assert_warns def test_n_samples_leaves_roots(): # Sanity check for the number of samples in leaves and roots X, y = make_blobs(n_samples=10) brc = Birch() brc.fit(X) n_samples_root = sum([sc.n_samples_ for sc in brc.root_.subclusters_]) n_samples_leaves = sum([sc.n_samples_ for leaf in brc._get_leaves() for sc in leaf.subclusters_]) assert_equal(n_samples_leaves, X.shape[0]) assert_equal(n_samples_root, X.shape[0]) def test_partial_fit(): # Test that fit is equivalent to calling partial_fit multiple times X, y = make_blobs(n_samples=100) brc = Birch(n_clusters=3) brc.fit(X) brc_partial = Birch(n_clusters=None) brc_partial.partial_fit(X[:50]) brc_partial.partial_fit(X[50:]) assert_array_equal(brc_partial.subcluster_centers_, brc.subcluster_centers_) # Test that same global labels are obtained after calling partial_fit # with None brc_partial.set_params(n_clusters=3) brc_partial.partial_fit(None) assert_array_equal(brc_partial.subcluster_labels_, brc.subcluster_labels_) def test_birch_predict(): # Test the predict method predicts the nearest centroid. rng = np.random.RandomState(0) X = generate_clustered_data(n_clusters=3, n_features=3, n_samples_per_cluster=10) # n_samples * n_samples_per_cluster shuffle_indices = np.arange(30) rng.shuffle(shuffle_indices) X_shuffle = X[shuffle_indices, :] brc = Birch(n_clusters=4, threshold=1.) brc.fit(X_shuffle) centroids = brc.subcluster_centers_ assert_array_equal(brc.labels_, brc.predict(X_shuffle)) nearest_centroid = pairwise_distances_argmin(X_shuffle, centroids) assert_almost_equal(v_measure_score(nearest_centroid, brc.labels_), 1.0) def test_n_clusters(): # Test that n_clusters param works properly X, y = make_blobs(n_samples=100, centers=10) brc1 = Birch(n_clusters=10) brc1.fit(X) assert_greater(len(brc1.subcluster_centers_), 10) assert_equal(len(np.unique(brc1.labels_)), 10) # Test that n_clusters = Agglomerative Clustering gives # the same results. gc = AgglomerativeClustering(n_clusters=10) brc2 = Birch(n_clusters=gc) brc2.fit(X) assert_array_equal(brc1.subcluster_labels_, brc2.subcluster_labels_) assert_array_equal(brc1.labels_, brc2.labels_) # Test that the wrong global clustering step raises an Error. clf = ElasticNet() brc3 = Birch(n_clusters=clf) assert_raises(ValueError, brc3.fit, X) # Test that a small number of clusters raises a warning. brc4 = Birch(threshold=10000.) assert_warns(UserWarning, brc4.fit, X) def test_sparse_X(): # Test that sparse and dense data give same results X, y = make_blobs(n_samples=100, centers=10) brc = Birch(n_clusters=10) brc.fit(X) csr = sparse.csr_matrix(X) brc_sparse = Birch(n_clusters=10) brc_sparse.fit(csr) assert_array_equal(brc.labels_, brc_sparse.labels_) assert_array_equal(brc.subcluster_centers_, brc_sparse.subcluster_centers_) def check_branching_factor(node, branching_factor): subclusters = node.subclusters_ assert_greater_equal(branching_factor, len(subclusters)) for cluster in subclusters: if cluster.child_: check_branching_factor(cluster.child_, branching_factor) def test_branching_factor(): # Test that nodes have at max branching_factor number of subclusters X, y = make_blobs() branching_factor = 9 # Purposefully set a low threshold to maximize the subclusters. brc = Birch(n_clusters=None, branching_factor=branching_factor, threshold=0.01) brc.fit(X) check_branching_factor(brc.root_, branching_factor) brc = Birch(n_clusters=3, branching_factor=branching_factor, threshold=0.01) brc.fit(X) check_branching_factor(brc.root_, branching_factor) # Raises error when branching_factor is set to one. brc = Birch(n_clusters=None, branching_factor=1, threshold=0.01) assert_raises(ValueError, brc.fit, X) def check_threshold(birch_instance, threshold): """Use the leaf linked list for traversal""" current_leaf = birch_instance.dummy_leaf_.next_leaf_ while current_leaf: subclusters = current_leaf.subclusters_ for sc in subclusters: assert_greater_equal(threshold, sc.radius) current_leaf = current_leaf.next_leaf_ def test_threshold(): # Test that the leaf subclusters have a threshold lesser than radius X, y = make_blobs(n_samples=80, centers=4) brc = Birch(threshold=0.5, n_clusters=None) brc.fit(X) check_threshold(brc, 0.5) brc = Birch(threshold=5.0, n_clusters=None) brc.fit(X) check_threshold(brc, 5.)
bsd-3-clause
jmiserez/pox
pox/forwarding/l2_ofcommand_learning.py
2
5028
# Copyright 2011 Kyriakos Zarifis # Copyright 2008 (C) Nicira, Inc. # # This file is part of POX. # # POX is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # POX is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with POX. If not, see <http://www.gnu.org/licenses/>. """ This is an L2 learning switch derived originally from NOX's pyswitch example. It is now a demonstration of the ofcommand library for constructing OpenFlow messages. """ from time import time # TODO: mac_to_str and mact_to_int aren't currently defined in packet_utils... #from pox.lib.packet.packet_utils import mac_to_str, mac_to_int from pox.lib.packet.ethernet import ethernet from pox.lib.packet.tcp import tcp from pox.lib.packet.udp import udp from pox.lib.packet.vlan import vlan from pox.lib.packet.ipv4 import ipv4 from pox.lib.packet.icmp import icmp from pox.lib.packet.ethernet import ethernet from pox.core import core from pox.lib.revent import * from pox.lib.addresses import EthAddr log = core.getLogger() import pox.openflow.ofcommand as ofcommand class dumb_l2_switch (EventMixin): def __init__ (self): log.info("Starting") self.listenTo(core) self.st = {} def _handle_GoingUpEvent (self, event): self.listenTo(core.openflow) def _handle_PacketIn (self, event): """Packet entry method. Drop LLDP packets (or we get confused) and attempt learning and forwarding """ con = event.connection dpid = event.connection.dpid inport = event.port packet = event.parse() buffer_id = event.ofp.buffer_id if not packet.parsed: log.warning("%i %i ignoring unparsed packet", dpid, inport) return if not con in self.st: log.info('registering new switch %s', str(dpid)) self.st[con] = {} # don't forward lldp packets if packet.type == ethernet.LLDP_TYPE: return # learn MAC on incoming port self.do_l2_learning(con, inport, packet) # forward packet self.forward_l2_packet(con, inport, packet, packet.arr, buffer_id) def do_l2_learning(self, con, inport, packet): """Given a packet, learn the source and peg to a switch/inport """ # learn MAC on incoming port srcaddr = EthAddr(packet.src) #if ord(srcaddr[0]) & 1: # return if self.st[con].has_key(srcaddr.toStr()): # change to raw? # we had already heard from this switch dst = self.st[con][srcaddr.toStr()] # raw? if dst[0] != inport: # but from a different port log.info('MAC has moved from %s to %s', str(dst), str(inport)) else: return else: log.info('learned MAC %s on Switch %s, Port %d', srcaddr.toStr(), con.dpid,inport) # learn or update timestamp of entry self.st[con][srcaddr.toStr()] = (inport, time(), packet) # raw? # Replace any old entry for (switch,mac). #mac = mac_to_int(packet.src) def forward_l2_packet(self, con, inport, packet, buf, bufid): """If we've learned the destination MAC set up a flow and send only out of its inport. Else, flood. """ dstaddr = EthAddr(packet.dst) #if not ord(dstaddr[0]) & 1 and # what did this do? if self.st[con].has_key(dstaddr.toStr()): # raw? prt = self.st[con][dstaddr.toStr()] # raw? if prt[0] == inport: log.warning('**warning** learned port = inport') ofcommand.floodPacket(con, inport, packet, buf, bufid) else: # We know the outport, set up a flow log.info('installing flow for %s', str(packet)) match = ofcommand.extractMatch(packet) actions = [ofcommand.Output(prt[0])] ofcommand.addFlowEntry(con, inport, match, actions, bufid) # Separate bufid, make addFlowEntry() only ADD the entry # send/wait for Barrier # sendBufferedPacket(bufid) else: # haven't learned destination MAC. Flood ofcommand.floodPacket(con, inport, packet, buf, bufid) ''' add arp cache timeout? # Timeout for cached MAC entries CACHE_TIMEOUT = 5 def timer_callback(): """Responsible for timing out cache entries. Called every 1 second. """ global st curtime = time() for con in st.keys(): for entry in st[con].keys(): if (curtime - st[con][entry][1]) > CACHE_TIMEOUT: con.msg('timing out entry '+mac_to_str(entry)+" -> "+str(st[con][entry][0])+' on switch ' + str(con)) st[con].pop(entry) '''
gpl-3.0
iledarn/addons-yelizariev
mail_wall_widgets/models.py
16
12363
from openerp.osv import osv,fields as old_fields from openerp import api, models, fields, tools from openerp.tools.safe_eval import safe_eval try: from openerp.addons.email_template.email_template import mako_template_env except ImportError: try: from openerp.addons.mail.mail_template import mako_template_env except ImportError: pass import copy from openerp.tools.translate import _ from datetime import date, datetime, timedelta from openerp.tools import DEFAULT_SERVER_DATE_FORMAT from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT class mail_wall_widgets_widget(models.Model): _name = 'mail.wall.widgets.widget' _order = "sequence, id" _columns = { 'name': old_fields.char('Name', required=True, translate=True), 'type': old_fields.selection(string='Type', selection=[ ('list', 'List'), ('funnel', 'Funnel'), ('slice', 'Slice'), #('', ''), #('', ''), #('', ''), #('', ''), ], help=''' Slice - use "domain" for total and "won_domain" for target '''), 'description': old_fields.text('Description', translate=True), 'group_ids': old_fields.many2many('res.groups', relation='mail_wall_widgets_widget_group', column1='widget_id', column2='group_id', string='Groups', help="User groups to show widget"), 'model_id': old_fields.many2one('ir.model', string='Model', help='The model object for the field to evaluate'), 'domain': old_fields.char("Filter Domain", help="Domain for filtering records. General rule, not user depending, e.g. [('state', '=', 'done')]. The expression can contain reference to 'user' which is a browse record of the current user if not in batch mode.", required=True), 'limit': old_fields.integer('Limit', help='Limit count of records to show'), 'order': old_fields.char('Order', help='Order of records to show'), 'value_field_id': old_fields.many2one('ir.model.fields', string='Value field', help='The field containing the value of record'), 'stage_field_id': old_fields.many2one('ir.model.fields', string='Stage field', help='Field to split records in funnel. It can be selection type or many2one (the later should have "sequence" field)'), #'stage_field_domain': old_fields.many2one('ir.model.fields', # string='Stage field domain', # help='(for many2one stage_field_id) Domain to find stage objects'), 'won_domain': old_fields.char('Won domain', help='Domain to find won objects'), 'field_date_id': old_fields.many2one('ir.model.fields', string='Date Field', help='The date to use for the time period evaluated'), 'start_date': old_fields.date('Start Date'), 'end_date': old_fields.date('End Date'), # no start and end = always active 'content': old_fields.char('Line template', help='Mako template to show content'), 'value_field_monetary': old_fields.boolean('Value is monetary'), 'cache': old_fields.boolean('Cache'), 'active': old_fields.boolean('Active'), 'sequence': old_fields.integer('Sequence', help='Sequence number for ordering'), } precision = fields.Float('Precision', help='round(Value/precision) * precision. E.g. 12345,333333 will be rounded to 12345,33 for precision=0.01, and to 12000 for precision=1000', default=0.01) agenda = fields.Boolean('Agenda', help='Split records by date: overdue, today, tomorrow, later') _defaults = { 'active': True, 'cache': False, 'limit': None, 'order': None, } @api.one def get_data(self, user): domain = safe_eval(self.domain, {'user': user}) won_domain = safe_eval(self.won_domain or '[]', {'user': user}) field_date_name = self.field_date_id and self.field_date_id.name if self.start_date and field_date_name: domain.append((field_date_name, '>=', self.start_date)) if self.end_date and field_date_name: domain.append((field_date_name, '<=', self.end_date)) res = { 'name': self.name, 'type': self.type, 'model': self.model_id.model, 'domain': str(domain), 'precision': self.precision, } obj = self.env[self.model_id.model] if self.type == 'list': total_count = obj.search_count(domain) groups = [{'test': lambda r: True}] if self.agenda: today = date.today() tomorrow = today + timedelta(days=1) def r2date(r): d = getattr(r, field_date_name) if d: d = datetime.strptime(d, self.field_date_id.ttype=='date' and DEFAULT_SERVER_DATE_FORMAT or DEFAULT_SERVER_DATETIME_FORMAT) d = d.date() else: d = date.today() return d groups = [ { 'label': _('Overdue'), 'class': 'overdue', 'test': lambda r: r2date(r) < today, 'mandatory': False, }, { 'label': _('Today'), 'class': 'today', 'test': lambda r: r2date(r) == today, 'mandatory': True, }, { 'label': _('Tomorrow'), 'class': 'tomorrow', 'test': lambda r: r2date(r) == tomorrow, 'mandatory': False, }, { 'label': _('Later'), 'class': 'later', 'test': lambda r: r2date(r) > tomorrow, 'mandatory': False, }, ] for g in groups: g['lines'] = [] res.update({ 'more': self.limit and self.limit < total_count, 'total_count': total_count, 'agenda': self.agenda, 'groups': groups, }) for r in obj.search(domain, limit=self.limit, order=self.order): mako = mako_template_env.from_string(tools.ustr(self.content)) content = mako.render({'record':r}) r_json = { 'id': r.id, #'fields': dict( (f,getattr(r,f)) for f in fields), 'display_mode': 'progress', 'state': 'inprogress', 'completeness': 0, 'name': content, 'description': '', } if self.value_field_id: r_json['current'] = getattr(r, self.value_field_id.name) if self.value_field_monetary: r_json['monetary'] = 1 for g in groups: if g['test'](r): g['lines'].append(r_json) break for g in groups: del g['test'] elif self.type == 'funnel': stage_ids = [] # [key] for group in obj.read_group(domain, [], [self.stage_field_id.name]): key = group[self.stage_field_id.name] if isinstance(key, (list, tuple)): key = key[0] stage_ids.append(key) stages = [] # [{'name':Name, 'id': key}] if self.stage_field_id.ttype == 'selection': d = dict (self.stage_field_id.selection) stages = [ {'id':id, 'name':d[id]} for id in stage_ids ] else: # many2one stage_model = self.stage_field_id.relation for r in self.env[stage_model].browse(stage_ids): stages.append({'id': r.id, 'name':r.name_get()[0][1]}) value_field_name = self.value_field_id.name for stage in stages: d = copy.copy(domain) d.append( (self.stage_field_id.name, '=', stage['id']) ) result = obj.read_group(d, [value_field_name], []) stage['closed_value'] = result and result[0][value_field_name] or 0.0 stage['domain'] = str(d) # won value d = domain + won_domain result = obj.read_group(domain, [value_field_name], []) won = {'name': _('Won'), 'id':'__won__', 'closed_value': result and result[0][value_field_name] or 0.0 } stages.append(won) cur = 0 for stage in reversed(stages): cur += stage['closed_value'] stage['abs_value'] = cur total_value = stages[0]['abs_value'] precision = self.precision for s in stages: s['rel_value'] = round(100*s['abs_value']/total_value/precision)*precision if total_value else 100 # dummy fields s['display_mode'] = 'progress' s['monetary'] = 1 res['stages'] = stages res['won'] = won res['conversion_rate'] = stages[-1]['rel_value'] elif self.type == 'slice': value_field_name = self.value_field_id.name for f,d in [('total', domain), ('won', won_domain)]: result = obj.read_group(d, [value_field_name], []) res[f] = result and result[0][value_field_name] or 0.0 res['domain'] = str(domain) res['won_domain'] = str(won_domain) precision = self.precision total_value = res['total'] res['slice'] = round(100*res['won']/res['total']/precision)*precision if res['total'] else 100 # dummy fields res['display_mode'] = 'progress' res['monetary'] = self.value_field_monetary return res class mail_wall_widgets_cache(models.Model): _name = 'mail.wall.widgets.cache' cache = fields.Text('Cached data') res_id = fields.Integer('Resource ID') res_model = fields.Integer('Resource Model') user_id = fields.Many2one('res.users') class res_users(models.Model): _inherit = 'res.users' @api.v7 def get_serialised_mail_wall_widgets_summary(self, cr, uid, excluded_categories=None, context=None): return self._get_serialised_mail_wall_widgets_summary(cr, uid, uid, excluded_categories=excluded_categories, context=context)[0] @api.one def _get_serialised_mail_wall_widgets_summary(self, excluded_categories=None): """ [ { 'id': ..., 'model': ..., 'currency': <res.currency id>, 'data': (depend on model) }, ] """ user = self.env.user res = [] model = 'mail.wall.widgets.widget' domain = [('group_ids', 'in', user.groups_id.ids), ('active', '=', True)] for widget in self.env[model].search(domain, order='sequence'): if widget.cache: #TODO continue res.append({ 'model': model, 'id': widget.id, 'currency': user.company_id.currency_id.id, 'data': widget.get_data(user)[0], }) return res #def get_challenge_suggestions(self, cr, uid, context=None): # """Return the list of challenges suggested to the user""" # challenge_info = [] # challenge_obj = self.pool.get('mail_wall_widgets.challenge') # challenge_ids = challenge_obj.search(cr, uid, [('invited_user_ids', 'in', uid), ('state', '=', 'inprogress')], context=context) # for challenge in challenge_obj.browse(cr, uid, challenge_ids, context=context): # values = { # 'id': challenge.id, # 'name': challenge.name, # 'description': challenge.description, # } # challenge_info.append(values) # return challenge_info
lgpl-3.0
tickbh/tdengine_cocos2dx_demo
tdengine_ddz/third_part/jsoncpp/test/pyjsontestrunner.py
257
2137
# Simple implementation of a json test runner to run the test against json-py. import sys import os.path import json import types if len(sys.argv) != 2: print "Usage: %s input-json-file", sys.argv[0] sys.exit(3) input_path = sys.argv[1] base_path = os.path.splitext(input_path)[0] actual_path = base_path + '.actual' rewrite_path = base_path + '.rewrite' rewrite_actual_path = base_path + '.actual-rewrite' def valueTreeToString( fout, value, path = '.' ): ty = type(value) if ty is types.DictType: fout.write( '%s={}\n' % path ) suffix = path[-1] != '.' and '.' or '' names = value.keys() names.sort() for name in names: valueTreeToString( fout, value[name], path + suffix + name ) elif ty is types.ListType: fout.write( '%s=[]\n' % path ) for index, childValue in zip( xrange(0,len(value)), value ): valueTreeToString( fout, childValue, path + '[%d]' % index ) elif ty is types.StringType: fout.write( '%s="%s"\n' % (path,value) ) elif ty is types.IntType: fout.write( '%s=%d\n' % (path,value) ) elif ty is types.FloatType: fout.write( '%s=%.16g\n' % (path,value) ) elif value is True: fout.write( '%s=true\n' % path ) elif value is False: fout.write( '%s=false\n' % path ) elif value is None: fout.write( '%s=null\n' % path ) else: assert False and "Unexpected value type" def parseAndSaveValueTree( input, actual_path ): root = json.loads( input ) fout = file( actual_path, 'wt' ) valueTreeToString( fout, root ) fout.close() return root def rewriteValueTree( value, rewrite_path ): rewrite = json.dumps( value ) #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? file( rewrite_path, 'wt').write( rewrite + '\n' ) return rewrite input = file( input_path, 'rt' ).read() root = parseAndSaveValueTree( input, actual_path ) rewrite = rewriteValueTree( json.write( root ), rewrite_path ) rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) sys.exit( 0 )
apache-2.0
rickhurst/Django-non-rel-blog
django/utils/unittest/suite.py
353
9293
"""TestSuite""" import sys import unittest from django.utils.unittest import case, util __unittest = True class BaseTestSuite(unittest.TestSuite): """A simple test suite that doesn't provide class or module shared fixtures. """ def __init__(self, tests=()): self._tests = [] self.addTests(tests) def __repr__(self): return "<%s tests=%s>" % (util.strclass(self.__class__), list(self)) def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented return list(self) == list(other) def __ne__(self, other): return not self == other # Can't guarantee hash invariant, so flag as unhashable __hash__ = None def __iter__(self): return iter(self._tests) def countTestCases(self): cases = 0 for test in self: cases += test.countTestCases() return cases def addTest(self, test): # sanity checks if not hasattr(test, '__call__'): raise TypeError("%r is not callable" % (repr(test),)) if isinstance(test, type) and issubclass(test, (case.TestCase, TestSuite)): raise TypeError("TestCases and TestSuites must be instantiated " "before passing them to addTest()") self._tests.append(test) def addTests(self, tests): if isinstance(tests, basestring): raise TypeError("tests must be an iterable of tests, not a string") for test in tests: self.addTest(test) def run(self, result): for test in self: if result.shouldStop: break test(result) return result def __call__(self, *args, **kwds): return self.run(*args, **kwds) def debug(self): """Run the tests without collecting errors in a TestResult""" for test in self: test.debug() class TestSuite(BaseTestSuite): """A test suite is a composite test consisting of a number of TestCases. For use, create an instance of TestSuite, then add test case instances. When all tests have been added, the suite can be passed to a test runner, such as TextTestRunner. It will run the individual test cases in the order in which they were added, aggregating the results. When subclassing, do not forget to call the base class constructor. """ def run(self, result): self._wrapped_run(result) self._tearDownPreviousClass(None, result) self._handleModuleTearDown(result) return result def debug(self): """Run the tests without collecting errors in a TestResult""" debug = _DebugResult() self._wrapped_run(debug, True) self._tearDownPreviousClass(None, debug) self._handleModuleTearDown(debug) ################################ # private methods def _wrapped_run(self, result, debug=False): for test in self: if result.shouldStop: break if _isnotsuite(test): self._tearDownPreviousClass(test, result) self._handleModuleFixture(test, result) self._handleClassSetUp(test, result) result._previousTestClass = test.__class__ if (getattr(test.__class__, '_classSetupFailed', False) or getattr(result, '_moduleSetUpFailed', False)): continue if hasattr(test, '_wrapped_run'): test._wrapped_run(result, debug) elif not debug: test(result) else: test.debug() def _handleClassSetUp(self, test, result): previousClass = getattr(result, '_previousTestClass', None) currentClass = test.__class__ if currentClass == previousClass: return if result._moduleSetUpFailed: return if getattr(currentClass, "__unittest_skip__", False): return try: currentClass._classSetupFailed = False except TypeError: # test may actually be a function # so its class will be a builtin-type pass setUpClass = getattr(currentClass, 'setUpClass', None) if setUpClass is not None: try: setUpClass() except Exception, e: if isinstance(result, _DebugResult): raise currentClass._classSetupFailed = True className = util.strclass(currentClass) errorName = 'setUpClass (%s)' % className self._addClassOrModuleLevelException(result, e, errorName) def _get_previous_module(self, result): previousModule = None previousClass = getattr(result, '_previousTestClass', None) if previousClass is not None: previousModule = previousClass.__module__ return previousModule def _handleModuleFixture(self, test, result): previousModule = self._get_previous_module(result) currentModule = test.__class__.__module__ if currentModule == previousModule: return self._handleModuleTearDown(result) result._moduleSetUpFailed = False try: module = sys.modules[currentModule] except KeyError: return setUpModule = getattr(module, 'setUpModule', None) if setUpModule is not None: try: setUpModule() except Exception, e: if isinstance(result, _DebugResult): raise result._moduleSetUpFailed = True errorName = 'setUpModule (%s)' % currentModule self._addClassOrModuleLevelException(result, e, errorName) def _addClassOrModuleLevelException(self, result, exception, errorName): error = _ErrorHolder(errorName) addSkip = getattr(result, 'addSkip', None) if addSkip is not None and isinstance(exception, case.SkipTest): addSkip(error, str(exception)) else: result.addError(error, sys.exc_info()) def _handleModuleTearDown(self, result): previousModule = self._get_previous_module(result) if previousModule is None: return if result._moduleSetUpFailed: return try: module = sys.modules[previousModule] except KeyError: return tearDownModule = getattr(module, 'tearDownModule', None) if tearDownModule is not None: try: tearDownModule() except Exception, e: if isinstance(result, _DebugResult): raise errorName = 'tearDownModule (%s)' % previousModule self._addClassOrModuleLevelException(result, e, errorName) def _tearDownPreviousClass(self, test, result): previousClass = getattr(result, '_previousTestClass', None) currentClass = test.__class__ if currentClass == previousClass: return if getattr(previousClass, '_classSetupFailed', False): return if getattr(result, '_moduleSetUpFailed', False): return if getattr(previousClass, "__unittest_skip__", False): return tearDownClass = getattr(previousClass, 'tearDownClass', None) if tearDownClass is not None: try: tearDownClass() except Exception, e: if isinstance(result, _DebugResult): raise className = util.strclass(previousClass) errorName = 'tearDownClass (%s)' % className self._addClassOrModuleLevelException(result, e, errorName) class _ErrorHolder(object): """ Placeholder for a TestCase inside a result. As far as a TestResult is concerned, this looks exactly like a unit test. Used to insert arbitrary errors into a test suite run. """ # Inspired by the ErrorHolder from Twisted: # http://twistedmatrix.com/trac/browser/trunk/twisted/trial/runner.py # attribute used by TestResult._exc_info_to_string failureException = None def __init__(self, description): self.description = description def id(self): return self.description def shortDescription(self): return None def __repr__(self): return "<ErrorHolder description=%r>" % (self.description,) def __str__(self): return self.id() def run(self, result): # could call result.addError(...) - but this test-like object # shouldn't be run anyway pass def __call__(self, result): return self.run(result) def countTestCases(self): return 0 def _isnotsuite(test): "A crude way to tell apart testcases and suites with duck-typing" try: iter(test) except TypeError: return True return False class _DebugResult(object): "Used by the TestSuite to hold previous class when running in debug." _previousTestClass = None _moduleSetUpFailed = False shouldStop = False
bsd-3-clause
yuanagain/seniorthesis
venv/lib/python2.7/site-packages/pip/req/req_uninstall.py
510
6897
from __future__ import absolute_import import logging import os import tempfile from pip.compat import uses_pycache, WINDOWS, cache_from_source from pip.exceptions import UninstallationError from pip.utils import rmtree, ask, is_local, renames, normalize_path from pip.utils.logging import indent_log logger = logging.getLogger(__name__) class UninstallPathSet(object): """A set of file paths to be removed in the uninstallation of a requirement.""" def __init__(self, dist): self.paths = set() self._refuse = set() self.pth = {} self.dist = dist self.save_dir = None self._moved_paths = [] def _permitted(self, path): """ Return True if the given path is one we are permitted to remove/modify, False otherwise. """ return is_local(path) def add(self, path): head, tail = os.path.split(path) # we normalize the head to resolve parent directory symlinks, but not # the tail, since we only want to uninstall symlinks, not their targets path = os.path.join(normalize_path(head), os.path.normcase(tail)) if not os.path.exists(path): return if self._permitted(path): self.paths.add(path) else: self._refuse.add(path) # __pycache__ files can show up after 'installed-files.txt' is created, # due to imports if os.path.splitext(path)[1] == '.py' and uses_pycache: self.add(cache_from_source(path)) def add_pth(self, pth_file, entry): pth_file = normalize_path(pth_file) if self._permitted(pth_file): if pth_file not in self.pth: self.pth[pth_file] = UninstallPthEntries(pth_file) self.pth[pth_file].add(entry) else: self._refuse.add(pth_file) def compact(self, paths): """Compact a path set to contain the minimal number of paths necessary to contain all paths in the set. If /a/path/ and /a/path/to/a/file.txt are both in the set, leave only the shorter path.""" short_paths = set() for path in sorted(paths, key=len): if not any([ (path.startswith(shortpath) and path[len(shortpath.rstrip(os.path.sep))] == os.path.sep) for shortpath in short_paths]): short_paths.add(path) return short_paths def _stash(self, path): return os.path.join( self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep)) def remove(self, auto_confirm=False): """Remove paths in ``self.paths`` with confirmation (unless ``auto_confirm`` is True).""" if not self.paths: logger.info( "Can't uninstall '%s'. No files were found to uninstall.", self.dist.project_name, ) return logger.info( 'Uninstalling %s-%s:', self.dist.project_name, self.dist.version ) with indent_log(): paths = sorted(self.compact(self.paths)) if auto_confirm: response = 'y' else: for path in paths: logger.info(path) response = ask('Proceed (y/n)? ', ('y', 'n')) if self._refuse: logger.info('Not removing or modifying (outside of prefix):') for path in self.compact(self._refuse): logger.info(path) if response == 'y': self.save_dir = tempfile.mkdtemp(suffix='-uninstall', prefix='pip-') for path in paths: new_path = self._stash(path) logger.debug('Removing file or directory %s', path) self._moved_paths.append(path) renames(path, new_path) for pth in self.pth.values(): pth.remove() logger.info( 'Successfully uninstalled %s-%s', self.dist.project_name, self.dist.version ) def rollback(self): """Rollback the changes previously made by remove().""" if self.save_dir is None: logger.error( "Can't roll back %s; was not uninstalled", self.dist.project_name, ) return False logger.info('Rolling back uninstall of %s', self.dist.project_name) for path in self._moved_paths: tmp_path = self._stash(path) logger.debug('Replacing %s', path) renames(tmp_path, path) for pth in self.pth.values(): pth.rollback() def commit(self): """Remove temporary save dir: rollback will no longer be possible.""" if self.save_dir is not None: rmtree(self.save_dir) self.save_dir = None self._moved_paths = [] class UninstallPthEntries(object): def __init__(self, pth_file): if not os.path.isfile(pth_file): raise UninstallationError( "Cannot remove entries from nonexistent file %s" % pth_file ) self.file = pth_file self.entries = set() self._saved_lines = None def add(self, entry): entry = os.path.normcase(entry) # On Windows, os.path.normcase converts the entry to use # backslashes. This is correct for entries that describe absolute # paths outside of site-packages, but all the others use forward # slashes. if WINDOWS and not os.path.splitdrive(entry)[0]: entry = entry.replace('\\', '/') self.entries.add(entry) def remove(self): logger.debug('Removing pth entries from %s:', self.file) with open(self.file, 'rb') as fh: # windows uses '\r\n' with py3k, but uses '\n' with py2.x lines = fh.readlines() self._saved_lines = lines if any(b'\r\n' in line for line in lines): endline = '\r\n' else: endline = '\n' for entry in self.entries: try: logger.debug('Removing entry: %s', entry) lines.remove((entry + endline).encode("utf-8")) except ValueError: pass with open(self.file, 'wb') as fh: fh.writelines(lines) def rollback(self): if self._saved_lines is None: logger.error( 'Cannot roll back changes to %s, none were made', self.file ) return False logger.debug('Rolling %s back to previous state', self.file) with open(self.file, 'wb') as fh: fh.writelines(self._saved_lines) return True
mit
Autonomi/limn
Printrun/pronsole.py
15
1106
#!/usr/bin/env python # This file is part of the Printrun suite. # # Printrun is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Printrun is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Printrun. If not, see <http://www.gnu.org/licenses/>. import sys import traceback import logging from printrun.pronsole import pronsole if __name__ == "__main__": interp = pronsole() interp.parse_cmdline(sys.argv[1:]) try: interp.cmdloop() except SystemExit: interp.p.disconnect() except: logging.error(_("Caught an exception, exiting:") + "\n" + traceback.format_exc()) interp.p.disconnect()
mit
rvalyi/OpenUpgrade
addons/hr_timesheet_invoice/report/hr_timesheet_invoice_report.py
40
9518
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields,osv from openerp.tools.sql import drop_view_if_exists class report_timesheet_line(osv.osv): _name = "report.timesheet.line" _description = "Timesheet Line" _auto = False _columns = { 'name': fields.char('Year',size=64,required=False, readonly=True), 'user_id': fields.many2one('res.users', 'User', readonly=True), 'date': fields.date('Date', readonly=True), 'day': fields.char('Day', size=128, readonly=True), 'quantity': fields.float('Time', readonly=True), 'cost': fields.float('Cost', readonly=True), 'product_id': fields.many2one('product.product', 'Product',readonly=True), 'account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True), 'general_account_id': fields.many2one('account.account', 'General Account', readonly=True), 'invoice_id': fields.many2one('account.invoice', 'Invoiced', readonly=True), 'month': fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'), ('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month',readonly=True), } _order = 'name desc,user_id desc' def init(self, cr): drop_view_if_exists(cr, 'report_timesheet_line') cr.execute(""" create or replace view report_timesheet_line as ( select min(l.id) as id, l.date as date, to_char(l.date,'YYYY') as name, to_char(l.date,'MM') as month, l.user_id, to_char(l.date, 'YYYY-MM-DD') as day, l.invoice_id, l.product_id, l.account_id, l.general_account_id, sum(l.unit_amount) as quantity, sum(l.amount) as cost from account_analytic_line l where l.user_id is not null group by l.date, l.user_id, l.product_id, l.account_id, l.general_account_id, l.invoice_id ) """) class report_timesheet_user(osv.osv): _name = "report_timesheet.user" _description = "Timesheet per day" _auto = False _columns = { 'name': fields.char('Year',size=64,required=False, readonly=True), 'user_id':fields.many2one('res.users', 'User', readonly=True), 'quantity': fields.float('Time', readonly=True), 'cost': fields.float('Cost', readonly=True), 'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'), ('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month',readonly=True), } _order = 'name desc,user_id desc' def init(self, cr): drop_view_if_exists(cr, 'report_timesheet_user') cr.execute(""" create or replace view report_timesheet_user as ( select min(l.id) as id, to_char(l.date,'YYYY') as name, to_char(l.date,'MM') as month, l.user_id, sum(l.unit_amount) as quantity, sum(l.amount) as cost from account_analytic_line l where user_id is not null group by l.date, to_char(l.date,'YYYY'),to_char(l.date,'MM'), l.user_id ) """) class report_timesheet_account(osv.osv): _name = "report_timesheet.account" _description = "Timesheet per account" _auto = False _columns = { 'name': fields.char('Year',size=64,required=False, readonly=True), 'user_id':fields.many2one('res.users', 'User', readonly=True), 'account_id':fields.many2one('account.analytic.account', 'Analytic Account', readonly=True), 'quantity': fields.float('Time', readonly=True), 'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'), ('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month',readonly=True), } _order = 'name desc,account_id desc,user_id desc' def init(self, cr): drop_view_if_exists(cr, 'report_timesheet_account') cr.execute(""" create or replace view report_timesheet_account as ( select min(id) as id, to_char(create_date, 'YYYY') as name, to_char(create_date,'MM') as month, user_id, account_id, sum(unit_amount) as quantity from account_analytic_line group by to_char(create_date, 'YYYY'),to_char(create_date, 'MM'), user_id, account_id ) """) class report_timesheet_account_date(osv.osv): _name = "report_timesheet.account.date" _description = "Daily timesheet per account" _auto = False _columns = { 'name': fields.char('Year',size=64,required=False, readonly=True), 'user_id':fields.many2one('res.users', 'User', readonly=True), 'account_id':fields.many2one('account.analytic.account', 'Analytic Account', readonly=True), 'quantity': fields.float('Time', readonly=True), 'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'), ('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month',readonly=True), } _order = 'name desc,account_id desc,user_id desc' def init(self, cr): drop_view_if_exists(cr, 'report_timesheet_account_date') cr.execute(""" create or replace view report_timesheet_account_date as ( select min(id) as id, to_char(date,'YYYY') as name, to_char(date,'MM') as month, user_id, account_id, sum(unit_amount) as quantity from account_analytic_line group by to_char(date,'YYYY'),to_char(date,'MM'), user_id, account_id ) """) class report_timesheet_invoice(osv.osv): _name = "report_timesheet.invoice" _description = "Costs to invoice" _auto = False _columns = { 'user_id':fields.many2one('res.users', 'User', readonly=True), 'account_id':fields.many2one('account.analytic.account', 'Project', readonly=True), 'manager_id':fields.many2one('res.users', 'Manager', readonly=True), 'quantity': fields.float('Time', readonly=True), 'amount_invoice': fields.float('To invoice', readonly=True) } _rec_name = 'user_id' _order = 'user_id desc' def init(self, cr): drop_view_if_exists(cr, 'report_timesheet_invoice') cr.execute(""" create or replace view report_timesheet_invoice as ( select min(l.id) as id, l.user_id as user_id, l.account_id as account_id, a.user_id as manager_id, sum(l.unit_amount) as quantity, sum(l.unit_amount * t.list_price) as amount_invoice from account_analytic_line l left join hr_timesheet_invoice_factor f on (l.to_invoice=f.id) left join account_analytic_account a on (l.account_id=a.id) left join product_product p on (l.to_invoice=f.id) left join product_template t on (l.to_invoice=f.id) where l.to_invoice is not null and l.invoice_id is null group by l.user_id, l.account_id, a.user_id ) """) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
mozillazg/redis-py-doc
tests/conftest.py
2
6783
import pytest import random import redis from distutils.version import StrictVersion from redis.connection import parse_url from unittest.mock import Mock from urllib.parse import urlparse # redis 6 release candidates report a version number of 5.9.x. Use this # constant for skip_if decorators as a placeholder until 6.0.0 is officially # released REDIS_6_VERSION = '5.9.0' REDIS_INFO = {} default_redis_url = "redis://localhost:6379/9" def pytest_addoption(parser): parser.addoption('--redis-url', default=default_redis_url, action="store", help="Redis connection string," " defaults to `%(default)s`") def _get_info(redis_url): client = redis.Redis.from_url(redis_url) info = client.info() client.connection_pool.disconnect() return info def pytest_sessionstart(session): redis_url = session.config.getoption("--redis-url") info = _get_info(redis_url) version = info["redis_version"] arch_bits = info["arch_bits"] REDIS_INFO["version"] = version REDIS_INFO["arch_bits"] = arch_bits def skip_if_server_version_lt(min_version): redis_version = REDIS_INFO["version"] check = StrictVersion(redis_version) < StrictVersion(min_version) return pytest.mark.skipif( check, reason="Redis version required >= {}".format(min_version)) def skip_if_server_version_gte(min_version): redis_version = REDIS_INFO["version"] check = StrictVersion(redis_version) >= StrictVersion(min_version) return pytest.mark.skipif( check, reason="Redis version required < {}".format(min_version)) def skip_unless_arch_bits(arch_bits): return pytest.mark.skipif(REDIS_INFO["arch_bits"] != arch_bits, reason="server is not {}-bit".format(arch_bits)) def _get_client(cls, request, single_connection_client=True, flushdb=True, **kwargs): """ Helper for fixtures or tests that need a Redis client Uses the "--redis-url" command line argument for connection info. Unlike ConnectionPool.from_url, keyword arguments to this function override values specified in the URL. """ redis_url = request.config.getoption("--redis-url") url_options = parse_url(redis_url) url_options.update(kwargs) pool = redis.ConnectionPool(**url_options) client = cls(connection_pool=pool) if single_connection_client: client = client.client() if request: def teardown(): if flushdb: try: client.flushdb() except redis.ConnectionError: # handle cases where a test disconnected a client # just manually retry the flushdb client.flushdb() client.close() client.connection_pool.disconnect() request.addfinalizer(teardown) return client @pytest.fixture() def r(request): with _get_client(redis.Redis, request) as client: yield client @pytest.fixture() def r2(request): "A second client for tests that need multiple" with _get_client(redis.Redis, request) as client: yield client def _gen_cluster_mock_resp(r, response): connection = Mock() connection.read_response.return_value = response r.connection = connection return r @pytest.fixture() def mock_cluster_resp_ok(request, **kwargs): r = _get_client(redis.Redis, request, **kwargs) return _gen_cluster_mock_resp(r, 'OK') @pytest.fixture() def mock_cluster_resp_int(request, **kwargs): r = _get_client(redis.Redis, request, **kwargs) return _gen_cluster_mock_resp(r, '2') @pytest.fixture() def mock_cluster_resp_info(request, **kwargs): r = _get_client(redis.Redis, request, **kwargs) response = ('cluster_state:ok\r\ncluster_slots_assigned:16384\r\n' 'cluster_slots_ok:16384\r\ncluster_slots_pfail:0\r\n' 'cluster_slots_fail:0\r\ncluster_known_nodes:7\r\n' 'cluster_size:3\r\ncluster_current_epoch:7\r\n' 'cluster_my_epoch:2\r\ncluster_stats_messages_sent:170262\r\n' 'cluster_stats_messages_received:105653\r\n') return _gen_cluster_mock_resp(r, response) @pytest.fixture() def mock_cluster_resp_nodes(request, **kwargs): r = _get_client(redis.Redis, request, **kwargs) response = ('c8253bae761cb1ecb2b61857d85dfe455a0fec8b 172.17.0.7:7006 ' 'slave aa90da731f673a99617dfe930306549a09f83a6b 0 ' '1447836263059 5 connected\n' '9bd595fe4821a0e8d6b99d70faa660638a7612b3 172.17.0.7:7008 ' 'master - 0 1447836264065 0 connected\n' 'aa90da731f673a99617dfe930306549a09f83a6b 172.17.0.7:7003 ' 'myself,master - 0 0 2 connected 5461-10922\n' '1df047e5a594f945d82fc140be97a1452bcbf93e 172.17.0.7:7007 ' 'slave 19efe5a631f3296fdf21a5441680f893e8cc96ec 0 ' '1447836262556 3 connected\n' '4ad9a12e63e8f0207025eeba2354bcf4c85e5b22 172.17.0.7:7005 ' 'master - 0 1447836262555 7 connected 0-5460\n' '19efe5a631f3296fdf21a5441680f893e8cc96ec 172.17.0.7:7004 ' 'master - 0 1447836263562 3 connected 10923-16383\n' 'fbb23ed8cfa23f17eaf27ff7d0c410492a1093d6 172.17.0.7:7002 ' 'master,fail - 1447829446956 1447829444948 1 disconnected\n' ) return _gen_cluster_mock_resp(r, response) @pytest.fixture() def mock_cluster_resp_slaves(request, **kwargs): r = _get_client(redis.Redis, request, **kwargs) response = ("['1df047e5a594f945d82fc140be97a1452bcbf93e 172.17.0.7:7007 " "slave 19efe5a631f3296fdf21a5441680f893e8cc96ec 0 " "1447836789290 3 connected']") return _gen_cluster_mock_resp(r, response) @pytest.fixture(scope="session") def master_host(request): url = request.config.getoption("--redis-url") parts = urlparse(url) yield parts.hostname def wait_for_command(client, monitor, command): # issue a command with a key name that's local to this process. # if we find a command with our key before the command we're waiting # for, something went wrong redis_version = REDIS_INFO["version"] if StrictVersion(redis_version) >= StrictVersion('5.0.0'): id_str = str(client.client_id()) else: id_str = '%08x' % random.randrange(2**32) key = '__REDIS-PY-%s__' % id_str client.get(key) while True: monitor_response = monitor.next_command() if command in monitor_response['command']: return monitor_response if key in monitor_response['command']: return None
mit
fhartwig/adhocracy3.mercator
src/adhocracy_mercator/adhocracy_mercator/catalog/adhocracy.py
2
3435
""" Adhocracy catalog extensions.""" from substanced.catalog import Keyword from adhocracy_core.catalog.adhocracy import AdhocracyCatalogIndexes from adhocracy_core.interfaces import IResource from adhocracy_core.utils import get_sheet_field from adhocracy_mercator.sheets.mercator import IMercatorSubResources from adhocracy_mercator.sheets.mercator import IFinance from adhocracy_mercator.sheets.mercator import ILocation class MercatorCatalogIndexes(AdhocracyCatalogIndexes): """Mercator indexes for the adhocracy catalog.""" mercator_location = Keyword() mercator_requested_funding = Keyword() mercator_budget = Keyword() LOCATION_INDEX_KEYWORDS = ['specific', 'online', 'linked_to_ruhr'] def index_location(resource, default) -> list: """Return search index keywords based on the "location_is_..." fields.""" location = get_sheet_field(resource, IMercatorSubResources, 'location') # TODO: Why is location '' in the first pass of that function # during MercatorProposal create? if location is None or location == '': return default locations = [] for keyword in LOCATION_INDEX_KEYWORDS: if get_sheet_field(location, ILocation, 'location_is_' + keyword): locations.append(keyword) return locations if locations else default BUDGET_INDEX_LIMIT_KEYWORDS = [5000, 10000, 20000, 50000] def index_requested_funding(resource: IResource, default) -> str: """Return search index keyword based on the "requested_funding" field.""" # TODO: Why is finance '' in the first pass of that function # during MercatorProposal create? # This sounds like a bug, the default value for References is None, finance = get_sheet_field(resource, IMercatorSubResources, 'finance') if finance is None or finance == '': return default funding = get_sheet_field(finance, IFinance, 'requested_funding') for limit in BUDGET_INDEX_LIMIT_KEYWORDS: if funding <= limit: return [str(limit)] return default def index_budget(resource: IResource, default) -> str: """ Return search index keyword based on the "budget" field. The returned values are the same values as per the "requested_funding" field, or "above_50000" if the total budget value is more than 50,000 euro. """ finance = get_sheet_field(resource, IMercatorSubResources, 'finance') if finance is None or finance == '': return default funding = get_sheet_field(finance, IFinance, 'budget') for limit in BUDGET_INDEX_LIMIT_KEYWORDS: if funding <= limit: return [str(limit)] return ['above_50000'] def includeme(config): """Register catalog utilities and index functions.""" config.add_catalog_factory('adhocracy', MercatorCatalogIndexes) config.add_indexview(index_location, catalog_name='adhocracy', index_name='mercator_location', context=IMercatorSubResources) config.add_indexview(index_requested_funding, catalog_name='adhocracy', index_name='mercator_requested_funding', context=IMercatorSubResources) config.add_indexview(index_budget, catalog_name='adhocracy', index_name='mercator_budget', context=IMercatorSubResources)
agpl-3.0
liamgh/liamgreenhughes-sl4a-tf101
python/src/Demo/sockets/rpythond.py
47
1214
#! /usr/bin/env python # Remote python server. # Execute Python commands remotely and send output back. # WARNING: This version has a gaping security hole -- it accepts requests # from any host on the Internet! import sys from socket import * import StringIO import traceback PORT = 4127 BUFSIZE = 1024 def main(): if len(sys.argv) > 1: port = int(eval(sys.argv[1])) else: port = PORT s = socket(AF_INET, SOCK_STREAM) s.bind(('', port)) s.listen(1) while 1: conn, (remotehost, remoteport) = s.accept() print 'connected by', remotehost, remoteport request = '' while 1: data = conn.recv(BUFSIZE) if not data: break request = request + data reply = execute(request) conn.send(reply) conn.close() def execute(request): stdout = sys.stdout stderr = sys.stderr sys.stdout = sys.stderr = fakefile = StringIO.StringIO() try: try: exec request in {}, {} except: print traceback.print_exc(100) finally: sys.stderr = stderr sys.stdout = stdout return fakefile.getvalue() main()
apache-2.0
ddy88958620/lib
Python/scrapy/getinthemix/dv247.py
2
2212
import re from scrapy.spider import BaseSpider from scrapy.selector import HtmlXPathSelector from scrapy.http import Request, FormRequest, HtmlResponse from scrapy.utils.response import get_base_url from scrapy.utils.url import urljoin_rfc from productloader import load_product from scrapy.http import FormRequest class DV247(BaseSpider): name = 'dv247.com' allowed_domains = ['dv247.com', 'www.dv247.com'] start_urls = ('http://www.dv247.com',) def parse_product(self, response): URL_BASE = 'http://www.dv247.com' hxs = HtmlXPathSelector(response) products = hxs.select('//div[@class="listItem clearfix"]') for p in products: res = {} name = ' '.join(p.select('.//a//text()').extract()) url = p.select('.//a/@href')[0].extract() url = urljoin_rfc(URL_BASE, url) price = p.select('.//li[@class="price"]/text()').re('\xa3(.*)')[0] res['url'] = url res['description'] = name res['price'] = price yield load_product(res, response) def parse(self, response): if not isinstance(response, HtmlResponse): return URL_BASE = 'http://www.dv247.com' #categories hxs = HtmlXPathSelector(response) category_urls = hxs.select('//nav[@id="megamenu"]/ul/li/a/@href | \ //nav[@id="megamenu"]//li[@class="accessories threeCol"]//a/@href').extract() #the following category had to be added manually because the link is broken. category_urls.append('/computer-music-software/') for url in category_urls: if url == '#': continue url = urljoin_rfc(URL_BASE, url) yield Request(url) #next page next_pages = hxs.select('//div[@class="listPaging"]') if next_pages: next_pages = next_pages[0].select('.//a[not(@class="selectedpage")]/@href').extract() for page in next_pages: url = urljoin_rfc(URL_BASE, page) yield Request(url) # products for p in self.parse_product(response): yield p
apache-2.0
CityGrid/arsenal
server/arsenalweb/views/login.py
1
4340
'''Arsenal login page.''' # Copyright 2015 CityGrid Media, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import logging from pyramid.view import view_config, forbidden_view_config from pyramid.httpexceptions import HTTPOk from pyramid.httpexceptions import HTTPFound from pyramid.httpexceptions import HTTPUnauthorized from pyramid.httpexceptions import HTTPForbidden from pyramid.security import remember from pyramid.session import signed_serialize from pyramid_ldap import get_ldap_connector from arsenalweb.views import ( db_authenticate, get_authenticated_user, pam_authenticate, site_layout, ) LOG = logging.getLogger(__name__) @view_config(route_name='login', renderer='arsenalweb:templates/login.pt') @forbidden_view_config(renderer='arsenalweb:templates/login.pt') def login(request): '''Process requests for the /login route.''' page_title = 'Login' LOG.debug('Processing login request...') auth_user = get_authenticated_user(request) if request.referer: referer_host = request.referer.split('/')[2] else: referer_host = None # Need to send the client a 401 so it can send a user/pass to auth. # Without this the client just gets the login page with a 200 and # thinks the command was successful. if request.path_info.split('/')[1][:3] == 'api' and not request.authenticated_userid: LOG.debug('request came from the api, sending request to re-auth') return HTTPUnauthorized() if request.referer and referer_host == request.host \ and request.referer.split('/')[3][:6] != 'logout': return_url = request.referer elif request.path != '/login': return_url = request.url else: return_url = '/nodes' login_name = '' password = '' error = '' if 'form.submitted' in request.POST: login_name = request.POST['login'] password = request.POST['password'] LOG.debug('Attempting to authenticate login: {0}'.format(login_name)) # Try local first, ldap/pam second (if configured) LOG.debug('Authenticating against local DB...') data = db_authenticate(login_name, password) if data is None and request.registry.settings['arsenal.use_ldap']: LOG.debug('Authenticating against LDAP...') connector = get_ldap_connector(request) data = connector.authenticate(login_name, password) if data is None and request.registry.settings['arsenal.use_pam']: LOG.debug('Authenticating against PAM...') data = pam_authenticate(login_name, password) if data is not None: user_name = data[0] encrypted = signed_serialize(login_name, request.registry.settings['arsenal.cookie_token']) headers = remember(request, user_name) headers.append(('Set-Cookie', 'un=' + str(encrypted) + '; Max-Age=604800; Path=/')) if 'api.client' in request.POST: return HTTPOk(headers=headers) else: return HTTPFound(request.POST['return_url'], headers=headers) else: error = 'Invalid credentials' request.response.status = 403 if request.authenticated_userid: if request.path == '/login': error = 'You are already logged in' page_title = 'Already Logged In' else: error = 'You do not have permission to access this page' page_title = 'Access Denied' request.response.status = 403 return { 'au': auth_user, 'error': error, 'layout': site_layout('max'), 'login': login_name, 'page_title': page_title, 'password': password, 'return_url': return_url, }
apache-2.0
gribozavr/swift
utils/swift_build_support/tests/test_host.py
48
2433
# test_host.py - Unit tests for swift_build_support.cmake -*-- python -*- # # This source file is part of the Swift.org open source project # # Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors # Licensed under Apache License v2.0 with Runtime Library Exception # # See https://swift.org/LICENSE.txt for license information # See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors import platform import unittest import swift_build_support.host as sbs_host class HostTestCase(unittest.TestCase): def test_system_memory(self): # We make sure that we get an integer back. If we get an integer back, # we know that we at least were able to get some sort of information # from the system and it could be parsed as an integer. This is just a # smoke test. supported_platforms = [('Darwin', 'x86_64')] mem = sbs_host.system_memory() if (platform.system(), platform.machine()) not in supported_platforms: self.assertIsNone(mem) else: self.assertIsInstance(mem, int) def test_lto_link_job_counts(self): # Make sure that: # # 1. we get back a dictionary with two keys in it, the first called # llvm, the other called swift. # # 2. The values associated with these keys is either None (if we do not # support the platform) or is an int that is reasonable (i.e. < # 100). The number 100 is just a heuristic number that is appropriate # currently since LTO uses so much memory. If and when that changes, # this number should change. supported_platforms = [('Darwin', 'x86_64')] reasonable_upper_bound_of_lto_threads = 100 result = sbs_host.max_lto_link_job_counts() self.assertIsInstance(result, dict) self.assertEqual(len(result), 2) if (platform.system(), platform.machine()) not in supported_platforms: self.assertIsNone(result['llvm']) self.assertIsNone(result['swift']) return self.assertIsNotNone(result['llvm']) self.assertIsNotNone(result['swift']) self.assertIsInstance(result['llvm'], int) self.assertIsInstance(result['swift'], int) self.assertLess(result['llvm'], reasonable_upper_bound_of_lto_threads) self.assertLess(result['swift'], reasonable_upper_bound_of_lto_threads)
apache-2.0
knorrium/selenium
py/selenium/webdriver/firefox/extension_connection.py
66
2846
# Licensed to the Software Freedom Conservancy (SFC) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The SFC licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import logging import time from selenium.webdriver.common.desired_capabilities import DesiredCapabilities from selenium.webdriver.common import utils from selenium.webdriver.remote.command import Command from selenium.webdriver.remote.remote_connection import RemoteConnection from selenium.webdriver.firefox.firefox_binary import FirefoxBinary LOGGER = logging.getLogger(__name__) PORT = 0 # HOST = None _URL = "" class ExtensionConnection(RemoteConnection): def __init__(self, host, firefox_profile, firefox_binary=None, timeout=30): self.profile = firefox_profile self.binary = firefox_binary HOST = host if self.binary is None: self.binary = FirefoxBinary() if HOST is None: HOST = "127.0.0.1" PORT = utils.free_port() self.profile.port = PORT self.profile.update_preferences() self.profile.add_extension() self.binary.launch_browser(self.profile) _URL = "http://%s:%d/hub" % (HOST, PORT) RemoteConnection.__init__( self, _URL, keep_alive=True) def quit(self, sessionId=None): self.execute(Command.QUIT, {'sessionId':sessionId}) while self.is_connectable(): LOGGER.info("waiting to quit") time.sleep(1) def connect(self): """Connects to the extension and retrieves the session id.""" return self.execute(Command.NEW_SESSION, {'desiredCapabilities': DesiredCapabilities.FIREFOX}) @classmethod def connect_and_quit(self): """Connects to an running browser and quit immediately.""" self._request('%s/extensions/firefox/quit' % _URL) @classmethod def is_connectable(self): """Trys to connect to the extension but do not retrieve context.""" utils.is_connectable(self.profile.port) class ExtensionConnectionError(Exception): """An internal error occurred int the extension. Might be caused by bad input or bugs in webdriver """ pass
apache-2.0
arichar6/veusz
veusz/widgets/nonorthpoint.py
1
10913
# Copyright (C) 2010 Jeremy S. Sanders # Email: Jeremy Sanders <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. ############################################################################## """Non orthogonal point plotting.""" from __future__ import division import numpy as N from ..compat import czip from .. import qtall as qt4 from .. import document from .. import datasets from .. import setting from .. import utils from . import pickable from .nonorthgraph import NonOrthGraph, FillBrush from .widget import Widget from .point import MarkerFillBrush def _(text, disambiguation=None, context='NonOrthPoint'): """Translate text.""" return qt4.QCoreApplication.translate(context, text, disambiguation) class NonOrthPoint(Widget): '''Widget for plotting points in a non-orthogonal plot.''' typename = 'nonorthpoint' allowusercreation = True description = _('Plot points on a graph with non-orthogonal axes') @classmethod def addSettings(klass, s): '''Settings for widget.''' Widget.addSettings(s) s.add( setting.DatasetExtended( 'data1', 'x', descr=_('Dataset containing 1st dataset, list of values ' 'or expression'), usertext=_('Dataset 1')) ) s.add( setting.DatasetExtended( 'data2', 'y', descr=_('Dataset containing 2nd dataset, list of values ' 'or expression'), usertext=_('Dataset 2')) ) s.add( setting.DatasetOrStr( 'labels', '', descr=_('Dataset or string to label points'), usertext=_('Labels')) ) s.add( setting.DatasetExtended( 'scalePoints', '', descr = _('Scale size of plotted markers by this dataset, ' ' list of values or expression'), usertext=_('Scale markers')) ) s.add( setting.MarkerColor('Color') ) s.add( setting.Color('color', 'auto', descr = _('Master color'), usertext = _('Color'), formatting=True), 0 ) s.add( setting.DistancePt('markerSize', '3pt', descr = _('Size of marker to plot'), usertext=_('Marker size'), formatting=True), 0 ) s.add( setting.Marker('marker', 'circle', descr = _('Type of marker to plot'), usertext=_('Marker'), formatting=True), 0 ) s.add( setting.Line('PlotLine', descr = _('Plot line settings'), usertext = _('Plot line')), pixmap = 'settings_plotline' ) s.PlotLine.get('color').newDefault( setting.Reference('../color') ) s.add( setting.MarkerLine('MarkerLine', descr = _('Line around the marker settings'), usertext = _('Marker border')), pixmap = 'settings_plotmarkerline' ) s.add( MarkerFillBrush('MarkerFill', descr = _('Marker fill settings'), usertext = _('Marker fill')), pixmap = 'settings_plotmarkerfill' ) s.add( FillBrush('Fill1', descr = _('Fill settings (1)'), usertext = _('Area fill 1')), pixmap = 'settings_plotfillbelow' ) s.add( FillBrush('Fill2', descr = _('Fill settings (2)'), usertext = _('Area fill 2')), pixmap = 'settings_plotfillbelow' ) s.add( setting.PointLabel('Label', descr = _('Label settings'), usertext=_('Label')), pixmap = 'settings_axislabel' ) @classmethod def allowedParentTypes(klass): return (NonOrthGraph,) @property def userdescription(self): return _("data1='%s', data2='%s'") % ( self.settings.data1, self.settings.data2) def updateDataRanges(self, inrange): '''Extend inrange to range of data.''' d1 = self.settings.get('data1').getData(self.document) if d1: inrange[0] = min( N.nanmin(d1.data), inrange[0] ) inrange[1] = max( N.nanmax(d1.data), inrange[1] ) d2 = self.settings.get('data2').getData(self.document) if d2: inrange[2] = min( N.nanmin(d2.data), inrange[2] ) inrange[3] = max( N.nanmax(d2.data), inrange[3] ) def pickPoint(self, x0, y0, bounds, distance = 'radial'): p = pickable.DiscretePickable(self, 'data1', 'data2', lambda v1, v2: self.parent.graphToPlotCoords(v1, v2)) return p.pickPoint(x0, y0, bounds, distance) def pickIndex(self, oldindex, direction, bounds): p = pickable.DiscretePickable(self, 'data1', 'data2', lambda v1, v2: self.parent.graphToPlotCoords(v1, v2)) return p.pickIndex(oldindex, direction, bounds) def drawLabels(self, painter, xplotter, yplotter, textvals, markersize): """Draw labels for the points. This is copied from the xy (point) widget class, so it probably should be somehow be shared. FIXME: sane automatic placement of labels """ s = self.settings lab = s.get('Label') # work out offset an alignment deltax = markersize*1.5*{'left':-1, 'centre':0, 'right':1}[lab.posnHorz] deltay = markersize*1.5*{'top':-1, 'centre':0, 'bottom':1}[lab.posnVert] alignhorz = {'left':1, 'centre':0, 'right':-1}[lab.posnHorz] alignvert = {'top':-1, 'centre':0, 'bottom':1}[lab.posnVert] # make font and len textpen = lab.makeQPen(painter) painter.setPen(textpen) font = lab.makeQFont(painter) angle = lab.angle # iterate over each point and plot each label for x, y, t in czip(xplotter+deltax, yplotter+deltay, textvals): utils.Renderer( painter, font, x, y, t, alignhorz, alignvert, angle, doc=self.document).render() def getColorbarParameters(self): """Return parameters for colorbar.""" s = self.settings c = s.Color return (c.min, c.max, c.scaling, s.MarkerFill.colorMap, 0, s.MarkerFill.colorMapInvert) def autoColor(self, painter, dataindex=0): """Automatic color for plotting.""" return painter.docColorAuto( painter.helper.autoColorIndex((self, dataindex))) def draw(self, parentposn, phelper, outerbounds=None): '''Plot the data on a plotter.''' posn = self.computeBounds(parentposn, phelper) s = self.settings d = self.document # exit if hidden if s.hide: return d1 = s.get('data1').getData(d) d2 = s.get('data2').getData(d) dscale = s.get('scalePoints').getData(d) colorpoints = s.Color.get('points').getData(d) text = s.get('labels').getData(d, checknull=True) if not d1 or not d2: return x1, y1, x2, y2 = posn cliprect = qt4.QRectF( qt4.QPointF(x1, y1), qt4.QPointF(x2, y2) ) painter = phelper.painter(self, posn) with painter: self.parent.setClip(painter, posn) # split parts separated by NaNs for v1, v2, scalings, cvals, textitems in datasets.generateValidDatasetParts( [d1, d2, dscale, colorpoints, text]): # convert data (chopping down length) v1d, v2d = v1.data, v2.data minlen = min(v1d.shape[0], v2d.shape[0]) v1d, v2d = v1d[:minlen], v2d[:minlen] px, py = self.parent.graphToPlotCoords(v1d, v2d) # do fill1 (if any) if not s.Fill1.hide: self.parent.drawFillPts(painter, s.Fill1, cliprect, px, py) # do fill2 if not s.Fill2.hide: self.parent.drawFillPts(painter, s.Fill2, cliprect, px, py) # plot line if not s.PlotLine.hide: painter.setBrush( qt4.QBrush() ) painter.setPen(s.PlotLine.makeQPen(painter)) pts = qt4.QPolygonF() utils.addNumpyToPolygonF(pts, px, py) utils.plotClippedPolyline(painter, cliprect, pts) # plot markers markersize = s.get('markerSize').convert(painter) if not s.MarkerLine.hide or not s.MarkerFill.hide: pscale = colorvals = cmap = None if scalings: pscale = scalings.data # color point individually if cvals and not s.MarkerFill.hide: colorvals = utils.applyScaling( cvals.data, s.Color.scaling, s.Color.min, s.Color.max) cmap = self.document.evaluate.getColormap( s.MarkerFill.colorMap, s.MarkerFill.colorMapInvert) painter.setBrush(s.MarkerFill.makeQBrushWHide(painter)) painter.setPen(s.MarkerLine.makeQPenWHide(painter)) utils.plotMarkers(painter, px, py, s.marker, markersize, scaling=pscale, clip=cliprect, cmap=cmap, colorvals=colorvals, scaleline=s.MarkerLine.scaleLine) # finally plot any labels if textitems and not s.Label.hide: self.drawLabels(painter, px, py, textitems, markersize) # allow the factory to instantiate plotter document.thefactory.register( NonOrthPoint )
gpl-2.0
Sumith1896/sympy
sympy/polys/heuristicgcd.py
86
3818
"""Heuristic polynomial GCD algorithm (HEUGCD). """ from __future__ import print_function, division from sympy.core.compatibility import range from .polyerrors import HeuristicGCDFailed HEU_GCD_MAX = 6 def heugcd(f, g): """ Heuristic polynomial GCD in ``Z[X]``. Given univariate polynomials ``f`` and ``g`` in ``Z[X]``, returns their GCD and cofactors, i.e. polynomials ``h``, ``cff`` and ``cfg`` such that:: h = gcd(f, g), cff = quo(f, h) and cfg = quo(g, h) The algorithm is purely heuristic which means it may fail to compute the GCD. This will be signaled by raising an exception. In this case you will need to switch to another GCD method. The algorithm computes the polynomial GCD by evaluating polynomials ``f`` and ``g`` at certain points and computing (fast) integer GCD of those evaluations. The polynomial GCD is recovered from the integer image by interpolation. The evaluation proces reduces f and g variable by variable into a large integer. The final step is to verify if the interpolated polynomial is the correct GCD. This gives cofactors of the input polynomials as a side effect. Examples ======== >>> from sympy.polys.heuristicgcd import heugcd >>> from sympy.polys import ring, ZZ >>> R, x,y, = ring("x,y", ZZ) >>> f = x**2 + 2*x*y + y**2 >>> g = x**2 + x*y >>> h, cff, cfg = heugcd(f, g) >>> h, cff, cfg (x + y, x + y, x) >>> cff*h == f True >>> cfg*h == g True References ========== 1. [Liao95]_ """ assert f.ring == g.ring and f.ring.domain.is_ZZ ring = f.ring x0 = ring.gens[0] domain = ring.domain gcd, f, g = f.extract_ground(g) f_norm = f.max_norm() g_norm = g.max_norm() B = domain(2*min(f_norm, g_norm) + 29) x = max(min(B, 99*domain.sqrt(B)), 2*min(f_norm // abs(f.LC), g_norm // abs(g.LC)) + 2) for i in range(0, HEU_GCD_MAX): ff = f.evaluate(x0, x) gg = g.evaluate(x0, x) if ff and gg: if ring.ngens == 1: h, cff, cfg = domain.cofactors(ff, gg) else: h, cff, cfg = heugcd(ff, gg) h = _gcd_interpolate(h, x, ring) h = h.primitive()[1] cff_, r = f.div(h) if not r: cfg_, r = g.div(h) if not r: h = h.mul_ground(gcd) return h, cff_, cfg_ cff = _gcd_interpolate(cff, x, ring) h, r = f.div(cff) if not r: cfg_, r = g.div(h) if not r: h = h.mul_ground(gcd) return h, cff, cfg_ cfg = _gcd_interpolate(cfg, x, ring) h, r = g.div(cfg) if not r: cff_, r = f.div(h) if not r: h = h.mul_ground(gcd) return h, cff_, cfg x = 73794*x * domain.sqrt(domain.sqrt(x)) // 27011 raise HeuristicGCDFailed('no luck') def _gcd_interpolate(h, x, ring): """Interpolate polynomial GCD from integer GCD. """ f, i = ring.zero, 0 # TODO: don't expose poly repr implementation details if ring.ngens == 1: while h: g = h % x if g > x // 2: g -= x h = (h - g) // x # f += X**i*g if g: f[(i,)] = g i += 1 else: while h: g = h.trunc_ground(x) h = (h - g).quo_ground(x) # f += X**i*g if g: for monom, coeff in g.iterterms(): f[(i,) + monom] = coeff i += 1 if f.LC < 0: return -f else: return f
bsd-3-clause
CarlosCondor/pelisalacarta-xbmc-plus
lib/atom/auth.py
26
1123
#!/usr/bin/env python # # Copyright (C) 2009 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This module is used for version 2 of the Google Data APIs. __author__ = '[email protected] (Jeff Scudder)' import base64 class BasicAuth(object): """Sets the Authorization header as defined in RFC1945""" def __init__(self, user_id, password): self.basic_cookie = base64.encodestring( '%s:%s' % (user_id, password)).strip() def modify_request(self, http_request): http_request.headers['Authorization'] = 'Basic %s' % self.basic_cookie ModifyRequest = modify_request
gpl-3.0
Thraxis/SickRage
lib/unidecode/x081.py
252
4673
data = ( 'Cheng ', # 0x00 'Tiao ', # 0x01 'Zhi ', # 0x02 'Cui ', # 0x03 'Mei ', # 0x04 'Xie ', # 0x05 'Cui ', # 0x06 'Xie ', # 0x07 'Mo ', # 0x08 'Mai ', # 0x09 'Ji ', # 0x0a 'Obiyaakasu ', # 0x0b '[?] ', # 0x0c 'Kuai ', # 0x0d 'Sa ', # 0x0e 'Zang ', # 0x0f 'Qi ', # 0x10 'Nao ', # 0x11 'Mi ', # 0x12 'Nong ', # 0x13 'Luan ', # 0x14 'Wan ', # 0x15 'Bo ', # 0x16 'Wen ', # 0x17 'Guan ', # 0x18 'Qiu ', # 0x19 'Jiao ', # 0x1a 'Jing ', # 0x1b 'Rou ', # 0x1c 'Heng ', # 0x1d 'Cuo ', # 0x1e 'Lie ', # 0x1f 'Shan ', # 0x20 'Ting ', # 0x21 'Mei ', # 0x22 'Chun ', # 0x23 'Shen ', # 0x24 'Xie ', # 0x25 'De ', # 0x26 'Zui ', # 0x27 'Cu ', # 0x28 'Xiu ', # 0x29 'Xin ', # 0x2a 'Tuo ', # 0x2b 'Pao ', # 0x2c 'Cheng ', # 0x2d 'Nei ', # 0x2e 'Fu ', # 0x2f 'Dou ', # 0x30 'Tuo ', # 0x31 'Niao ', # 0x32 'Noy ', # 0x33 'Pi ', # 0x34 'Gu ', # 0x35 'Gua ', # 0x36 'Li ', # 0x37 'Lian ', # 0x38 'Zhang ', # 0x39 'Cui ', # 0x3a 'Jie ', # 0x3b 'Liang ', # 0x3c 'Zhou ', # 0x3d 'Pi ', # 0x3e 'Biao ', # 0x3f 'Lun ', # 0x40 'Pian ', # 0x41 'Guo ', # 0x42 'Kui ', # 0x43 'Chui ', # 0x44 'Dan ', # 0x45 'Tian ', # 0x46 'Nei ', # 0x47 'Jing ', # 0x48 'Jie ', # 0x49 'La ', # 0x4a 'Yi ', # 0x4b 'An ', # 0x4c 'Ren ', # 0x4d 'Shen ', # 0x4e 'Chuo ', # 0x4f 'Fu ', # 0x50 'Fu ', # 0x51 'Ju ', # 0x52 'Fei ', # 0x53 'Qiang ', # 0x54 'Wan ', # 0x55 'Dong ', # 0x56 'Pi ', # 0x57 'Guo ', # 0x58 'Zong ', # 0x59 'Ding ', # 0x5a 'Wu ', # 0x5b 'Mei ', # 0x5c 'Ruan ', # 0x5d 'Zhuan ', # 0x5e 'Zhi ', # 0x5f 'Cou ', # 0x60 'Gua ', # 0x61 'Ou ', # 0x62 'Di ', # 0x63 'An ', # 0x64 'Xing ', # 0x65 'Nao ', # 0x66 'Yu ', # 0x67 'Chuan ', # 0x68 'Nan ', # 0x69 'Yun ', # 0x6a 'Zhong ', # 0x6b 'Rou ', # 0x6c 'E ', # 0x6d 'Sai ', # 0x6e 'Tu ', # 0x6f 'Yao ', # 0x70 'Jian ', # 0x71 'Wei ', # 0x72 'Jiao ', # 0x73 'Yu ', # 0x74 'Jia ', # 0x75 'Duan ', # 0x76 'Bi ', # 0x77 'Chang ', # 0x78 'Fu ', # 0x79 'Xian ', # 0x7a 'Ni ', # 0x7b 'Mian ', # 0x7c 'Wa ', # 0x7d 'Teng ', # 0x7e 'Tui ', # 0x7f 'Bang ', # 0x80 'Qian ', # 0x81 'Lu ', # 0x82 'Wa ', # 0x83 'Sou ', # 0x84 'Tang ', # 0x85 'Su ', # 0x86 'Zhui ', # 0x87 'Ge ', # 0x88 'Yi ', # 0x89 'Bo ', # 0x8a 'Liao ', # 0x8b 'Ji ', # 0x8c 'Pi ', # 0x8d 'Xie ', # 0x8e 'Gao ', # 0x8f 'Lu ', # 0x90 'Bin ', # 0x91 'Ou ', # 0x92 'Chang ', # 0x93 'Lu ', # 0x94 'Guo ', # 0x95 'Pang ', # 0x96 'Chuai ', # 0x97 'Piao ', # 0x98 'Jiang ', # 0x99 'Fu ', # 0x9a 'Tang ', # 0x9b 'Mo ', # 0x9c 'Xi ', # 0x9d 'Zhuan ', # 0x9e 'Lu ', # 0x9f 'Jiao ', # 0xa0 'Ying ', # 0xa1 'Lu ', # 0xa2 'Zhi ', # 0xa3 'Tara ', # 0xa4 'Chun ', # 0xa5 'Lian ', # 0xa6 'Tong ', # 0xa7 'Peng ', # 0xa8 'Ni ', # 0xa9 'Zha ', # 0xaa 'Liao ', # 0xab 'Cui ', # 0xac 'Gui ', # 0xad 'Xiao ', # 0xae 'Teng ', # 0xaf 'Fan ', # 0xb0 'Zhi ', # 0xb1 'Jiao ', # 0xb2 'Shan ', # 0xb3 'Wu ', # 0xb4 'Cui ', # 0xb5 'Run ', # 0xb6 'Xiang ', # 0xb7 'Sui ', # 0xb8 'Fen ', # 0xb9 'Ying ', # 0xba 'Tan ', # 0xbb 'Zhua ', # 0xbc 'Dan ', # 0xbd 'Kuai ', # 0xbe 'Nong ', # 0xbf 'Tun ', # 0xc0 'Lian ', # 0xc1 'Bi ', # 0xc2 'Yong ', # 0xc3 'Jue ', # 0xc4 'Chu ', # 0xc5 'Yi ', # 0xc6 'Juan ', # 0xc7 'La ', # 0xc8 'Lian ', # 0xc9 'Sao ', # 0xca 'Tun ', # 0xcb 'Gu ', # 0xcc 'Qi ', # 0xcd 'Cui ', # 0xce 'Bin ', # 0xcf 'Xun ', # 0xd0 'Ru ', # 0xd1 'Huo ', # 0xd2 'Zang ', # 0xd3 'Xian ', # 0xd4 'Biao ', # 0xd5 'Xing ', # 0xd6 'Kuan ', # 0xd7 'La ', # 0xd8 'Yan ', # 0xd9 'Lu ', # 0xda 'Huo ', # 0xdb 'Zang ', # 0xdc 'Luo ', # 0xdd 'Qu ', # 0xde 'Zang ', # 0xdf 'Luan ', # 0xe0 'Ni ', # 0xe1 'Zang ', # 0xe2 'Chen ', # 0xe3 'Qian ', # 0xe4 'Wo ', # 0xe5 'Guang ', # 0xe6 'Zang ', # 0xe7 'Lin ', # 0xe8 'Guang ', # 0xe9 'Zi ', # 0xea 'Jiao ', # 0xeb 'Nie ', # 0xec 'Chou ', # 0xed 'Ji ', # 0xee 'Gao ', # 0xef 'Chou ', # 0xf0 'Mian ', # 0xf1 'Nie ', # 0xf2 'Zhi ', # 0xf3 'Zhi ', # 0xf4 'Ge ', # 0xf5 'Jian ', # 0xf6 'Die ', # 0xf7 'Zhi ', # 0xf8 'Xiu ', # 0xf9 'Tai ', # 0xfa 'Zhen ', # 0xfb 'Jiu ', # 0xfc 'Xian ', # 0xfd 'Yu ', # 0xfe 'Cha ', # 0xff )
gpl-3.0
fkolacek/FIT-VUT
bp-revok/python/lib/python2.7/email/test/test_email_torture.py
150
3669
# Copyright (C) 2002-2004 Python Software Foundation # # A torture test of the email package. This should not be run as part of the # standard Python test suite since it requires several meg of email messages # collected in the wild. These source messages are not checked into the # Python distro, but are available as part of the standalone email package at # http://sf.net/projects/mimelib import sys import os import unittest from cStringIO import StringIO from types import ListType from email.test.test_email import TestEmailBase from test.test_support import TestSkipped, run_unittest import email from email import __file__ as testfile from email.iterators import _structure def openfile(filename): from os.path import join, dirname, abspath path = abspath(join(dirname(testfile), os.pardir, 'moredata', filename)) return open(path, 'r') # Prevent this test from running in the Python distro try: openfile('crispin-torture.txt') except IOError: raise TestSkipped class TortureBase(TestEmailBase): def _msgobj(self, filename): fp = openfile(filename) try: msg = email.message_from_file(fp) finally: fp.close() return msg class TestCrispinTorture(TortureBase): # Mark Crispin's torture test from the SquirrelMail project def test_mondo_message(self): eq = self.assertEqual neq = self.ndiffAssertEqual msg = self._msgobj('crispin-torture.txt') payload = msg.get_payload() eq(type(payload), ListType) eq(len(payload), 12) eq(msg.preamble, None) eq(msg.epilogue, '\n') # Probably the best way to verify the message is parsed correctly is to # dump its structure and compare it against the known structure. fp = StringIO() _structure(msg, fp=fp) neq(fp.getvalue(), """\ multipart/mixed text/plain message/rfc822 multipart/alternative text/plain multipart/mixed text/richtext application/andrew-inset message/rfc822 audio/basic audio/basic image/pbm message/rfc822 multipart/mixed multipart/mixed text/plain audio/x-sun multipart/mixed image/gif image/gif application/x-be2 application/atomicmail audio/x-sun message/rfc822 multipart/mixed text/plain image/pgm text/plain message/rfc822 multipart/mixed text/plain image/pbm message/rfc822 application/postscript image/gif message/rfc822 multipart/mixed audio/basic audio/basic message/rfc822 multipart/mixed application/postscript text/plain message/rfc822 multipart/mixed text/plain multipart/parallel image/gif audio/basic application/atomicmail message/rfc822 audio/x-sun """) def _testclasses(): mod = sys.modules[__name__] return [getattr(mod, name) for name in dir(mod) if name.startswith('Test')] def suite(): suite = unittest.TestSuite() for testclass in _testclasses(): suite.addTest(unittest.makeSuite(testclass)) return suite def test_main(): for testclass in _testclasses(): run_unittest(testclass) if __name__ == '__main__': unittest.main(defaultTest='suite')
apache-2.0
nickhdamico/py
lib/cherrypy/wsgiserver/ssl_builtin.py
56
3242
"""A library for integrating Python's builtin ``ssl`` library with CherryPy. The ssl module must be importable for SSL functionality. To use this module, set ``CherryPyWSGIServer.ssl_adapter`` to an instance of ``BuiltinSSLAdapter``. """ try: import ssl except ImportError: ssl = None try: from _pyio import DEFAULT_BUFFER_SIZE except ImportError: try: from io import DEFAULT_BUFFER_SIZE except ImportError: DEFAULT_BUFFER_SIZE = -1 import sys from cherrypy import wsgiserver class BuiltinSSLAdapter(wsgiserver.SSLAdapter): """A wrapper for integrating Python's builtin ssl module with CherryPy.""" certificate = None """The filename of the server SSL certificate.""" private_key = None """The filename of the server's private key file.""" def __init__(self, certificate, private_key, certificate_chain=None): if ssl is None: raise ImportError("You must install the ssl module to use HTTPS.") self.certificate = certificate self.private_key = private_key self.certificate_chain = certificate_chain def bind(self, sock): """Wrap and return the given socket.""" return sock def wrap(self, sock): """Wrap and return the given socket, plus WSGI environ entries.""" try: s = ssl.wrap_socket(sock, do_handshake_on_connect=True, server_side=True, certfile=self.certificate, keyfile=self.private_key, ssl_version=ssl.PROTOCOL_SSLv23) except ssl.SSLError: e = sys.exc_info()[1] if e.errno == ssl.SSL_ERROR_EOF: # This is almost certainly due to the cherrypy engine # 'pinging' the socket to assert it's connectable; # the 'ping' isn't SSL. return None, {} elif e.errno == ssl.SSL_ERROR_SSL: if e.args[1].endswith('http request'): # The client is speaking HTTP to an HTTPS server. raise wsgiserver.NoSSLError elif e.args[1].endswith('unknown protocol'): # The client is speaking some non-HTTP protocol. # Drop the conn. return None, {} raise return s, self.get_environ(s) # TODO: fill this out more with mod ssl env def get_environ(self, sock): """Create WSGI environ entries to be merged into each request.""" cipher = sock.cipher() ssl_environ = { "wsgi.url_scheme": "https", "HTTPS": "on", 'SSL_PROTOCOL': cipher[1], 'SSL_CIPHER': cipher[0] # SSL_VERSION_INTERFACE string The mod_ssl program version # SSL_VERSION_LIBRARY string The OpenSSL program version } return ssl_environ if sys.version_info >= (3, 0): def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE): return wsgiserver.CP_makefile(sock, mode, bufsize) else: def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE): return wsgiserver.CP_fileobject(sock, mode, bufsize)
gpl-3.0
appapantula/scikit-learn
sklearn/neighbors/graph.py
208
7031
"""Nearest Neighbors graph functions""" # Author: Jake Vanderplas <[email protected]> # # License: BSD 3 clause (C) INRIA, University of Amsterdam import warnings from .base import KNeighborsMixin, RadiusNeighborsMixin from .unsupervised import NearestNeighbors def _check_params(X, metric, p, metric_params): """Check the validity of the input parameters""" params = zip(['metric', 'p', 'metric_params'], [metric, p, metric_params]) est_params = X.get_params() for param_name, func_param in params: if func_param != est_params[param_name]: raise ValueError( "Got %s for %s, while the estimator has %s for " "the same parameter." % ( func_param, param_name, est_params[param_name])) def _query_include_self(X, include_self, mode): """Return the query based on include_self param""" # Done to preserve backward compatibility. if include_self is None: if mode == "connectivity": warnings.warn( "The behavior of 'kneighbors_graph' when mode='connectivity' " "will change in version 0.18. Presently, the nearest neighbor " "of each sample is the sample itself. Beginning in version " "0.18, the default behavior will be to exclude each sample " "from being its own nearest neighbor. To maintain the current " "behavior, set include_self=True.", DeprecationWarning) include_self = True else: include_self = False if include_self: query = X._fit_X else: query = None return query def kneighbors_graph(X, n_neighbors, mode='connectivity', metric='minkowski', p=2, metric_params=None, include_self=None): """Computes the (weighted) graph of k-Neighbors for points in X Read more in the :ref:`User Guide <unsupervised_neighbors>`. Parameters ---------- X : array-like or BallTree, shape = [n_samples, n_features] Sample data, in the form of a numpy array or a precomputed :class:`BallTree`. n_neighbors : int Number of neighbors for each sample. mode : {'connectivity', 'distance'}, optional Type of returned matrix: 'connectivity' will return the connectivity matrix with ones and zeros, in 'distance' the edges are Euclidean distance between points. metric : string, default 'minkowski' The distance metric used to calculate the k-Neighbors for each sample point. The DistanceMetric class gives a list of available metrics. The default distance is 'euclidean' ('minkowski' metric with the p param equal to 2.) include_self: bool, default backward-compatible. Whether or not to mark each sample as the first nearest neighbor to itself. If `None`, then True is used for mode='connectivity' and False for mode='distance' as this will preserve backwards compatibilty. From version 0.18, the default value will be False, irrespective of the value of `mode`. p : int, default 2 Power parameter for the Minkowski metric. When p = 1, this is equivalent to using manhattan_distance (l1), and euclidean_distance (l2) for p = 2. For arbitrary p, minkowski_distance (l_p) is used. metric_params: dict, optional additional keyword arguments for the metric function. Returns ------- A : sparse matrix in CSR format, shape = [n_samples, n_samples] A[i, j] is assigned the weight of edge that connects i to j. Examples -------- >>> X = [[0], [3], [1]] >>> from sklearn.neighbors import kneighbors_graph >>> A = kneighbors_graph(X, 2) >>> A.toarray() array([[ 1., 0., 1.], [ 0., 1., 1.], [ 1., 0., 1.]]) See also -------- radius_neighbors_graph """ if not isinstance(X, KNeighborsMixin): X = NearestNeighbors(n_neighbors, metric=metric, p=p, metric_params=metric_params).fit(X) else: _check_params(X, metric, p, metric_params) query = _query_include_self(X, include_self, mode) return X.kneighbors_graph(X=query, n_neighbors=n_neighbors, mode=mode) def radius_neighbors_graph(X, radius, mode='connectivity', metric='minkowski', p=2, metric_params=None, include_self=None): """Computes the (weighted) graph of Neighbors for points in X Neighborhoods are restricted the points at a distance lower than radius. Read more in the :ref:`User Guide <unsupervised_neighbors>`. Parameters ---------- X : array-like or BallTree, shape = [n_samples, n_features] Sample data, in the form of a numpy array or a precomputed :class:`BallTree`. radius : float Radius of neighborhoods. mode : {'connectivity', 'distance'}, optional Type of returned matrix: 'connectivity' will return the connectivity matrix with ones and zeros, in 'distance' the edges are Euclidean distance between points. metric : string, default 'minkowski' The distance metric used to calculate the neighbors within a given radius for each sample point. The DistanceMetric class gives a list of available metrics. The default distance is 'euclidean' ('minkowski' metric with the param equal to 2.) include_self: bool, default None Whether or not to mark each sample as the first nearest neighbor to itself. If `None`, then True is used for mode='connectivity' and False for mode='distance' as this will preserve backwards compatibilty. From version 0.18, the default value will be False, irrespective of the value of `mode`. p : int, default 2 Power parameter for the Minkowski metric. When p = 1, this is equivalent to using manhattan_distance (l1), and euclidean_distance (l2) for p = 2. For arbitrary p, minkowski_distance (l_p) is used. metric_params: dict, optional additional keyword arguments for the metric function. Returns ------- A : sparse matrix in CSR format, shape = [n_samples, n_samples] A[i, j] is assigned the weight of edge that connects i to j. Examples -------- >>> X = [[0], [3], [1]] >>> from sklearn.neighbors import radius_neighbors_graph >>> A = radius_neighbors_graph(X, 1.5) >>> A.toarray() array([[ 1., 0., 1.], [ 0., 1., 0.], [ 1., 0., 1.]]) See also -------- kneighbors_graph """ if not isinstance(X, RadiusNeighborsMixin): X = NearestNeighbors(radius=radius, metric=metric, p=p, metric_params=metric_params).fit(X) else: _check_params(X, metric, p, metric_params) query = _query_include_self(X, include_self, mode) return X.radius_neighbors_graph(query, radius, mode)
bsd-3-clause
andreaso/ansible
test/runner/lib/cover.py
26
7465
"""Code coverage utilities.""" from __future__ import absolute_import, print_function import os import re from lib.target import ( walk_module_targets, walk_compile_targets, ) from lib.util import ( display, ApplicationError, EnvironmentConfig, run_command, common_environment, ) from lib.executor import ( Delegate, install_command_requirements, ) COVERAGE_DIR = 'test/results/coverage' COVERAGE_FILE = os.path.join(COVERAGE_DIR, 'coverage') COVERAGE_GROUPS = ('command', 'target', 'environment', 'version') def command_coverage_combine(args): """Patch paths in coverage files and merge into a single file. :type args: CoverageConfig :rtype: list[str] """ coverage = initialize_coverage(args) modules = dict((t.module, t.path) for t in list(walk_module_targets())) coverage_files = [os.path.join(COVERAGE_DIR, f) for f in os.listdir(COVERAGE_DIR) if '=coverage.' in f] ansible_path = os.path.abspath('lib/ansible/') + '/' root_path = os.getcwd() + '/' counter = 0 groups = {} if args.all or args.stub: sources = sorted(os.path.abspath(target.path) for target in walk_compile_targets()) else: sources = [] if args.stub: groups['=stub'] = dict((source, set()) for source in sources) for coverage_file in coverage_files: counter += 1 display.info('[%4d/%4d] %s' % (counter, len(coverage_files), coverage_file), verbosity=2) original = coverage.CoverageData() group = get_coverage_group(args, coverage_file) if group is None: display.warning('Unexpected name for coverage file: %s' % coverage_file) continue if os.path.getsize(coverage_file) == 0: display.warning('Empty coverage file: %s' % coverage_file) continue try: original.read_file(coverage_file) except Exception as ex: # pylint: disable=locally-disabled, broad-except display.error(str(ex)) continue for filename in original.measured_files(): arcs = set(original.arcs(filename) or []) if not arcs: # This is most likely due to using an unsupported version of coverage. display.warning('No arcs found for "%s" in coverage file: %s' % (filename, coverage_file)) continue if '/ansible_modlib.zip/ansible/' in filename: new_name = re.sub('^.*/ansible_modlib.zip/ansible/', ansible_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif '/ansible_module_' in filename: module = re.sub('^.*/ansible_module_(?P<module>.*).py$', '\\g<module>', filename) if module not in modules: display.warning('Skipping coverage of unknown module: %s' % module) continue new_name = os.path.abspath(modules[module]) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name elif re.search('^(/.*?)?/root/ansible/', filename): new_name = re.sub('^(/.*?)?/root/ansible/', root_path, filename) display.info('%s -> %s' % (filename, new_name), verbosity=3) filename = new_name if group not in groups: groups[group] = {} arc_data = groups[group] if filename not in arc_data: arc_data[filename] = set() arc_data[filename].update(arcs) output_files = [] for group in sorted(groups): arc_data = groups[group] updated = coverage.CoverageData() for filename in arc_data: if not os.path.isfile(filename): display.warning('Invalid coverage path: %s' % filename) continue updated.add_arcs({filename: list(arc_data[filename])}) if args.all: updated.add_arcs(dict((source, []) for source in sources)) if not args.explain: output_file = COVERAGE_FILE + group updated.write_file(output_file) output_files.append(output_file) return sorted(output_files) def command_coverage_report(args): """ :type args: CoverageConfig """ output_files = command_coverage_combine(args) for output_file in output_files: if args.group_by or args.stub: display.info('>>> Coverage Group: %s' % ' '.join(os.path.basename(output_file).split('=')[1:])) env = common_environment() env.update(dict(COVERAGE_FILE=output_file)) run_command(args, env=env, cmd=['coverage', 'report']) def command_coverage_html(args): """ :type args: CoverageConfig """ output_files = command_coverage_combine(args) for output_file in output_files: dir_name = 'test/results/reports/%s' % os.path.basename(output_file) env = common_environment() env.update(dict(COVERAGE_FILE=output_file)) run_command(args, env=env, cmd=['coverage', 'html', '-d', dir_name]) def command_coverage_xml(args): """ :type args: CoverageConfig """ output_files = command_coverage_combine(args) for output_file in output_files: xml_name = 'test/results/reports/%s.xml' % os.path.basename(output_file) env = common_environment() env.update(dict(COVERAGE_FILE=output_file)) run_command(args, env=env, cmd=['coverage', 'xml', '-o', xml_name]) def command_coverage_erase(args): """ :type args: CoverageConfig """ initialize_coverage(args) for name in os.listdir(COVERAGE_DIR): if not name.startswith('coverage') and '=coverage.' not in name: continue path = os.path.join(COVERAGE_DIR, name) if not args.explain: os.remove(path) def initialize_coverage(args): """ :type args: CoverageConfig :rtype: coverage """ if args.delegate: raise Delegate() if args.requirements: install_command_requirements(args) try: import coverage except ImportError: coverage = None if not coverage: raise ApplicationError('You must install the "coverage" python module to use this command.') return coverage def get_coverage_group(args, coverage_file): """ :type args: CoverageConfig :type coverage_file: str :rtype: str """ parts = os.path.basename(coverage_file).split('=', 4) if len(parts) != 5 or not parts[4].startswith('coverage.'): return None names = dict( command=parts[0], target=parts[1], environment=parts[2], version=parts[3], ) group = '' for part in COVERAGE_GROUPS: if part in args.group_by: group += '=%s' % names[part] return group class CoverageConfig(EnvironmentConfig): """Configuration for the coverage command.""" def __init__(self, args): """ :type args: any """ super(CoverageConfig, self).__init__(args, 'coverage') self.group_by = frozenset(args.group_by) if 'group_by' in args and args.group_by else set() # type: frozenset[str] self.all = args.all if 'all' in args else False # type: bool self.stub = args.stub if 'stub' in args else False # type: bool
gpl-3.0
redhat-openstack/neutron
neutron/tests/unit/ryu/test_ryu_db.py
9
2313
# Copyright 2012 Isaku Yamahata <yamahata at private email ne jp> # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib import operator from neutron.db import api as db from neutron.plugins.ryu.common import config # noqa from neutron.plugins.ryu.db import api_v2 as db_api_v2 from neutron.tests.unit import test_db_plugin as test_plugin class RyuDBTest(test_plugin.NeutronDbPluginV2TestCase): @staticmethod def _tunnel_key_sort(key_list): key_list.sort(key=operator.attrgetter('tunnel_key')) return [(key.network_id, key.tunnel_key) for key in key_list] def test_key_allocation(self): tunnel_key = db_api_v2.TunnelKey() session = db.get_session() with contextlib.nested(self.network('network-0'), self.network('network-1') ) as (network_0, network_1): network_id0 = network_0['network']['id'] key0 = tunnel_key.allocate(session, network_id0) network_id1 = network_1['network']['id'] key1 = tunnel_key.allocate(session, network_id1) key_list = tunnel_key.all_list() self.assertEqual(len(key_list), 2) expected_list = [(network_id0, key0), (network_id1, key1)] self.assertEqual(self._tunnel_key_sort(key_list), expected_list) tunnel_key.delete(session, network_id0) key_list = tunnel_key.all_list() self.assertEqual(self._tunnel_key_sort(key_list), [(network_id1, key1)]) tunnel_key.delete(session, network_id1) self.assertEqual(tunnel_key.all_list(), [])
apache-2.0
whitehorse-io/encarnia
pyenv/lib/python2.7/site-packages/twisted/python/urlpath.py
3
9084
# -*- test-case-name: twisted.python.test.test_urlpath -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ L{URLPath}, a representation of a URL. """ from __future__ import division, absolute_import from twisted.python.compat import ( nativeString, unicode, urllib_parse as urlparse, urlunquote, urlquote ) from hyperlink import URL as _URL _allascii = b"".join([chr(x).encode('ascii') for x in range(1, 128)]) def _rereconstituter(name): """ Attriute declaration to preserve mutability on L{URLPath}. @param name: a public attribute name @type name: native L{str} @return: a descriptor which retrieves the private version of the attribute on get and calls rerealize on set. """ privateName = nativeString("_") + name return property( lambda self: getattr(self, privateName), lambda self, value: (setattr(self, privateName, value if isinstance(value, bytes) else value.encode("charmap")) or self._reconstitute()) ) class URLPath(object): """ A representation of a URL. @ivar scheme: The scheme of the URL (e.g. 'http'). @type scheme: L{bytes} @ivar netloc: The network location ("host"). @type netloc: L{bytes} @ivar path: The path on the network location. @type path: L{bytes} @ivar query: The query argument (the portion after ? in the URL). @type query: L{bytes} @ivar fragment: The page fragment (the portion after # in the URL). @type fragment: L{bytes} """ def __init__(self, scheme=b'', netloc=b'localhost', path=b'', query=b'', fragment=b''): self._scheme = scheme or b'http' self._netloc = netloc self._path = path or b'/' self._query = query self._fragment = fragment self._reconstitute() def _reconstitute(self): """ Reconstitute this L{URLPath} from all its given attributes. """ urltext = urlquote( urlparse.urlunsplit((self._scheme, self._netloc, self._path, self._query, self._fragment)), safe=_allascii ) self._url = _URL.fromText(urltext.encode("ascii").decode("ascii")) scheme = _rereconstituter("scheme") netloc = _rereconstituter("netloc") path = _rereconstituter("path") query = _rereconstituter("query") fragment = _rereconstituter("fragment") @classmethod def _fromURL(cls, urlInstance): """ Reconstruct all the public instance variables of this L{URLPath} from its underlying L{_URL}. @param urlInstance: the object to base this L{URLPath} on. @type urlInstance: L{_URL} @return: a new L{URLPath} """ self = cls.__new__(cls) self._url = urlInstance.replace(path=urlInstance.path or [u""]) self._scheme = self._url.scheme.encode("ascii") self._netloc = self._url.authority().encode("ascii") self._path = (_URL(path=self._url.path, rooted=True).asURI().asText() .encode("ascii")) self._query = (_URL(query=self._url.query).asURI().asText() .encode("ascii"))[1:] self._fragment = self._url.fragment.encode("ascii") return self def pathList(self, unquote=False, copy=True): """ Split this URL's path into its components. @param unquote: whether to remove %-encoding from the returned strings. @param copy: (ignored, do not use) @return: The components of C{self.path} @rtype: L{list} of L{bytes} """ segments = self._url.path mapper = lambda x: x.encode("ascii") if unquote: mapper = (lambda x, m=mapper: m(urlunquote(x))) return [b''] + [mapper(segment) for segment in segments] @classmethod def fromString(klass, url): """ Make a L{URLPath} from a L{str} or L{unicode}. @param url: A L{str} representation of a URL. @type url: L{str} or L{unicode}. @return: a new L{URLPath} derived from the given string. @rtype: L{URLPath} """ if not isinstance(url, (str, unicode)): raise ValueError("'url' must be a str or unicode") if isinstance(url, bytes): # On Python 2, accepting 'str' (for compatibility) means we might # get 'bytes'. On py3, this will not work with bytes due to the # check above. return klass.fromBytes(url) return klass._fromURL(_URL.fromText(url)) @classmethod def fromBytes(klass, url): """ Make a L{URLPath} from a L{bytes}. @param url: A L{bytes} representation of a URL. @type url: L{bytes} @return: a new L{URLPath} derived from the given L{bytes}. @rtype: L{URLPath} @since: 15.4 """ if not isinstance(url, bytes): raise ValueError("'url' must be bytes") quoted = urlquote(url, safe=_allascii) if isinstance(quoted, bytes): # This will only be bytes on python 2, where we can transform it # into unicode. On python 3, urlquote always returns str. quoted = quoted.decode("ascii") return klass.fromString(quoted) @classmethod def fromRequest(klass, request): """ Make a L{URLPath} from a L{twisted.web.http.Request}. @param request: A L{twisted.web.http.Request} to make the L{URLPath} from. @return: a new L{URLPath} derived from the given request. @rtype: L{URLPath} """ return klass.fromBytes(request.prePathURL()) def _mod(self, newURL, keepQuery): """ Return a modified copy of C{self} using C{newURL}, keeping the query string if C{keepQuery} is C{True}. @param newURL: a L{URL} to derive a new L{URLPath} from @type newURL: L{URL} @param keepQuery: if C{True}, preserve the query parameters from C{self} on the new L{URLPath}; if C{False}, give the new L{URLPath} no query parameters. @type keepQuery: L{bool} @return: a new L{URLPath} """ return self._fromURL(newURL.replace( fragment=u'', query=self._url.query if keepQuery else () )) def sibling(self, path, keepQuery=False): """ Get the sibling of the current L{URLPath}. A sibling is a file which is in the same directory as the current file. @param path: The path of the sibling. @type path: L{bytes} @param keepQuery: Whether to keep the query parameters on the returned L{URLPath}. @type: keepQuery: L{bool} @return: a new L{URLPath} """ return self._mod(self._url.sibling(path.decode("ascii")), keepQuery) def child(self, path, keepQuery=False): """ Get the child of this L{URLPath}. @param path: The path of the child. @type path: L{bytes} @param keepQuery: Whether to keep the query parameters on the returned L{URLPath}. @type: keepQuery: L{bool} @return: a new L{URLPath} """ return self._mod(self._url.child(path.decode("ascii")), keepQuery) def parent(self, keepQuery=False): """ Get the parent directory of this L{URLPath}. @param keepQuery: Whether to keep the query parameters on the returned L{URLPath}. @type: keepQuery: L{bool} @return: a new L{URLPath} """ return self._mod(self._url.click(u".."), keepQuery) def here(self, keepQuery=False): """ Get the current directory of this L{URLPath}. @param keepQuery: Whether to keep the query parameters on the returned L{URLPath}. @type: keepQuery: L{bool} @return: a new L{URLPath} """ return self._mod(self._url.click(u"."), keepQuery) def click(self, st): """ Return a path which is the URL where a browser would presumably take you if you clicked on a link with an HREF as given. @param st: A relative URL, to be interpreted relative to C{self} as the base URL. @type st: L{bytes} @return: a new L{URLPath} """ return self._fromURL(self._url.click(st.decode("ascii"))) def __str__(self): """ The L{str} of a L{URLPath} is its URL text. """ return nativeString(self._url.asURI().asText()) def __repr__(self): """ The L{repr} of a L{URLPath} is an eval-able expression which will construct a similar L{URLPath}. """ return ('URLPath(scheme=%r, netloc=%r, path=%r, query=%r, fragment=%r)' % (self.scheme, self.netloc, self.path, self.query, self.fragment))
mit
dpac-vlsi/SynchroTrace
src/dev/x86/Cmos.py
11
2041
# Copyright (c) 2008 The Regents of The University of Michigan # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Gabe Black from m5.params import * from m5.proxy import * from Device import BasicPioDevice from X86IntPin import X86IntSourcePin class Cmos(BasicPioDevice): type = 'Cmos' cxx_class='X86ISA::Cmos' time = Param.Time('01/01/2012', "System time to use ('Now' for actual time)") pio_latency = Param.Latency('1ns', "Programmed IO latency in simticks") int_pin = Param.X86IntSourcePin(X86IntSourcePin(), 'Pin to signal RTC alarm interrupts to')
bsd-3-clause
awkspace/ansible
test/runner/lib/integration/__init__.py
11
7030
"""Ansible integration test infrastructure.""" from __future__ import absolute_import, print_function import contextlib import os import shutil import tempfile from lib.target import ( analyze_integration_target_dependencies, walk_integration_targets, ) from lib.config import ( NetworkIntegrationConfig, PosixIntegrationConfig, WindowsIntegrationConfig, ) from lib.util import ( ApplicationError, display, make_dirs, ) from lib.cache import ( CommonCache, ) def generate_dependency_map(integration_targets): """ :type integration_targets: list[IntegrationTarget] :rtype: dict[str, set[IntegrationTarget]] """ targets_dict = dict((target.name, target) for target in integration_targets) target_dependencies = analyze_integration_target_dependencies(integration_targets) dependency_map = {} invalid_targets = set() for dependency, dependents in target_dependencies.items(): dependency_target = targets_dict.get(dependency) if not dependency_target: invalid_targets.add(dependency) continue for dependent in dependents: if dependent not in dependency_map: dependency_map[dependent] = set() dependency_map[dependent].add(dependency_target) if invalid_targets: raise ApplicationError('Non-existent target dependencies: %s' % ', '.join(sorted(invalid_targets))) return dependency_map def get_files_needed(target_dependencies): """ :type target_dependencies: list[IntegrationTarget] :rtype: list[str] """ files_needed = [] for target_dependency in target_dependencies: files_needed += target_dependency.needs_file files_needed = sorted(set(files_needed)) invalid_paths = [path for path in files_needed if not os.path.isfile(path)] if invalid_paths: raise ApplicationError('Invalid "needs/file/*" aliases:\n%s' % '\n'.join(invalid_paths)) return files_needed @contextlib.contextmanager def integration_test_environment(args, target, inventory_path): """ :type args: IntegrationConfig :type target: IntegrationTarget :type inventory_path: str """ vars_file = 'integration_config.yml' if args.no_temp_workdir or 'no/temp_workdir/' in target.aliases: display.warning('Disabling the temp work dir is a temporary debugging feature that may be removed in the future without notice.') integration_dir = 'test/integration' ansible_config = os.path.join(integration_dir, '%s.cfg' % args.command) inventory_name = os.path.relpath(inventory_path, integration_dir) if '/' in inventory_name: inventory_name = inventory_path yield IntegrationEnvironment(integration_dir, inventory_name, ansible_config, vars_file) return root_temp_dir = os.path.expanduser('~/.ansible/test/tmp') prefix = '%s-' % target.name suffix = u'-\u00c5\u00d1\u015a\u00cc\u03b2\u0141\u00c8' if args.no_temp_unicode or 'no/temp_unicode/' in target.aliases: display.warning('Disabling unicode in the temp work dir is a temporary debugging feature that may be removed in the future without notice.') suffix = '-ansible' if isinstance('', bytes): suffix = suffix.encode('utf-8') if args.explain: temp_dir = os.path.join(root_temp_dir, '%stemp%s' % (prefix, suffix)) else: make_dirs(root_temp_dir) temp_dir = tempfile.mkdtemp(prefix=prefix, suffix=suffix, dir=root_temp_dir) try: display.info('Preparing temporary directory: %s' % temp_dir, verbosity=2) inventory_names = { PosixIntegrationConfig: 'inventory', WindowsIntegrationConfig: 'inventory.winrm', NetworkIntegrationConfig: 'inventory.networking', } inventory_name = inventory_names[type(args)] cache = IntegrationCache(args) target_dependencies = sorted([target] + list(cache.dependency_map.get(target.name, set()))) files_needed = get_files_needed(target_dependencies) integration_dir = os.path.join(temp_dir, 'test/integration') ansible_config = os.path.join(integration_dir, '%s.cfg' % args.command) file_copies = [ ('test/integration/%s.cfg' % args.command, ansible_config), ('test/integration/integration_config.yml', os.path.join(integration_dir, vars_file)), (inventory_path, os.path.join(integration_dir, inventory_name)), ] file_copies += [(path, os.path.join(temp_dir, path)) for path in files_needed] directory_copies = [ (os.path.join('test/integration/targets', target.name), os.path.join(integration_dir, 'targets', target.name)) for target in target_dependencies ] inventory_dir = os.path.dirname(inventory_path) host_vars_dir = os.path.join(inventory_dir, 'host_vars') group_vars_dir = os.path.join(inventory_dir, 'group_vars') if os.path.isdir(host_vars_dir): directory_copies.append((host_vars_dir, os.path.join(integration_dir, os.path.basename(host_vars_dir)))) if os.path.isdir(group_vars_dir): directory_copies.append((group_vars_dir, os.path.join(integration_dir, os.path.basename(group_vars_dir)))) directory_copies = sorted(set(directory_copies)) file_copies = sorted(set(file_copies)) if not args.explain: make_dirs(integration_dir) for dir_src, dir_dst in directory_copies: display.info('Copying %s/ to %s/' % (dir_src, dir_dst), verbosity=2) if not args.explain: shutil.copytree(dir_src, dir_dst, symlinks=True) for file_src, file_dst in file_copies: display.info('Copying %s to %s' % (file_src, file_dst), verbosity=2) if not args.explain: make_dirs(os.path.dirname(file_dst)) shutil.copy2(file_src, file_dst) yield IntegrationEnvironment(integration_dir, inventory_name, ansible_config, vars_file) finally: if not args.explain: shutil.rmtree(temp_dir) class IntegrationEnvironment(object): """Details about the integration environment.""" def __init__(self, integration_dir, inventory_path, ansible_config, vars_file): self.integration_dir = integration_dir self.inventory_path = inventory_path self.ansible_config = ansible_config self.vars_file = vars_file class IntegrationCache(CommonCache): """Integration cache.""" @property def integration_targets(self): """ :rtype: list[IntegrationTarget] """ return self.get('integration_targets', lambda: list(walk_integration_targets())) @property def dependency_map(self): """ :rtype: dict[str, set[IntegrationTarget]] """ return self.get('dependency_map', lambda: generate_dependency_map(self.integration_targets))
gpl-3.0
cocrawler/cocrawler
scripts/aiohttp-fetch.py
1
2534
''' Fetches some urls using aiohttp. Also serves as a minimum example of using aiohttp. Good examples: https://www.enterprisecarshare.com/robots.txt -- 302 redir lacking Location: raises RuntimeError ''' import sys from traceback import print_exc import asyncio import aiohttp import aiohttp.connector async def main(urls): connector = aiohttp.connector.TCPConnector(use_dns_cache=True) session = aiohttp.ClientSession(connector=connector) for url in urls: if not url.startswith('http'): url = 'http://' + url print(url, '\n') try: response = await session.get(url, allow_redirects=True) except aiohttp.client_exceptions.ClientConnectorError as e: print('saw connect error for', url, ':', e, file=sys.stderr) continue except Exception as e: print('Saw an exception thrown by session.get:') print_exc() print('') continue #print('dns:') #for k, v in connector.cached_hosts.items(): # print(' ', k) # or k[0]? # for rec in v: # print(' ', rec.get('host')) print('') if str(response.url) != url: print('final url:', str(response.url)) print('') print('final request headers:') for k, v in response.request_info.headers.items(): print(k+':', v) print('') if response.history: print('response history: response and headers:') for h in response.history: print(' ', repr(h)) print('') print('response history urls:') response_urls = [str(h.url) for h in response.history] response_urls.append(str(response.url)) if response_urls: print(' ', '\n '.join(response_urls)) print('') print('response headers:') for k, v in response.raw_headers: line = k+b': '+v print(' ', line.decode(errors='ignore')) print('') try: text = await response.text(errors='ignore') #print(text) pass except Exception: print_exc() await session.close() loop = asyncio.get_event_loop() loop.run_until_complete(main(sys.argv[1:])) # vodoo recommended by advanced aiohttp docs for graceful shutdown # https://github.com/aio-libs/aiohttp/issues/1925 loop.run_until_complete(asyncio.sleep(0.250)) loop.close()
apache-2.0
nysan/yocto-autobuilder
lib/python2.6/site-packages/Twisted-11.0.0-py2.6-linux-x86_64.egg/twisted/internet/base.py
18
41263
# -*- test-case-name: twisted.test.test_internet -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Very basic functionality for a Reactor implementation. """ import socket # needed only for sync-dns from zope.interface import implements, classImplements import sys import warnings from heapq import heappush, heappop, heapify import traceback from twisted.python.compat import set from twisted.python.util import unsignedID from twisted.internet.interfaces import IReactorCore, IReactorTime, IReactorThreads from twisted.internet.interfaces import IResolverSimple, IReactorPluggableResolver from twisted.internet.interfaces import IConnector, IDelayedCall from twisted.internet import fdesc, main, error, abstract, defer, threads from twisted.python import log, failure, reflect from twisted.python.runtime import seconds as runtimeSeconds, platform from twisted.internet.defer import Deferred, DeferredList from twisted.persisted import styles # This import is for side-effects! Even if you don't see any code using it # in this module, don't delete it. from twisted.python import threadable class DelayedCall(styles.Ephemeral): implements(IDelayedCall) # enable .debug to record creator call stack, and it will be logged if # an exception occurs while the function is being run debug = False _str = None def __init__(self, time, func, args, kw, cancel, reset, seconds=runtimeSeconds): """ @param time: Seconds from the epoch at which to call C{func}. @param func: The callable to call. @param args: The positional arguments to pass to the callable. @param kw: The keyword arguments to pass to the callable. @param cancel: A callable which will be called with this DelayedCall before cancellation. @param reset: A callable which will be called with this DelayedCall after changing this DelayedCall's scheduled execution time. The callable should adjust any necessary scheduling details to ensure this DelayedCall is invoked at the new appropriate time. @param seconds: If provided, a no-argument callable which will be used to determine the current time any time that information is needed. """ self.time, self.func, self.args, self.kw = time, func, args, kw self.resetter = reset self.canceller = cancel self.seconds = seconds self.cancelled = self.called = 0 self.delayed_time = 0 if self.debug: self.creator = traceback.format_stack()[:-2] def getTime(self): """Return the time at which this call will fire @rtype: C{float} @return: The number of seconds after the epoch at which this call is scheduled to be made. """ return self.time + self.delayed_time def cancel(self): """Unschedule this call @raise AlreadyCancelled: Raised if this call has already been unscheduled. @raise AlreadyCalled: Raised if this call has already been made. """ if self.cancelled: raise error.AlreadyCancelled elif self.called: raise error.AlreadyCalled else: self.canceller(self) self.cancelled = 1 if self.debug: self._str = str(self) del self.func, self.args, self.kw def reset(self, secondsFromNow): """Reschedule this call for a different time @type secondsFromNow: C{float} @param secondsFromNow: The number of seconds from the time of the C{reset} call at which this call will be scheduled. @raise AlreadyCancelled: Raised if this call has been cancelled. @raise AlreadyCalled: Raised if this call has already been made. """ if self.cancelled: raise error.AlreadyCancelled elif self.called: raise error.AlreadyCalled else: newTime = self.seconds() + secondsFromNow if newTime < self.time: self.delayed_time = 0 self.time = newTime self.resetter(self) else: self.delayed_time = newTime - self.time def delay(self, secondsLater): """Reschedule this call for a later time @type secondsLater: C{float} @param secondsLater: The number of seconds after the originally scheduled time for which to reschedule this call. @raise AlreadyCancelled: Raised if this call has been cancelled. @raise AlreadyCalled: Raised if this call has already been made. """ if self.cancelled: raise error.AlreadyCancelled elif self.called: raise error.AlreadyCalled else: self.delayed_time += secondsLater if self.delayed_time < 0: self.activate_delay() self.resetter(self) def activate_delay(self): self.time += self.delayed_time self.delayed_time = 0 def active(self): """Determine whether this call is still pending @rtype: C{bool} @return: True if this call has not yet been made or cancelled, False otherwise. """ return not (self.cancelled or self.called) def __le__(self, other): """ Implement C{<=} operator between two L{DelayedCall} instances. Comparison is based on the C{time} attribute (unadjusted by the delayed time). """ return self.time <= other.time def __lt__(self, other): """ Implement C{<} operator between two L{DelayedCall} instances. Comparison is based on the C{time} attribute (unadjusted by the delayed time). """ return self.time < other.time def __str__(self): if self._str is not None: return self._str if hasattr(self, 'func'): if hasattr(self.func, 'func_name'): func = self.func.func_name if hasattr(self.func, 'im_class'): func = self.func.im_class.__name__ + '.' + func else: func = reflect.safe_repr(self.func) else: func = None now = self.seconds() L = ["<DelayedCall 0x%x [%ss] called=%s cancelled=%s" % ( unsignedID(self), self.time - now, self.called, self.cancelled)] if func is not None: L.extend((" ", func, "(")) if self.args: L.append(", ".join([reflect.safe_repr(e) for e in self.args])) if self.kw: L.append(", ") if self.kw: L.append(", ".join(['%s=%s' % (k, reflect.safe_repr(v)) for (k, v) in self.kw.iteritems()])) L.append(")") if self.debug: L.append("\n\ntraceback at creation: \n\n%s" % (' '.join(self.creator))) L.append('>') return "".join(L) class ThreadedResolver(object): """ L{ThreadedResolver} uses a reactor, a threadpool, and L{socket.gethostbyname} to perform name lookups without blocking the reactor thread. It also supports timeouts indepedently from whatever timeout logic L{socket.gethostbyname} might have. @ivar reactor: The reactor the threadpool of which will be used to call L{socket.gethostbyname} and the I/O thread of which the result will be delivered. """ implements(IResolverSimple) def __init__(self, reactor): self.reactor = reactor self._runningQueries = {} def _fail(self, name, err): err = error.DNSLookupError("address %r not found: %s" % (name, err)) return failure.Failure(err) def _cleanup(self, name, lookupDeferred): userDeferred, cancelCall = self._runningQueries[lookupDeferred] del self._runningQueries[lookupDeferred] userDeferred.errback(self._fail(name, "timeout error")) def _checkTimeout(self, result, name, lookupDeferred): try: userDeferred, cancelCall = self._runningQueries[lookupDeferred] except KeyError: pass else: del self._runningQueries[lookupDeferred] cancelCall.cancel() if isinstance(result, failure.Failure): userDeferred.errback(self._fail(name, result.getErrorMessage())) else: userDeferred.callback(result) def getHostByName(self, name, timeout = (1, 3, 11, 45)): """ See L{twisted.internet.interfaces.IResolverSimple.getHostByName}. Note that the elements of C{timeout} are summed and the result is used as a timeout for the lookup. Any intermediate timeout or retry logic is left up to the platform via L{socket.gethostbyname}. """ if timeout: timeoutDelay = sum(timeout) else: timeoutDelay = 60 userDeferred = defer.Deferred() lookupDeferred = threads.deferToThreadPool( self.reactor, self.reactor.getThreadPool(), socket.gethostbyname, name) cancelCall = self.reactor.callLater( timeoutDelay, self._cleanup, name, lookupDeferred) self._runningQueries[lookupDeferred] = (userDeferred, cancelCall) lookupDeferred.addBoth(self._checkTimeout, name, lookupDeferred) return userDeferred class BlockingResolver: implements(IResolverSimple) def getHostByName(self, name, timeout = (1, 3, 11, 45)): try: address = socket.gethostbyname(name) except socket.error: msg = "address %r not found" % (name,) err = error.DNSLookupError(msg) return defer.fail(err) else: return defer.succeed(address) class _ThreePhaseEvent(object): """ Collection of callables (with arguments) which can be invoked as a group in a particular order. This provides the underlying implementation for the reactor's system event triggers. An instance of this class tracks triggers for all phases of a single type of event. @ivar before: A list of the before-phase triggers containing three-tuples of a callable, a tuple of positional arguments, and a dict of keyword arguments @ivar finishedBefore: A list of the before-phase triggers which have already been executed. This is only populated in the C{'BEFORE'} state. @ivar during: A list of the during-phase triggers containing three-tuples of a callable, a tuple of positional arguments, and a dict of keyword arguments @ivar after: A list of the after-phase triggers containing three-tuples of a callable, a tuple of positional arguments, and a dict of keyword arguments @ivar state: A string indicating what is currently going on with this object. One of C{'BASE'} (for when nothing in particular is happening; this is the initial value), C{'BEFORE'} (when the before-phase triggers are in the process of being executed). """ def __init__(self): self.before = [] self.during = [] self.after = [] self.state = 'BASE' def addTrigger(self, phase, callable, *args, **kwargs): """ Add a trigger to the indicate phase. @param phase: One of C{'before'}, C{'during'}, or C{'after'}. @param callable: An object to be called when this event is triggered. @param *args: Positional arguments to pass to C{callable}. @param **kwargs: Keyword arguments to pass to C{callable}. @return: An opaque handle which may be passed to L{removeTrigger} to reverse the effects of calling this method. """ if phase not in ('before', 'during', 'after'): raise KeyError("invalid phase") getattr(self, phase).append((callable, args, kwargs)) return phase, callable, args, kwargs def removeTrigger(self, handle): """ Remove a previously added trigger callable. @param handle: An object previously returned by L{addTrigger}. The trigger added by that call will be removed. @raise ValueError: If the trigger associated with C{handle} has already been removed or if C{handle} is not a valid handle. """ return getattr(self, 'removeTrigger_' + self.state)(handle) def removeTrigger_BASE(self, handle): """ Just try to remove the trigger. @see: removeTrigger """ try: phase, callable, args, kwargs = handle except (TypeError, ValueError): raise ValueError("invalid trigger handle") else: if phase not in ('before', 'during', 'after'): raise KeyError("invalid phase") getattr(self, phase).remove((callable, args, kwargs)) def removeTrigger_BEFORE(self, handle): """ Remove the trigger if it has yet to be executed, otherwise emit a warning that in the future an exception will be raised when removing an already-executed trigger. @see: removeTrigger """ phase, callable, args, kwargs = handle if phase != 'before': return self.removeTrigger_BASE(handle) if (callable, args, kwargs) in self.finishedBefore: warnings.warn( "Removing already-fired system event triggers will raise an " "exception in a future version of Twisted.", category=DeprecationWarning, stacklevel=3) else: self.removeTrigger_BASE(handle) def fireEvent(self): """ Call the triggers added to this event. """ self.state = 'BEFORE' self.finishedBefore = [] beforeResults = [] while self.before: callable, args, kwargs = self.before.pop(0) self.finishedBefore.append((callable, args, kwargs)) try: result = callable(*args, **kwargs) except: log.err() else: if isinstance(result, Deferred): beforeResults.append(result) DeferredList(beforeResults).addCallback(self._continueFiring) def _continueFiring(self, ignored): """ Call the during and after phase triggers for this event. """ self.state = 'BASE' self.finishedBefore = [] for phase in self.during, self.after: while phase: callable, args, kwargs = phase.pop(0) try: callable(*args, **kwargs) except: log.err() class ReactorBase(object): """ Default base class for Reactors. @type _stopped: C{bool} @ivar _stopped: A flag which is true between paired calls to C{reactor.run} and C{reactor.stop}. This should be replaced with an explicit state machine. @type _justStopped: C{bool} @ivar _justStopped: A flag which is true between the time C{reactor.stop} is called and the time the shutdown system event is fired. This is used to determine whether that event should be fired after each iteration through the mainloop. This should be replaced with an explicit state machine. @type _started: C{bool} @ivar _started: A flag which is true from the time C{reactor.run} is called until the time C{reactor.run} returns. This is used to prevent calls to C{reactor.run} on a running reactor. This should be replaced with an explicit state machine. @ivar running: See L{IReactorCore.running} """ implements(IReactorCore, IReactorTime, IReactorPluggableResolver) _stopped = True installed = False usingThreads = False resolver = BlockingResolver() __name__ = "twisted.internet.reactor" def __init__(self): self.threadCallQueue = [] self._eventTriggers = {} self._pendingTimedCalls = [] self._newTimedCalls = [] self._cancellations = 0 self.running = False self._started = False self._justStopped = False self._startedBefore = False # reactor internal readers, e.g. the waker. self._internalReaders = set() self.waker = None # Arrange for the running attribute to change to True at the right time # and let a subclass possibly do other things at that time (eg install # signal handlers). self.addSystemEventTrigger( 'during', 'startup', self._reallyStartRunning) self.addSystemEventTrigger('during', 'shutdown', self.crash) self.addSystemEventTrigger('during', 'shutdown', self.disconnectAll) if platform.supportsThreads(): self._initThreads() self.installWaker() # override in subclasses _lock = None def installWaker(self): raise NotImplementedError( reflect.qual(self.__class__) + " did not implement installWaker") def installResolver(self, resolver): assert IResolverSimple.providedBy(resolver) oldResolver = self.resolver self.resolver = resolver return oldResolver def wakeUp(self): """ Wake up the event loop. """ if self.waker: self.waker.wakeUp() # if the waker isn't installed, the reactor isn't running, and # therefore doesn't need to be woken up def doIteration(self, delay): """ Do one iteration over the readers and writers which have been added. """ raise NotImplementedError( reflect.qual(self.__class__) + " did not implement doIteration") def addReader(self, reader): raise NotImplementedError( reflect.qual(self.__class__) + " did not implement addReader") def addWriter(self, writer): raise NotImplementedError( reflect.qual(self.__class__) + " did not implement addWriter") def removeReader(self, reader): raise NotImplementedError( reflect.qual(self.__class__) + " did not implement removeReader") def removeWriter(self, writer): raise NotImplementedError( reflect.qual(self.__class__) + " did not implement removeWriter") def removeAll(self): raise NotImplementedError( reflect.qual(self.__class__) + " did not implement removeAll") def getReaders(self): raise NotImplementedError( reflect.qual(self.__class__) + " did not implement getReaders") def getWriters(self): raise NotImplementedError( reflect.qual(self.__class__) + " did not implement getWriters") def resolve(self, name, timeout = (1, 3, 11, 45)): """Return a Deferred that will resolve a hostname. """ if not name: # XXX - This is *less than* '::', and will screw up IPv6 servers return defer.succeed('0.0.0.0') if abstract.isIPAddress(name): return defer.succeed(name) return self.resolver.getHostByName(name, timeout) # Installation. # IReactorCore def stop(self): """ See twisted.internet.interfaces.IReactorCore.stop. """ if self._stopped: raise error.ReactorNotRunning( "Can't stop reactor that isn't running.") self._stopped = True self._justStopped = True self._startedBefore = True def crash(self): """ See twisted.internet.interfaces.IReactorCore.crash. Reset reactor state tracking attributes and re-initialize certain state-transition helpers which were set up in C{__init__} but later destroyed (through use). """ self._started = False self.running = False self.addSystemEventTrigger( 'during', 'startup', self._reallyStartRunning) def sigInt(self, *args): """Handle a SIGINT interrupt. """ log.msg("Received SIGINT, shutting down.") self.callFromThread(self.stop) def sigBreak(self, *args): """Handle a SIGBREAK interrupt. """ log.msg("Received SIGBREAK, shutting down.") self.callFromThread(self.stop) def sigTerm(self, *args): """Handle a SIGTERM interrupt. """ log.msg("Received SIGTERM, shutting down.") self.callFromThread(self.stop) def disconnectAll(self): """Disconnect every reader, and writer in the system. """ selectables = self.removeAll() for reader in selectables: log.callWithLogger(reader, reader.connectionLost, failure.Failure(main.CONNECTION_LOST)) def iterate(self, delay=0): """See twisted.internet.interfaces.IReactorCore.iterate. """ self.runUntilCurrent() self.doIteration(delay) def fireSystemEvent(self, eventType): """See twisted.internet.interfaces.IReactorCore.fireSystemEvent. """ event = self._eventTriggers.get(eventType) if event is not None: event.fireEvent() def addSystemEventTrigger(self, _phase, _eventType, _f, *args, **kw): """See twisted.internet.interfaces.IReactorCore.addSystemEventTrigger. """ assert callable(_f), "%s is not callable" % _f if _eventType not in self._eventTriggers: self._eventTriggers[_eventType] = _ThreePhaseEvent() return (_eventType, self._eventTriggers[_eventType].addTrigger( _phase, _f, *args, **kw)) def removeSystemEventTrigger(self, triggerID): """See twisted.internet.interfaces.IReactorCore.removeSystemEventTrigger. """ eventType, handle = triggerID self._eventTriggers[eventType].removeTrigger(handle) def callWhenRunning(self, _callable, *args, **kw): """See twisted.internet.interfaces.IReactorCore.callWhenRunning. """ if self.running: _callable(*args, **kw) else: return self.addSystemEventTrigger('after', 'startup', _callable, *args, **kw) def startRunning(self): """ Method called when reactor starts: do some initialization and fire startup events. Don't call this directly, call reactor.run() instead: it should take care of calling this. This method is somewhat misnamed. The reactor will not necessarily be in the running state by the time this method returns. The only guarantee is that it will be on its way to the running state. """ if self._started: raise error.ReactorAlreadyRunning() if self._startedBefore: raise error.ReactorNotRestartable() self._started = True self._stopped = False threadable.registerAsIOThread() self.fireSystemEvent('startup') def _reallyStartRunning(self): """ Method called to transition to the running state. This should happen in the I{during startup} event trigger phase. """ self.running = True # IReactorTime seconds = staticmethod(runtimeSeconds) def callLater(self, _seconds, _f, *args, **kw): """See twisted.internet.interfaces.IReactorTime.callLater. """ assert callable(_f), "%s is not callable" % _f assert sys.maxint >= _seconds >= 0, \ "%s is not greater than or equal to 0 seconds" % (_seconds,) tple = DelayedCall(self.seconds() + _seconds, _f, args, kw, self._cancelCallLater, self._moveCallLaterSooner, seconds=self.seconds) self._newTimedCalls.append(tple) return tple def _moveCallLaterSooner(self, tple): # Linear time find: slow. heap = self._pendingTimedCalls try: pos = heap.index(tple) # Move elt up the heap until it rests at the right place. elt = heap[pos] while pos != 0: parent = (pos-1) // 2 if heap[parent] <= elt: break # move parent down heap[pos] = heap[parent] pos = parent heap[pos] = elt except ValueError: # element was not found in heap - oh well... pass def _cancelCallLater(self, tple): self._cancellations+=1 def getDelayedCalls(self): """Return all the outstanding delayed calls in the system. They are returned in no particular order. This method is not efficient -- it is really only meant for test cases.""" return [x for x in (self._pendingTimedCalls + self._newTimedCalls) if not x.cancelled] def _insertNewDelayedCalls(self): for call in self._newTimedCalls: if call.cancelled: self._cancellations-=1 else: call.activate_delay() heappush(self._pendingTimedCalls, call) self._newTimedCalls = [] def timeout(self): # insert new delayed calls to make sure to include them in timeout value self._insertNewDelayedCalls() if not self._pendingTimedCalls: return None return max(0, self._pendingTimedCalls[0].time - self.seconds()) def runUntilCurrent(self): """Run all pending timed calls. """ if self.threadCallQueue: # Keep track of how many calls we actually make, as we're # making them, in case another call is added to the queue # while we're in this loop. count = 0 total = len(self.threadCallQueue) for (f, a, kw) in self.threadCallQueue: try: f(*a, **kw) except: log.err() count += 1 if count == total: break del self.threadCallQueue[:count] if self.threadCallQueue: self.wakeUp() # insert new delayed calls now self._insertNewDelayedCalls() now = self.seconds() while self._pendingTimedCalls and (self._pendingTimedCalls[0].time <= now): call = heappop(self._pendingTimedCalls) if call.cancelled: self._cancellations-=1 continue if call.delayed_time > 0: call.activate_delay() heappush(self._pendingTimedCalls, call) continue try: call.called = 1 call.func(*call.args, **call.kw) except: log.deferr() if hasattr(call, "creator"): e = "\n" e += " C: previous exception occurred in " + \ "a DelayedCall created here:\n" e += " C:" e += "".join(call.creator).rstrip().replace("\n","\n C:") e += "\n" log.msg(e) if (self._cancellations > 50 and self._cancellations > len(self._pendingTimedCalls) >> 1): self._cancellations = 0 self._pendingTimedCalls = [x for x in self._pendingTimedCalls if not x.cancelled] heapify(self._pendingTimedCalls) if self._justStopped: self._justStopped = False self.fireSystemEvent("shutdown") # IReactorProcess def _checkProcessArgs(self, args, env): """ Check for valid arguments and environment to spawnProcess. @return: A two element tuple giving values to use when creating the process. The first element of the tuple is a C{list} of C{str} giving the values for argv of the child process. The second element of the tuple is either C{None} if C{env} was C{None} or a C{dict} mapping C{str} environment keys to C{str} environment values. """ # Any unicode string which Python would successfully implicitly # encode to a byte string would have worked before these explicit # checks were added. Anything which would have failed with a # UnicodeEncodeError during that implicit encoding step would have # raised an exception in the child process and that would have been # a pain in the butt to debug. # # So, we will explicitly attempt the same encoding which Python # would implicitly do later. If it fails, we will report an error # without ever spawning a child process. If it succeeds, we'll save # the result so that Python doesn't need to do it implicitly later. # # For any unicode which we can actually encode, we'll also issue a # deprecation warning, because no one should be passing unicode here # anyway. # # -exarkun defaultEncoding = sys.getdefaultencoding() # Common check function def argChecker(arg): """ Return either a str or None. If the given value is not allowable for some reason, None is returned. Otherwise, a possibly different object which should be used in place of arg is returned. This forces unicode encoding to happen now, rather than implicitly later. """ if isinstance(arg, unicode): try: arg = arg.encode(defaultEncoding) except UnicodeEncodeError: return None warnings.warn( "Argument strings and environment keys/values passed to " "reactor.spawnProcess should be str, not unicode.", category=DeprecationWarning, stacklevel=4) if isinstance(arg, str) and '\0' not in arg: return arg return None # Make a few tests to check input validity if not isinstance(args, (tuple, list)): raise TypeError("Arguments must be a tuple or list") outputArgs = [] for arg in args: arg = argChecker(arg) if arg is None: raise TypeError("Arguments contain a non-string value") else: outputArgs.append(arg) outputEnv = None if env is not None: outputEnv = {} for key, val in env.iteritems(): key = argChecker(key) if key is None: raise TypeError("Environment contains a non-string key") val = argChecker(val) if val is None: raise TypeError("Environment contains a non-string value") outputEnv[key] = val return outputArgs, outputEnv # IReactorThreads if platform.supportsThreads(): threadpool = None # ID of the trigger starting the threadpool _threadpoolStartupID = None # ID of the trigger stopping the threadpool threadpoolShutdownID = None def _initThreads(self): self.usingThreads = True self.resolver = ThreadedResolver(self) def callFromThread(self, f, *args, **kw): """ See L{twisted.internet.interfaces.IReactorThreads.callFromThread}. """ assert callable(f), "%s is not callable" % (f,) # lists are thread-safe in CPython, but not in Jython # this is probably a bug in Jython, but until fixed this code # won't work in Jython. self.threadCallQueue.append((f, args, kw)) self.wakeUp() def _initThreadPool(self): """ Create the threadpool accessible with callFromThread. """ from twisted.python import threadpool self.threadpool = threadpool.ThreadPool( 0, 10, 'twisted.internet.reactor') self._threadpoolStartupID = self.callWhenRunning( self.threadpool.start) self.threadpoolShutdownID = self.addSystemEventTrigger( 'during', 'shutdown', self._stopThreadPool) def _uninstallHandler(self): pass def _stopThreadPool(self): """ Stop the reactor threadpool. This method is only valid if there is currently a threadpool (created by L{_initThreadPool}). It is not intended to be called directly; instead, it will be called by a shutdown trigger created in L{_initThreadPool}. """ triggers = [self._threadpoolStartupID, self.threadpoolShutdownID] for trigger in filter(None, triggers): try: self.removeSystemEventTrigger(trigger) except ValueError: pass self._threadpoolStartupID = None self.threadpoolShutdownID = None self.threadpool.stop() self.threadpool = None def getThreadPool(self): """ See L{twisted.internet.interfaces.IReactorThreads.getThreadPool}. """ if self.threadpool is None: self._initThreadPool() return self.threadpool def callInThread(self, _callable, *args, **kwargs): """ See L{twisted.internet.interfaces.IReactorThreads.callInThread}. """ self.getThreadPool().callInThread(_callable, *args, **kwargs) def suggestThreadPoolSize(self, size): """ See L{twisted.internet.interfaces.IReactorThreads.suggestThreadPoolSize}. """ self.getThreadPool().adjustPoolsize(maxthreads=size) else: # This is for signal handlers. def callFromThread(self, f, *args, **kw): assert callable(f), "%s is not callable" % (f,) # See comment in the other callFromThread implementation. self.threadCallQueue.append((f, args, kw)) if platform.supportsThreads(): classImplements(ReactorBase, IReactorThreads) class BaseConnector(styles.Ephemeral): """Basic implementation of connector. State can be: "connecting", "connected", "disconnected" """ implements(IConnector) timeoutID = None factoryStarted = 0 def __init__(self, factory, timeout, reactor): self.state = "disconnected" self.reactor = reactor self.factory = factory self.timeout = timeout def disconnect(self): """Disconnect whatever our state is.""" if self.state == 'connecting': self.stopConnecting() elif self.state == 'connected': self.transport.loseConnection() def connect(self): """Start connection to remote server.""" if self.state != "disconnected": raise RuntimeError, "can't connect in this state" self.state = "connecting" if not self.factoryStarted: self.factory.doStart() self.factoryStarted = 1 self.transport = transport = self._makeTransport() if self.timeout is not None: self.timeoutID = self.reactor.callLater(self.timeout, transport.failIfNotConnected, error.TimeoutError()) self.factory.startedConnecting(self) def stopConnecting(self): """Stop attempting to connect.""" if self.state != "connecting": raise error.NotConnectingError, "we're not trying to connect" self.state = "disconnected" self.transport.failIfNotConnected(error.UserError()) del self.transport def cancelTimeout(self): if self.timeoutID is not None: try: self.timeoutID.cancel() except ValueError: pass del self.timeoutID def buildProtocol(self, addr): self.state = "connected" self.cancelTimeout() return self.factory.buildProtocol(addr) def connectionFailed(self, reason): self.cancelTimeout() self.transport = None self.state = "disconnected" self.factory.clientConnectionFailed(self, reason) if self.state == "disconnected": # factory hasn't called our connect() method self.factory.doStop() self.factoryStarted = 0 def connectionLost(self, reason): self.state = "disconnected" self.factory.clientConnectionLost(self, reason) if self.state == "disconnected": # factory hasn't called our connect() method self.factory.doStop() self.factoryStarted = 0 def getDestination(self): raise NotImplementedError( reflect.qual(self.__class__) + " did not implement " "getDestination") class BasePort(abstract.FileDescriptor): """Basic implementation of a ListeningPort. Note: This does not actually implement IListeningPort. """ addressFamily = None socketType = None def createInternetSocket(self): s = socket.socket(self.addressFamily, self.socketType) s.setblocking(0) fdesc._setCloseOnExec(s.fileno()) return s def doWrite(self): """Raises a RuntimeError""" raise RuntimeError, "doWrite called on a %s" % reflect.qual(self.__class__) class _SignalReactorMixin(object): """ Private mixin to manage signals: it installs signal handlers at start time, and define run method. It can only be used mixed in with L{ReactorBase}, and has to be defined first in the inheritance (so that method resolution order finds startRunning first). @type _installSignalHandlers: C{bool} @ivar _installSignalHandlers: A flag which indicates whether any signal handlers will be installed during startup. This includes handlers for SIGCHLD to monitor child processes, and SIGINT, SIGTERM, and SIGBREAK to stop the reactor. """ _installSignalHandlers = False def _handleSignals(self): """ Install the signal handlers for the Twisted event loop. """ try: import signal except ImportError: log.msg("Warning: signal module unavailable -- " "not installing signal handlers.") return if signal.getsignal(signal.SIGINT) == signal.default_int_handler: # only handle if there isn't already a handler, e.g. for Pdb. signal.signal(signal.SIGINT, self.sigInt) signal.signal(signal.SIGTERM, self.sigTerm) # Catch Ctrl-Break in windows if hasattr(signal, "SIGBREAK"): signal.signal(signal.SIGBREAK, self.sigBreak) def startRunning(self, installSignalHandlers=True): """ Extend the base implementation in order to remember whether signal handlers should be installed later. @type installSignalHandlers: C{bool} @param installSignalHandlers: A flag which, if set, indicates that handlers for a number of (implementation-defined) signals should be installed during startup. """ self._installSignalHandlers = installSignalHandlers ReactorBase.startRunning(self) def _reallyStartRunning(self): """ Extend the base implementation by also installing signal handlers, if C{self._installSignalHandlers} is true. """ ReactorBase._reallyStartRunning(self) if self._installSignalHandlers: # Make sure this happens before after-startup events, since the # expectation of after-startup is that the reactor is fully # initialized. Don't do it right away for historical reasons # (perhaps some before-startup triggers don't want there to be a # custom SIGCHLD handler so that they can run child processes with # some blocking api). self._handleSignals() def run(self, installSignalHandlers=True): self.startRunning(installSignalHandlers=installSignalHandlers) self.mainLoop() def mainLoop(self): while self._started: try: while self._started: # Advance simulation time in delayed event # processors. self.runUntilCurrent() t2 = self.timeout() t = self.running and t2 self.doIteration(t) except: log.msg("Unexpected error in main loop.") log.err() else: log.msg('Main loop terminated.') __all__ = []
gpl-2.0
ryansnowboarder/zulip
zerver/lib/test_runner.py
2
3482
from __future__ import print_function from django.test.runner import DiscoverRunner from zerver.lib.cache import bounce_key_prefix_for_testing from zerver.views.messages import get_sqlalchemy_connection import os import time import traceback import unittest def slow(expected_run_time, slowness_reason): ''' This is a decorate that annotates a test as being "known to be slow." The decorator will set expected_run_time and slowness_reason as atributes of the function. Other code can use this annotation as needed, e.g. to exclude these tests in "fast" mode. ''' def decorator(f): f.expected_run_time = expected_run_time f.slowness_reason = slowness_reason return f return decorator def is_known_slow_test(test_method): return hasattr(test_method, 'slowness_reason') def full_test_name(test): test_module = test.__module__ test_class = test.__class__.__name__ test_method = test._testMethodName return '%s.%s.%s' % (test_module, test_class, test_method) def get_test_method(test): return getattr(test, test._testMethodName) def enforce_timely_test_completion(test_method, test_name, delay): if hasattr(test_method, 'expected_run_time'): # Allow for tests to run 50% slower than normal due # to random variations. max_delay = 1.5 * test_method.expected_run_time else: max_delay = 0.180 # seconds # Further adjustments for slow laptops: max_delay = max_delay * 3 if delay > max_delay: print('Test is TOO slow: %s (%.3f s)' % (test_name, delay)) def fast_tests_only(): return "FAST_TESTS_ONLY" in os.environ def run_test(test): failed = False test_method = get_test_method(test) if fast_tests_only() and is_known_slow_test(test_method): return failed test_name = full_test_name(test) bounce_key_prefix_for_testing(test_name) print('Running', test_name) if not hasattr(test, "_pre_setup"): print("somehow the test doesn't have _pre_setup; it may be an import fail.") print("Here's a debugger. Good luck!") import pdb; pdb.set_trace() test._pre_setup() start_time = time.time() test.setUp() try: test_method() except unittest.SkipTest: pass except Exception: failed = True traceback.print_exc() test.tearDown() delay = time.time() - start_time enforce_timely_test_completion(test_method, test_name, delay) test._post_teardown() return failed class Runner(DiscoverRunner): def __init__(self, *args, **kwargs): DiscoverRunner.__init__(self, *args, **kwargs) def run_suite(self, suite, fatal_errors=None): failed = False for test in suite: if run_test(test): failed = True if fatal_errors: return failed return failed def run_tests(self, test_labels, extra_tests=None, **kwargs): self.setup_test_environment() suite = self.build_suite(test_labels, extra_tests) # We have to do the next line to avoid flaky scenarios where we # run a single test and getting an SA connection causes data from # a Django connection to be rolled back mid-test. get_sqlalchemy_connection() failed = self.run_suite(suite, fatal_errors=kwargs.get('fatal_errors')) self.teardown_test_environment() return failed print()
apache-2.0
rvalyi/OpenUpgrade
openerp/addons/base/tests/test_misc.py
393
1111
import unittest2 from openerp.tools import misc class test_countingstream(unittest2.TestCase): def test_empty_stream(self): s = misc.CountingStream(iter([])) self.assertEqual(s.index, -1) self.assertIsNone(next(s, None)) self.assertEqual(s.index, 0) def test_single(self): s = misc.CountingStream(xrange(1)) self.assertEqual(s.index, -1) self.assertEqual(next(s, None), 0) self.assertIsNone(next(s, None)) self.assertEqual(s.index, 1) def test_full(self): s = misc.CountingStream(xrange(42)) for _ in s: pass self.assertEqual(s.index, 42) def test_repeated(self): """ Once the CountingStream has stopped iterating, the index should not increase anymore (the internal state should not be allowed to change) """ s = misc.CountingStream(iter([])) self.assertIsNone(next(s, None)) self.assertEqual(s.index, 0) self.assertIsNone(next(s, None)) self.assertEqual(s.index, 0) if __name__ == '__main__': unittest2.main()
agpl-3.0
caveman-dick/ansible
lib/ansible/modules/packaging/os/dpkg_selections.py
29
2142
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright: Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: dpkg_selections short_description: Dpkg package selection selections description: - Change dpkg package selection state via --get-selections and --set-selections. version_added: "2.0" author: Brian Brazil <[email protected]> options: name: description: - Name of the package required: true selection: description: - The selection state to set the package to. choices: [ 'install', 'hold', 'deinstall', 'purge' ] required: true notes: - This module won't cause any packages to be installed/removed/purged, use the C(apt) module for that. ''' EXAMPLES = ''' # Prevent python from being upgraded. - dpkg_selections: name: python selection: hold ''' def main(): module = AnsibleModule( argument_spec=dict( name=dict(required=True), selection=dict(choices=['install', 'hold', 'deinstall', 'purge']) ), supports_check_mode=True, ) dpkg = module.get_bin_path('dpkg', True) name = module.params['name'] selection = module.params['selection'] # Get current settings. rc, out, err = module.run_command([dpkg, '--get-selections', name], check_rc=True) if not out: current = 'not present' else: current = out.split()[1] changed = current != selection if module.check_mode or not changed: module.exit_json(changed=changed, before=current, after=selection) module.run_command([dpkg, '--set-selections'], data="%s %s" % (name, selection), check_rc=True) module.exit_json(changed=changed, before=current, after=selection) from ansible.module_utils.basic import * if __name__ == '__main__': main()
gpl-3.0
adamkh/Arduino
arduino-core/src/processing/app/i18n/python/requests/packages/urllib3/__init__.py
309
1692
# urllib3/__init__.py # Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt) # # This module is part of urllib3 and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """ urllib3 - Thread-safe connection pooling and re-using. """ __author__ = 'Andrey Petrov ([email protected])' __license__ = 'MIT' __version__ = 'dev' from .connectionpool import ( HTTPConnectionPool, HTTPSConnectionPool, connection_from_url ) from . import exceptions from .filepost import encode_multipart_formdata from .poolmanager import PoolManager, ProxyManager, proxy_from_url from .response import HTTPResponse from .util import make_headers, get_host # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler()) def add_stderr_logger(level=logging.DEBUG): """ Helper for quickly adding a StreamHandler to the logger. Useful for debugging. Returns the handler after adding it. """ # This method needs to be in this __init__.py to get the __name__ correct # even if urllib3 is vendored within another package. logger = logging.getLogger(__name__) handler = logging.StreamHandler() handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) logger.addHandler(handler) logger.setLevel(level) logger.debug('Added an stderr logging handler to logger: %s' % __name__) return handler # ... Clean up. del NullHandler
lgpl-2.1