function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def test_invalid_type(self): pl = [ object() ] for fmt in ALL_FORMATS: with self.subTest(fmt=fmt): self.assertRaises(TypeError, plistlib.dumps, pl, fmt=fmt)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_bytes(self): pl = self._create() data = plistlib.dumps(pl) pl2 = plistlib.loads(data) self.assertNotIsInstance(pl, plistlib._InternalDict) self.assertEqual(dict(pl), dict(pl2)) data2 = plistlib.dumps(pl2) self.assertEqual(data, data2)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_indentation_dict(self): data = {'1': {'2': {'3': {'4': {'5': {'6': {'7': {'8': {'9': b'aaaaaa'}}}}}}}}} self.assertEqual(plistlib.loads(plistlib.dumps(data)), data)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_appleformatting(self): for use_builtin_types in (True, False): for fmt in ALL_FORMATS: with self.subTest(fmt=fmt, use_builtin_types=use_builtin_types): pl = plistlib.loads(TESTDATA[fmt], use_builtin_types=use_builtin_types) data = plistlib.dumps(pl, fmt=fmt) self.assertEqual(data, TESTDATA[fmt], "generated data was not identical to Apple's output")
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_bytesio(self): for fmt in ALL_FORMATS: with self.subTest(fmt=fmt): b = BytesIO() pl = self._create(fmt=fmt) plistlib.dump(pl, b, fmt=fmt) pl2 = plistlib.load(BytesIO(b.getvalue()), fmt=fmt) self.assertEqual(dict(pl), dict(pl2)) pl2 = plistlib.load(BytesIO(b.getvalue())) self.assertEqual(dict(pl), dict(pl2))
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_keysort(self): pl = collections.OrderedDict() pl['b'] = 1 pl['a'] = 2 pl['c'] = 3 for fmt in ALL_FORMATS: for sort_keys in (False, True): with self.subTest(fmt=fmt, sort_keys=sort_keys): data = plistlib.dumps(pl, fmt=fmt, sort_keys=sort_keys) pl2 = plistlib.loads(data, dict_type=collections.OrderedDict) self.assertEqual(dict(pl), dict(pl2)) if sort_keys: self.assertEqual(list(pl2.keys()), ['a', 'b', 'c']) else: self.assertEqual(list(pl2.keys()), ['b', 'a', 'c'])
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_skipkeys(self): pl = { 42: 'aNumber', 'snake': 'aWord', } for fmt in ALL_FORMATS: with self.subTest(fmt=fmt): data = plistlib.dumps( pl, fmt=fmt, skipkeys=True, sort_keys=False) pl2 = plistlib.loads(data) self.assertEqual(pl2, {'snake': 'aWord'}) fp = BytesIO() plistlib.dump( pl, fp, fmt=fmt, skipkeys=True, sort_keys=False) data = fp.getvalue() pl2 = plistlib.loads(fp.getvalue()) self.assertEqual(pl2, {'snake': 'aWord'})
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_list_members(self): pl = { 'first': [1, 2], 'second': [1, 2], 'third': [3, 4], } for fmt in ALL_FORMATS: with self.subTest(fmt=fmt): data = plistlib.dumps(pl, fmt=fmt) pl2 = plistlib.loads(data) self.assertEqual(pl2, { 'first': [1, 2], 'second': [1, 2], 'third': [3, 4], }) self.assertIsNot(pl2['first'], pl2['second'])
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_controlcharacters(self): for i in range(128): c = chr(i) testString = "string containing %s" % c if i >= 32 or c in "\r\n\t": # \r, \n and \t are the only legal control chars in XML plistlib.dumps(testString, fmt=plistlib.FMT_XML) else: self.assertRaises(ValueError, plistlib.dumps, testString)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_invalidarray(self): for i in ["<key>key inside an array</key>", "<key>key inside an array2</key><real>3</real>", "<true/><key>key inside an array3</key>"]: self.assertRaises(ValueError, plistlib.loads, ("<plist><array>%s</array></plist>"%i).encode())
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_invalidinteger(self): self.assertRaises(ValueError, plistlib.loads, b"<plist><integer>not integer</integer></plist>")
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_xml_encodings(self): base = TESTDATA[plistlib.FMT_XML] for xml_encoding, encoding, bom in [ (b'utf-8', 'utf-8', codecs.BOM_UTF8), (b'utf-16', 'utf-16-le', codecs.BOM_UTF16_LE), (b'utf-16', 'utf-16-be', codecs.BOM_UTF16_BE), # Expat does not support UTF-32 #(b'utf-32', 'utf-32-le', codecs.BOM_UTF32_LE), #(b'utf-32', 'utf-32-be', codecs.BOM_UTF32_BE), ]: pl = self._create(fmt=plistlib.FMT_XML) with self.subTest(encoding=encoding): data = base.replace(b'UTF-8', xml_encoding) data = bom + data.decode('utf-8').encode(encoding) pl2 = plistlib.loads(data) self.assertEqual(dict(pl), dict(pl2))
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_io_deprecated(self): pl_in = { 'key': 42, 'sub': { 'key': 9, 'alt': 'value', 'data': b'buffer', } } pl_out = plistlib._InternalDict({ 'key': 42, 'sub': plistlib._InternalDict({ 'key': 9, 'alt': 'value', 'data': plistlib.Data(b'buffer'), }) }) self.addCleanup(support.unlink, support.TESTFN) with self.assertWarns(DeprecationWarning): plistlib.writePlist(pl_in, support.TESTFN) with self.assertWarns(DeprecationWarning): pl2 = plistlib.readPlist(support.TESTFN) self.assertEqual(pl_out, pl2) os.unlink(support.TESTFN) with open(support.TESTFN, 'wb') as fp: with self.assertWarns(DeprecationWarning): plistlib.writePlist(pl_in, fp) with open(support.TESTFN, 'rb') as fp: with self.assertWarns(DeprecationWarning): pl2 = plistlib.readPlist(fp) self.assertEqual(pl_out, pl2)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def test_dataobject_deprecated(self): in_data = { 'key': plistlib.Data(b'hello') } out_data = { 'key': b'hello' } buf = plistlib.dumps(in_data) cur = plistlib.loads(buf) self.assertEqual(cur, out_data) self.assertNotEqual(cur, in_data) cur = plistlib.loads(buf, use_builtin_types=False) self.assertNotEqual(cur, out_data) self.assertEqual(cur, in_data) with self.assertWarns(DeprecationWarning): cur = plistlib.readPlistFromBytes(buf) self.assertNotEqual(cur, out_data) self.assertEqual(cur, in_data)
ArcherSys/ArcherSys
[ 3, 2, 3, 16, 1412356452 ]
def take_snapshot(options): esm = ElasticsearchSnapshotManager(options) sh = esm.sh snapshot = options.snapshot and options.snapshot or 'all_' + time.strftime('%Y%m%d%H') snapdef = { "include_global_state": True } if options.indices: snapdef['indices'] = ','.join(options.indices) try: sh.create(repository=options.repository, snapshot=snapshot, body=json.dumps(snapdef), wait_for_completion=options.wait, request_timeout=7200) # Housekeeping - delete old snapshots snapshots = sh.get(repository=options.repository, snapshot="_all", request_timeout=120)['snapshots'] num_snaps = len(snapshots) if num_snaps > options.keep: up_to = num_snaps - options.keep logger.info('TOTAL: %d - Will delete 1 -> %d' % (num_snaps, up_to + 1)) for snap in snapshots[0:up_to]: sh.delete(repository=options.repository, snapshot=snap['snapshot'], request_timeout=3600) logger.info('Deleted snapshot %s' % snap['snapshot']) except exceptions.TransportError as e: pass
DomainGroupOSS/elasticsearch-snapshots
[ 23, 13, 23, 1, 1442386334 ]
def sslproxy_mock(url, request): return """<table class="table table-striped table-bordered" cellspacing="0" width="100%" id="proxylisttable"> <thead> <tr> <th>IP Address</th> <th>Port</th> <th>Code</th> <th class='hm'>Country</th> <th>Anonymity</th> <th class='hm'>Google</th> <th class='hx'>Https</th> <th class='hm'>Last Checked</th> </tr> </thead> <tbody> <tr> <td>24.211.89.146</td> <td>8080</td> <td>US</td> <td class='hm'>United States</td> <td>elite proxy</td> <td class='hm'>no</td> <td class='hx'>yes</td> <td class='hm'>8 seconds ago</td> </tr> <tr> <td>187.84.222.153</td> <td>80</td> <td>BR</td> <td class='hm'>Brazil</td> <td>anonymous</td> <td class='hm'>no</td> <td class='hx'>yes</td> <td class='hm'>1 minute ago</td> </tr> <tr> <td>41.193.238.249</td> <td>8080</td> <td>ZA</td> <td class='hm'>South Africa</td> <td>elite proxy</td> <td class='hm'>no</td> <td class='hx'>yes</td> <td class='hm'>1 minute ago</td> </tr> </tbody> <tfoot> <tr> <th class="input"><input type="text" /></th> <th></th><th></th> <th class='hm'></th> <th></th> <th class='hm'></th> <th class='hx'></th> <th class='hm'></th> </tr> </tfoot>
pgaref/HTTP_Request_Randomizer
[ 140, 53, 140, 23, 1446231372 ]
def free_proxy_mock(url, request): return """<table border="0" cellpadding="0" cellspacing="0" id="proxylisttable"
pgaref/HTTP_Request_Randomizer
[ 140, 53, 140, 23, 1446231372 ]
def proxy_for_eu_mock(url, request): return """<table class="proxy_list"> <tr> <th>IP</th> <th>Port</th> <th>Country</th> <th>Anon</th> <th>Speed</th> <th> Check</th> <th>Cookie/POST</th> </tr> <tr> <td>107.151.136.222</td> <td>80</td> <td>United States</td> <td>HIGH</td> <td>1.643</td> <td>2016-04-12 17:02:43</td> <td>Yes/Yes</td> </tr> <tr> <td>37.187.253.39</td> <td>8115</td> <td>France</td> <td>HIGH</td> <td>12.779</td> <td>2016-04-12 14:36:18</td> <td>Yes/Yes</td> </tr>
pgaref/HTTP_Request_Randomizer
[ 140, 53, 140, 23, 1446231372 ]
def rebro_weebly_mock(url, request): return """<div class="paragraph" style="text-align:left;"><strong><font color="#3ab890" size="3"><font color="#d5d5d5">IP:Port</font></font></strong><br/><font size="2"><strong><font color="#33a27f">213.149.105.12:8080<br/>119.188.46.42:8080</font></strong></font><br/><span></span>
pgaref/HTTP_Request_Randomizer
[ 140, 53, 140, 23, 1446231372 ]
def prem_mock(url, request): return """ <head> <script src="/js/test.js"></script> </head> <div id="proxylist">\n <tr class="anon">\n <th><a href="/list/ip-address-01.htm" title="Proxy List sorted by ip address">IP address</a></th> \n <th><a href="/list/" title="Proxy List sorted by anonymity level">Anonymity</a></th> \n <th><a href="/list/time-01.htm" title="Proxy List sorted by updated time">Checked</a></th> \n <th><a href="/list/type-01.htm" title="Proxy list sorted by country">Country</a></th> \n <th><dfn title="City or State\\Region ">City</dfn></th> \n <th><dfn title="Internet Service Provider">ISP</dfn></th> \n </tr> \n <div id="navbar"> <ul class="pagination"><li class="active"><a href="/list/">1</a></li><li><a href="02.htm">2</a></li></ul> </div> \n <tr class="anon"> <td data-label="IP:port "><span><input type="checkbox" name="proxyIp[]" value="191.252.61.28|r60e6"></span>191.252.61.28:<span class="r60e6"></span></td> <td data-label="Anonymity Type: ">high-anonymous</td> <td data-label="Checked: ">Apr-18, 17:18</td> <td data-label="Country: ">Brazil</td> <td data-label="City: ">S\xe3o Jos\xe9 Dos Campos</td> <td data-label="ISP: "><dfn title="Locaweb Servi\xe7os de Internet S/A">Locaweb Servi\xe7o...</dfn></td> </tr> \n <tr class="anon"> <td data-label="IP:port "><span><input type="checkbox" name="proxyIp[]" value="167.114.203.141|r63c5"></span>167.114.203.141:<span class="r63c5"></span></td> <td data-label="Anonymity Type: ">transparent</td> <td data-label="Checked: ">Apr-18, 13:22</td> <td data-label="Country: ">Canada</td> <td data-label="City: ">Montr\xe9al (QC)</td> <td data-label="ISP: ">OVH Hosting</td> </tr> \n <tr class="anon"> <td data-label="IP:port "><span><input type="checkbox" name="proxyIp[]" value="152.251.141.93|r63c5"></span>152.251.141.93:<span class="r63c5"></span></td> <td data-label="Anonymity Type: ">elite </td> <td data-label="Checked: ">Jul-16, 04:39</td> <td data-label="Country: ">Brazil</td> <td data-label="City: ">&nbsp;</td> <td data-label="ISP: ">Vivo</td> </tr> \n <tr><td colspan="6"><span><input type="checkbox" name="" value="" onclick="checkAll(this)"></span>Select All Proxies</td></tr>
pgaref/HTTP_Request_Randomizer
[ 140, 53, 140, 23, 1446231372 ]
def __init__(self, x: int, y: int): self.x = x self.y = y
sqlalchemy/sqlalchemy
[ 6899, 1088, 6899, 170, 1543289703 ]
def __repr__(self) -> str: return "Point(x=%r, y=%r)" % (self.x, self.y)
sqlalchemy/sqlalchemy
[ 6899, 1088, 6899, 170, 1543289703 ]
def __ne__(self, other: Any) -> bool: return not self.__eq__(other)
sqlalchemy/sqlalchemy
[ 6899, 1088, 6899, 170, 1543289703 ]
def main(): if DEBUG: test() n = int(input()) paths = cycles(n) print(len(paths)) for p in paths: print('%d %s' % (len(p), ' '.join([str(v) for v in p])))
andreimaximov/algorithms
[ 100, 43, 100, 5, 1435729230 ]
def even(n): """Builds a set of cycles that a graph with even vertices.""" assert n % 2 == 0 # Base case for complete graph such that V = {1, 2, 3, 4}. cycles = [[1, 2, 3], [2, 3, 4], [3, 4, 1], [4, 1, 2]] for i in range(6, n + 1, 2): a, b = i, i - 1 # Use edges (a, 1), (a, 0), (b, 1), (b, 0), (a, b) exactly twice each. cycles += [[a, 1, b], [a, 2, b], [a, 1, b, 2]] # Similar to odd(...) as we are left with 2n - 2 edges to use # connected to i - 4 of the vertices V' = {3 ... i - 2}. Notice that # |V'| is even so we can apply the same strategy as in odd(...). for k in range(3, i - 1, 2): c, d = k, k + 1 cycles += [[a, c, b, d]] * 2 return cycles
andreimaximov/algorithms
[ 100, 43, 100, 5, 1435729230 ]
def test(): """Checks the cycles(...) solver for a bunch of inputs.""" print('Testing...') for n in range(3, 300, 21): check(n, cycles(n)) print('Tests pass!')
andreimaximov/algorithms
[ 100, 43, 100, 5, 1435729230 ]
def parent(self): if self._values['parent'] is None: return None result = fq_name(self.partition, self._values['parent']) return result
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def streams(self): streams = self._values['streams'] if streams is None: return None if streams < 1 or streams > 256: raise F5ModuleError( "Streams value must be between 1 and 256" ) return self._values['streams']
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def receive_window(self): window = self._values['receive_window'] if window is None: return None if window < 16 or window > 128: raise F5ModuleError( "Receive Window value must be between 16 and 128" ) return self._values['receive_window']
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def header_table_size(self): header = self._values['header_table_size'] if header is None: return None if header < 0 or header > 65535: raise F5ModuleError( "Header Table Size value must be between 0 and 65535" ) return self._values['header_table_size']
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def write_size(self): write = self._values['write_size'] if write is None: return None if write < 2048 or write > 32768: raise F5ModuleError( "Write Size value must be between 2048 and 32768" ) return self._values['write_size']
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def frame_size(self): frame = self._values['frame_size'] if frame is None: return None if frame < 1024 or frame > 16384: raise F5ModuleError( "Write Size value must be between 1024 and 16384" ) return self._values['frame_size']
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def enforce_tls_requirements(self): result = flatten_boolean(self._values['enforce_tls_requirements']) if result is None: return None if result == 'yes': return 'enabled' return 'disabled'
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def insert_header(self): result = flatten_boolean(self._values['insert_header']) if result is None: return None if result == 'yes': return 'enabled' return 'disabled'
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def activation_modes(self): value = self._values['activation_modes'] if value is None: return None if is_empty_list(value): raise F5ModuleError( "Activation Modes cannot be empty, please provide a value" ) return value
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def to_return(self): result = {} try: for returnable in self.returnables: result[returnable] = getattr(self, returnable) result = self._filter_params(result) except Exception: pass return result
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def insert_header(self): if self._values['insert_header'] is None: return None elif self._values['insert_header'] == 'enabled': return 'yes' return 'no'
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def enforce_tls_requirements(self): if self._values['enforce_tls_requirements'] is None: return None elif self._values['enforce_tls_requirements'] == 'enabled': return 'yes' return 'no'
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def __init__(self, want, have=None): self.want = want self.have = have
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def __default(self, param): attr1 = getattr(self.want, param) try: attr2 = getattr(self.have, param) if attr1 != attr2: return attr1 except AttributeError: return attr1
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def description(self): if self.want.description is None: return None if self.want.description == '': if self.have.description is None or self.have.description == "none": return None if self.want.description != self.have.description: return self.want.description
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def __init__(self, *args, **kwargs): self.module = kwargs.get('module', None) self.client = F5RestClient(**self.module.params) self.want = ModuleParameters(params=self.module.params) self.have = ApiParameters() self.changes = UsableChanges()
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def _update_changed_options(self): diff = Difference(self.want, self.have) updatables = Parameters.updatables changed = dict() for k in updatables: change = diff.compare(k) if change is None: continue else: if isinstance(change, dict): changed.update(change) else: changed[k] = change if changed: self.changes = UsableChanges(params=changed) return True return False
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def _announce_deprecations(self, result): warnings = result.pop('__warnings', []) for warning in warnings: self.client.module.deprecate( msg=warning['msg'], version=warning['version'] )
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def absent(self): if self.exists(): return self.remove() return False
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def should_update(self): result = self._update_changed_options() if result: return True return False
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def create(self): self._set_changed_options() if self.module.check_mode: return True self.create_on_device() return True
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def create_on_device(self): params = self.changes.api_params() params['name'] = self.want.name params['partition'] = self.want.partition uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http2/".format( self.client.provider['server'], self.client.provider['server_port'] ) resp = self.client.api.post(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] in [400, 403, 404]: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return response['selfLink']
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def remove_from_device(self): uri = "https://{0}:{1}/mgmt/tm/ltm/profile/http2/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) response = self.client.api.delete(uri) if response.status == 200: return True raise F5ModuleError(response.content)
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def __init__(self): self.supports_check_mode = True argument_spec = dict( name=dict(required=True), parent=dict(), activation_modes=dict( type='list', elements='str', choices=[ 'alpn', 'always' ], mutually_exclusive=[['always', 'alpn']], ), description=dict(), enforce_tls_requirements=dict(type='bool'), streams=dict(type='int'), idle_timeout=dict(type='int'), frame_size=dict(type='int'), header_table_size=dict(type='int'), insert_header=dict(type='bool'), insert_header_name=dict(), receive_window=dict(type='int'), write_size=dict(type='int'), state=dict( default='present', choices=['present', 'absent'] ), partition=dict( default='Common', fallback=(env_fallback, ['F5_PARTITION']) ) ) self.argument_spec = {} self.argument_spec.update(f5_argument_spec) self.argument_spec.update(argument_spec)
F5Networks/f5-ansible-modules
[ 357, 221, 357, 48, 1448045671 ]
def __init__(self): """ Initialize the plugin list """ self.__plugins = {}
nacx/kahuna
[ 12, 8, 12, 1, 1314873676 ]
def call(self, plugin_name, command_name, args): """ Encapsulate the call into a context already loaded. """ try: plugin = self.load_plugin(plugin_name) except KeyError: # Plugin not found, pring generic help self.help_all() if not command_name: self.help(plugin) else: try: command = plugin._commands()[command_name] except KeyError: # Command not found in plugin. Print only plugin help self.help(plugin) with opencontext(plugin): return command(args)
nacx/kahuna
[ 12, 8, 12, 1, 1314873676 ]
def help_all(self): """ Prints the help for all registered plugins """ for name in sorted(__all__): plugin = self.load_plugin(name) self.help(plugin) print
nacx/kahuna
[ 12, 8, 12, 1, 1314873676 ]
def __init__(self): """ Create Cache object and get thread namespace. """ self.namespace = THREAD_NAMESPACE
PeRDy/django-audit-tools
[ 8, 3, 8, 1, 1451763432 ]
def set_process(self, process): """ Set current process. :param process: Process object: :type process: :class:`audit_tools.audit.Process` """ self.namespace.audit_current_process = process
PeRDy/django-audit-tools
[ 8, 3, 8, 1, 1451763432 ]
def set_last_access(self, access): """ Set last access. :param access: Access object. :type access: :class:`audit_tools.audit.Access` """ self.namespace.audit_current_access = access
PeRDy/django-audit-tools
[ 8, 3, 8, 1, 1451763432 ]
def init_ou_structure(self): # Change from original: Drop OUs outside self.root_ou_id subtree. super(RandsoneOrgLdif, self).init_ou_structure() ous, tree = [self.root_ou_id], self.ou_tree for ou in ous: ous.extend(tree.get(ou, ())) self.ou_tree = dict((ou, tree[ou]) for ou in ous if ou in tree)
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def init_person_titles(self): self.person_titles = {}
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def __init__(self, pdata, dbstate, uistate, nav_group=0): GeoGraphyView.__init__(self, _('Family places map'), pdata, dbstate, uistate, FamilyBookmarks, nav_group) self.dbstate = dbstate self.uistate = uistate self.place_list = [] self.place_without_coordinates = [] self.minlat = self.maxlat = self.minlon = self.maxlon = 0.0 self.minyear = 9999 self.maxyear = 0 self.nbplaces = 0 self.nbmarkers = 0 self.sort = [] self.additional_uis.append(self.additional_ui()) self.no_show_places_in_status_bar = False self.cal = None
gramps-project/gramps
[ 1698, 344, 1698, 81, 1420599108 ]
def get_stock(self): """ Returns the name of the stock icon to use for the display. This assumes that this icon has already been registered as a stock icon. """ return 'geo-show-family'
gramps-project/gramps
[ 1698, 344, 1698, 81, 1420599108 ]
def additional_ui(self): """ Specifies the UIManager XML code that defines the menus and buttons associated with the interface. """ return _UI_DEF
gramps-project/gramps
[ 1698, 344, 1698, 81, 1420599108 ]
def goto_handle(self, handle=None): """ Rebuild the tree with the given person handle as the root. """ if self.osm is None: return if self.uistate.get_active('Family'): self._createmap(self.uistate.get_active('Family')) else: self._createmap(self.uistate.get_active('Person'))
gramps-project/gramps
[ 1698, 344, 1698, 81, 1420599108 ]
def _createpersonmarkers(self, dbstate, person, comment, fam_id): """ Create all markers for the specified person. """ self.cal = config.get('preferences.calendar-format-report') latitude = longitude = "" if person: # For each event, if we have a place, set a marker. for event_ref in person.get_event_ref_list(): if not event_ref: continue role = event_ref.get_role() event = dbstate.db.get_event_from_handle(event_ref.ref) eyear = event.get_date_object().to_calendar(self.cal).get_year() place_handle = event.get_place_handle() if place_handle: place = dbstate.db.get_place_from_handle(place_handle) if place: longitude = place.get_longitude() latitude = place.get_latitude() latitude, longitude = conv_lat_lon(latitude, longitude, "D.D8") descr = _pd.display(dbstate.db, place) evt = EventType(event.get_type()) descr1 = _("%(eventtype)s : %(name)s") % { 'eventtype': evt, 'name': _nd.display(person)} # place.get_longitude and place.get_latitude return # one string. We have coordinates when the two values # contains non null string. if longitude and latitude: if not self._present_in_places_list( 2, str(descr1 + descr + str(evt))): self._append_to_places_list( descr, str(descr1 + descr + str(evt)), _nd.display(person), latitude, longitude, role, eyear, event.get_type(), person.gramps_id, place.gramps_id, event.gramps_id, fam_id) else: self._append_to_places_without_coord( place.gramps_id, descr) family_list = person.get_family_handle_list() for family_hdl in family_list: family = self.dbstate.db.get_family_from_handle(family_hdl) if family is not None: for event_ref in family.get_event_ref_list(): if event_ref: event = dbstate.db.get_event_from_handle( event_ref.ref) role = event_ref.get_role() if event.get_place_handle(): place_handle = event.get_place_handle() if place_handle: place = dbstate.db.get_place_from_handle( place_handle) if place: longitude = place.get_longitude() latitude = place.get_latitude() (latitude, longitude) = conv_lat_lon(latitude, longitude, "D.D8") descr = _pd.display(dbstate.db, place) evt = EventType(event.get_type()) (father_name, mother_name) = self._get_father_and_mother_name(event) descr1 = "%s : %s - " % (evt, father_name) descr1 = "%s%s" % (descr1, mother_name) eyear = event.get_date_object().to_calendar(self.cal).get_year() if longitude and latitude: if not self._present_in_places_list( 2, str(descr1 + descr + str(evt))): self._append_to_places_list( descr, str(descr1 + descr + str( evt)), _nd.display(person), latitude, longitude, role, eyear, event.get_type(), person.gramps_id, place.gramps_id, event.gramps_id, family.gramps_id) else: self._append_to_places_without_coord(place.gramps_id, descr)
gramps-project/gramps
[ 1698, 344, 1698, 81, 1420599108 ]
def _createmap_for_one_family(self, family): """ Create all markers for one family : all event's places with a lat/lon. """ dbstate = self.dbstate self.message_layer.add_message( _("Family places for %s") % self.family_label(family)) person = None if family: handle = family.get_father_handle() if handle: person = dbstate.db.get_person_from_handle(handle) else: return family_id = family.gramps_id if person is None: # family without father ? handle = family.get_mother_handle() if handle: person = dbstate.db.get_person_from_handle(handle) if person is None: handle = self.uistate.get_active('Person') if handle: person = dbstate.db.get_person_from_handle(handle) if person is not None: family_list = person.get_family_handle_list() if len(family_list) > 0: fhandle = family_list[0] # first is primary fam = dbstate.db.get_family_from_handle(fhandle) father = mother = None handle = fam.get_father_handle() if handle: father = dbstate.db.get_person_from_handle(handle) if father: comment = _("Father : %(id)s : %(name)s") % { 'id': father.gramps_id, 'name': _nd.display(father)} self._createpersonmarkers(dbstate, father, comment, family_id) handle = fam.get_mother_handle() if handle: mother = dbstate.db.get_person_from_handle(handle) if mother: comment = _("Mother : %(id)s : %(name)s") % { 'id': mother.gramps_id, 'name': _nd.display(mother)} self._createpersonmarkers(dbstate, mother, comment, family_id) index = 0 child_ref_list = fam.get_child_ref_list() if child_ref_list: for child_ref in child_ref_list: child = dbstate.db.get_person_from_handle( child_ref.ref) if child: index += 1 comment = _("Child : %(id)s - %(index)d " ": %(name)s") % { 'id': child.gramps_id, 'index': index, 'name': _nd.display(child)} self._createpersonmarkers(dbstate, child, comment, family_id) else: comment = _("Person : %(id)s %(name)s has no family.") % { 'id': person.gramps_id, 'name': _nd.display(person)} self._createpersonmarkers(dbstate, person, comment, family_id)
gramps-project/gramps
[ 1698, 344, 1698, 81, 1420599108 ]
def add_event_bubble_message(self, event, lat, lon, mark, menu): """ Add an item to the popup menu. """ self.itemoption = Gtk.Menu() itemoption = self.itemoption itemoption.show() menu.set_submenu(itemoption) modify = Gtk.MenuItem(label=_("Edit Family")) modify.show() modify.connect("activate", self.edit_family, event, lat, lon, mark) itemoption.append(modify) modify = Gtk.MenuItem(label=_("Edit Person")) modify.show() modify.connect("activate", self.edit_person, event, lat, lon, mark) itemoption.append(modify) modify = Gtk.MenuItem(label=_("Edit Event")) modify.show() modify.connect("activate", self.edit_event, event, lat, lon, mark) itemoption.append(modify) center = Gtk.MenuItem(label=_("Center on this place")) center.show() center.connect("activate", self.center_here, event, lat, lon, mark) itemoption.append(center)
gramps-project/gramps
[ 1698, 344, 1698, 81, 1420599108 ]
def add_specific_menu(self, menu, event, lat, lon): """ Add specific entry to the navigation menu. """ return
gramps-project/gramps
[ 1698, 344, 1698, 81, 1420599108 ]
def run(emparts, config): global pv_debug global pv_last_update global pv_data # Only update every X seconds if time.time() < pv_last_update + int(config.get('min_update', 20)): if (pv_debug > 1): print("pv: data skipping") return pv_last_update = time.time() registers = eval(config.get('registers')) pv_data = [] for inv in eval(config.get('inverters')): host, port, modbusid, manufacturer = inv device_class = get_device_class(host, int(port), int(modbusid)) if device_class == "Solar Inverter": relevant_registers = eval(config.get('registers')) mdata = get_pv_data(host, int(port), int(modbusid), relevant_registers) pv_data.append(mdata) elif device_class == "Battery Inverter": relevant_registers = eval(config.get('registers_batt')) mdata = get_pv_data(host, int(port), int(modbusid), relevant_registers) pv_data.append(mdata) else: if (pv_debug > 1): print("pv: unknown device class; skipping") pass # query if pv_data is None: if pv_debug > 0: print("PV: no data") return timestamp = time.time() for i in pv_data: i['timestamp'] = timestamp if pv_debug > 0: print("PV:" + format(i))
datenschuft/SMA-EM
[ 66, 33, 66, 11, 1430946296 ]
def on_publish(client, userdata, result): pass
datenschuft/SMA-EM
[ 66, 33, 66, 11, 1430946296 ]
def __init__(self, flag_name, short_flag, checkbox): if not flag_name.startswith('re.'): raise ValueError('Invalid flag name {!r}'.format(flag_name)) self.flagName = flag_name self.reFlag = getattr(re, flag_name[3:]) self.shortFlag = short_flag self.checkBox = checkbox self.preEmbedState = None return
luksan/kodos
[ 19, 12, 19, 5, 1263750837 ]
def embed(self): """Set the state of the checkbox to show that it is set by the regexp text.""" if self.preEmbedState == None: self.preEmbedState = self.checkBox.isChecked() self.checkBox.setChecked(True) self.checkBox.setDisabled(True) return
luksan/kodos
[ 19, 12, 19, 5, 1263750837 ]
def allFlagsORed(self): ret = 0 for f in self: if f.checkBox.isChecked(): ret |= f.reFlag return ret
luksan/kodos
[ 19, 12, 19, 5, 1263750837 ]
def clear_cache(index, cache): print "clear cache", index for tmp in cache: client[db_name][coll_name_list[index]+"_matrix"].insert(cache[tmp])
whodewho/FluxEnder
[ 21, 11, 21, 1, 1399520247 ]
def extract_ip_feature(index): model = init_dga() cursor = client[db_name][coll_name_list[index]].find(timeout=False) cache = {} print index for row in cursor: if not ip_p.match(str(row["_id"])): continue number = len(row["ITEMS"]) min_ttl = min(row["TTLS"]) max_ttl = max(row["TTLS"]) lifetime = int(row["LAST_SEEN"] - row["FIRST_SEEN"])/(60*60*24) dd = domain_diversity(row["SUBDOMAINS"], row["ITEMS"]) ips = ip_pool_stability(row["ITEMS"], db_name) tmp_counter = collections.Counter(evaluate_url_list(model, row["ITEMS"])) dga = round(tmp_counter['dga']/float(number), 3) cache[row["_id"]] = {"number": number, "dd": dd, "ips": ips, "dga": dga, "ttl": [min_ttl, max_ttl], "lifetime": lifetime, "_id": row["_id"]} # client[db_name][coll_name_list[index]+"_matrix"].insert(tmp) clear_cache(index, cache)
whodewho/FluxEnder
[ 21, 11, 21, 1, 1399520247 ]
def list_programs(plugin, item_id, **kwargs): program_title = 'Le dernier JT' program_url = URL_ROOT + '/category/jt-complet/' item = Listitem() item.label = program_title item.set_callback(list_videos, item_id=item_id, program_url=program_url, page='1') item_post_treatment(item) yield item program_title = 'L\'actu par communes' program_url = URL_ROOT + '/lactu-par-communes/' item = Listitem() item.label = program_title item.set_callback(list_communes, item_id=item_id, program_url=program_url, page='1') item_post_treatment(item) yield item resp = urlquick.get(URL_EMISSIONS) root = resp.parse("tbody") for program_datas in root.iterfind(".//a"): program_title = program_datas.find('.//img').get('alt') program_image = program_datas.find('.//img').get('src') if 'www.canalc.be' in program_datas.get("href"): program_url = program_datas.get("href").strip() else: program_url = URL_ROOT + program_datas.get("href") item = Listitem() item.label = program_title item.art['thumb'] = item.art['landscape'] = program_image item.set_callback(list_videos, item_id=item_id, program_url=program_url, page='1') item_post_treatment(item) yield item
Catch-up-TV-and-More/plugin.video.catchuptvandmore
[ 225, 76, 225, 57, 1487096995 ]
def list_communes(plugin, item_id, program_url, page, **kwargs): return False
Catch-up-TV-and-More/plugin.video.catchuptvandmore
[ 225, 76, 225, 57, 1487096995 ]
def list_videos(plugin, item_id, program_url, page, **kwargs): if page == '1': resp = urlquick.get(program_url) else: resp = urlquick.get(program_url + 'page/%s/' % page) root = resp.parse() for video_datas in root.iterfind(".//article"): if video_datas.find(".//h2") is not None: video_title = video_datas.find( './/h2/a').text video_image = video_datas.find('.//img').get('src') video_url = video_datas.find('.//a').get('href') item = Listitem() item.label = video_title item.art['thumb'] = item.art['landscape'] = video_image item.set_callback(get_video_url, item_id=item_id, video_url=video_url) item_post_treatment(item, is_playable=True, is_downloadable=True) yield item if root.find(".//span[@class='pages']") is not None: yield Listitem.next_page( item_id=item_id, program_url=program_url, page=str(int(page) + 1))
Catch-up-TV-and-More/plugin.video.catchuptvandmore
[ 225, 76, 225, 57, 1487096995 ]
def get_video_url(plugin, item_id, video_url, download_mode=False, **kwargs): resp = urlquick.get(video_url) if len(re.compile(r'source src\=\"(.*?)\"').findall(resp.text)) > 0: stream_url = re.compile( r'source src\=\"(.*?)\"').findall(resp.text)[0] if download_mode: return download.download_video(stream_url) return stream_url video_id = re.compile(r'www.youtube.com\/embed\/(.*?)\"').findall(resp.text)[0] return resolver_proxy.get_stream_youtube(plugin, video_id, download_mode)
Catch-up-TV-and-More/plugin.video.catchuptvandmore
[ 225, 76, 225, 57, 1487096995 ]
def main(): print('Welcome to the ScrapeBot setup') config = get_config() instance_name = check_minimal_config(config) print('Continuing to the database') print('- connecting to ' + config.get('Database', 'host', fallback='localhost')) try: engine = get_engine(config) base.metadata.create_all(engine) db = get_db(engine) except: print('- uh, there is a problem with connecting to your database ...') exit(3) print('- read tables: ' + ', '.join(base.metadata.tables.keys())) users = db.query(User).order_by(User.created).all() user = None if len(users) == 0: print('- the database currently does not contain any users, so we will create a default one') username = read_forcefully('- what name should this user listen to', 'root') email = read_forcefully('- and what is this user\'s email address') user = create_user(db, username, email) else: print('- one or many users available') user = db.query(User).filter(User.name == 'root').first() if user is None: user = users[0] while read_bool_forcefully('Do you want to create another user'): username = read_forcefully('- what name should this user listen to') email = read_forcefully('- and what is this user\'s email address') create_user(db, username, email) print('Checking this instance') this_instance = db.query(Instance).filter(Instance.name == instance_name) print('- it is called ' + instance_name) if this_instance.count() == 0: db.add(Instance(name=instance_name, owner_uid=user.uid)) db.commit() print('- instance newly registered and ascribed to user "' + user.name + '"') else: print('- instance name already registered, meaning that it has been used elsewhere') if read_bool_forcefully('- is this on purpose'): print('- okay, fair enough, proceeding ...') else: instance_name = read_forcefully('- so how should this instance be called') config.add_value('Instance', 'Name', instance_name) config.write() print('- alright, updated "config.ini"') db.add(Instance(name=instance_name, owner_uid=user.uid)) db.commit() print('- instance newly registered and ascribed to user "' + user.name + '"') print('- browser-wise this instance will use ' + config.get('Instance', 'browser', fallback='Firefox')) print('Finishing up') print('- instance should be ready to use') print('- to run it once, use the script "scrapebot.py"') if platform.system() == 'Linux': print('- to run it regularly and since you are using Linux, I recommend a cronjob') os_user = getpass.getuser() if read_bool_forcefully('- install cronjob for ' + os_user + ' now'): cron = CronTab(user=os_user) cron.remove_all(comment='ScrapeBot // ' + instance_name) cronjob = cron.new(command='cd ' + os.getcwd() + ' && ' + sys.executable + ' scrapebot.py >> scrapebot_cron.log', comment='ScrapeBot // ' + instance_name) cronjob.minute.every(2) cron.write() else: print('- to run it regularly (which is what you want), you may want to use Windows Task Scheduler or the like') print('---------') print('Thanks for using; please direct any questions and pull requests to https://github.com/marhai/scrapebot') db.close()
MarHai/ScrapeBot
[ 16, 5, 16, 3, 1423476441 ]
def get_config(create_if_necessary=True): if not os.access('config.ini', os.R_OK) and create_if_necessary: config = setup_config() print('Reading newly created config.ini') return config elif not os.access('config.ini', os.R_OK): print('Configuration (config.ini) not found (have you tried running "setup.py" first?') exit(3) else: print('Configuration file "config.ini" found') return Configuration()
MarHai/ScrapeBot
[ 16, 5, 16, 3, 1423476441 ]
def read_forcefully(line, default=''): if default != '': line = line + ' (default is "' + default + '", to accept just hit return)' value = input(line + ': ').strip() if value == '': if default == '': print('- This information is obligatory!') return read_forcefully(line) else: print('- Using the default value, "' + default + '"') return default else: return value
MarHai/ScrapeBot
[ 16, 5, 16, 3, 1423476441 ]
def read_numeric_forcefully(line, default=None): if default != '': line = line + ' (default is ' + str(default) + ', to accept just hit return)' value = input(line + ': ').strip() if value == '': if default is None: print('- This information is really necessary, please enter a number!') return read_numeric_forcefully(line) else: print('- Using the default value, ' + str(default)) return int(default) else: return int(value)
MarHai/ScrapeBot
[ 16, 5, 16, 3, 1423476441 ]
def get_engine(config): database_timeout = -1 try: database_timeout = int(config.get('Database', 'Timeout', fallback=-1)) if database_timeout > 0: print('Reconnecting to MySQL (through SQLAlchemy\'s pool_recycle) every ' + str(database_timeout) + ' seconds') except: database_timeout = -1 try: return create_engine(config.get_db_engine_string(), encoding='utf-8', pool_recycle=database_timeout) except: print('Error: Database engine could not be created (' + config.get_db_engine_string() + ')') error = sys.exc_info()[0] if error is not None: print('- ' + str(error)) print('- ' + traceback.format_exc()) exit(1)
MarHai/ScrapeBot
[ 16, 5, 16, 3, 1423476441 ]
def genDictFromFreq(freq_dict, size_b): """ Creates a LZ77 dictionary (initial lookback window) from a dictionary of word: frequency Args: freq_dict (dict(str)): A dictionary mapping word to frequency size_b (int): output size of frequency dictionary in B Returns: str: A LZ77 dictionary of size_b scored on len(word) * frequency """ # change value from frequency to score for word, freq in freq_dict.items(): freq_dict[word] = len(word) * freq """ superstrings swallow substring scores """ # 1. sort keys on increasing key (word) length sorted_keys = sorted(freq_dict, key=lambda k: len(k)) # 2. add substring scores to superstring value and flag for # removal (set score to 0) for i, key_i in enumerate(sorted_keys): # highest scoring superstring should consume substring sorted_keys_by_score = sorted(sorted_keys[i+1:], key=lambda k: freq_dict[k], reverse=True) for j, key_j in enumerate(sorted_keys_by_score): if key_i in key_j: freq_dict[key_j] += freq_dict[key_i] freq_dict[key_i] = 0 break # 3. Remove substring items (has score 0) freq_dict = {k: v for k, v in freq_dict.items() if v > 0} """ Create LZ77 dictionary string """ # 1. Join keys (word) on score in ascending) order # According to zlib documentation, most common substrings should be placed # at the end of the pre-defined dictionary dict_str = ''.join(sorted(freq_dict, key=lambda k: freq_dict[k])) # 2. trim to size_b if valid if 0 < size_b < len(dict_str): dict_str = dict_str[len(dict_str)-size_b:] return dict_str
pinterest/mysql_utils
[ 877, 144, 877, 1, 1445707999 ]
def main(): args = parse() with open(args.freqs_file, 'r') as freqs_file: counts = json.load(freqs_file) # Generate Dictionary from substring frequencies zdict_str = genDictFromFreq(counts, args.size) print(zdict_str, end='')
pinterest/mysql_utils
[ 877, 144, 877, 1, 1445707999 ]
def setup(): GPIO.setmode(GPIO.BOARD) # Numbers GPIOs by physical location GPIO.setup(Gpin, GPIO.OUT) # Set Green Led Pin mode to output GPIO.setup(Rpin, GPIO.OUT) # Set Red Led Pin mode to output GPIO.setup(TouchPin, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Set BtnPin's mode is input, and pull up to high level(3.3V)
sunfounder/SunFounder_SensorKit_for_RPi2
[ 105, 84, 105, 15, 1440128194 ]
def Print(x): global tmp if x != tmp: if x == 0: print (' **********') print (' * ON *') print (' **********')
sunfounder/SunFounder_SensorKit_for_RPi2
[ 105, 84, 105, 15, 1440128194 ]
def loop(): while True: Led(GPIO.input(TouchPin)) Print(GPIO.input(TouchPin))
sunfounder/SunFounder_SensorKit_for_RPi2
[ 105, 84, 105, 15, 1440128194 ]
def main(inargs=None): import argparse import Cerebrum.logutils import Cerebrum.logutils.options parser = argparse.ArgumentParser( description="Start flask dev server", ) bind_opts = parser.add_argument_group('bind options') bind_opts.add_argument( '--host', default=app.config['HOST'], help='Listen on interface %(metavar)s (%(default)s)', metavar='<host>', ) bind_opts.add_argument( '--port', type=int, default=app.config['PORT'], help='Listen on port %(metavar)s (%(default)s)', metavar='<port>', ) debug_opts = parser.add_argument_group('debug options') debug_mutex = debug_opts.add_mutually_exclusive_group() debug_default = app.config['DEBUG'] debug_mutex.add_argument( '--debug', dest='debug', action='store_true', help='Enable debug mode' + (' (default)' if debug_default else ''), ) debug_mutex.add_argument( '--no-debug', dest='debug', action='store_false', help='Disable debug mode' + ('' if debug_default else ' (default)'), ) debug_mutex.set_defaults(debug=debug_default) Cerebrum.logutils.options.install_subparser(parser) args = parser.parse_args(inargs) Cerebrum.logutils.autoconf('console', args) # Fix flask logging app.logger.propagate = True for handler in app.logger.handlers[:]: app.logger.removeHandler(handler) app.run( host=args.host, port=args.port, debug=args.debug, )
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def identity_matrix(): return numpy.require(_identity_matrix[:], 'f')
dkobozev/tatlin
[ 48, 16, 48, 2, 1320015854 ]
def translate(vertices, x, y, z): translated = vertices + numpy.array([x, y, z], 'f') return translated
dkobozev/tatlin
[ 48, 16, 48, 2, 1320015854 ]
def __init__(self, tot=100, lenght=10): self.cp='/-\|' self.bar_lenght = lenght self.tot = tot
luca-heltai/ePICURE
[ 10, 23, 10, 12, 1416839855 ]
def progress(self, x): """Sets progress bar to a certain percentage x. Progress is given as whole percentage, i.e. 50% done is given by x = 50""" y = int(x)%4 z = int((x/float(self.tot))*self.bar_lenght) sys.stdout.write("#" * z + self.cp[y] +"-" * (self.bar_lenght-1 - z) + "] "+ bold(str(int(x))+"/"+str(self.tot)) + chr(8) * (self.bar_lenght+4+len(str(int(x)))+len(str(self.tot)) )) sys.stdout.flush()
luca-heltai/ePICURE
[ 10, 23, 10, 12, 1416839855 ]
def __init__(self): self.COLS = struct.unpack('hh', fcntl.ioctl(sys.stdout, termios.TIOCGWINSZ, '1234'))[1]
luca-heltai/ePICURE
[ 10, 23, 10, 12, 1416839855 ]
def _glib_sigint_handler(cls, user_data): context = cls._loop_contexts[-1] context._quit_by_sigint = True context._loop_exit_func() # keep the handler around until we explicitly remove it return True
exaile/exaile
[ 332, 82, 332, 90, 1409354815 ]
def __enter__(self): # Only use unix_signal_add if this is not win32 and there has # not already been one. if sys.platform != 'win32' and not InterruptibleLoopContext._loop_contexts: # Add a glib signal handler source_id = GLib.unix_signal_add( GLib.PRIORITY_DEFAULT, signal.SIGINT, self._glib_sigint_handler, None ) InterruptibleLoopContext._signal_source_id = source_id InterruptibleLoopContext._loop_contexts.append(self)
exaile/exaile
[ 332, 82, 332, 90, 1409354815 ]
def __init__(self, init_type, init_args, patch, scope, name): Processor.__init__(self, 2, 1, init_type, init_args, patch, scope, name) initargs, kwargs = self.parse_args(init_args) if len(initargs): self.mapname = initargs[0] else: self.mapname = "semitone" self.map = self.maps.get(self.mapname) self.hot_inlets = [0, 1] self.dsp_inlets = [0] self.dsp_outlets = [0] mapvals = [val for pair in self.map for val in pair] self.dsp_init("vcq12~", map=mapvals)
bgribble/mfp
[ 49, 2, 49, 39, 1357664703 ]
def __init__(self, label_time): self.label_time = label_time style_context = self.label_time.get_style_context() self.color_override = Gtk.CssProvider() style_context.add_provider(self.color_override, Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION + 1) self.label_color_default = self.load_color_from_css(style_context) label_color_ett_reached = self.load_color_from_css(style_context, "ett-reached") label_color_ett_info = self.load_color_from_css(style_context, "ett-info") label_color_ett_warn = self.load_color_from_css(style_context, "ett-warn") self.color_map = [ ( 300, self.label_color_default), ( 0, label_color_ett_reached), (-150, label_color_ett_info), (-300, label_color_ett_warn) ]
Cimbali/pympress
[ 820, 75, 820, 22, 1442468502 ]
def default_color(self): """ Forces to reset the default colors on the label. """ self.color_override.load_from_data(''.encode('ascii'))
Cimbali/pympress
[ 820, 75, 820, 22, 1442468502 ]
def __init__(self, builder, ett, timing_tracker, autoplay): super(TimeCounter, self).__init__() self.label_colorer = TimeLabelColorer(builder.get_object('label_time')) self.ett = ett self.timing_tracker = timing_tracker self.autoplay = autoplay builder.load_widgets(self) builder.setup_actions({ 'pause-timer': dict(activate=self.switch_pause, state=self.paused), 'reset-timer': dict(activate=self.reset_timer), }) self.pause_action = builder.get_application().lookup_action('pause-timer') # Setup timer for clocks GLib.timeout_add(250, self.update_time)
Cimbali/pympress
[ 820, 75, 820, 22, 1442468502 ]
def pause(self): """ Pause the timer if it is not paused, otherwise do nothing. Returns: `bool`: whether the clock's pause was toggled. """ if self.paused: return False self.paused = True self.pause_action.change_state(GLib.Variant.new_boolean(self.paused)) self.elapsed_time += time.time() - self.restart_time self.timing_tracker.end_time = self.elapsed_time if self.autoplay.is_looping(): self.autoplay.pause() self.update_time() return True
Cimbali/pympress
[ 820, 75, 820, 22, 1442468502 ]