text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def event_env_merge_info(app, env, _, other): """Called by Sphinx during phase 3 (resolving). * Combine child process' modified env with this one. Only changes should be new Imgur IDs since cache update is done in event_env_updated() after everything is merged and we're back to one process. :param sphinx.application.Sphinx app: Sphinx application object. :param sphinx.environment.BuildEnvironment env: Sphinx build environment. :param _: Not used. :param sphinx.environment.BuildEnvironment other: Sphinx build environment from child process. """ other_album_cache = getattr(other, 'imgur_album_cache', None) other_image_cache = getattr(other, 'imgur_image_cache', None) if not other_album_cache and not other_image_cache: return album_cache = app.builder.env.imgur_album_cache image_cache = app.builder.env.imgur_image_cache assert env # Linting. # Merge items. album_cache.update(other_album_cache) image_cache.update(other_image_cache)
[ "def", "event_env_merge_info", "(", "app", ",", "env", ",", "_", ",", "other", ")", ":", "other_album_cache", "=", "getattr", "(", "other", ",", "'imgur_album_cache'", ",", "None", ")", "other_image_cache", "=", "getattr", "(", "other", ",", "'imgur_image_cache'", ",", "None", ")", "if", "not", "other_album_cache", "and", "not", "other_image_cache", ":", "return", "album_cache", "=", "app", ".", "builder", ".", "env", ".", "imgur_album_cache", "image_cache", "=", "app", ".", "builder", ".", "env", ".", "imgur_image_cache", "assert", "env", "# Linting.", "# Merge items.", "album_cache", ".", "update", "(", "other_album_cache", ")", "image_cache", ".", "update", "(", "other_image_cache", ")" ]
45.545455
24.636364
def build_on_entry(self, runnable, regime, on_entry): """ Build OnEntry start handler code. @param on_entry: OnEntry start handler object @type on_entry: lems.model.dynamics.OnEntry @return: Generated OnEntry code @rtype: list(string) """ on_entry_code = [] on_entry_code += ['if self.current_regime != self.last_regime:'] on_entry_code += [' self.last_regime = self.current_regime'] for action in on_entry.actions: code = self.build_action(runnable, regime, action) for line in code: on_entry_code += [' ' + line] return on_entry_code
[ "def", "build_on_entry", "(", "self", ",", "runnable", ",", "regime", ",", "on_entry", ")", ":", "on_entry_code", "=", "[", "]", "on_entry_code", "+=", "[", "'if self.current_regime != self.last_regime:'", "]", "on_entry_code", "+=", "[", "' self.last_regime = self.current_regime'", "]", "for", "action", "in", "on_entry", ".", "actions", ":", "code", "=", "self", ".", "build_action", "(", "runnable", ",", "regime", ",", "action", ")", "for", "line", "in", "code", ":", "on_entry_code", "+=", "[", "' '", "+", "line", "]", "return", "on_entry_code" ]
30.090909
19.181818
def run_thread(self): """Run the main thread.""" self._run_thread = True self._thread.setDaemon(True) self._thread.start()
[ "def", "run_thread", "(", "self", ")", ":", "self", ".", "_run_thread", "=", "True", "self", ".", "_thread", ".", "setDaemon", "(", "True", ")", "self", ".", "_thread", ".", "start", "(", ")" ]
30
8.8
def convertall(table, *args, **kwargs): """ Convenience function to convert all fields in the table using a common function or mapping. See also :func:`convert`. The ``where`` keyword argument can be given with a callable or expression which is evaluated on each row and which should return True if the conversion should be applied on that row, else False. """ # TODO don't read the data twice! return convert(table, header(table), *args, **kwargs)
[ "def", "convertall", "(", "table", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# TODO don't read the data twice!", "return", "convert", "(", "table", ",", "header", "(", "table", ")", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
36.538462
20.692308
def pos(self): """ Lazy-loads the part of speech tag for this word :getter: Returns the plain string value of the POS tag for the word :type: str """ if self._pos is None: poses = self._element.xpath('POS/text()') if len(poses) > 0: self._pos = poses[0] return self._pos
[ "def", "pos", "(", "self", ")", ":", "if", "self", ".", "_pos", "is", "None", ":", "poses", "=", "self", ".", "_element", ".", "xpath", "(", "'POS/text()'", ")", "if", "len", "(", "poses", ")", ">", "0", ":", "self", ".", "_pos", "=", "poses", "[", "0", "]", "return", "self", ".", "_pos" ]
27.384615
17.846154
def dot_eth_label(name): """ Convert from a name, like 'ethfinex.eth', to a label, like 'ethfinex' If name is already a label, this should be a noop, except for converting to a string and validating the name syntax. """ label = name_to_label(name, registrar='eth') if len(label) < MIN_ETH_LABEL_LENGTH: raise InvalidLabel('name %r is too short' % label) else: return label
[ "def", "dot_eth_label", "(", "name", ")", ":", "label", "=", "name_to_label", "(", "name", ",", "registrar", "=", "'eth'", ")", "if", "len", "(", "label", ")", "<", "MIN_ETH_LABEL_LENGTH", ":", "raise", "InvalidLabel", "(", "'name %r is too short'", "%", "label", ")", "else", ":", "return", "label" ]
37.272727
16.363636
def create(cls, bucket, key, value): """Create a new tag for bucket.""" with db.session.begin_nested(): obj = cls( bucket_id=as_bucket_id(bucket), key=key, value=value ) db.session.add(obj) return obj
[ "def", "create", "(", "cls", ",", "bucket", ",", "key", ",", "value", ")", ":", "with", "db", ".", "session", ".", "begin_nested", "(", ")", ":", "obj", "=", "cls", "(", "bucket_id", "=", "as_bucket_id", "(", "bucket", ")", ",", "key", "=", "key", ",", "value", "=", "value", ")", "db", ".", "session", ".", "add", "(", "obj", ")", "return", "obj" ]
29.9
11.7
def hpx_to_axes(h, npix): """ Generate a sequence of bin edge vectors corresponding to the axes of a HPX object.""" x = h.ebins z = np.arange(npix[-1] + 1) return x, z
[ "def", "hpx_to_axes", "(", "h", ",", "npix", ")", ":", "x", "=", "h", ".", "ebins", "z", "=", "np", ".", "arange", "(", "npix", "[", "-", "1", "]", "+", "1", ")", "return", "x", ",", "z" ]
26
16.285714
def dumps(self, script): "Return a compressed representation of script as a binary string." string = BytesIO() self._dump(script, string, self._protocol, self._version) return string.getvalue()
[ "def", "dumps", "(", "self", ",", "script", ")", ":", "string", "=", "BytesIO", "(", ")", "self", ".", "_dump", "(", "script", ",", "string", ",", "self", ".", "_protocol", ",", "self", ".", "_version", ")", "return", "string", ".", "getvalue", "(", ")" ]
44.2
19.4
def virtual_interface_create(provider, names, **kwargs): ''' Attach private interfaces to a server CLI Example: .. code-block:: bash salt minionname cloud.virtual_interface_create my-nova names=['salt-master'] net_name='salt' ''' client = _get_client() return client.extra_action(provider=provider, names=names, action='virtual_interface_create', **kwargs)
[ "def", "virtual_interface_create", "(", "provider", ",", "names", ",", "*", "*", "kwargs", ")", ":", "client", "=", "_get_client", "(", ")", "return", "client", ".", "extra_action", "(", "provider", "=", "provider", ",", "names", "=", "names", ",", "action", "=", "'virtual_interface_create'", ",", "*", "*", "kwargs", ")" ]
29.538462
32.615385
def delete_view(self, query_criteria=None, uid='_all_users'): ''' a method to delete a view associated with a user design doc :param query_criteria: [optional] dictionary with valid jsonmodel query criteria :param uid: [optional] string with uid of design document to update :return: integer with status of operation an example of how to construct the query_criteria argument: query_criteria = { '.path.to.number': { 'min_value': 4.5 }, '.path.to.string': { 'discrete_values': [ 'pond', 'lake', 'stream', 'brook' ] } } NOTE: only fields specified in the document schema at class initialization can be used as fields in query_criteria. otherwise, an error will be thrown. uid is automatically added to all document schemas at initialization NOTE: the full list of all criteria are found in the reference page for the jsonmodel module as well as the query-rules.json file included in the module. http://collectiveacuity.github.io/jsonModel/reference/#query-criteria NOTE: if a query_criteria is not specified, then the entire user design doc is removed otherwise, the existing design document is updated. ''' # https://developer.couchbase.com/documentation/mobile/1.5/references/sync-gateway/admin-rest-api/index.html#/query/delete__db___design__ddoc_ title = '%s.delete_view' % self.__class__.__name__ # validate inputs input_fields = { 'uid': uid } for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # validate inputs if query_criteria: if not self.model: raise ValueError('%s(query_criteria={...} requires a document_schema.' % title) self.model.query(query_criteria) else: query_criteria = {} if uid != '_all_users' and self.public: raise ValueError('%s(uid="%s") user ids are not applicable for a public bucket. % title') # handle deleting user design doc if not query_criteria: url = self.bucket_url + '/_design/%s' % uid response = requests.delete(url) # catch missing args elif not uid: raise IndexError('%s requires either a uid or query_criteria argument.' % title) # handle removing a view from a design doc else: # determine hash of query criteria import hashlib import json hashed_criteria = hashlib.md5(json.dumps(query_criteria, sort_key=True).encode('utf-8')).hexdigest() # determine design document to update url = self.bucket_url + '/_design/%s' % uid # remove view from design document and update response = requests.get(url) if response.status_code in (200, 201): design_details = response.json() design_details['views'] = self._clean_views(design_details['views']) if hashed_criteria in design_details['views'].keys(): del design_details['views'][hashed_criteria] if design_details['views']: response = requests.put(url, json=design_details) else: response = requests.delete(url) return response.status_code
[ "def", "delete_view", "(", "self", ",", "query_criteria", "=", "None", ",", "uid", "=", "'_all_users'", ")", ":", "# https://developer.couchbase.com/documentation/mobile/1.5/references/sync-gateway/admin-rest-api/index.html#/query/delete__db___design__ddoc_", "title", "=", "'%s.delete_view'", "%", "self", ".", "__class__", ".", "__name__", "# validate inputs", "input_fields", "=", "{", "'uid'", ":", "uid", "}", "for", "key", ",", "value", "in", "input_fields", ".", "items", "(", ")", ":", "if", "value", ":", "object_title", "=", "'%s(%s=%s)'", "%", "(", "title", ",", "key", ",", "str", "(", "value", ")", ")", "self", ".", "fields", ".", "validate", "(", "value", ",", "'.%s'", "%", "key", ",", "object_title", ")", "# validate inputs", "if", "query_criteria", ":", "if", "not", "self", ".", "model", ":", "raise", "ValueError", "(", "'%s(query_criteria={...} requires a document_schema.'", "%", "title", ")", "self", ".", "model", ".", "query", "(", "query_criteria", ")", "else", ":", "query_criteria", "=", "{", "}", "if", "uid", "!=", "'_all_users'", "and", "self", ".", "public", ":", "raise", "ValueError", "(", "'%s(uid=\"%s\") user ids are not applicable for a public bucket. % title'", ")", "# handle deleting user design doc", "if", "not", "query_criteria", ":", "url", "=", "self", ".", "bucket_url", "+", "'/_design/%s'", "%", "uid", "response", "=", "requests", ".", "delete", "(", "url", ")", "# catch missing args", "elif", "not", "uid", ":", "raise", "IndexError", "(", "'%s requires either a uid or query_criteria argument.'", "%", "title", ")", "# handle removing a view from a design doc", "else", ":", "# determine hash of query criteria", "import", "hashlib", "import", "json", "hashed_criteria", "=", "hashlib", ".", "md5", "(", "json", ".", "dumps", "(", "query_criteria", ",", "sort_key", "=", "True", ")", ".", "encode", "(", "'utf-8'", ")", ")", ".", "hexdigest", "(", ")", "# determine design document to update", "url", "=", "self", ".", "bucket_url", "+", "'/_design/%s'", "%", "uid", "# remove view from design document and update", "response", "=", "requests", ".", "get", "(", "url", ")", "if", "response", ".", "status_code", "in", "(", "200", ",", "201", ")", ":", "design_details", "=", "response", ".", "json", "(", ")", "design_details", "[", "'views'", "]", "=", "self", ".", "_clean_views", "(", "design_details", "[", "'views'", "]", ")", "if", "hashed_criteria", "in", "design_details", "[", "'views'", "]", ".", "keys", "(", ")", ":", "del", "design_details", "[", "'views'", "]", "[", "hashed_criteria", "]", "if", "design_details", "[", "'views'", "]", ":", "response", "=", "requests", ".", "put", "(", "url", ",", "json", "=", "design_details", ")", "else", ":", "response", "=", "requests", ".", "delete", "(", "url", ")", "return", "response", ".", "status_code" ]
40.94382
26.247191
def endpoint_create(service, publicurl=None, internalurl=None, adminurl=None, region=None, profile=None, url=None, interface=None, **connection_args): ''' Create an endpoint for an Openstack service CLI Examples: .. code-block:: bash salt 'v2' keystone.endpoint_create nova 'http://public/url' 'http://internal/url' 'http://adminurl/url' region salt 'v3' keystone.endpoint_create nova url='http://public/url' interface='public' region='RegionOne' ''' kstone = auth(profile, **connection_args) keystone_service = service_get(name=service, profile=profile, **connection_args) if not keystone_service or 'Error' in keystone_service: return {'Error': 'Could not find the specified service'} if _OS_IDENTITY_API_VERSION > 2: kstone.endpoints.create(service=keystone_service[service]['id'], region_id=region, url=url, interface=interface) else: kstone.endpoints.create(region=region, service_id=keystone_service[service]['id'], publicurl=publicurl, adminurl=adminurl, internalurl=internalurl) return endpoint_get(service, region, profile, interface, **connection_args)
[ "def", "endpoint_create", "(", "service", ",", "publicurl", "=", "None", ",", "internalurl", "=", "None", ",", "adminurl", "=", "None", ",", "region", "=", "None", ",", "profile", "=", "None", ",", "url", "=", "None", ",", "interface", "=", "None", ",", "*", "*", "connection_args", ")", ":", "kstone", "=", "auth", "(", "profile", ",", "*", "*", "connection_args", ")", "keystone_service", "=", "service_get", "(", "name", "=", "service", ",", "profile", "=", "profile", ",", "*", "*", "connection_args", ")", "if", "not", "keystone_service", "or", "'Error'", "in", "keystone_service", ":", "return", "{", "'Error'", ":", "'Could not find the specified service'", "}", "if", "_OS_IDENTITY_API_VERSION", ">", "2", ":", "kstone", ".", "endpoints", ".", "create", "(", "service", "=", "keystone_service", "[", "service", "]", "[", "'id'", "]", ",", "region_id", "=", "region", ",", "url", "=", "url", ",", "interface", "=", "interface", ")", "else", ":", "kstone", ".", "endpoints", ".", "create", "(", "region", "=", "region", ",", "service_id", "=", "keystone_service", "[", "service", "]", "[", "'id'", "]", ",", "publicurl", "=", "publicurl", ",", "adminurl", "=", "adminurl", ",", "internalurl", "=", "internalurl", ")", "return", "endpoint_get", "(", "service", ",", "region", ",", "profile", ",", "interface", ",", "*", "*", "connection_args", ")" ]
45.16129
27.096774
def normal_print(raw): ''' no colorful text, for output.''' lines = raw.split('\n') for line in lines: if line: print(line + '\n')
[ "def", "normal_print", "(", "raw", ")", ":", "lines", "=", "raw", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "line", ":", "print", "(", "line", "+", "'\\n'", ")" ]
26.166667
13.833333
def run(): """Fetches changes and applies them to VIFs periodically Process as of RM11449: * Get all groups from redis * Fetch ALL VIFs from Xen * Walk ALL VIFs and partition them into added, updated and removed * Walk the final "modified" VIFs list and apply flows to each """ groups_client = sg_cli.SecurityGroupsClient() xapi_client = xapi.XapiClient() interfaces = set() while True: try: interfaces = xapi_client.get_interfaces() except Exception: LOG.exception("Unable to get instances/interfaces from xapi") _sleep() continue try: sg_states = groups_client.get_security_group_states(interfaces) new_sg, updated_sg, removed_sg = partition_vifs(xapi_client, interfaces, sg_states) xapi_client.update_interfaces(new_sg, updated_sg, removed_sg) groups_to_ack = [v for v in new_sg + updated_sg if v.success] # NOTE(quade): This solves a race condition where a security group # rule may have changed between the time the sg_states were called # and when they were officially ack'd. It functions as a compare # and set. This is a fix until we get onto a proper messaging # queue. NCP-2287 sg_sts_curr = groups_client.get_security_group_states(interfaces) groups_to_ack = get_groups_to_ack(groups_to_ack, sg_states, sg_sts_curr) # This list will contain all the security group rules that do not # match ack_groups(groups_client, groups_to_ack) except Exception: LOG.exception("Unable to get security groups from registry and " "apply them to xapi") _sleep() continue _sleep()
[ "def", "run", "(", ")", ":", "groups_client", "=", "sg_cli", ".", "SecurityGroupsClient", "(", ")", "xapi_client", "=", "xapi", ".", "XapiClient", "(", ")", "interfaces", "=", "set", "(", ")", "while", "True", ":", "try", ":", "interfaces", "=", "xapi_client", ".", "get_interfaces", "(", ")", "except", "Exception", ":", "LOG", ".", "exception", "(", "\"Unable to get instances/interfaces from xapi\"", ")", "_sleep", "(", ")", "continue", "try", ":", "sg_states", "=", "groups_client", ".", "get_security_group_states", "(", "interfaces", ")", "new_sg", ",", "updated_sg", ",", "removed_sg", "=", "partition_vifs", "(", "xapi_client", ",", "interfaces", ",", "sg_states", ")", "xapi_client", ".", "update_interfaces", "(", "new_sg", ",", "updated_sg", ",", "removed_sg", ")", "groups_to_ack", "=", "[", "v", "for", "v", "in", "new_sg", "+", "updated_sg", "if", "v", ".", "success", "]", "# NOTE(quade): This solves a race condition where a security group", "# rule may have changed between the time the sg_states were called", "# and when they were officially ack'd. It functions as a compare", "# and set. This is a fix until we get onto a proper messaging", "# queue. NCP-2287", "sg_sts_curr", "=", "groups_client", ".", "get_security_group_states", "(", "interfaces", ")", "groups_to_ack", "=", "get_groups_to_ack", "(", "groups_to_ack", ",", "sg_states", ",", "sg_sts_curr", ")", "# This list will contain all the security group rules that do not", "# match", "ack_groups", "(", "groups_client", ",", "groups_to_ack", ")", "except", "Exception", ":", "LOG", ".", "exception", "(", "\"Unable to get security groups from registry and \"", "\"apply them to xapi\"", ")", "_sleep", "(", ")", "continue", "_sleep", "(", ")" ]
41.702128
24.702128
def repmc(instr, marker, value, lenout=None): """ Replace a marker with a character string. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/repmc_c.html :param instr: Input string. :type instr: str :param marker: Marker to be replaced. :type marker: str :param value: Replacement value. :type value: str :param lenout: Optional available space in output string :type lenout: int :return: Output string. :rtype: str """ if lenout is None: lenout = ctypes.c_int(len(instr) + len(value) + len(marker) + 15) instr = stypes.stringToCharP(instr) marker = stypes.stringToCharP(marker) value = stypes.stringToCharP(value) out = stypes.stringToCharP(lenout) libspice.repmc_c(instr, marker, value, lenout, out) return stypes.toPythonString(out)
[ "def", "repmc", "(", "instr", ",", "marker", ",", "value", ",", "lenout", "=", "None", ")", ":", "if", "lenout", "is", "None", ":", "lenout", "=", "ctypes", ".", "c_int", "(", "len", "(", "instr", ")", "+", "len", "(", "value", ")", "+", "len", "(", "marker", ")", "+", "15", ")", "instr", "=", "stypes", ".", "stringToCharP", "(", "instr", ")", "marker", "=", "stypes", ".", "stringToCharP", "(", "marker", ")", "value", "=", "stypes", ".", "stringToCharP", "(", "value", ")", "out", "=", "stypes", ".", "stringToCharP", "(", "lenout", ")", "libspice", ".", "repmc_c", "(", "instr", ",", "marker", ",", "value", ",", "lenout", ",", "out", ")", "return", "stypes", ".", "toPythonString", "(", "out", ")" ]
32.48
13.84
def post_object_async(self, path, **kwds): """POST to an object.""" return self.do_request_async(self.api_url + path, 'POST', **kwds)
[ "def", "post_object_async", "(", "self", ",", "path", ",", "*", "*", "kwds", ")", ":", "return", "self", ".", "do_request_async", "(", "self", ".", "api_url", "+", "path", ",", "'POST'", ",", "*", "*", "kwds", ")" ]
46.333333
10.333333
def get_network(self): """ Identify the connected network. This call returns a dictionary with keys chain_id, core_symbol and prefix """ props = self.get_chain_properties() chain_id = props["chain_id"] for k, v in known_chains.items(): if v["chain_id"] == chain_id: return v raise exceptions.UnknownNetworkException( "Connecting to unknown network (chain_id: {})!".format(props["chain_id"]) )
[ "def", "get_network", "(", "self", ")", ":", "props", "=", "self", ".", "get_chain_properties", "(", ")", "chain_id", "=", "props", "[", "\"chain_id\"", "]", "for", "k", ",", "v", "in", "known_chains", ".", "items", "(", ")", ":", "if", "v", "[", "\"chain_id\"", "]", "==", "chain_id", ":", "return", "v", "raise", "exceptions", ".", "UnknownNetworkException", "(", "\"Connecting to unknown network (chain_id: {})!\"", ".", "format", "(", "props", "[", "\"chain_id\"", "]", ")", ")" ]
40.75
12.75
def __cancel(self, checkout_id, cancel_reason, **kwargs): """Call documentation: `/checkout/cancel <https://www.wepay.com/developer/reference/checkout#cancel>`_, plus extra keyword parameters: :keyword str access_token: will be used instead of instance's ``access_token``, with ``batch_mode=True`` will set `authorization` param to it's value. :keyword bool batch_mode: turn on/off the batch_mode, see :class:`wepay.api.WePay` :keyword str batch_reference_id: `reference_id` param for batch call, see :class:`wepay.api.WePay` :keyword str api_version: WePay API version, see :class:`wepay.api.WePay` """ params = { 'checkout_id': checkout_id, 'cancel_reason': cancel_reason } return self.make_call(self.__cancel, params, kwargs)
[ "def", "__cancel", "(", "self", ",", "checkout_id", ",", "cancel_reason", ",", "*", "*", "kwargs", ")", ":", "params", "=", "{", "'checkout_id'", ":", "checkout_id", ",", "'cancel_reason'", ":", "cancel_reason", "}", "return", "self", ".", "make_call", "(", "self", ".", "__cancel", ",", "params", ",", "kwargs", ")" ]
36.916667
20.541667
def relation_clear(r_id=None): ''' Clears any relation data already set on relation r_id ''' settings = relation_get(rid=r_id, unit=local_unit()) for setting in settings: if setting not in ['public-address', 'private-address']: settings[setting] = None relation_set(relation_id=r_id, **settings)
[ "def", "relation_clear", "(", "r_id", "=", "None", ")", ":", "settings", "=", "relation_get", "(", "rid", "=", "r_id", ",", "unit", "=", "local_unit", "(", ")", ")", "for", "setting", "in", "settings", ":", "if", "setting", "not", "in", "[", "'public-address'", ",", "'private-address'", "]", ":", "settings", "[", "setting", "]", "=", "None", "relation_set", "(", "relation_id", "=", "r_id", ",", "*", "*", "settings", ")" ]
40.888889
11.333333
def getMenu(self): """Get manager interface section list from Squid Proxy Server @return: List of tuples (section, description, type) """ data = self._retrieve('') info_list = [] for line in data.splitlines(): mobj = re.match('^\s*(\S.*\S)\s*\t\s*(\S.*\S)\s*\t\s*(\S.*\S)$', line) if mobj: info_list.append(mobj.groups()) return info_list
[ "def", "getMenu", "(", "self", ")", ":", "data", "=", "self", ".", "_retrieve", "(", "''", ")", "info_list", "=", "[", "]", "for", "line", "in", "data", ".", "splitlines", "(", ")", ":", "mobj", "=", "re", ".", "match", "(", "'^\\s*(\\S.*\\S)\\s*\\t\\s*(\\S.*\\S)\\s*\\t\\s*(\\S.*\\S)$'", ",", "line", ")", "if", "mobj", ":", "info_list", ".", "append", "(", "mobj", ".", "groups", "(", ")", ")", "return", "info_list" ]
34.153846
16.461538
def timestamp(x): """Get a timestamp from a date in python 3 and python 2""" if x.tzinfo is None: # Naive dates to utc x = x.replace(tzinfo=utc) if hasattr(x, 'timestamp'): return x.timestamp() else: return (x - datetime(1970, 1, 1, tzinfo=utc)).total_seconds()
[ "def", "timestamp", "(", "x", ")", ":", "if", "x", ".", "tzinfo", "is", "None", ":", "# Naive dates to utc", "x", "=", "x", ".", "replace", "(", "tzinfo", "=", "utc", ")", "if", "hasattr", "(", "x", ",", "'timestamp'", ")", ":", "return", "x", ".", "timestamp", "(", ")", "else", ":", "return", "(", "x", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "utc", ")", ")", ".", "total_seconds", "(", ")" ]
30.1
17.9
def pydict2xml(filename, metadata_dict, **kwargs): """Create an XML file. Takes a path to where the XML file should be created and a metadata dictionary. """ try: f = open(filename, 'w') f.write(pydict2xmlstring(metadata_dict, **kwargs).encode('utf-8')) f.close() except: raise MetadataGeneratorException( 'Failed to create an XML file. Filename: %s' % (filename) )
[ "def", "pydict2xml", "(", "filename", ",", "metadata_dict", ",", "*", "*", "kwargs", ")", ":", "try", ":", "f", "=", "open", "(", "filename", ",", "'w'", ")", "f", ".", "write", "(", "pydict2xmlstring", "(", "metadata_dict", ",", "*", "*", "kwargs", ")", ".", "encode", "(", "'utf-8'", ")", ")", "f", ".", "close", "(", ")", "except", ":", "raise", "MetadataGeneratorException", "(", "'Failed to create an XML file. Filename: %s'", "%", "(", "filename", ")", ")" ]
30.642857
18.857143
def rec_apply(func, n): """ Used to determine parent directory n levels up by repeatedly applying os.path.dirname """ if n > 1: rec_func = rec_apply(func, n - 1) return lambda x: func(rec_func(x)) return func
[ "def", "rec_apply", "(", "func", ",", "n", ")", ":", "if", "n", ">", "1", ":", "rec_func", "=", "rec_apply", "(", "func", ",", "n", "-", "1", ")", "return", "lambda", "x", ":", "func", "(", "rec_func", "(", "x", ")", ")", "return", "func" ]
26.666667
9.333333
def dump(stream=None): """ Dumps a string representation of `FILTERS` to a stream, normally an open file. If none is passed, `FILTERS` is dumped to a default location within the project. """ if stream: stream.write(dumps()) else: path = os.path.join(os.path.dirname(insights.__file__), _filename) with open(path, "wu") as f: f.write(dumps())
[ "def", "dump", "(", "stream", "=", "None", ")", ":", "if", "stream", ":", "stream", ".", "write", "(", "dumps", "(", ")", ")", "else", ":", "path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "insights", ".", "__file__", ")", ",", "_filename", ")", "with", "open", "(", "path", ",", "\"wu\"", ")", "as", "f", ":", "f", ".", "write", "(", "dumps", "(", ")", ")" ]
32.833333
18.333333
def runprofilerandshow(funcname, profilepath, argv='', *args, **kwargs): ''' Run a functions profiler and show it in a GUI visualisation using RunSnakeRun Note: can also use calibration for more exact results ''' functionprofiler.runprofile(funcname+'(\''+argv+'\')', profilepath, *args, **kwargs) print 'Showing profile (windows should open in the background)'; sys.stdout.flush(); functionprofiler.browseprofilegui(profilepath)
[ "def", "runprofilerandshow", "(", "funcname", ",", "profilepath", ",", "argv", "=", "''", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "functionprofiler", ".", "runprofile", "(", "funcname", "+", "'(\\''", "+", "argv", "+", "'\\')'", ",", "profilepath", ",", "*", "args", ",", "*", "*", "kwargs", ")", "print", "'Showing profile (windows should open in the background)'", "sys", ".", "stdout", ".", "flush", "(", ")", "functionprofiler", ".", "browseprofilegui", "(", "profilepath", ")" ]
56.25
32.75
def _set_esp(self, v, load=False): """ Setter method for esp, mapped from YANG variable /routing_system/interface/ve/ipv6/interface_ospfv3_conf/authentication/ipsec_auth_key_config/esp (algorithm-type-esp) If this variable is read-only (config: false) in the source YANG file, then _set_esp is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_esp() directly. YANG Description: Specify Encapsulating Security Payload (ESP) as the protocol to provide packet-level security. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'NULL': {'value': 1}},), is_leaf=True, yang_name="esp", rest_name="esp", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify Encapsulating Security Payload (ESP)', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-esp', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """esp must be of a type compatible with algorithm-type-esp""", 'defined-type': "brocade-ospfv3:algorithm-type-esp", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'NULL': {'value': 1}},), is_leaf=True, yang_name="esp", rest_name="esp", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify Encapsulating Security Payload (ESP)', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-esp', is_config=True)""", }) self.__esp = t if hasattr(self, '_set'): self._set()
[ "def", "_set_esp", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "RestrictedClassType", "(", "base_type", "=", "unicode", ",", "restriction_type", "=", "\"dict_key\"", ",", "restriction_arg", "=", "{", "u'NULL'", ":", "{", "'value'", ":", "1", "}", "}", ",", ")", ",", "is_leaf", "=", "True", ",", "yang_name", "=", "\"esp\"", ",", "rest_name", "=", "\"esp\"", ",", "parent", "=", "self", ",", "choice", "=", "(", "u'ch-algorithm'", ",", "u'ca-esp-algorithm'", ")", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Specify Encapsulating Security Payload (ESP)'", ",", "u'cli-incomplete-command'", ":", "None", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-ospfv3'", ",", "defining_module", "=", "'brocade-ospfv3'", ",", "yang_type", "=", "'algorithm-type-esp'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"esp must be of a type compatible with algorithm-type-esp\"\"\"", ",", "'defined-type'", ":", "\"brocade-ospfv3:algorithm-type-esp\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type=\"dict_key\", restriction_arg={u'NULL': {'value': 1}},), is_leaf=True, yang_name=\"esp\", rest_name=\"esp\", parent=self, choice=(u'ch-algorithm', u'ca-esp-algorithm'), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Specify Encapsulating Security Payload (ESP)', u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ospfv3', defining_module='brocade-ospfv3', yang_type='algorithm-type-esp', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__esp", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
93.25
48.708333
def gen_timeout_resend(attempts): """Generate the time in seconds in which DHCPDISCOVER wil be retransmited. [:rfc:`2131#section-3.1`]:: might retransmit the DHCPREQUEST message four times, for a total delay of 60 seconds [:rfc:`2131#section-4.1`]:: For example, in a 10Mb/sec Ethernet internetwork, the delay before the first retransmission SHOULD be 4 seconds randomized by the value of a uniform random number chosen from the range -1 to +1. Clients with clocks that provide resolution granularity of less than one second may choose a non-integer randomization value. The delay before the next retransmission SHOULD be 8 seconds randomized by the value of a uniform number chosen from the range -1 to +1. The retransmission delay SHOULD be doubled with subsequent retransmissions up to a maximum of 64 seconds. """ timeout = 2 ** (attempts + 1) + random.uniform(-1, +1) logger.debug('next timeout resending will happen on %s', future_dt_str(nowutc(), timeout)) return timeout
[ "def", "gen_timeout_resend", "(", "attempts", ")", ":", "timeout", "=", "2", "**", "(", "attempts", "+", "1", ")", "+", "random", ".", "uniform", "(", "-", "1", ",", "+", "1", ")", "logger", ".", "debug", "(", "'next timeout resending will happen on %s'", ",", "future_dt_str", "(", "nowutc", "(", ")", ",", "timeout", ")", ")", "return", "timeout" ]
43.8
24.32
def choice_default_invalidator(self, obj): """Invalidated cached items when the Choice changes.""" invalid = [('Question', obj.question_id, True)] for pk in obj.voters.values_list('pk', flat=True): invalid.append(('User', pk, False)) return invalid
[ "def", "choice_default_invalidator", "(", "self", ",", "obj", ")", ":", "invalid", "=", "[", "(", "'Question'", ",", "obj", ".", "question_id", ",", "True", ")", "]", "for", "pk", "in", "obj", ".", "voters", ".", "values_list", "(", "'pk'", ",", "flat", "=", "True", ")", ":", "invalid", ".", "append", "(", "(", "'User'", ",", "pk", ",", "False", ")", ")", "return", "invalid" ]
47.833333
10
def search_user_directory(self, term: str) -> List[User]: """ Search user directory for a given term, returning a list of users Args: term: term to be searched for Returns: user_list: list of users returned by server-side search """ response = self.api._send( 'POST', '/user_directory/search', { 'search_term': term, }, ) try: return [ User(self.api, _user['user_id'], _user['display_name']) for _user in response['results'] ] except KeyError: return []
[ "def", "search_user_directory", "(", "self", ",", "term", ":", "str", ")", "->", "List", "[", "User", "]", ":", "response", "=", "self", ".", "api", ".", "_send", "(", "'POST'", ",", "'/user_directory/search'", ",", "{", "'search_term'", ":", "term", ",", "}", ",", ")", "try", ":", "return", "[", "User", "(", "self", ".", "api", ",", "_user", "[", "'user_id'", "]", ",", "_user", "[", "'display_name'", "]", ")", "for", "_user", "in", "response", "[", "'results'", "]", "]", "except", "KeyError", ":", "return", "[", "]" ]
30
18
def p_systemcall_signed(self, p): # for $signed system task 'systemcall : DOLLER SIGNED LPAREN sysargs RPAREN' p[0] = SystemCall(p[2], p[4], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
[ "def", "p_systemcall_signed", "(", "self", ",", "p", ")", ":", "# for $signed system task", "p", "[", "0", "]", "=", "SystemCall", "(", "p", "[", "2", "]", ",", "p", "[", "4", "]", ",", "lineno", "=", "p", ".", "lineno", "(", "1", ")", ")", "p", ".", "set_lineno", "(", "0", ",", "p", ".", "lineno", "(", "1", ")", ")" ]
52.75
14.75
def prepare(doc): """Sets the caption_found and plot_found variables to False.""" doc.caption_found = False doc.plot_found = False doc.listings_counter = 0
[ "def", "prepare", "(", "doc", ")", ":", "doc", ".", "caption_found", "=", "False", "doc", ".", "plot_found", "=", "False", "doc", ".", "listings_counter", "=", "0" ]
33.4
12
def get_env_spec(self, filters=None): """ Get the spec of the current env. The spec will hold the info about all the domains and networks associated with this env. Args: filters (list): list of paths to keys that should be removed from the init file Returns: dict: the spec of the current env """ spec = { 'domains': { vm_name: deepcopy(vm_object.spec) for vm_name, vm_object in self._vms.viewitems() }, 'nets': { net_name: deepcopy(net_object.spec) for net_name, net_object in self._nets.viewitems() } } if filters: utils.filter_spec(spec, filters) return spec
[ "def", "get_env_spec", "(", "self", ",", "filters", "=", "None", ")", ":", "spec", "=", "{", "'domains'", ":", "{", "vm_name", ":", "deepcopy", "(", "vm_object", ".", "spec", ")", "for", "vm_name", ",", "vm_object", "in", "self", ".", "_vms", ".", "viewitems", "(", ")", "}", ",", "'nets'", ":", "{", "net_name", ":", "deepcopy", "(", "net_object", ".", "spec", ")", "for", "net_name", ",", "net_object", "in", "self", ".", "_nets", ".", "viewitems", "(", ")", "}", "}", "if", "filters", ":", "utils", ".", "filter_spec", "(", "spec", ",", "filters", ")", "return", "spec" ]
30.142857
18.785714
def tdist_ci(x, df, stderr, level): """ Confidence Intervals using the t-distribution """ q = (1 + level)/2 delta = stats.t.ppf(q, df) * stderr return x - delta, x + delta
[ "def", "tdist_ci", "(", "x", ",", "df", ",", "stderr", ",", "level", ")", ":", "q", "=", "(", "1", "+", "level", ")", "/", "2", "delta", "=", "stats", ".", "t", ".", "ppf", "(", "q", ",", "df", ")", "*", "stderr", "return", "x", "-", "delta", ",", "x", "+", "delta" ]
27
6.142857
def compute_inj_optimal_snr(workflow, inj_file, precalc_psd_files, out_dir, tags=None): "Set up a job for computing optimal SNRs of a sim_inspiral file." if tags is None: tags = [] node = Executable(workflow.cp, 'optimal_snr', ifos=workflow.ifos, out_dir=out_dir, tags=tags).create_node() node.add_input_opt('--input-file', inj_file) node.add_input_list_opt('--time-varying-psds', precalc_psd_files) node.new_output_file_opt(workflow.analysis_time, '.xml', '--output-file') workflow += node return node.output_files[0]
[ "def", "compute_inj_optimal_snr", "(", "workflow", ",", "inj_file", ",", "precalc_psd_files", ",", "out_dir", ",", "tags", "=", "None", ")", ":", "if", "tags", "is", "None", ":", "tags", "=", "[", "]", "node", "=", "Executable", "(", "workflow", ".", "cp", ",", "'optimal_snr'", ",", "ifos", "=", "workflow", ".", "ifos", ",", "out_dir", "=", "out_dir", ",", "tags", "=", "tags", ")", ".", "create_node", "(", ")", "node", ".", "add_input_opt", "(", "'--input-file'", ",", "inj_file", ")", "node", ".", "add_input_list_opt", "(", "'--time-varying-psds'", ",", "precalc_psd_files", ")", "node", ".", "new_output_file_opt", "(", "workflow", ".", "analysis_time", ",", "'.xml'", ",", "'--output-file'", ")", "workflow", "+=", "node", "return", "node", ".", "output_files", "[", "0", "]" ]
45.923077
23.307692
def bytes2unicode(x, encoding='utf-8', errors='strict'): """ Convert a C{bytes} to a unicode string. @param x: a unicode string, of type C{unicode} on Python 2, or C{str} on Python 3. @param encoding: an optional codec, default: 'utf-8' @param errors: error handling scheme, default 'strict' @return: a unicode string of type C{unicode} on Python 2, or C{str} on Python 3. """ if isinstance(x, (text_type, type(None))): return x return text_type(x, encoding, errors)
[ "def", "bytes2unicode", "(", "x", ",", "encoding", "=", "'utf-8'", ",", "errors", "=", "'strict'", ")", ":", "if", "isinstance", "(", "x", ",", "(", "text_type", ",", "type", "(", "None", ")", ")", ")", ":", "return", "x", "return", "text_type", "(", "x", ",", "encoding", ",", "errors", ")" ]
37.5
13.071429
def zone_get(name, resource_group, **kwargs): ''' .. versionadded:: Fluorine Get a dictionary representing a DNS zone's properties, but not the record sets within the zone. :param name: The DNS zone to get. :param resource_group: The name of the resource group. CLI Example: .. code-block:: bash salt-call azurearm_dns.zone_get myzone testgroup ''' dnsconn = __utils__['azurearm.get_client']('dns', **kwargs) try: zone = dnsconn.zones.get( zone_name=name, resource_group_name=resource_group ) result = zone.as_dict() except CloudError as exc: __utils__['azurearm.log_cloud_error']('dns', str(exc), **kwargs) result = {'error': str(exc)} return result
[ "def", "zone_get", "(", "name", ",", "resource_group", ",", "*", "*", "kwargs", ")", ":", "dnsconn", "=", "__utils__", "[", "'azurearm.get_client'", "]", "(", "'dns'", ",", "*", "*", "kwargs", ")", "try", ":", "zone", "=", "dnsconn", ".", "zones", ".", "get", "(", "zone_name", "=", "name", ",", "resource_group_name", "=", "resource_group", ")", "result", "=", "zone", ".", "as_dict", "(", ")", "except", "CloudError", "as", "exc", ":", "__utils__", "[", "'azurearm.log_cloud_error'", "]", "(", "'dns'", ",", "str", "(", "exc", ")", ",", "*", "*", "kwargs", ")", "result", "=", "{", "'error'", ":", "str", "(", "exc", ")", "}", "return", "result" ]
24.290323
24.096774
def get_file(self, attr_name): '''Return absolute path to logging file for obj's attribute.''' return os.path.abspath(os.path.join(self.folder, "{}.log" .format(attr_name)))
[ "def", "get_file", "(", "self", ",", "attr_name", ")", ":", "return", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "self", ".", "folder", ",", "\"{}.log\"", ".", "format", "(", "attr_name", ")", ")", ")" ]
57.5
22.5
def twostep(script, iterations=3, angle_threshold=60, normal_steps=20, fit_steps=20, selected=False): """ Two Step Smoothing, a feature preserving/enhancing fairing filter. It is based on a Normal Smoothing step where similar normals are averaged together and a step where the vertexes are fitted on the new normals. Based on: A. Belyaev and Y. Ohtake, "A Comparison of Mesh Smoothing Methods" Proc. Israel-Korea Bi-National Conf. Geometric Modeling and Computer Graphics, pp. 83-87, 2003. Args: script: the FilterScript object or script filename to write the filter to. iterations (int): The number of times that the whole algorithm (normal smoothing + vertex fitting) is iterated. angle_threshold (float): Specify a threshold angle (0..90) for features that you want to be preserved. Features forming angles LARGER than the specified threshold will be preserved. 0 -> no smoothing 90 -> all faces will be smoothed normal_steps (int): Number of iterations of normal smoothing step. The larger the better and (the slower) fit_steps (int): Number of iterations of the vertex fitting procedure selected (bool): If selected the filter is performed only on the selected faces Layer stack: No impacts MeshLab versions: 2016.12 1.3.4BETA """ filter_xml = ''.join([ ' <filter name="TwoStep Smooth">\n', ' <Param name="stepSmoothNum" ', 'value="{:d}" '.format(iterations), 'description="Smoothing steps" ', 'type="RichInt" ', '/>\n', ' <Param name="normalThr" ', 'value="{}" '.format(angle_threshold), 'description="Feature Angle Threshold (deg)" ', 'type="RichFloat" ', '/>\n', ' <Param name="stepNormalNum" ', 'value="{:d}" '.format(normal_steps), 'description="Normal Smoothing steps" ', 'type="RichInt" ', '/>\n', ' <Param name="stepFitNum" ', 'value="{:d}" '.format(fit_steps), 'description="Vertex Fitting steps" ', 'type="RichInt" ', '/>\n', ' <Param name="Selected" ', 'value="{}" '.format(str(selected).lower()), 'description="Affect only selected faces" ', 'type="RichBool" ', '/>\n', ' </filter>\n']) util.write_filter(script, filter_xml) return None
[ "def", "twostep", "(", "script", ",", "iterations", "=", "3", ",", "angle_threshold", "=", "60", ",", "normal_steps", "=", "20", ",", "fit_steps", "=", "20", ",", "selected", "=", "False", ")", ":", "filter_xml", "=", "''", ".", "join", "(", "[", "' <filter name=\"TwoStep Smooth\">\\n'", ",", "' <Param name=\"stepSmoothNum\" '", ",", "'value=\"{:d}\" '", ".", "format", "(", "iterations", ")", ",", "'description=\"Smoothing steps\" '", ",", "'type=\"RichInt\" '", ",", "'/>\\n'", ",", "' <Param name=\"normalThr\" '", ",", "'value=\"{}\" '", ".", "format", "(", "angle_threshold", ")", ",", "'description=\"Feature Angle Threshold (deg)\" '", ",", "'type=\"RichFloat\" '", ",", "'/>\\n'", ",", "' <Param name=\"stepNormalNum\" '", ",", "'value=\"{:d}\" '", ".", "format", "(", "normal_steps", ")", ",", "'description=\"Normal Smoothing steps\" '", ",", "'type=\"RichInt\" '", ",", "'/>\\n'", ",", "' <Param name=\"stepFitNum\" '", ",", "'value=\"{:d}\" '", ".", "format", "(", "fit_steps", ")", ",", "'description=\"Vertex Fitting steps\" '", ",", "'type=\"RichInt\" '", ",", "'/>\\n'", ",", "' <Param name=\"Selected\" '", ",", "'value=\"{}\" '", ".", "format", "(", "str", "(", "selected", ")", ".", "lower", "(", ")", ")", ",", "'description=\"Affect only selected faces\" '", ",", "'type=\"RichBool\" '", ",", "'/>\\n'", ",", "' </filter>\\n'", "]", ")", "util", ".", "write_filter", "(", "script", ",", "filter_xml", ")", "return", "None" ]
37.378788
18.333333
def tsave( text, font=DEFAULT_FONT, filename="art", chr_ignore=True, print_status=True): r""" Save ascii art (support \n). :param text: input text :param font: input font :type font:str :type text:str :param filename: output file name :type filename:str :param chr_ignore: ignore not supported character :type chr_ignore:bool :param print_status : save message print flag :type print_status:bool :return: None """ try: if isinstance(text, str) is False: raise Exception(TEXT_TYPE_ERROR) files_list = os.listdir(os.getcwd()) extension = ".txt" splitted_filename = filename.split(".") name = splitted_filename[0] if len(splitted_filename) > 1: extension = "." + splitted_filename[1] index = 2 test_name = name while(True): if test_name + extension in files_list: test_name = name + str(index) index = index + 1 else: break if font.lower() in TEST_FILTERED_FONTS: file = codecs.open(test_name + extension, "w", encoding='utf-8') else: file = open(test_name + extension, "w") result = text2art(text, font=font, chr_ignore=chr_ignore) file.write(result) file.close() if print_status: print("Saved! \nFilename: " + test_name + extension) return {"Status": True, "Message": "OK"} except Exception as e: return {"Status": False, "Message": str(e)}
[ "def", "tsave", "(", "text", ",", "font", "=", "DEFAULT_FONT", ",", "filename", "=", "\"art\"", ",", "chr_ignore", "=", "True", ",", "print_status", "=", "True", ")", ":", "try", ":", "if", "isinstance", "(", "text", ",", "str", ")", "is", "False", ":", "raise", "Exception", "(", "TEXT_TYPE_ERROR", ")", "files_list", "=", "os", ".", "listdir", "(", "os", ".", "getcwd", "(", ")", ")", "extension", "=", "\".txt\"", "splitted_filename", "=", "filename", ".", "split", "(", "\".\"", ")", "name", "=", "splitted_filename", "[", "0", "]", "if", "len", "(", "splitted_filename", ")", ">", "1", ":", "extension", "=", "\".\"", "+", "splitted_filename", "[", "1", "]", "index", "=", "2", "test_name", "=", "name", "while", "(", "True", ")", ":", "if", "test_name", "+", "extension", "in", "files_list", ":", "test_name", "=", "name", "+", "str", "(", "index", ")", "index", "=", "index", "+", "1", "else", ":", "break", "if", "font", ".", "lower", "(", ")", "in", "TEST_FILTERED_FONTS", ":", "file", "=", "codecs", ".", "open", "(", "test_name", "+", "extension", ",", "\"w\"", ",", "encoding", "=", "'utf-8'", ")", "else", ":", "file", "=", "open", "(", "test_name", "+", "extension", ",", "\"w\"", ")", "result", "=", "text2art", "(", "text", ",", "font", "=", "font", ",", "chr_ignore", "=", "chr_ignore", ")", "file", ".", "write", "(", "result", ")", "file", ".", "close", "(", ")", "if", "print_status", ":", "print", "(", "\"Saved! \\nFilename: \"", "+", "test_name", "+", "extension", ")", "return", "{", "\"Status\"", ":", "True", ",", "\"Message\"", ":", "\"OK\"", "}", "except", "Exception", "as", "e", ":", "return", "{", "\"Status\"", ":", "False", ",", "\"Message\"", ":", "str", "(", "e", ")", "}" ]
31.26
14.92
def create_media_service_rg(access_token, subscription_id, rgname, location, stoname, msname): '''Create a media service in a resource group. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. rgname (str): Azure resource group name. location (str): Azure data center location. E.g. westus. stoname (str): Azure storage account name. msname (str): Media service name. Returns: HTTP response. JSON body. ''' endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/resourceGroups/', rgname, '/providers/microsoft.media/mediaservices/', msname, '?api-version=', MEDIA_API]) ms_body = {'name': msname} ms_body['location'] = location sub_id_str = '/subscriptions/' + subscription_id + '/resourceGroups/' + rgname + \ '/providers/Microsoft.Storage/storageAccounts/' + stoname storage_account = {'id': sub_id_str} storage_account['isPrimary'] = True properties = {'storageAccounts': [storage_account]} ms_body['properties'] = properties body = json.dumps(ms_body) return do_put(endpoint, body, access_token)
[ "def", "create_media_service_rg", "(", "access_token", ",", "subscription_id", ",", "rgname", ",", "location", ",", "stoname", ",", "msname", ")", ":", "endpoint", "=", "''", ".", "join", "(", "[", "get_rm_endpoint", "(", ")", ",", "'/subscriptions/'", ",", "subscription_id", ",", "'/resourceGroups/'", ",", "rgname", ",", "'/providers/microsoft.media/mediaservices/'", ",", "msname", ",", "'?api-version='", ",", "MEDIA_API", "]", ")", "ms_body", "=", "{", "'name'", ":", "msname", "}", "ms_body", "[", "'location'", "]", "=", "location", "sub_id_str", "=", "'/subscriptions/'", "+", "subscription_id", "+", "'/resourceGroups/'", "+", "rgname", "+", "'/providers/Microsoft.Storage/storageAccounts/'", "+", "stoname", "storage_account", "=", "{", "'id'", ":", "sub_id_str", "}", "storage_account", "[", "'isPrimary'", "]", "=", "True", "properties", "=", "{", "'storageAccounts'", ":", "[", "storage_account", "]", "}", "ms_body", "[", "'properties'", "]", "=", "properties", "body", "=", "json", ".", "dumps", "(", "ms_body", ")", "return", "do_put", "(", "endpoint", ",", "body", ",", "access_token", ")" ]
43.724138
18.068966
def get_schema(self, filename): """ Guess schema using messytables """ table_set = self.read_file(filename) # Have I been able to read the filename if table_set is None: return [] # Get the first table as rowset row_set = table_set.tables[0] offset, headers = headers_guess(row_set.sample) row_set.register_processor(headers_processor(headers)) row_set.register_processor(offset_processor(offset + 1)) types = type_guess(row_set.sample, strict=True) # Get a sample as well.. sample = next(row_set.sample) clean = lambda v: str(v) if not isinstance(v, str) else v schema = [] for i, h in enumerate(headers): schema.append([h, str(types[i]), clean(sample[i].value)]) return schema
[ "def", "get_schema", "(", "self", ",", "filename", ")", ":", "table_set", "=", "self", ".", "read_file", "(", "filename", ")", "# Have I been able to read the filename", "if", "table_set", "is", "None", ":", "return", "[", "]", "# Get the first table as rowset", "row_set", "=", "table_set", ".", "tables", "[", "0", "]", "offset", ",", "headers", "=", "headers_guess", "(", "row_set", ".", "sample", ")", "row_set", ".", "register_processor", "(", "headers_processor", "(", "headers", ")", ")", "row_set", ".", "register_processor", "(", "offset_processor", "(", "offset", "+", "1", ")", ")", "types", "=", "type_guess", "(", "row_set", ".", "sample", ",", "strict", "=", "True", ")", "# Get a sample as well..", "sample", "=", "next", "(", "row_set", ".", "sample", ")", "clean", "=", "lambda", "v", ":", "str", "(", "v", ")", "if", "not", "isinstance", "(", "v", ",", "str", ")", "else", "v", "schema", "=", "[", "]", "for", "i", ",", "h", "in", "enumerate", "(", "headers", ")", ":", "schema", ".", "append", "(", "[", "h", ",", "str", "(", "types", "[", "i", "]", ")", ",", "clean", "(", "sample", "[", "i", "]", ".", "value", ")", "]", ")", "return", "schema" ]
30.793103
15.827586
def gen_binder_rst(fpath, binder_conf, gallery_conf): """Generate the RST + link for the Binder badge. Parameters ---------- fpath: str The path to the `.py` file for which a Binder badge will be generated. binder_conf: dict or None If a dictionary it must have the following keys: 'binderhub_url': The URL of the BinderHub instance that's running a Binder service. 'org': The GitHub organization to which the documentation will be pushed. 'repo': The GitHub repository to which the documentation will be pushed. 'branch': The Git branch on which the documentation exists (e.g., gh-pages). 'dependencies': A list of paths to dependency files that match the Binderspec. Returns ------- rst : str The reStructuredText for the Binder badge that links to this file. """ binder_conf = check_binder_conf(binder_conf) binder_url = gen_binder_url(fpath, binder_conf, gallery_conf) rst = ( "\n" " .. container:: binder-badge\n\n" " .. image:: https://mybinder.org/badge_logo.svg\n" " :target: {}\n" " :width: 150 px\n").format(binder_url) return rst
[ "def", "gen_binder_rst", "(", "fpath", ",", "binder_conf", ",", "gallery_conf", ")", ":", "binder_conf", "=", "check_binder_conf", "(", "binder_conf", ")", "binder_url", "=", "gen_binder_url", "(", "fpath", ",", "binder_conf", ",", "gallery_conf", ")", "rst", "=", "(", "\"\\n\"", "\" .. container:: binder-badge\\n\\n\"", "\" .. image:: https://mybinder.org/badge_logo.svg\\n\"", "\" :target: {}\\n\"", "\" :width: 150 px\\n\"", ")", ".", "format", "(", "binder_url", ")", "return", "rst" ]
34.416667
24.166667
def unique_values(func): """ Wrap a function returning an iterable such that the resulting iterable only ever yields unique items. """ @wraps(func) def wrapper(*args, **kwargs): return unique_everseen(func(*args, **kwargs)) return wrapper
[ "def", "unique_values", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "unique_everseen", "(", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")", "return", "wrapper" ]
24.181818
18.363636
def screeplot(self, type="barplot", **kwargs): """ Produce the scree plot. Library ``matplotlib`` is required for this function. :param str type: either ``"barplot"`` or ``"lines"``. """ # check for matplotlib. exit if absent. is_server = kwargs.pop("server") if kwargs: raise ValueError("Unknown arguments %s to screeplot()" % ", ".join(kwargs.keys())) try: import matplotlib if is_server: matplotlib.use('Agg', warn=False) import matplotlib.pyplot as plt except ImportError: print("matplotlib is required for this function!") return variances = [s ** 2 for s in self._model_json['output']['importance'].cell_values[0][1:]] plt.xlabel('Components') plt.ylabel('Variances') plt.title('Scree Plot') plt.xticks(list(range(1, len(variances) + 1))) if type == "barplot": plt.bar(list(range(1, len(variances) + 1)), variances) elif type == "lines": plt.plot(list(range(1, len(variances) + 1)), variances, 'b--') if not is_server: plt.show()
[ "def", "screeplot", "(", "self", ",", "type", "=", "\"barplot\"", ",", "*", "*", "kwargs", ")", ":", "# check for matplotlib. exit if absent.", "is_server", "=", "kwargs", ".", "pop", "(", "\"server\"", ")", "if", "kwargs", ":", "raise", "ValueError", "(", "\"Unknown arguments %s to screeplot()\"", "%", "\", \"", ".", "join", "(", "kwargs", ".", "keys", "(", ")", ")", ")", "try", ":", "import", "matplotlib", "if", "is_server", ":", "matplotlib", ".", "use", "(", "'Agg'", ",", "warn", "=", "False", ")", "import", "matplotlib", ".", "pyplot", "as", "plt", "except", "ImportError", ":", "print", "(", "\"matplotlib is required for this function!\"", ")", "return", "variances", "=", "[", "s", "**", "2", "for", "s", "in", "self", ".", "_model_json", "[", "'output'", "]", "[", "'importance'", "]", ".", "cell_values", "[", "0", "]", "[", "1", ":", "]", "]", "plt", ".", "xlabel", "(", "'Components'", ")", "plt", ".", "ylabel", "(", "'Variances'", ")", "plt", ".", "title", "(", "'Scree Plot'", ")", "plt", ".", "xticks", "(", "list", "(", "range", "(", "1", ",", "len", "(", "variances", ")", "+", "1", ")", ")", ")", "if", "type", "==", "\"barplot\"", ":", "plt", ".", "bar", "(", "list", "(", "range", "(", "1", ",", "len", "(", "variances", ")", "+", "1", ")", ")", ",", "variances", ")", "elif", "type", "==", "\"lines\"", ":", "plt", ".", "plot", "(", "list", "(", "range", "(", "1", ",", "len", "(", "variances", ")", "+", "1", ")", ")", ",", "variances", ",", "'b--'", ")", "if", "not", "is_server", ":", "plt", ".", "show", "(", ")" ]
38.3
18.7
def deserialize_non_framed_values(stream, header, verifier=None): """Deserializes the IV and body length from a non-framed stream. :param stream: Source data stream :type stream: io.BytesIO :param header: Deserialized header :type header: aws_encryption_sdk.structures.MessageHeader :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: IV and Data Length values for body :rtype: tuple of bytes and int """ _LOGGER.debug("Starting non-framed body iv/tag deserialization") (data_iv, data_length) = unpack_values(">{}sQ".format(header.algorithm.iv_len), stream, verifier) return data_iv, data_length
[ "def", "deserialize_non_framed_values", "(", "stream", ",", "header", ",", "verifier", "=", "None", ")", ":", "_LOGGER", ".", "debug", "(", "\"Starting non-framed body iv/tag deserialization\"", ")", "(", "data_iv", ",", "data_length", ")", "=", "unpack_values", "(", "\">{}sQ\"", ".", "format", "(", "header", ".", "algorithm", ".", "iv_len", ")", ",", "stream", ",", "verifier", ")", "return", "data_iv", ",", "data_length" ]
47.066667
17
def script_status(self, script_id): """ Returns the run status of a stored script as well as the current values of parameters 0 to 9. script_id:= id of stored script. The run status may be . . PI_SCRIPT_INITING PI_SCRIPT_HALTED PI_SCRIPT_RUNNING PI_SCRIPT_WAITING PI_SCRIPT_FAILED . . The return value is a tuple of run status and a list of the 10 parameters. On error the run status will be negative and the parameter list will be empty. ... (s, pars) = pi.script_status(sid) ... """ res = yield from self._pigpio_aio_command(_PI_CMD_PROCP, script_id, 0) bytes = u2i(res) if bytes > 0: # Fixme : this sould be the same a _rxbuf # data = self._rxbuf(bytes) data = yield from self._loop.sock_recv(self.s, bytes) while len(data) < bytes: b = yield from self._loop.sock_recv(self.s, bytes-len(data)) data.extend(b) pars = struct.unpack('11i', _str(data)) status = pars[0] params = pars[1:] else: status = bytes params = () return status, params
[ "def", "script_status", "(", "self", ",", "script_id", ")", ":", "res", "=", "yield", "from", "self", ".", "_pigpio_aio_command", "(", "_PI_CMD_PROCP", ",", "script_id", ",", "0", ")", "bytes", "=", "u2i", "(", "res", ")", "if", "bytes", ">", "0", ":", "# Fixme : this sould be the same a _rxbuf", "# data = self._rxbuf(bytes)", "data", "=", "yield", "from", "self", ".", "_loop", ".", "sock_recv", "(", "self", ".", "s", ",", "bytes", ")", "while", "len", "(", "data", ")", "<", "bytes", ":", "b", "=", "yield", "from", "self", ".", "_loop", ".", "sock_recv", "(", "self", ".", "s", ",", "bytes", "-", "len", "(", "data", ")", ")", "data", ".", "extend", "(", "b", ")", "pars", "=", "struct", ".", "unpack", "(", "'11i'", ",", "_str", "(", "data", ")", ")", "status", "=", "pars", "[", "0", "]", "params", "=", "pars", "[", "1", ":", "]", "else", ":", "status", "=", "bytes", "params", "=", "(", ")", "return", "status", ",", "params" ]
28.159091
19.977273
def _get_spinner(self, spinner): """Extracts spinner value from options and returns value containing spinner frames and interval, defaults to 'dots' spinner. Parameters ---------- spinner : dict, str Contains spinner value or type of spinner to be used Returns ------- dict Contains frames and interval defining spinner """ default_spinner = Spinners['dots'].value if spinner and type(spinner) == dict: return spinner if is_supported(): if all([is_text_type(spinner), spinner in Spinners.__members__]): return Spinners[spinner].value else: return default_spinner else: return Spinners['line'].value
[ "def", "_get_spinner", "(", "self", ",", "spinner", ")", ":", "default_spinner", "=", "Spinners", "[", "'dots'", "]", ".", "value", "if", "spinner", "and", "type", "(", "spinner", ")", "==", "dict", ":", "return", "spinner", "if", "is_supported", "(", ")", ":", "if", "all", "(", "[", "is_text_type", "(", "spinner", ")", ",", "spinner", "in", "Spinners", ".", "__members__", "]", ")", ":", "return", "Spinners", "[", "spinner", "]", ".", "value", "else", ":", "return", "default_spinner", "else", ":", "return", "Spinners", "[", "'line'", "]", ".", "value" ]
32.708333
18.166667
def depricated_name(newmethod): """ Decorator for warning user of depricated functions before use. Args: newmethod (str): Name of method to use instead. """ def decorator(func): @wraps(func) def wrapper(*args, **kwargs): warnings.simplefilter('always', DeprecationWarning) warnings.warn( "Function {} is depricated, please use {} instead.".format(func.__name__, newmethod), category=DeprecationWarning, stacklevel=2 ) warnings.simplefilter('default', DeprecationWarning) return func(*args, **kwargs) return wrapper return decorator
[ "def", "depricated_name", "(", "newmethod", ")", ":", "def", "decorator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "warnings", ".", "simplefilter", "(", "'always'", ",", "DeprecationWarning", ")", "warnings", ".", "warn", "(", "\"Function {} is depricated, please use {} instead.\"", ".", "format", "(", "func", ".", "__name__", ",", "newmethod", ")", ",", "category", "=", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "warnings", ".", "simplefilter", "(", "'default'", ",", "DeprecationWarning", ")", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper", "return", "decorator" ]
34.894737
19.210526
def focus_cb(self, viewer, channel): """ Callback from the reference viewer shell when the focus changes between channels. """ chname = channel.name if self.active != chname: # focus has shifted to a different channel than our idea # of the active one self.active = chname self.set_info("Focus is now in channel '%s'" % ( self.active)) return True
[ "def", "focus_cb", "(", "self", ",", "viewer", ",", "channel", ")", ":", "chname", "=", "channel", ".", "name", "if", "self", ".", "active", "!=", "chname", ":", "# focus has shifted to a different channel than our idea", "# of the active one", "self", ".", "active", "=", "chname", "self", ".", "set_info", "(", "\"Focus is now in channel '%s'\"", "%", "(", "self", ".", "active", ")", ")", "return", "True" ]
32.5
14.642857
def is_async_call(func): '''inspect.iscoroutinefunction that looks through partials.''' while isinstance(func, partial): func = func.func return inspect.iscoroutinefunction(func)
[ "def", "is_async_call", "(", "func", ")", ":", "while", "isinstance", "(", "func", ",", "partial", ")", ":", "func", "=", "func", ".", "func", "return", "inspect", ".", "iscoroutinefunction", "(", "func", ")" ]
38.8
13.2
def generate_hash_comment(file_path): """ Read file with given file_path and return string of format # SHA1:da39a3ee5e6b4b0d3255bfef95601890afd80709 which is hex representation of SHA1 file content hash """ with open(file_path, 'rb') as fp: hexdigest = hashlib.sha1(fp.read().strip()).hexdigest() return "# SHA1:{0}\n".format(hexdigest)
[ "def", "generate_hash_comment", "(", "file_path", ")", ":", "with", "open", "(", "file_path", ",", "'rb'", ")", "as", "fp", ":", "hexdigest", "=", "hashlib", ".", "sha1", "(", "fp", ".", "read", "(", ")", ".", "strip", "(", ")", ")", ".", "hexdigest", "(", ")", "return", "\"# SHA1:{0}\\n\"", ".", "format", "(", "hexdigest", ")" ]
33.454545
15.090909
def set_y2label(self, s, delay_draw=False): "set plot ylabel" self.conf.relabel(y2label=s, delay_draw=delay_draw)
[ "def", "set_y2label", "(", "self", ",", "s", ",", "delay_draw", "=", "False", ")", ":", "self", ".", "conf", ".", "relabel", "(", "y2label", "=", "s", ",", "delay_draw", "=", "delay_draw", ")" ]
42.333333
12.333333
def _create_intermediate_nodes(self, name): """Create intermediate nodes if hierarchy does not exist.""" hierarchy = self._split_node_name(name, self.root_name) node_tree = [ self.root_name + self._node_separator + self._node_separator.join(hierarchy[: num + 1]) for num in range(len(hierarchy)) ] iobj = [ (child[: child.rfind(self._node_separator)], child) for child in node_tree if child not in self._db ] for parent, child in iobj: self._db[child] = {"parent": parent, "children": [], "data": []} self._db[parent]["children"] = sorted( self._db[parent]["children"] + [child] )
[ "def", "_create_intermediate_nodes", "(", "self", ",", "name", ")", ":", "hierarchy", "=", "self", ".", "_split_node_name", "(", "name", ",", "self", ".", "root_name", ")", "node_tree", "=", "[", "self", ".", "root_name", "+", "self", ".", "_node_separator", "+", "self", ".", "_node_separator", ".", "join", "(", "hierarchy", "[", ":", "num", "+", "1", "]", ")", "for", "num", "in", "range", "(", "len", "(", "hierarchy", ")", ")", "]", "iobj", "=", "[", "(", "child", "[", ":", "child", ".", "rfind", "(", "self", ".", "_node_separator", ")", "]", ",", "child", ")", "for", "child", "in", "node_tree", "if", "child", "not", "in", "self", ".", "_db", "]", "for", "parent", ",", "child", "in", "iobj", ":", "self", ".", "_db", "[", "child", "]", "=", "{", "\"parent\"", ":", "parent", ",", "\"children\"", ":", "[", "]", ",", "\"data\"", ":", "[", "]", "}", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", "=", "sorted", "(", "self", ".", "_db", "[", "parent", "]", "[", "\"children\"", "]", "+", "[", "child", "]", ")" ]
39.684211
15.894737
def has_nans(obj): """Check if obj has any NaNs Compatible with different behavior of np.isnan, which sometimes applies over all axes (py35, py35) and sometimes does not (py34). """ nans = np.isnan(obj) while np.ndim(nans): nans = np.any(nans) return bool(nans)
[ "def", "has_nans", "(", "obj", ")", ":", "nans", "=", "np", ".", "isnan", "(", "obj", ")", "while", "np", ".", "ndim", "(", "nans", ")", ":", "nans", "=", "np", ".", "any", "(", "nans", ")", "return", "bool", "(", "nans", ")" ]
28.9
18.2
def tag_pivot(self, tag_resource): """Pivot point on tags for this resource. This method will return all *resources* (group, indicators, task, victims, etc) for this resource that have the provided tag applied. **Example Endpoints URI's** +--------------+------------------------------------------------------------+ | HTTP Method | API Endpoint URI's | +==============+============================================================+ | GET | /v2/tags/{resourceId}/groups/{resourceType} | +--------------+------------------------------------------------------------+ | GET | /v2/tags/{resourceId}/groups/{resourceType}/{uniqueId} | +--------------+------------------------------------------------------------+ | GET | /v2/tags/{resourceId}/indicators/{resourceType} | +--------------+------------------------------------------------------------+ | GET | /v2/tags/{resourceId}/indicators/{resourceType}/{uniqueId} | +--------------+------------------------------------------------------------+ | POST | /v2/tags/{resourceId}/groups/{resourceType}/{uniqueId} | +--------------+------------------------------------------------------------+ | POST | /v2/tags/{resourceId}/indicators/{resourceType}/{uniqueId} | +--------------+------------------------------------------------------------+ Args: resource_id (string): The resource pivot id (tag name). """ resource = self.copy() resource._request_uri = '{}/{}'.format(tag_resource.request_uri, resource._request_uri) return resource
[ "def", "tag_pivot", "(", "self", ",", "tag_resource", ")", ":", "resource", "=", "self", ".", "copy", "(", ")", "resource", ".", "_request_uri", "=", "'{}/{}'", ".", "format", "(", "tag_resource", ".", "request_uri", ",", "resource", ".", "_request_uri", ")", "return", "resource" ]
59.333333
35
def team_stats(game_id): """Return team stats of a game with matching id. The additional pitching/batting is mostly the same stats. MLB decided to have two box score files, thus we return the data from both. """ # get data from data module box_score = mlbgame.data.get_box_score(game_id) raw_box_score = mlbgame.data.get_raw_box_score(game_id) # parse XML box_score_tree = etree.parse(box_score).getroot() raw_box_score_tree = etree.parse(raw_box_score).getroot() # get pitching and batting ingo pitching = box_score_tree.findall('pitching') batting = box_score_tree.findall('batting') # dictionary for output output = {} output = __team_stats_info(pitching, output, 'pitching') output = __team_stats_info(batting, output, 'batting') output = __raw_team_stats_info(raw_box_score_tree, output) return output
[ "def", "team_stats", "(", "game_id", ")", ":", "# get data from data module", "box_score", "=", "mlbgame", ".", "data", ".", "get_box_score", "(", "game_id", ")", "raw_box_score", "=", "mlbgame", ".", "data", ".", "get_raw_box_score", "(", "game_id", ")", "# parse XML", "box_score_tree", "=", "etree", ".", "parse", "(", "box_score", ")", ".", "getroot", "(", ")", "raw_box_score_tree", "=", "etree", ".", "parse", "(", "raw_box_score", ")", ".", "getroot", "(", ")", "# get pitching and batting ingo", "pitching", "=", "box_score_tree", ".", "findall", "(", "'pitching'", ")", "batting", "=", "box_score_tree", ".", "findall", "(", "'batting'", ")", "# dictionary for output", "output", "=", "{", "}", "output", "=", "__team_stats_info", "(", "pitching", ",", "output", ",", "'pitching'", ")", "output", "=", "__team_stats_info", "(", "batting", ",", "output", ",", "'batting'", ")", "output", "=", "__raw_team_stats_info", "(", "raw_box_score_tree", ",", "output", ")", "return", "output" ]
41.095238
16.952381
def _forceInt(x,y,z,dens,b2,c2,i,glx=None,glw=None): """Integral that gives the force in x,y,z""" def integrand(s): t= 1/s**2.-1. return dens(numpy.sqrt(x**2./(1.+t)+y**2./(b2+t)+z**2./(c2+t)))\ *(x/(1.+t)*(i==0)+y/(b2+t)*(i==1)+z/(c2+t)*(i==2))\ /numpy.sqrt((1.+(b2-1.)*s**2.)*(1.+(c2-1.)*s**2.)) if glx is None: return integrate.quad(integrand,0.,1.)[0] else: return numpy.sum(glw*integrand(glx))
[ "def", "_forceInt", "(", "x", ",", "y", ",", "z", ",", "dens", ",", "b2", ",", "c2", ",", "i", ",", "glx", "=", "None", ",", "glw", "=", "None", ")", ":", "def", "integrand", "(", "s", ")", ":", "t", "=", "1", "/", "s", "**", "2.", "-", "1.", "return", "dens", "(", "numpy", ".", "sqrt", "(", "x", "**", "2.", "/", "(", "1.", "+", "t", ")", "+", "y", "**", "2.", "/", "(", "b2", "+", "t", ")", "+", "z", "**", "2.", "/", "(", "c2", "+", "t", ")", ")", ")", "*", "(", "x", "/", "(", "1.", "+", "t", ")", "*", "(", "i", "==", "0", ")", "+", "y", "/", "(", "b2", "+", "t", ")", "*", "(", "i", "==", "1", ")", "+", "z", "/", "(", "c2", "+", "t", ")", "*", "(", "i", "==", "2", ")", ")", "/", "numpy", ".", "sqrt", "(", "(", "1.", "+", "(", "b2", "-", "1.", ")", "*", "s", "**", "2.", ")", "*", "(", "1.", "+", "(", "c2", "-", "1.", ")", "*", "s", "**", "2.", ")", ")", "if", "glx", "is", "None", ":", "return", "integrate", ".", "quad", "(", "integrand", ",", "0.", ",", "1.", ")", "[", "0", "]", "else", ":", "return", "numpy", ".", "sum", "(", "glw", "*", "integrand", "(", "glx", ")", ")" ]
44.545455
20.181818
def import_end_event_to_graph(diagram_graph, process_id, process_attributes, element): """ Adds to graph the new element that represents BPMN end event. End event inherits sequence of eventDefinitionRef from Event type. Separate methods for each event type are required since each of them has different variants (Message, Error, Signal etc.). :param diagram_graph: NetworkX graph representing a BPMN process diagram, :param process_id: string object, representing an ID of process element, :param process_attributes: dictionary that holds attribute values of 'process' element, which is parent of imported flow node, :param element: object representing a BPMN XML 'endEvent' element. """ end_event_definitions = {'messageEventDefinition', 'signalEventDefinition', 'escalationEventDefinition', 'errorEventDefinition', 'compensateEventDefinition', 'terminateEventDefinition'} BpmnDiagramGraphImport.import_flow_node_to_graph(diagram_graph, process_id, process_attributes, element) BpmnDiagramGraphImport.import_event_definition_elements(diagram_graph, element, end_event_definitions)
[ "def", "import_end_event_to_graph", "(", "diagram_graph", ",", "process_id", ",", "process_attributes", ",", "element", ")", ":", "end_event_definitions", "=", "{", "'messageEventDefinition'", ",", "'signalEventDefinition'", ",", "'escalationEventDefinition'", ",", "'errorEventDefinition'", ",", "'compensateEventDefinition'", ",", "'terminateEventDefinition'", "}", "BpmnDiagramGraphImport", ".", "import_flow_node_to_graph", "(", "diagram_graph", ",", "process_id", ",", "process_attributes", ",", "element", ")", "BpmnDiagramGraphImport", ".", "import_event_definition_elements", "(", "diagram_graph", ",", "element", ",", "end_event_definitions", ")" ]
71.470588
40.882353
def maybe_timeout_options(self): """Implements the NailgunProtocol.TimeoutProvider interface.""" if self._exit_timeout_start_time: return NailgunProtocol.TimeoutOptions(self._exit_timeout_start_time, self._exit_timeout) else: return None
[ "def", "maybe_timeout_options", "(", "self", ")", ":", "if", "self", ".", "_exit_timeout_start_time", ":", "return", "NailgunProtocol", ".", "TimeoutOptions", "(", "self", ".", "_exit_timeout_start_time", ",", "self", ".", "_exit_timeout", ")", "else", ":", "return", "None" ]
42.666667
19.833333
def loadProfile(self, profile, inspectorFullName=None): """ Reads the persistent program settings for the current profile. If inspectorFullName is given, a window with this inspector will be created if it wasn't already created in the profile. All windows with this inspector will be raised. """ settings = QtCore.QSettings() logger.info("Reading profile {!r} from: {}".format(profile, settings.fileName())) self._profile = profile profGroupName = self.profileGroupName(profile) # Instantiate windows from groups settings.beginGroup(profGroupName) try: for windowGroupName in settings.childGroups(): if windowGroupName.startswith('window'): settings.beginGroup(windowGroupName) try: self.addNewMainWindow(settings=settings) finally: settings.endGroup() finally: settings.endGroup() if inspectorFullName is not None: windows = [win for win in self._mainWindows if win.inspectorFullName == inspectorFullName] if len(windows) == 0: logger.info("Creating window for inspector: {!r}".format(inspectorFullName)) try: win = self.addNewMainWindow(inspectorFullName=inspectorFullName) except KeyError: logger.warn("No inspector found with ID: {}".format(inspectorFullName)) else: for win in windows: win.raise_() if len(self.mainWindows) == 0: logger.info("No open windows in profile (creating one).") self.addNewMainWindow(inspectorFullName=DEFAULT_INSPECTOR)
[ "def", "loadProfile", "(", "self", ",", "profile", ",", "inspectorFullName", "=", "None", ")", ":", "settings", "=", "QtCore", ".", "QSettings", "(", ")", "logger", ".", "info", "(", "\"Reading profile {!r} from: {}\"", ".", "format", "(", "profile", ",", "settings", ".", "fileName", "(", ")", ")", ")", "self", ".", "_profile", "=", "profile", "profGroupName", "=", "self", ".", "profileGroupName", "(", "profile", ")", "# Instantiate windows from groups", "settings", ".", "beginGroup", "(", "profGroupName", ")", "try", ":", "for", "windowGroupName", "in", "settings", ".", "childGroups", "(", ")", ":", "if", "windowGroupName", ".", "startswith", "(", "'window'", ")", ":", "settings", ".", "beginGroup", "(", "windowGroupName", ")", "try", ":", "self", ".", "addNewMainWindow", "(", "settings", "=", "settings", ")", "finally", ":", "settings", ".", "endGroup", "(", ")", "finally", ":", "settings", ".", "endGroup", "(", ")", "if", "inspectorFullName", "is", "not", "None", ":", "windows", "=", "[", "win", "for", "win", "in", "self", ".", "_mainWindows", "if", "win", ".", "inspectorFullName", "==", "inspectorFullName", "]", "if", "len", "(", "windows", ")", "==", "0", ":", "logger", ".", "info", "(", "\"Creating window for inspector: {!r}\"", ".", "format", "(", "inspectorFullName", ")", ")", "try", ":", "win", "=", "self", ".", "addNewMainWindow", "(", "inspectorFullName", "=", "inspectorFullName", ")", "except", "KeyError", ":", "logger", ".", "warn", "(", "\"No inspector found with ID: {}\"", ".", "format", "(", "inspectorFullName", ")", ")", "else", ":", "for", "win", "in", "windows", ":", "win", ".", "raise_", "(", ")", "if", "len", "(", "self", ".", "mainWindows", ")", "==", "0", ":", "logger", ".", "info", "(", "\"No open windows in profile (creating one).\"", ")", "self", ".", "addNewMainWindow", "(", "inspectorFullName", "=", "DEFAULT_INSPECTOR", ")" ]
43.682927
21.804878
def split_history_item(history): """ Return the log file and optional description for item. """ try: log_file, description = shlex.split(history) except ValueError: log_file = history.strip() description = None return log_file, description
[ "def", "split_history_item", "(", "history", ")", ":", "try", ":", "log_file", ",", "description", "=", "shlex", ".", "split", "(", "history", ")", "except", "ValueError", ":", "log_file", "=", "history", ".", "strip", "(", ")", "description", "=", "None", "return", "log_file", ",", "description" ]
25.272727
14.181818
def to_xml(cls, enum_val): """ Return the XML value of the enumeration value *enum_val*. """ if enum_val not in cls._member_to_xml: raise ValueError( "value '%s' not in enumeration %s" % (enum_val, cls.__name__) ) return cls._member_to_xml[enum_val]
[ "def", "to_xml", "(", "cls", ",", "enum_val", ")", ":", "if", "enum_val", "not", "in", "cls", ".", "_member_to_xml", ":", "raise", "ValueError", "(", "\"value '%s' not in enumeration %s\"", "%", "(", "enum_val", ",", "cls", ".", "__name__", ")", ")", "return", "cls", ".", "_member_to_xml", "[", "enum_val", "]" ]
35.666667
13.666667
def set_default_symbols(self): """Set self.symbols based on self.numbers and the periodic table.""" self.symbols = tuple(periodic[n].symbol for n in self.numbers)
[ "def", "set_default_symbols", "(", "self", ")", ":", "self", ".", "symbols", "=", "tuple", "(", "periodic", "[", "n", "]", ".", "symbol", "for", "n", "in", "self", ".", "numbers", ")" ]
58.666667
13.333333
def _build_http_client(cls, session: AppSession): '''Create the HTTP client. Returns: Client: An instance of :class:`.http.Client`. ''' # TODO: # recorder = self._build_recorder() stream_factory = functools.partial( HTTPStream, ignore_length=session.args.ignore_length, keep_alive=session.args.http_keep_alive) return session.factory.new( 'HTTPClient', connection_pool=session.factory['ConnectionPool'], stream_factory=stream_factory )
[ "def", "_build_http_client", "(", "cls", ",", "session", ":", "AppSession", ")", ":", "# TODO:", "# recorder = self._build_recorder()", "stream_factory", "=", "functools", ".", "partial", "(", "HTTPStream", ",", "ignore_length", "=", "session", ".", "args", ".", "ignore_length", ",", "keep_alive", "=", "session", ".", "args", ".", "http_keep_alive", ")", "return", "session", ".", "factory", ".", "new", "(", "'HTTPClient'", ",", "connection_pool", "=", "session", ".", "factory", "[", "'ConnectionPool'", "]", ",", "stream_factory", "=", "stream_factory", ")" ]
29.947368
18.473684
def assign_parent(node: astroid.node_classes.NodeNG) -> astroid.node_classes.NodeNG: """return the higher parent which is not an AssignName, Tuple or List node """ while node and isinstance(node, (astroid.AssignName, astroid.Tuple, astroid.List)): node = node.parent return node
[ "def", "assign_parent", "(", "node", ":", "astroid", ".", "node_classes", ".", "NodeNG", ")", "->", "astroid", ".", "node_classes", ".", "NodeNG", ":", "while", "node", "and", "isinstance", "(", "node", ",", "(", "astroid", ".", "AssignName", ",", "astroid", ".", "Tuple", ",", "astroid", ".", "List", ")", ")", ":", "node", "=", "node", ".", "parent", "return", "node" ]
49.5
21.666667
def _iget(key, lookup_dict): """ Case-insensitive search for `key` within keys of `lookup_dict`. """ for k, v in lookup_dict.items(): if k.lower() == key.lower(): return v return None
[ "def", "_iget", "(", "key", ",", "lookup_dict", ")", ":", "for", "k", ",", "v", "in", "lookup_dict", ".", "items", "(", ")", ":", "if", "k", ".", "lower", "(", ")", "==", "key", ".", "lower", "(", ")", ":", "return", "v", "return", "None" ]
27
11.5
def sas_logical_jbods(self): """ Gets the SAS Logical JBODs API client. Returns: SasLogicalJbod: """ if not self.__sas_logical_jbods: self.__sas_logical_jbods = SasLogicalJbods(self.__connection) return self.__sas_logical_jbods
[ "def", "sas_logical_jbods", "(", "self", ")", ":", "if", "not", "self", ".", "__sas_logical_jbods", ":", "self", ".", "__sas_logical_jbods", "=", "SasLogicalJbods", "(", "self", ".", "__connection", ")", "return", "self", ".", "__sas_logical_jbods" ]
29.1
12.9
def get_cash_balance(self): """ Returns the account cash balance available for investing Returns ------- float The cash balance in your account. """ cash = False try: response = self.session.get('/browse/cashBalanceAj.action') json_response = response.json() if self.session.json_success(json_response): self.__log('Cash available: {0}'.format(json_response['cashBalance'])) cash_value = json_response['cashBalance'] # Convert currency to float value # Match values like $1,000.12 or 1,0000$ cash_match = re.search('^[^0-9]?([0-9\.,]+)[^0-9]?', cash_value) if cash_match: cash_str = cash_match.group(1) cash_str = cash_str.replace(',', '') cash = float(cash_str) else: self.__log('Could not get cash balance: {0}'.format(response.text)) except Exception as e: self.__log('Could not get the cash balance on the account: Error: {0}\nJSON: {1}'.format(str(e), response.text)) raise e return cash
[ "def", "get_cash_balance", "(", "self", ")", ":", "cash", "=", "False", "try", ":", "response", "=", "self", ".", "session", ".", "get", "(", "'/browse/cashBalanceAj.action'", ")", "json_response", "=", "response", ".", "json", "(", ")", "if", "self", ".", "session", ".", "json_success", "(", "json_response", ")", ":", "self", ".", "__log", "(", "'Cash available: {0}'", ".", "format", "(", "json_response", "[", "'cashBalance'", "]", ")", ")", "cash_value", "=", "json_response", "[", "'cashBalance'", "]", "# Convert currency to float value", "# Match values like $1,000.12 or 1,0000$", "cash_match", "=", "re", ".", "search", "(", "'^[^0-9]?([0-9\\.,]+)[^0-9]?'", ",", "cash_value", ")", "if", "cash_match", ":", "cash_str", "=", "cash_match", ".", "group", "(", "1", ")", "cash_str", "=", "cash_str", ".", "replace", "(", "','", ",", "''", ")", "cash", "=", "float", "(", "cash_str", ")", "else", ":", "self", ".", "__log", "(", "'Could not get cash balance: {0}'", ".", "format", "(", "response", ".", "text", ")", ")", "except", "Exception", "as", "e", ":", "self", ".", "__log", "(", "'Could not get the cash balance on the account: Error: {0}\\nJSON: {1}'", ".", "format", "(", "str", "(", "e", ")", ",", "response", ".", "text", ")", ")", "raise", "e", "return", "cash" ]
36.393939
23.787879
def read_sections(): """Read ini files and return list of pairs (name, options)""" config = configparser.ConfigParser() config.read(('requirements.ini', 'setup.cfg', 'tox.ini')) return [ ( name, { key: parse_value(key, config[name][key]) for key in config[name] } ) for name in config.sections() if 'requirements' in name ]
[ "def", "read_sections", "(", ")", ":", "config", "=", "configparser", ".", "ConfigParser", "(", ")", "config", ".", "read", "(", "(", "'requirements.ini'", ",", "'setup.cfg'", ",", "'tox.ini'", ")", ")", "return", "[", "(", "name", ",", "{", "key", ":", "parse_value", "(", "key", ",", "config", "[", "name", "]", "[", "key", "]", ")", "for", "key", "in", "config", "[", "name", "]", "}", ")", "for", "name", "in", "config", ".", "sections", "(", ")", "if", "'requirements'", "in", "name", "]" ]
28.6
18
def enable_collection(f): """Call the wrapped function with a HistogramCollection as argument.""" @wraps(f) def new_f(h: AbstractHistogram1D, **kwargs): from physt.histogram_collection import HistogramCollection if isinstance(h, HistogramCollection): return f(h, **kwargs) else: return f(HistogramCollection(h), **kwargs) return new_f
[ "def", "enable_collection", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "new_f", "(", "h", ":", "AbstractHistogram1D", ",", "*", "*", "kwargs", ")", ":", "from", "physt", ".", "histogram_collection", "import", "HistogramCollection", "if", "isinstance", "(", "h", ",", "HistogramCollection", ")", ":", "return", "f", "(", "h", ",", "*", "*", "kwargs", ")", "else", ":", "return", "f", "(", "HistogramCollection", "(", "h", ")", ",", "*", "*", "kwargs", ")", "return", "new_f" ]
38.9
15.4
def by_organizations(self, field=None): """ Used to seggregate the data acording to organizations. This method pops the latest aggregation from the self.aggregations dict and adds it as a nested aggregation under itself :param field: the field to create the parent agg (optional) default: author_org_name :returns: self, which allows the method to be chainable with the other methods """ # this functions is currently only for issues and PRs agg_field = field if field else "author_org_name" agg_key = "terms_" + agg_field if agg_key in self.aggregations.keys(): agg = self.aggregations[agg_key] else: agg = A("terms", field=agg_field, missing="others", size=self.size) child_agg_counter = self.child_agg_counter_dict[agg_key] # 0 if not present because defaultdict child_name, child_agg = self.aggregations.popitem() agg.metric(child_agg_counter, child_agg) self.aggregations[agg_key] = agg self.child_agg_counter_dict[agg_key] += 1 return self
[ "def", "by_organizations", "(", "self", ",", "field", "=", "None", ")", ":", "# this functions is currently only for issues and PRs", "agg_field", "=", "field", "if", "field", "else", "\"author_org_name\"", "agg_key", "=", "\"terms_\"", "+", "agg_field", "if", "agg_key", "in", "self", ".", "aggregations", ".", "keys", "(", ")", ":", "agg", "=", "self", ".", "aggregations", "[", "agg_key", "]", "else", ":", "agg", "=", "A", "(", "\"terms\"", ",", "field", "=", "agg_field", ",", "missing", "=", "\"others\"", ",", "size", "=", "self", ".", "size", ")", "child_agg_counter", "=", "self", ".", "child_agg_counter_dict", "[", "agg_key", "]", "# 0 if not present because defaultdict", "child_name", ",", "child_agg", "=", "self", ".", "aggregations", ".", "popitem", "(", ")", "agg", ".", "metric", "(", "child_agg_counter", ",", "child_agg", ")", "self", ".", "aggregations", "[", "agg_key", "]", "=", "agg", "self", ".", "child_agg_counter_dict", "[", "agg_key", "]", "+=", "1", "return", "self" ]
42.884615
21.346154
def expand_tamil(start,end): """ expand uyir or mei-letter range etc. i.e. அ-ஔ gets converted to அ,ஆ,இ,ஈ,உ,ஊ,எ,ஏ,ஐ,ஒ,ஓ,ஔ etc. """ # few sequences for seq in [utf8.uyir_letters, utf8.grantha_mei_letters, \ utf8.grantha_agaram_letters]: if is_containing_seq(start,end,seq): return expand_sequence(start,end,seq) # all Tamil letters seq = utf8.grantha_uyirmei_letters if is_containing_seq(start,end,seq): return expand_sequence(start,end,seq) raise Exception("Cannot understand sequence [%s-%s]"%(start,end))
[ "def", "expand_tamil", "(", "start", ",", "end", ")", ":", "# few sequences", "for", "seq", "in", "[", "utf8", ".", "uyir_letters", ",", "utf8", ".", "grantha_mei_letters", ",", "utf8", ".", "grantha_agaram_letters", "]", ":", "if", "is_containing_seq", "(", "start", ",", "end", ",", "seq", ")", ":", "return", "expand_sequence", "(", "start", ",", "end", ",", "seq", ")", "# all Tamil letters", "seq", "=", "utf8", ".", "grantha_uyirmei_letters", "if", "is_containing_seq", "(", "start", ",", "end", ",", "seq", ")", ":", "return", "expand_sequence", "(", "start", ",", "end", ",", "seq", ")", "raise", "Exception", "(", "\"Cannot understand sequence [%s-%s]\"", "%", "(", "start", ",", "end", ")", ")" ]
38.733333
12.4
def UWRatio(s1, s2, full_process=True): """Return a measure of the sequences' similarity between 0 and 100, using different algorithms. Same as WRatio but preserving unicode. """ return WRatio(s1, s2, force_ascii=False, full_process=full_process)
[ "def", "UWRatio", "(", "s1", ",", "s2", ",", "full_process", "=", "True", ")", ":", "return", "WRatio", "(", "s1", ",", "s2", ",", "force_ascii", "=", "False", ",", "full_process", "=", "full_process", ")" ]
51.6
12.4
def get_parameter_p_value_too_high_warning( model_type, model_params, parameter, p_value, maximum_p_value ): """ Return an empty list or a single warning wrapped in a list indicating whether model parameter p-value is too high. Parameters ---------- model_type : :any:`str` Model type (e.g., ``'cdd_hdd'``). model_params : :any:`dict` Parameters as stored in :any:`eemeter.CalTRACKUsagePerDayCandidateModel.model_params`. parameter : :any:`str` The name of the parameter, e.g., ``'intercept'``. p_value : :any:`float` The p-value of the parameter. maximum_p_value : :any:`float` The maximum allowable p-value of the parameter. Returns ------- warnings : :any:`list` of :any:`eemeter.EEMeterWarning` Empty list or list of single warning. """ warnings = [] if p_value > maximum_p_value: data = { "{}_p_value".format(parameter): p_value, "{}_maximum_p_value".format(parameter): maximum_p_value, } data.update(model_params) warnings.append( EEMeterWarning( qualified_name=( "eemeter.caltrack_daily.{model_type}.{parameter}_p_value_too_high".format( model_type=model_type, parameter=parameter ) ), description=( "Model fit {parameter} p-value is too high. Candidate model rejected.".format( parameter=parameter ) ), data=data, ) ) return warnings
[ "def", "get_parameter_p_value_too_high_warning", "(", "model_type", ",", "model_params", ",", "parameter", ",", "p_value", ",", "maximum_p_value", ")", ":", "warnings", "=", "[", "]", "if", "p_value", ">", "maximum_p_value", ":", "data", "=", "{", "\"{}_p_value\"", ".", "format", "(", "parameter", ")", ":", "p_value", ",", "\"{}_maximum_p_value\"", ".", "format", "(", "parameter", ")", ":", "maximum_p_value", ",", "}", "data", ".", "update", "(", "model_params", ")", "warnings", ".", "append", "(", "EEMeterWarning", "(", "qualified_name", "=", "(", "\"eemeter.caltrack_daily.{model_type}.{parameter}_p_value_too_high\"", ".", "format", "(", "model_type", "=", "model_type", ",", "parameter", "=", "parameter", ")", ")", ",", "description", "=", "(", "\"Model fit {parameter} p-value is too high. Candidate model rejected.\"", ".", "format", "(", "parameter", "=", "parameter", ")", ")", ",", "data", "=", "data", ",", ")", ")", "return", "warnings" ]
34.234043
19.808511
def compound_crossspec(a_data, tbin, Df=None, pointProcess=False): """ Calculate cross spectra of compound signals. a_data is a list of datasets (a_data = [data1,data2,...]). For each dataset in a_data, the compound signal is calculated and the crossspectra between these compound signals is computed. If pointProcess=True, power spectra are normalized by the length T of the time series. Parameters ---------- a_data : list of numpy.ndarrays Array: 1st axis unit, 2nd axis time. tbin : float Binsize in ms. Df : float/None, Window width of sliding rectangular filter (smoothing), None -> no smoothing. pointProcess : bool If set to True, crossspectrum is normalized to signal length `T` Returns ------- freq : tuple numpy.ndarray of frequencies. CRO : tuple 3 dim numpy.ndarray; 1st axis first compound signal, 2nd axis second compound signal, 3rd axis frequency. Examples -------- >>> compound_crossspec([np.array([analog_sig1, analog_sig2]), np.array([analog_sig3,analog_sig4])], tbin, Df=Df) Out[1]: (freq,CRO) >>> CRO.shape Out[2]: (2,2,len(analog_sig1)) """ a_mdata = [] for data in a_data: a_mdata.append(np.sum(data, axis=0)) # calculate compound signals return crossspec(np.array(a_mdata), tbin, Df, units=False, pointProcess=pointProcess)
[ "def", "compound_crossspec", "(", "a_data", ",", "tbin", ",", "Df", "=", "None", ",", "pointProcess", "=", "False", ")", ":", "a_mdata", "=", "[", "]", "for", "data", "in", "a_data", ":", "a_mdata", ".", "append", "(", "np", ".", "sum", "(", "data", ",", "axis", "=", "0", ")", ")", "# calculate compound signals", "return", "crossspec", "(", "np", ".", "array", "(", "a_mdata", ")", ",", "tbin", ",", "Df", ",", "units", "=", "False", ",", "pointProcess", "=", "pointProcess", ")" ]
31.191489
23.404255
def _remote_file_size(url=None, file_name=None, pb_dir=None): """ Get the remote file size in bytes Parameters ---------- url : str, optional The full url of the file. Use this option to explicitly state the full url. file_name : str, optional The base file name. Use this argument along with pb_dir if you want the full url to be constructed. pb_dir : str, optional The base file name. Use this argument along with file_name if you want the full url to be constructed. Returns ------- remote_file_size : int Size of the file in bytes """ # Option to construct the url if file_name and pb_dir: url = posixpath.join(config.db_index_url, pb_dir, file_name) response = requests.head(url, headers={'Accept-Encoding': 'identity'}) # Raise HTTPError if invalid url response.raise_for_status() # Supposed size of the file remote_file_size = int(response.headers['content-length']) return remote_file_size
[ "def", "_remote_file_size", "(", "url", "=", "None", ",", "file_name", "=", "None", ",", "pb_dir", "=", "None", ")", ":", "# Option to construct the url", "if", "file_name", "and", "pb_dir", ":", "url", "=", "posixpath", ".", "join", "(", "config", ".", "db_index_url", ",", "pb_dir", ",", "file_name", ")", "response", "=", "requests", ".", "head", "(", "url", ",", "headers", "=", "{", "'Accept-Encoding'", ":", "'identity'", "}", ")", "# Raise HTTPError if invalid url", "response", ".", "raise_for_status", "(", ")", "# Supposed size of the file", "remote_file_size", "=", "int", "(", "response", ".", "headers", "[", "'content-length'", "]", ")", "return", "remote_file_size" ]
28.857143
20.628571
def load(self, filepath): """Load the track file""" with open(filepath, 'rb') as fd: num_keys = struct.unpack(">i", fd.read(4))[0] for i in range(num_keys): row, value, kind = struct.unpack('>ifb', fd.read(9)) self.keys.append(TrackKey(row, value, kind))
[ "def", "load", "(", "self", ",", "filepath", ")", ":", "with", "open", "(", "filepath", ",", "'rb'", ")", "as", "fd", ":", "num_keys", "=", "struct", ".", "unpack", "(", "\">i\"", ",", "fd", ".", "read", "(", "4", ")", ")", "[", "0", "]", "for", "i", "in", "range", "(", "num_keys", ")", ":", "row", ",", "value", ",", "kind", "=", "struct", ".", "unpack", "(", "'>ifb'", ",", "fd", ".", "read", "(", "9", ")", ")", "self", ".", "keys", ".", "append", "(", "TrackKey", "(", "row", ",", "value", ",", "kind", ")", ")" ]
45.714286
11.857143
def apool(self, k_height, k_width, d_height=2, d_width=2, mode="VALID", input_layer=None, num_channels_in=None): """Construct an average pooling layer.""" return self._pool("apool", pooling_layers.average_pooling2d, k_height, k_width, d_height, d_width, mode, input_layer, num_channels_in)
[ "def", "apool", "(", "self", ",", "k_height", ",", "k_width", ",", "d_height", "=", "2", ",", "d_width", "=", "2", ",", "mode", "=", "\"VALID\"", ",", "input_layer", "=", "None", ",", "num_channels_in", "=", "None", ")", ":", "return", "self", ".", "_pool", "(", "\"apool\"", ",", "pooling_layers", ".", "average_pooling2d", ",", "k_height", ",", "k_width", ",", "d_height", ",", "d_width", ",", "mode", ",", "input_layer", ",", "num_channels_in", ")" ]
37
15.75
def html(self) -> str: """Generate a random HTML tag with text inside and some attrs set. :return: HTML. :Examples: '<span class="select" id="careers"> Ports are created with the built-in function open_port. </span>' """ tag_name = self.random.choice(list(HTML_CONTAINER_TAGS)) tag_attributes = list(HTML_CONTAINER_TAGS[tag_name]) # type: ignore k = self.random.randint(1, len(tag_attributes)) selected_attrs = self.random.sample(tag_attributes, k=k) attrs = [] for attr in selected_attrs: attrs.append('{}="{}"'.format( attr, self.html_attribute_value(tag_name, attr))) html_result = '<{tag} {attrs}>{content}</{tag}>' return html_result.format( tag=tag_name, attrs=' '.join(attrs), content=self.__text.sentence(), )
[ "def", "html", "(", "self", ")", "->", "str", ":", "tag_name", "=", "self", ".", "random", ".", "choice", "(", "list", "(", "HTML_CONTAINER_TAGS", ")", ")", "tag_attributes", "=", "list", "(", "HTML_CONTAINER_TAGS", "[", "tag_name", "]", ")", "# type: ignore", "k", "=", "self", ".", "random", ".", "randint", "(", "1", ",", "len", "(", "tag_attributes", ")", ")", "selected_attrs", "=", "self", ".", "random", ".", "sample", "(", "tag_attributes", ",", "k", "=", "k", ")", "attrs", "=", "[", "]", "for", "attr", "in", "selected_attrs", ":", "attrs", ".", "append", "(", "'{}=\"{}\"'", ".", "format", "(", "attr", ",", "self", ".", "html_attribute_value", "(", "tag_name", ",", "attr", ")", ")", ")", "html_result", "=", "'<{tag} {attrs}>{content}</{tag}>'", "return", "html_result", ".", "format", "(", "tag", "=", "tag_name", ",", "attrs", "=", "' '", ".", "join", "(", "attrs", ")", ",", "content", "=", "self", ".", "__text", ".", "sentence", "(", ")", ",", ")" ]
33.37037
20.074074
def tmp_file_path(self): """ :return: :rtype: str """ return os.path.normpath(os.path.join( TMP_DIR, self.filename ))
[ "def", "tmp_file_path", "(", "self", ")", ":", "return", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "TMP_DIR", ",", "self", ".", "filename", ")", ")" ]
20.111111
14.555556
def mergecn(args): """ %prog mergecn FACE.csv Compile matrix of GC-corrected copy numbers. Place a bunch of folders in csv file. Each folder will be scanned, one chromosomes after another. """ p = OptionParser(mergecn.__doc__) opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) csvfile, = args samples = [x.replace("-cn", "").strip().strip("/") for x in open(csvfile)] betadir = "beta" mkdir(betadir) for seqid in allsomes: names = [op.join(s + "-cn", "{}.{}.cn". format(op.basename(s), seqid)) for s in samples] arrays = [np.fromfile(name, dtype=np.float) for name in names] shapes = [x.shape[0] for x in arrays] med_shape = np.median(shapes) arrays = [x for x in arrays if x.shape[0] == med_shape] ploidy = 2 if seqid not in ("chrY", "chrM") else 1 if seqid in sexsomes: chr_med = [np.median([x for x in a if x > 0]) for a in arrays] chr_med = np.array(chr_med) idx = get_kmeans(chr_med, k=2) zero_med = np.median(chr_med[idx == 0]) one_med = np.median(chr_med[idx == 1]) logging.debug("K-means with {} c0:{} c1:{}" .format(seqid, zero_med, one_med)) higher_idx = 1 if one_med > zero_med else 0 # Use the higher mean coverage componen arrays = np.array(arrays)[idx == higher_idx] arrays = [[x] for x in arrays] ar = np.concatenate(arrays) print(seqid, ar.shape) rows, columns = ar.shape beta = [] std = [] for j in xrange(columns): a = ar[:, j] beta.append(np.median(a)) std.append(np.std(a) / np.mean(a)) beta = np.array(beta) / ploidy betafile = op.join(betadir, "{}.beta".format(seqid)) beta.tofile(betafile) stdfile = op.join(betadir, "{}.std".format(seqid)) std = np.array(std) std.tofile(stdfile) logging.debug("Written to `{}`".format(betafile)) ar.tofile("{}.bin".format(seqid))
[ "def", "mergecn", "(", "args", ")", ":", "p", "=", "OptionParser", "(", "mergecn", ".", "__doc__", ")", "opts", ",", "args", "=", "p", ".", "parse_args", "(", "args", ")", "if", "len", "(", "args", ")", "!=", "1", ":", "sys", ".", "exit", "(", "not", "p", ".", "print_help", "(", ")", ")", "csvfile", ",", "=", "args", "samples", "=", "[", "x", ".", "replace", "(", "\"-cn\"", ",", "\"\"", ")", ".", "strip", "(", ")", ".", "strip", "(", "\"/\"", ")", "for", "x", "in", "open", "(", "csvfile", ")", "]", "betadir", "=", "\"beta\"", "mkdir", "(", "betadir", ")", "for", "seqid", "in", "allsomes", ":", "names", "=", "[", "op", ".", "join", "(", "s", "+", "\"-cn\"", ",", "\"{}.{}.cn\"", ".", "format", "(", "op", ".", "basename", "(", "s", ")", ",", "seqid", ")", ")", "for", "s", "in", "samples", "]", "arrays", "=", "[", "np", ".", "fromfile", "(", "name", ",", "dtype", "=", "np", ".", "float", ")", "for", "name", "in", "names", "]", "shapes", "=", "[", "x", ".", "shape", "[", "0", "]", "for", "x", "in", "arrays", "]", "med_shape", "=", "np", ".", "median", "(", "shapes", ")", "arrays", "=", "[", "x", "for", "x", "in", "arrays", "if", "x", ".", "shape", "[", "0", "]", "==", "med_shape", "]", "ploidy", "=", "2", "if", "seqid", "not", "in", "(", "\"chrY\"", ",", "\"chrM\"", ")", "else", "1", "if", "seqid", "in", "sexsomes", ":", "chr_med", "=", "[", "np", ".", "median", "(", "[", "x", "for", "x", "in", "a", "if", "x", ">", "0", "]", ")", "for", "a", "in", "arrays", "]", "chr_med", "=", "np", ".", "array", "(", "chr_med", ")", "idx", "=", "get_kmeans", "(", "chr_med", ",", "k", "=", "2", ")", "zero_med", "=", "np", ".", "median", "(", "chr_med", "[", "idx", "==", "0", "]", ")", "one_med", "=", "np", ".", "median", "(", "chr_med", "[", "idx", "==", "1", "]", ")", "logging", ".", "debug", "(", "\"K-means with {} c0:{} c1:{}\"", ".", "format", "(", "seqid", ",", "zero_med", ",", "one_med", ")", ")", "higher_idx", "=", "1", "if", "one_med", ">", "zero_med", "else", "0", "# Use the higher mean coverage componen", "arrays", "=", "np", ".", "array", "(", "arrays", ")", "[", "idx", "==", "higher_idx", "]", "arrays", "=", "[", "[", "x", "]", "for", "x", "in", "arrays", "]", "ar", "=", "np", ".", "concatenate", "(", "arrays", ")", "print", "(", "seqid", ",", "ar", ".", "shape", ")", "rows", ",", "columns", "=", "ar", ".", "shape", "beta", "=", "[", "]", "std", "=", "[", "]", "for", "j", "in", "xrange", "(", "columns", ")", ":", "a", "=", "ar", "[", ":", ",", "j", "]", "beta", ".", "append", "(", "np", ".", "median", "(", "a", ")", ")", "std", ".", "append", "(", "np", ".", "std", "(", "a", ")", "/", "np", ".", "mean", "(", "a", ")", ")", "beta", "=", "np", ".", "array", "(", "beta", ")", "/", "ploidy", "betafile", "=", "op", ".", "join", "(", "betadir", ",", "\"{}.beta\"", ".", "format", "(", "seqid", ")", ")", "beta", ".", "tofile", "(", "betafile", ")", "stdfile", "=", "op", ".", "join", "(", "betadir", ",", "\"{}.std\"", ".", "format", "(", "seqid", ")", ")", "std", "=", "np", ".", "array", "(", "std", ")", "std", ".", "tofile", "(", "stdfile", ")", "logging", ".", "debug", "(", "\"Written to `{}`\"", ".", "format", "(", "betafile", ")", ")", "ar", ".", "tofile", "(", "\"{}.bin\"", ".", "format", "(", "seqid", ")", ")" ]
38.703704
15.296296
def run_putgist(filename, user, **kwargs): """Passes user inputs to GetGist() and calls put()""" assume_yes = kwargs.get("yes_to_all") private = kwargs.get("private") getgist = GetGist( user=user, filename=filename, assume_yes=assume_yes, create_private=private, allow_none=True, ) getgist.put()
[ "def", "run_putgist", "(", "filename", ",", "user", ",", "*", "*", "kwargs", ")", ":", "assume_yes", "=", "kwargs", ".", "get", "(", "\"yes_to_all\"", ")", "private", "=", "kwargs", ".", "get", "(", "\"private\"", ")", "getgist", "=", "GetGist", "(", "user", "=", "user", ",", "filename", "=", "filename", ",", "assume_yes", "=", "assume_yes", ",", "create_private", "=", "private", ",", "allow_none", "=", "True", ",", ")", "getgist", ".", "put", "(", ")" ]
29
12.916667
def battery_status_encode(self, id, battery_function, type, temperature, voltages, current_battery, current_consumed, energy_consumed, battery_remaining): ''' Battery information id : Battery ID (uint8_t) battery_function : Function of the battery (uint8_t) type : Type (chemistry) of the battery (uint8_t) temperature : Temperature of the battery in centi-degrees celsius. INT16_MAX for unknown temperature. (int16_t) voltages : Battery voltage of cells, in millivolts (1 = 1 millivolt). Cells above the valid cell count for this battery should have the UINT16_MAX value. (uint16_t) current_battery : Battery current, in 10*milliamperes (1 = 10 milliampere), -1: autopilot does not measure the current (int16_t) current_consumed : Consumed charge, in milliampere hours (1 = 1 mAh), -1: autopilot does not provide mAh consumption estimate (int32_t) energy_consumed : Consumed energy, in 100*Joules (intergrated U*I*dt) (1 = 100 Joule), -1: autopilot does not provide energy consumption estimate (int32_t) battery_remaining : Remaining battery energy: (0%: 0, 100%: 100), -1: autopilot does not estimate the remaining battery (int8_t) ''' return MAVLink_battery_status_message(id, battery_function, type, temperature, voltages, current_battery, current_consumed, energy_consumed, battery_remaining)
[ "def", "battery_status_encode", "(", "self", ",", "id", ",", "battery_function", ",", "type", ",", "temperature", ",", "voltages", ",", "current_battery", ",", "current_consumed", ",", "energy_consumed", ",", "battery_remaining", ")", ":", "return", "MAVLink_battery_status_message", "(", "id", ",", "battery_function", ",", "type", ",", "temperature", ",", "voltages", ",", "current_battery", ",", "current_consumed", ",", "energy_consumed", ",", "battery_remaining", ")" ]
100.875
76.75
def immediate_postdominators(self, end, target_graph=None): """ Get all immediate postdominators of sub graph from given node upwards. :param str start: id of the node to navigate forwards from. :param networkx.classes.digraph.DiGraph target_graph: graph to analyse, default is self.graph. :return: each node of graph as index values, with element as respective node's immediate dominator. :rtype: dict """ return self._immediate_dominators(end, target_graph=target_graph, reverse_graph=True)
[ "def", "immediate_postdominators", "(", "self", ",", "end", ",", "target_graph", "=", "None", ")", ":", "return", "self", ".", "_immediate_dominators", "(", "end", ",", "target_graph", "=", "target_graph", ",", "reverse_graph", "=", "True", ")" ]
49.818182
33.272727
def limit_exceed(to_validate, constraint, violation_cfg): """ Compares 2 values and returns violation message if validated value bigger than constraint :param to_validate: :param constraint: :param violation_cfg: :return: """ if to_validate > constraint: violation_cfg[Check.CFG_KEY_VIOLATION_MSG] = violation_cfg[Check.CFG_KEY_VIOLATION_MSG].format(constraint) return violation_cfg else: return None
[ "def", "limit_exceed", "(", "to_validate", ",", "constraint", ",", "violation_cfg", ")", ":", "if", "to_validate", ">", "constraint", ":", "violation_cfg", "[", "Check", ".", "CFG_KEY_VIOLATION_MSG", "]", "=", "violation_cfg", "[", "Check", ".", "CFG_KEY_VIOLATION_MSG", "]", ".", "format", "(", "constraint", ")", "return", "violation_cfg", "else", ":", "return", "None" ]
34.461538
22.615385
def visit_Return(self, node): """ Add edge from all possible callee to current function. Gather all the function call that led to the creation of the returned expression and add an edge to each of this function. When visiting an expression, one returns a list of frozensets. Each element of the list is linked to a possible path, each element of a frozenset is linked to a dependency. """ if not node.value: # Yielding function can't return values return for dep_set in self.visit(node.value): if dep_set: for dep in dep_set: self.result.add_edge(dep, self.current_function) else: self.result.add_edge(TypeDependencies.NoDeps, self.current_function)
[ "def", "visit_Return", "(", "self", ",", "node", ")", ":", "if", "not", "node", ".", "value", ":", "# Yielding function can't return values", "return", "for", "dep_set", "in", "self", ".", "visit", "(", "node", ".", "value", ")", ":", "if", "dep_set", ":", "for", "dep", "in", "dep_set", ":", "self", ".", "result", ".", "add_edge", "(", "dep", ",", "self", ".", "current_function", ")", "else", ":", "self", ".", "result", ".", "add_edge", "(", "TypeDependencies", ".", "NoDeps", ",", "self", ".", "current_function", ")" ]
40.380952
19.52381
def start_fetching_next_page(self): """ If there are more pages left in the query result, this asynchronously starts fetching the next page. If there are no pages left, :exc:`.QueryExhausted` is raised. Also see :attr:`.has_more_pages`. This should only be called after the first page has been returned. .. versionadded:: 2.0.0 """ if not self._paging_state: raise QueryExhausted() self._make_query_plan() self.message.paging_state = self._paging_state self._event.clear() self._final_result = _NOT_SET self._final_exception = None self._start_timer() self.send_request()
[ "def", "start_fetching_next_page", "(", "self", ")", ":", "if", "not", "self", ".", "_paging_state", ":", "raise", "QueryExhausted", "(", ")", "self", ".", "_make_query_plan", "(", ")", "self", ".", "message", ".", "paging_state", "=", "self", ".", "_paging_state", "self", ".", "_event", ".", "clear", "(", ")", "self", ".", "_final_result", "=", "_NOT_SET", "self", ".", "_final_exception", "=", "None", "self", ".", "_start_timer", "(", ")", "self", ".", "send_request", "(", ")" ]
34.45
17.45
def _root_mean_square_error(y, y_pred, w): """Calculate the root mean square error.""" return np.sqrt(np.average(((y_pred - y) ** 2), weights=w))
[ "def", "_root_mean_square_error", "(", "y", ",", "y_pred", ",", "w", ")", ":", "return", "np", ".", "sqrt", "(", "np", ".", "average", "(", "(", "(", "y_pred", "-", "y", ")", "**", "2", ")", ",", "weights", "=", "w", ")", ")" ]
50.333333
8
def update_iteration_num_suggestions(self, num_suggestions): """Update iteration's num_suggestions.""" iteration_config = self.experiment_group.iteration_config iteration_config.num_suggestions = num_suggestions self._update_config(iteration_config)
[ "def", "update_iteration_num_suggestions", "(", "self", ",", "num_suggestions", ")", ":", "iteration_config", "=", "self", ".", "experiment_group", ".", "iteration_config", "iteration_config", ".", "num_suggestions", "=", "num_suggestions", "self", ".", "_update_config", "(", "iteration_config", ")" ]
46.166667
18
def visit_classdef(self, node): """visit an astroid.Class node add this class to the class diagram definition """ anc_level, association_level = self._get_levels() self.extract_classes(node, anc_level, association_level)
[ "def", "visit_classdef", "(", "self", ",", "node", ")", ":", "anc_level", ",", "association_level", "=", "self", ".", "_get_levels", "(", ")", "self", ".", "extract_classes", "(", "node", ",", "anc_level", ",", "association_level", ")" ]
36.428571
14.857143
def storeTopAnnotations(self, service_name, annotations): """ Aggregates methods Parameters: - service_name - annotations """ self.send_storeTopAnnotations(service_name, annotations) self.recv_storeTopAnnotations()
[ "def", "storeTopAnnotations", "(", "self", ",", "service_name", ",", "annotations", ")", ":", "self", ".", "send_storeTopAnnotations", "(", "service_name", ",", "annotations", ")", "self", ".", "recv_storeTopAnnotations", "(", ")" ]
24
16.8
def _inputhook_tk(inputhook_context): """ Inputhook for Tk. Run the Tk eventloop until prompt-toolkit needs to process the next input. """ # Get the current TK application. import _tkinter # Keep this imports inline! from six.moves import tkinter root = tkinter._default_root def wait_using_filehandler(): """ Run the TK eventloop until the file handler that we got from the inputhook becomes readable. """ # Add a handler that sets the stop flag when `prompt-toolkit` has input # to process. stop = [False] def done(*a): stop[0] = True root.createfilehandler(inputhook_context.fileno(), _tkinter.READABLE, done) # Run the TK event loop as long as we don't receive input. while root.dooneevent(_tkinter.ALL_EVENTS): if stop[0]: break root.deletefilehandler(inputhook_context.fileno()) def wait_using_polling(): """ Windows TK doesn't support 'createfilehandler'. So, run the TK eventloop and poll until input is ready. """ while not inputhook_context.input_is_ready(): while root.dooneevent(_tkinter.ALL_EVENTS | _tkinter.DONT_WAIT): pass # Sleep to make the CPU idle, but not too long, so that the UI # stays responsive. time.sleep(.01) if root is not None: if hasattr(root, 'createfilehandler'): wait_using_filehandler() else: wait_using_polling()
[ "def", "_inputhook_tk", "(", "inputhook_context", ")", ":", "# Get the current TK application.", "import", "_tkinter", "# Keep this imports inline!", "from", "six", ".", "moves", "import", "tkinter", "root", "=", "tkinter", ".", "_default_root", "def", "wait_using_filehandler", "(", ")", ":", "\"\"\"\n Run the TK eventloop until the file handler that we got from the\n inputhook becomes readable.\n \"\"\"", "# Add a handler that sets the stop flag when `prompt-toolkit` has input", "# to process.", "stop", "=", "[", "False", "]", "def", "done", "(", "*", "a", ")", ":", "stop", "[", "0", "]", "=", "True", "root", ".", "createfilehandler", "(", "inputhook_context", ".", "fileno", "(", ")", ",", "_tkinter", ".", "READABLE", ",", "done", ")", "# Run the TK event loop as long as we don't receive input.", "while", "root", ".", "dooneevent", "(", "_tkinter", ".", "ALL_EVENTS", ")", ":", "if", "stop", "[", "0", "]", ":", "break", "root", ".", "deletefilehandler", "(", "inputhook_context", ".", "fileno", "(", ")", ")", "def", "wait_using_polling", "(", ")", ":", "\"\"\"\n Windows TK doesn't support 'createfilehandler'.\n So, run the TK eventloop and poll until input is ready.\n \"\"\"", "while", "not", "inputhook_context", ".", "input_is_ready", "(", ")", ":", "while", "root", ".", "dooneevent", "(", "_tkinter", ".", "ALL_EVENTS", "|", "_tkinter", ".", "DONT_WAIT", ")", ":", "pass", "# Sleep to make the CPU idle, but not too long, so that the UI", "# stays responsive.", "time", ".", "sleep", "(", ".01", ")", "if", "root", "is", "not", "None", ":", "if", "hasattr", "(", "root", ",", "'createfilehandler'", ")", ":", "wait_using_filehandler", "(", ")", "else", ":", "wait_using_polling", "(", ")" ]
32.659574
18.021277
def backend(): """ :return: A unicode string of the backend being used: "openssl", "osx", "win", "winlegacy" """ if _module_values['backend'] is not None: return _module_values['backend'] with _backend_lock: if _module_values['backend'] is not None: return _module_values['backend'] if sys.platform == 'win32': # Windows XP was major version 5, Vista was 6 if sys.getwindowsversion()[0] < 6: _module_values['backend'] = 'winlegacy' else: _module_values['backend'] = 'win' elif sys.platform == 'darwin': _module_values['backend'] = 'osx' else: _module_values['backend'] = 'openssl' return _module_values['backend']
[ "def", "backend", "(", ")", ":", "if", "_module_values", "[", "'backend'", "]", "is", "not", "None", ":", "return", "_module_values", "[", "'backend'", "]", "with", "_backend_lock", ":", "if", "_module_values", "[", "'backend'", "]", "is", "not", "None", ":", "return", "_module_values", "[", "'backend'", "]", "if", "sys", ".", "platform", "==", "'win32'", ":", "# Windows XP was major version 5, Vista was 6", "if", "sys", ".", "getwindowsversion", "(", ")", "[", "0", "]", "<", "6", ":", "_module_values", "[", "'backend'", "]", "=", "'winlegacy'", "else", ":", "_module_values", "[", "'backend'", "]", "=", "'win'", "elif", "sys", ".", "platform", "==", "'darwin'", ":", "_module_values", "[", "'backend'", "]", "=", "'osx'", "else", ":", "_module_values", "[", "'backend'", "]", "=", "'openssl'", "return", "_module_values", "[", "'backend'", "]" ]
30
16.307692
def get_levels_and_coordinates_names(self): """ Get the current level of the high level mean plot and the name of the corrisponding site, study, etc. As well as the code for the current coordinate system. Returns ------- (high_level_type,high_level_name,coordinate_system) : tuple object containing current high level type, name, and coordinate system being analyzed """ if self.COORDINATE_SYSTEM == "geographic": dirtype = 'DA-DIR-GEO' elif self.COORDINATE_SYSTEM == "tilt-corrected": dirtype = 'DA-DIR-TILT' else: dirtype = 'DA-DIR' if self.level_box.GetValue() == 'sample': high_level_type = 'samples' if self.level_box.GetValue() == 'site': high_level_type = 'sites' if self.level_box.GetValue() == 'location': high_level_type = 'locations' if self.level_box.GetValue() == 'study': high_level_type = 'study' high_level_name = str(self.level_names.GetValue()) return (high_level_type, high_level_name, dirtype)
[ "def", "get_levels_and_coordinates_names", "(", "self", ")", ":", "if", "self", ".", "COORDINATE_SYSTEM", "==", "\"geographic\"", ":", "dirtype", "=", "'DA-DIR-GEO'", "elif", "self", ".", "COORDINATE_SYSTEM", "==", "\"tilt-corrected\"", ":", "dirtype", "=", "'DA-DIR-TILT'", "else", ":", "dirtype", "=", "'DA-DIR'", "if", "self", ".", "level_box", ".", "GetValue", "(", ")", "==", "'sample'", ":", "high_level_type", "=", "'samples'", "if", "self", ".", "level_box", ".", "GetValue", "(", ")", "==", "'site'", ":", "high_level_type", "=", "'sites'", "if", "self", ".", "level_box", ".", "GetValue", "(", ")", "==", "'location'", ":", "high_level_type", "=", "'locations'", "if", "self", ".", "level_box", ".", "GetValue", "(", ")", "==", "'study'", ":", "high_level_type", "=", "'study'", "high_level_name", "=", "str", "(", "self", ".", "level_names", ".", "GetValue", "(", ")", ")", "return", "(", "high_level_type", ",", "high_level_name", ",", "dirtype", ")" ]
39
15.137931
def save_waypoints(self, filename): '''save waypoints to a file''' try: #need to remove the leading and trailing quotes in filename self.wploader.save(filename.strip('"')) except Exception as msg: print("Failed to save %s - %s" % (filename, msg)) return print("Saved %u waypoints to %s" % (self.wploader.count(), filename))
[ "def", "save_waypoints", "(", "self", ",", "filename", ")", ":", "try", ":", "#need to remove the leading and trailing quotes in filename", "self", ".", "wploader", ".", "save", "(", "filename", ".", "strip", "(", "'\"'", ")", ")", "except", "Exception", "as", "msg", ":", "print", "(", "\"Failed to save %s - %s\"", "%", "(", "filename", ",", "msg", ")", ")", "return", "print", "(", "\"Saved %u waypoints to %s\"", "%", "(", "self", ".", "wploader", ".", "count", "(", ")", ",", "filename", ")", ")" ]
43.888889
18.333333
def readlines(self, size=None): """Reads a file into a list of strings. It calls :meth:`readline` until the file is read to the end. It does support the optional `size` argument if the underlaying stream supports it for `readline`. """ last_pos = self._pos result = [] if size is not None: end = min(self.limit, last_pos + size) else: end = self.limit while 1: if size is not None: size -= last_pos - self._pos if self._pos >= end: break result.append(self.readline(size)) if size is not None: last_pos = self._pos return result
[ "def", "readlines", "(", "self", ",", "size", "=", "None", ")", ":", "last_pos", "=", "self", ".", "_pos", "result", "=", "[", "]", "if", "size", "is", "not", "None", ":", "end", "=", "min", "(", "self", ".", "limit", ",", "last_pos", "+", "size", ")", "else", ":", "end", "=", "self", ".", "limit", "while", "1", ":", "if", "size", "is", "not", "None", ":", "size", "-=", "last_pos", "-", "self", ".", "_pos", "if", "self", ".", "_pos", ">=", "end", ":", "break", "result", ".", "append", "(", "self", ".", "readline", "(", "size", ")", ")", "if", "size", "is", "not", "None", ":", "last_pos", "=", "self", ".", "_pos", "return", "result" ]
34.190476
13.380952
def annotate_tree_properties(comments): """ iterate through nodes and adds some magic properties to each of them representing opening list of children and closing it """ if not comments: return it = iter(comments) # get the first item, this will fail if no items ! old = next(it) # first item starts a new thread old.open = True last = set() for c in it: # if this comment has a parent, store its last child for future reference if old.last_child_id: last.add(old.last_child_id) # this is the last child, mark it if c.pk in last: c.last = True # increase the depth if c.depth > old.depth: c.open = True else: # c.depth <= old.depth # close some depths old.close = list(range(old.depth - c.depth)) # new thread if old.root_id != c.root_id: # close even the top depth old.close.append(len(old.close)) # and start a new thread c.open = True # empty the last set last = set() # iterate yield old old = c old.close = range(old.depth) yield old
[ "def", "annotate_tree_properties", "(", "comments", ")", ":", "if", "not", "comments", ":", "return", "it", "=", "iter", "(", "comments", ")", "# get the first item, this will fail if no items !", "old", "=", "next", "(", "it", ")", "# first item starts a new thread", "old", ".", "open", "=", "True", "last", "=", "set", "(", ")", "for", "c", "in", "it", ":", "# if this comment has a parent, store its last child for future reference", "if", "old", ".", "last_child_id", ":", "last", ".", "add", "(", "old", ".", "last_child_id", ")", "# this is the last child, mark it", "if", "c", ".", "pk", "in", "last", ":", "c", ".", "last", "=", "True", "# increase the depth", "if", "c", ".", "depth", ">", "old", ".", "depth", ":", "c", ".", "open", "=", "True", "else", ":", "# c.depth <= old.depth", "# close some depths", "old", ".", "close", "=", "list", "(", "range", "(", "old", ".", "depth", "-", "c", ".", "depth", ")", ")", "# new thread", "if", "old", ".", "root_id", "!=", "c", ".", "root_id", ":", "# close even the top depth", "old", ".", "close", ".", "append", "(", "len", "(", "old", ".", "close", ")", ")", "# and start a new thread", "c", ".", "open", "=", "True", "# empty the last set", "last", "=", "set", "(", ")", "# iterate", "yield", "old", "old", "=", "c", "old", ".", "close", "=", "range", "(", "old", ".", "depth", ")", "yield", "old" ]
26.06383
18.06383
def parse_alert(server_handshake_bytes): """ Parses the handshake for protocol alerts :param server_handshake_bytes: A byte string of the handshake data received from the server :return: None or an 2-element tuple of integers: 0: 1 (warning) or 2 (fatal) 1: The alert description (see https://tools.ietf.org/html/rfc5246#section-7.2) """ for record_type, _, record_data in parse_tls_records(server_handshake_bytes): if record_type != b'\x15': continue if len(record_data) != 2: return None return (int_from_bytes(record_data[0:1]), int_from_bytes(record_data[1:2])) return None
[ "def", "parse_alert", "(", "server_handshake_bytes", ")", ":", "for", "record_type", ",", "_", ",", "record_data", "in", "parse_tls_records", "(", "server_handshake_bytes", ")", ":", "if", "record_type", "!=", "b'\\x15'", ":", "continue", "if", "len", "(", "record_data", ")", "!=", "2", ":", "return", "None", "return", "(", "int_from_bytes", "(", "record_data", "[", "0", ":", "1", "]", ")", ",", "int_from_bytes", "(", "record_data", "[", "1", ":", "2", "]", ")", ")", "return", "None" ]
33.55
20.15
def delete_image(self, name: str) -> None: """ Deletes a Docker image with a given name. Parameters: name: the name of the Docker image. """ logger.debug("deleting Docker image: %s", name) path = "docker/images/{}".format(name) response = self.__api.delete(path) if response.status_code != 204: try: self.__api.handle_erroneous_response(response) except Exception: logger.exception("failed to delete Docker image: %s", name) raise else: logger.info("deleted Docker image: %s", name)
[ "def", "delete_image", "(", "self", ",", "name", ":", "str", ")", "->", "None", ":", "logger", ".", "debug", "(", "\"deleting Docker image: %s\"", ",", "name", ")", "path", "=", "\"docker/images/{}\"", ".", "format", "(", "name", ")", "response", "=", "self", ".", "__api", ".", "delete", "(", "path", ")", "if", "response", ".", "status_code", "!=", "204", ":", "try", ":", "self", ".", "__api", ".", "handle_erroneous_response", "(", "response", ")", "except", "Exception", ":", "logger", ".", "exception", "(", "\"failed to delete Docker image: %s\"", ",", "name", ")", "raise", "else", ":", "logger", ".", "info", "(", "\"deleted Docker image: %s\"", ",", "name", ")" ]
35.222222
14.333333
def submit(self, queue=None, options=[]): """Submits the job either locally or to a remote server if it is defined. Args: queue (int, optional): The number of sub-jobs to run. This argmuent will set the num_jobs attribute of this object. Defaults to None, meaning the value of num_jobs will be used. options (list of str, optional): A list of command line options for the condor_submit command. For details on valid options see: http://research.cs.wisc.edu/htcondor/manual/current/condor_submit.html. Defaults to an empty list. """ if not self.executable: log.error('Job %s was submitted with no executable', self.name) raise NoExecutable('You cannot submit a job without an executable') self._num_jobs = queue or self.num_jobs self._write_job_file() args = ['condor_submit'] args.extend(options) args.append(self.job_file) log.info('Submitting job %s with options: %s', self.name, args) return super(Job, self).submit(args)
[ "def", "submit", "(", "self", ",", "queue", "=", "None", ",", "options", "=", "[", "]", ")", ":", "if", "not", "self", ".", "executable", ":", "log", ".", "error", "(", "'Job %s was submitted with no executable'", ",", "self", ".", "name", ")", "raise", "NoExecutable", "(", "'You cannot submit a job without an executable'", ")", "self", ".", "_num_jobs", "=", "queue", "or", "self", ".", "num_jobs", "self", ".", "_write_job_file", "(", ")", "args", "=", "[", "'condor_submit'", "]", "args", ".", "extend", "(", "options", ")", "args", ".", "append", "(", "self", ".", "job_file", ")", "log", ".", "info", "(", "'Submitting job %s with options: %s'", ",", "self", ".", "name", ",", "args", ")", "return", "super", "(", "Job", ",", "self", ")", ".", "submit", "(", "args", ")" ]
43.6
28.08