text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
async def game( self, short_name, *, id=None, text=None, parse_mode=(), link_preview=True, geo=None, period=60, contact=None, game=False, buttons=None ): """ Creates a new inline result of game type. Args: short_name (`str`): The short name of the game to use. """ result = types.InputBotInlineResultGame( id=id or '', short_name=short_name, send_message=await self._message( text=text, parse_mode=parse_mode, link_preview=link_preview, geo=geo, period=period, contact=contact, game=game, buttons=buttons ) ) if id is None: result.id = hashlib.sha256(bytes(result)).hexdigest() return result
[ "async", "def", "game", "(", "self", ",", "short_name", ",", "*", ",", "id", "=", "None", ",", "text", "=", "None", ",", "parse_mode", "=", "(", ")", ",", "link_preview", "=", "True", ",", "geo", "=", "None", ",", "period", "=", "60", ",", "contact", "=", "None", ",", "game", "=", "False", ",", "buttons", "=", "None", ")", ":", "result", "=", "types", ".", "InputBotInlineResultGame", "(", "id", "=", "id", "or", "''", ",", "short_name", "=", "short_name", ",", "send_message", "=", "await", "self", ".", "_message", "(", "text", "=", "text", ",", "parse_mode", "=", "parse_mode", ",", "link_preview", "=", "link_preview", ",", "geo", "=", "geo", ",", "period", "=", "period", ",", "contact", "=", "contact", ",", "game", "=", "game", ",", "buttons", "=", "buttons", ")", ")", "if", "id", "is", "None", ":", "result", ".", "id", "=", "hashlib", ".", "sha256", "(", "bytes", "(", "result", ")", ")", ".", "hexdigest", "(", ")", "return", "result" ]
31.074074
17.222222
def payments(self, cursor=None, order='asc', limit=10, sse=False): """Retrieve the payments JSON from this instance's Horizon server. Retrieve the payments JSON response for the account associated with this :class:`Address`. :param cursor: A paging token, specifying where to start returning records from. When streaming this can be set to "now" to stream object created since your request time. :type cursor: int, str :param str order: The order in which to return rows, "asc" or "desc". :param int limit: Maximum number of records to return. :param bool sse: Use server side events for streaming responses. """ return self.horizon.account_payments(address=self.address, cursor=cursor, order=order, limit=limit, sse=sse)
[ "def", "payments", "(", "self", ",", "cursor", "=", "None", ",", "order", "=", "'asc'", ",", "limit", "=", "10", ",", "sse", "=", "False", ")", ":", "return", "self", ".", "horizon", ".", "account_payments", "(", "address", "=", "self", ".", "address", ",", "cursor", "=", "cursor", ",", "order", "=", "order", ",", "limit", "=", "limit", ",", "sse", "=", "sse", ")" ]
53.466667
31.8
def output(data, **kwargs): # pylint: disable=unused-argument ''' Read in the dict structure generated by the salt key API methods and print the structure. ''' color = salt.utils.color.get_colors( __opts__.get('color'), __opts__.get('color_theme')) strip_colors = __opts__.get('strip_colors', True) ident = 0 if __opts__.get('__multi_key'): ident = 4 if __opts__['transport'] in ('zeromq', 'tcp'): acc = 'minions' pend = 'minions_pre' den = 'minions_denied' rej = 'minions_rejected' cmap = {pend: color['RED'], acc: color['GREEN'], den: color['MAGENTA'], rej: color['BLUE'], 'local': color['MAGENTA']} trans = {pend: u'{0}{1}Unaccepted Keys:{2}'.format( ' ' * ident, color['LIGHT_RED'], color['ENDC']), acc: u'{0}{1}Accepted Keys:{2}'.format( ' ' * ident, color['LIGHT_GREEN'], color['ENDC']), den: u'{0}{1}Denied Keys:{2}'.format( ' ' * ident, color['LIGHT_MAGENTA'], color['ENDC']), rej: u'{0}{1}Rejected Keys:{2}'.format( ' ' * ident, color['LIGHT_BLUE'], color['ENDC']), 'local': u'{0}{1}Local Keys:{2}'.format( ' ' * ident, color['LIGHT_MAGENTA'], color['ENDC'])} else: acc = 'accepted' pend = 'pending' rej = 'rejected' cmap = {pend: color['RED'], acc: color['GREEN'], rej: color['BLUE'], 'local': color['MAGENTA']} trans = {pend: u'{0}{1}Unaccepted Keys:{2}'.format( ' ' * ident, color['LIGHT_RED'], color['ENDC']), acc: u'{0}{1}Accepted Keys:{2}'.format( ' ' * ident, color['LIGHT_GREEN'], color['ENDC']), rej: u'{0}{1}Rejected Keys:{2}'.format( ' ' * ident, color['LIGHT_BLUE'], color['ENDC']), 'local': u'{0}{1}Local Keys:{2}'.format( ' ' * ident, color['LIGHT_MAGENTA'], color['ENDC'])} ret = '' for status in sorted(data): ret += u'{0}\n'.format(trans[status]) for key in sorted(data[status]): key = salt.utils.data.decode(key) skey = salt.output.strip_esc_sequence(key) if strip_colors else key if isinstance(data[status], list): ret += u'{0}{1}{2}{3}\n'.format( ' ' * ident, cmap[status], skey, color['ENDC']) if isinstance(data[status], dict): ret += u'{0}{1}{2}: {3}{4}\n'.format( ' ' * ident, cmap[status], skey, data[status][key], color['ENDC']) return ret
[ "def", "output", "(", "data", ",", "*", "*", "kwargs", ")", ":", "# pylint: disable=unused-argument", "color", "=", "salt", ".", "utils", ".", "color", ".", "get_colors", "(", "__opts__", ".", "get", "(", "'color'", ")", ",", "__opts__", ".", "get", "(", "'color_theme'", ")", ")", "strip_colors", "=", "__opts__", ".", "get", "(", "'strip_colors'", ",", "True", ")", "ident", "=", "0", "if", "__opts__", ".", "get", "(", "'__multi_key'", ")", ":", "ident", "=", "4", "if", "__opts__", "[", "'transport'", "]", "in", "(", "'zeromq'", ",", "'tcp'", ")", ":", "acc", "=", "'minions'", "pend", "=", "'minions_pre'", "den", "=", "'minions_denied'", "rej", "=", "'minions_rejected'", "cmap", "=", "{", "pend", ":", "color", "[", "'RED'", "]", ",", "acc", ":", "color", "[", "'GREEN'", "]", ",", "den", ":", "color", "[", "'MAGENTA'", "]", ",", "rej", ":", "color", "[", "'BLUE'", "]", ",", "'local'", ":", "color", "[", "'MAGENTA'", "]", "}", "trans", "=", "{", "pend", ":", "u'{0}{1}Unaccepted Keys:{2}'", ".", "format", "(", "' '", "*", "ident", ",", "color", "[", "'LIGHT_RED'", "]", ",", "color", "[", "'ENDC'", "]", ")", ",", "acc", ":", "u'{0}{1}Accepted Keys:{2}'", ".", "format", "(", "' '", "*", "ident", ",", "color", "[", "'LIGHT_GREEN'", "]", ",", "color", "[", "'ENDC'", "]", ")", ",", "den", ":", "u'{0}{1}Denied Keys:{2}'", ".", "format", "(", "' '", "*", "ident", ",", "color", "[", "'LIGHT_MAGENTA'", "]", ",", "color", "[", "'ENDC'", "]", ")", ",", "rej", ":", "u'{0}{1}Rejected Keys:{2}'", ".", "format", "(", "' '", "*", "ident", ",", "color", "[", "'LIGHT_BLUE'", "]", ",", "color", "[", "'ENDC'", "]", ")", ",", "'local'", ":", "u'{0}{1}Local Keys:{2}'", ".", "format", "(", "' '", "*", "ident", ",", "color", "[", "'LIGHT_MAGENTA'", "]", ",", "color", "[", "'ENDC'", "]", ")", "}", "else", ":", "acc", "=", "'accepted'", "pend", "=", "'pending'", "rej", "=", "'rejected'", "cmap", "=", "{", "pend", ":", "color", "[", "'RED'", "]", ",", "acc", ":", "color", "[", "'GREEN'", "]", ",", "rej", ":", "color", "[", "'BLUE'", "]", ",", "'local'", ":", "color", "[", "'MAGENTA'", "]", "}", "trans", "=", "{", "pend", ":", "u'{0}{1}Unaccepted Keys:{2}'", ".", "format", "(", "' '", "*", "ident", ",", "color", "[", "'LIGHT_RED'", "]", ",", "color", "[", "'ENDC'", "]", ")", ",", "acc", ":", "u'{0}{1}Accepted Keys:{2}'", ".", "format", "(", "' '", "*", "ident", ",", "color", "[", "'LIGHT_GREEN'", "]", ",", "color", "[", "'ENDC'", "]", ")", ",", "rej", ":", "u'{0}{1}Rejected Keys:{2}'", ".", "format", "(", "' '", "*", "ident", ",", "color", "[", "'LIGHT_BLUE'", "]", ",", "color", "[", "'ENDC'", "]", ")", ",", "'local'", ":", "u'{0}{1}Local Keys:{2}'", ".", "format", "(", "' '", "*", "ident", ",", "color", "[", "'LIGHT_MAGENTA'", "]", ",", "color", "[", "'ENDC'", "]", ")", "}", "ret", "=", "''", "for", "status", "in", "sorted", "(", "data", ")", ":", "ret", "+=", "u'{0}\\n'", ".", "format", "(", "trans", "[", "status", "]", ")", "for", "key", "in", "sorted", "(", "data", "[", "status", "]", ")", ":", "key", "=", "salt", ".", "utils", ".", "data", ".", "decode", "(", "key", ")", "skey", "=", "salt", ".", "output", ".", "strip_esc_sequence", "(", "key", ")", "if", "strip_colors", "else", "key", "if", "isinstance", "(", "data", "[", "status", "]", ",", "list", ")", ":", "ret", "+=", "u'{0}{1}{2}{3}\\n'", ".", "format", "(", "' '", "*", "ident", ",", "cmap", "[", "status", "]", ",", "skey", ",", "color", "[", "'ENDC'", "]", ")", "if", "isinstance", "(", "data", "[", "status", "]", ",", "dict", ")", ":", "ret", "+=", "u'{0}{1}{2}: {3}{4}\\n'", ".", "format", "(", "' '", "*", "ident", ",", "cmap", "[", "status", "]", ",", "skey", ",", "data", "[", "status", "]", "[", "key", "]", ",", "color", "[", "'ENDC'", "]", ")", "return", "ret" ]
39.978261
13.934783
def get_conn(cls): """Return a connection object to the ldap database""" conn = cls._conn if conn is None or conn.closed: conn = ldap3.Connection( settings.CAS_LDAP_SERVER, settings.CAS_LDAP_USER, settings.CAS_LDAP_PASSWORD, client_strategy="RESTARTABLE", auto_bind=True ) cls._conn = conn return conn
[ "def", "get_conn", "(", "cls", ")", ":", "conn", "=", "cls", ".", "_conn", "if", "conn", "is", "None", "or", "conn", ".", "closed", ":", "conn", "=", "ldap3", ".", "Connection", "(", "settings", ".", "CAS_LDAP_SERVER", ",", "settings", ".", "CAS_LDAP_USER", ",", "settings", ".", "CAS_LDAP_PASSWORD", ",", "client_strategy", "=", "\"RESTARTABLE\"", ",", "auto_bind", "=", "True", ")", "cls", ".", "_conn", "=", "conn", "return", "conn" ]
33.615385
9.538462
def sortable_title(instance): """Uses the default Plone sortable_text index lower-case """ title = plone_sortable_title(instance) if safe_callable(title): title = title() return title.lower()
[ "def", "sortable_title", "(", "instance", ")", ":", "title", "=", "plone_sortable_title", "(", "instance", ")", "if", "safe_callable", "(", "title", ")", ":", "title", "=", "title", "(", ")", "return", "title", ".", "lower", "(", ")" ]
30.428571
8.285714
def _lookup(self, p, directory, fsclass, create=1): """ The generic entry point for Node lookup with user-supplied data. This translates arbitrary input into a canonical Node.FS object of the specified fsclass. The general approach for strings is to turn it into a fully normalized absolute path and then call the root directory's lookup_abs() method for the heavy lifting. If the path name begins with '#', it is unconditionally interpreted relative to the top-level directory of this FS. '#' is treated as a synonym for the top-level SConstruct directory, much like '~' is treated as a synonym for the user's home directory in a UNIX shell. So both '#foo' and '#/foo' refer to the 'foo' subdirectory underneath the top-level SConstruct directory. If the path name is relative, then the path is looked up relative to the specified directory, or the current directory (self._cwd, typically the SConscript directory) if the specified directory is None. """ if isinstance(p, Base): # It's already a Node.FS object. Make sure it's the right # class and return. p.must_be_same(fsclass) return p # str(p) in case it's something like a proxy object p = str(p) if not os_sep_is_slash: p = p.replace(OS_SEP, '/') if p[0:1] == '#': # There was an initial '#', so we strip it and override # whatever directory they may have specified with the # top-level SConstruct directory. p = p[1:] directory = self.Top # There might be a drive letter following the # '#'. Although it is not described in the SCons man page, # the regression test suite explicitly tests for that # syntax. It seems to mean the following thing: # # Assuming the the SCons top dir is in C:/xxx/yyy, # '#X:/toto' means X:/xxx/yyy/toto. # # i.e. it assumes that the X: drive has a directory # structure similar to the one found on drive C:. if do_splitdrive: drive, p = _my_splitdrive(p) if drive: root = self.get_root(drive) else: root = directory.root else: root = directory.root # We can only strip trailing after splitting the drive # since the drive might the UNC '//' prefix. p = p.strip('/') needs_normpath = needs_normpath_match(p) # The path is relative to the top-level SCons directory. if p in ('', '.'): p = directory.get_labspath() else: p = directory.get_labspath() + '/' + p else: if do_splitdrive: drive, p = _my_splitdrive(p) if drive and not p: # This causes a naked drive letter to be treated # as a synonym for the root directory on that # drive. p = '/' else: drive = '' # We can only strip trailing '/' since the drive might the # UNC '//' prefix. if p != '/': p = p.rstrip('/') needs_normpath = needs_normpath_match(p) if p[0:1] == '/': # Absolute path root = self.get_root(drive) else: # This is a relative lookup or to the current directory # (the path name is not absolute). Add the string to the # appropriate directory lookup path, after which the whole # thing gets normalized. if directory: if not isinstance(directory, Dir): directory = self.Dir(directory) else: directory = self._cwd if p in ('', '.'): p = directory.get_labspath() else: p = directory.get_labspath() + '/' + p if drive: root = self.get_root(drive) else: root = directory.root if needs_normpath is not None: # Normalize a pathname. Will return the same result for # equivalent paths. # # We take advantage of the fact that we have an absolute # path here for sure. In addition, we know that the # components of lookup path are separated by slashes at # this point. Because of this, this code is about 2X # faster than calling os.path.normpath() followed by # replacing os.sep with '/' again. ins = p.split('/')[1:] outs = [] for d in ins: if d == '..': try: outs.pop() except IndexError: pass elif d not in ('', '.'): outs.append(d) p = '/' + '/'.join(outs) return root._lookup_abs(p, fsclass, create)
[ "def", "_lookup", "(", "self", ",", "p", ",", "directory", ",", "fsclass", ",", "create", "=", "1", ")", ":", "if", "isinstance", "(", "p", ",", "Base", ")", ":", "# It's already a Node.FS object. Make sure it's the right", "# class and return.", "p", ".", "must_be_same", "(", "fsclass", ")", "return", "p", "# str(p) in case it's something like a proxy object", "p", "=", "str", "(", "p", ")", "if", "not", "os_sep_is_slash", ":", "p", "=", "p", ".", "replace", "(", "OS_SEP", ",", "'/'", ")", "if", "p", "[", "0", ":", "1", "]", "==", "'#'", ":", "# There was an initial '#', so we strip it and override", "# whatever directory they may have specified with the", "# top-level SConstruct directory.", "p", "=", "p", "[", "1", ":", "]", "directory", "=", "self", ".", "Top", "# There might be a drive letter following the", "# '#'. Although it is not described in the SCons man page,", "# the regression test suite explicitly tests for that", "# syntax. It seems to mean the following thing:", "#", "# Assuming the the SCons top dir is in C:/xxx/yyy,", "# '#X:/toto' means X:/xxx/yyy/toto.", "#", "# i.e. it assumes that the X: drive has a directory", "# structure similar to the one found on drive C:.", "if", "do_splitdrive", ":", "drive", ",", "p", "=", "_my_splitdrive", "(", "p", ")", "if", "drive", ":", "root", "=", "self", ".", "get_root", "(", "drive", ")", "else", ":", "root", "=", "directory", ".", "root", "else", ":", "root", "=", "directory", ".", "root", "# We can only strip trailing after splitting the drive", "# since the drive might the UNC '//' prefix.", "p", "=", "p", ".", "strip", "(", "'/'", ")", "needs_normpath", "=", "needs_normpath_match", "(", "p", ")", "# The path is relative to the top-level SCons directory.", "if", "p", "in", "(", "''", ",", "'.'", ")", ":", "p", "=", "directory", ".", "get_labspath", "(", ")", "else", ":", "p", "=", "directory", ".", "get_labspath", "(", ")", "+", "'/'", "+", "p", "else", ":", "if", "do_splitdrive", ":", "drive", ",", "p", "=", "_my_splitdrive", "(", "p", ")", "if", "drive", "and", "not", "p", ":", "# This causes a naked drive letter to be treated", "# as a synonym for the root directory on that", "# drive.", "p", "=", "'/'", "else", ":", "drive", "=", "''", "# We can only strip trailing '/' since the drive might the", "# UNC '//' prefix.", "if", "p", "!=", "'/'", ":", "p", "=", "p", ".", "rstrip", "(", "'/'", ")", "needs_normpath", "=", "needs_normpath_match", "(", "p", ")", "if", "p", "[", "0", ":", "1", "]", "==", "'/'", ":", "# Absolute path", "root", "=", "self", ".", "get_root", "(", "drive", ")", "else", ":", "# This is a relative lookup or to the current directory", "# (the path name is not absolute). Add the string to the", "# appropriate directory lookup path, after which the whole", "# thing gets normalized.", "if", "directory", ":", "if", "not", "isinstance", "(", "directory", ",", "Dir", ")", ":", "directory", "=", "self", ".", "Dir", "(", "directory", ")", "else", ":", "directory", "=", "self", ".", "_cwd", "if", "p", "in", "(", "''", ",", "'.'", ")", ":", "p", "=", "directory", ".", "get_labspath", "(", ")", "else", ":", "p", "=", "directory", ".", "get_labspath", "(", ")", "+", "'/'", "+", "p", "if", "drive", ":", "root", "=", "self", ".", "get_root", "(", "drive", ")", "else", ":", "root", "=", "directory", ".", "root", "if", "needs_normpath", "is", "not", "None", ":", "# Normalize a pathname. Will return the same result for", "# equivalent paths.", "#", "# We take advantage of the fact that we have an absolute", "# path here for sure. In addition, we know that the", "# components of lookup path are separated by slashes at", "# this point. Because of this, this code is about 2X", "# faster than calling os.path.normpath() followed by", "# replacing os.sep with '/' again.", "ins", "=", "p", ".", "split", "(", "'/'", ")", "[", "1", ":", "]", "outs", "=", "[", "]", "for", "d", "in", "ins", ":", "if", "d", "==", "'..'", ":", "try", ":", "outs", ".", "pop", "(", ")", "except", "IndexError", ":", "pass", "elif", "d", "not", "in", "(", "''", ",", "'.'", ")", ":", "outs", ".", "append", "(", "d", ")", "p", "=", "'/'", "+", "'/'", ".", "join", "(", "outs", ")", "return", "root", ".", "_lookup_abs", "(", "p", ",", "fsclass", ",", "create", ")" ]
38.6
19.459259
def _set_meta(self): """ set the meta fields of the ParameterSet as those that are shared by ALL parameters in the ParameterSet. For any fields that are not """ # we want to set meta-fields that are shared by ALL params in the PS for field in _meta_fields_twig: keys_for_this_field = set([getattr(p, field) for p in self.to_list() if getattr(p, field) is not None]) if len(keys_for_this_field)==1: setattr(self, '_'+field, list(keys_for_this_field)[0]) else: setattr(self, '_'+field, None)
[ "def", "_set_meta", "(", "self", ")", ":", "# we want to set meta-fields that are shared by ALL params in the PS", "for", "field", "in", "_meta_fields_twig", ":", "keys_for_this_field", "=", "set", "(", "[", "getattr", "(", "p", ",", "field", ")", "for", "p", "in", "self", ".", "to_list", "(", ")", "if", "getattr", "(", "p", ",", "field", ")", "is", "not", "None", "]", ")", "if", "len", "(", "keys_for_this_field", ")", "==", "1", ":", "setattr", "(", "self", ",", "'_'", "+", "field", ",", "list", "(", "keys_for_this_field", ")", "[", "0", "]", ")", "else", ":", "setattr", "(", "self", ",", "'_'", "+", "field", ",", "None", ")" ]
45.2
18.8
def dropEvent( self, event ): """ Processes the drag drop event using the filter set by the \ setDragDropFilter :param event | <QDropEvent> """ filt = self.dragDropFilter() if not filt: super(XTreeWidget, self).dropEvent(event) return filt(self, event)
[ "def", "dropEvent", "(", "self", ",", "event", ")", ":", "filt", "=", "self", ".", "dragDropFilter", "(", ")", "if", "not", "filt", ":", "super", "(", "XTreeWidget", ",", "self", ")", ".", "dropEvent", "(", "event", ")", "return", "filt", "(", "self", ",", "event", ")" ]
27.923077
14.384615
def add(x1, x2, output_shape=None, name=None): """Binary addition with broadcsting. Args: x1: a Tensor x2: a Tensor output_shape: an optional Shape name: an optional string Returns: a Tensor """ output_shape = convert_to_shape(output_shape) if not isinstance(x2, Tensor): return ScalarAddOperation(x1, x2).outputs[0] with tf.name_scope(name, default_name="add"): x1, x2 = binary_arguments_to_tensors(x1, x2) return AddOperation( x1, x2, output_shape=_infer_binary_broadcast_shape( x1.shape, x2.shape, output_shape)).outputs[0]
[ "def", "add", "(", "x1", ",", "x2", ",", "output_shape", "=", "None", ",", "name", "=", "None", ")", ":", "output_shape", "=", "convert_to_shape", "(", "output_shape", ")", "if", "not", "isinstance", "(", "x2", ",", "Tensor", ")", ":", "return", "ScalarAddOperation", "(", "x1", ",", "x2", ")", ".", "outputs", "[", "0", "]", "with", "tf", ".", "name_scope", "(", "name", ",", "default_name", "=", "\"add\"", ")", ":", "x1", ",", "x2", "=", "binary_arguments_to_tensors", "(", "x1", ",", "x2", ")", "return", "AddOperation", "(", "x1", ",", "x2", ",", "output_shape", "=", "_infer_binary_broadcast_shape", "(", "x1", ".", "shape", ",", "x2", ".", "shape", ",", "output_shape", ")", ")", ".", "outputs", "[", "0", "]" ]
30.263158
15.368421
def set(self, handler, attr, name, path, cfg): """ Obtain value for config variable, by prompting the user for input and substituting a default value if needed. Also does validation on user input """ full_name = ("%s.%s" % (path, name)).strip(".") # obtain default value if attr.default is None: default = None else: try: comp = vodka.component.Component(cfg) default = handler.default(name, inst=comp) if self.skip_defaults: self.echo("%s: %s [default]" % (full_name, default)) return default except Exception: raise # render explanation self.echo("") self.echo(attr.help_text) if attr.choices: self.echo("choices: %s" % ", ".join([str(c) for c in attr.choices])) # obtain user input and validate until input is valid b = False while not b: try: if type(attr.expected_type) == type: r = self.prompt(full_name, default=default, type=attr.expected_type) r = attr.expected_type(r) else: r = self.prompt(full_name, default=default, type=str) except ValueError: self.echo("Value expected to be of type %s"% attr.expected_type) try: b = handler.check({name:r}, name, path) except Exception as inst: if hasattr(inst, "explanation"): self.echo(inst.explanation) else: raise return r
[ "def", "set", "(", "self", ",", "handler", ",", "attr", ",", "name", ",", "path", ",", "cfg", ")", ":", "full_name", "=", "(", "\"%s.%s\"", "%", "(", "path", ",", "name", ")", ")", ".", "strip", "(", "\".\"", ")", "# obtain default value", "if", "attr", ".", "default", "is", "None", ":", "default", "=", "None", "else", ":", "try", ":", "comp", "=", "vodka", ".", "component", ".", "Component", "(", "cfg", ")", "default", "=", "handler", ".", "default", "(", "name", ",", "inst", "=", "comp", ")", "if", "self", ".", "skip_defaults", ":", "self", ".", "echo", "(", "\"%s: %s [default]\"", "%", "(", "full_name", ",", "default", ")", ")", "return", "default", "except", "Exception", ":", "raise", "# render explanation", "self", ".", "echo", "(", "\"\"", ")", "self", ".", "echo", "(", "attr", ".", "help_text", ")", "if", "attr", ".", "choices", ":", "self", ".", "echo", "(", "\"choices: %s\"", "%", "\", \"", ".", "join", "(", "[", "str", "(", "c", ")", "for", "c", "in", "attr", ".", "choices", "]", ")", ")", "# obtain user input and validate until input is valid", "b", "=", "False", "while", "not", "b", ":", "try", ":", "if", "type", "(", "attr", ".", "expected_type", ")", "==", "type", ":", "r", "=", "self", ".", "prompt", "(", "full_name", ",", "default", "=", "default", ",", "type", "=", "attr", ".", "expected_type", ")", "r", "=", "attr", ".", "expected_type", "(", "r", ")", "else", ":", "r", "=", "self", ".", "prompt", "(", "full_name", ",", "default", "=", "default", ",", "type", "=", "str", ")", "except", "ValueError", ":", "self", ".", "echo", "(", "\"Value expected to be of type %s\"", "%", "attr", ".", "expected_type", ")", "try", ":", "b", "=", "handler", ".", "check", "(", "{", "name", ":", "r", "}", ",", "name", ",", "path", ")", "except", "Exception", "as", "inst", ":", "if", "hasattr", "(", "inst", ",", "\"explanation\"", ")", ":", "self", ".", "echo", "(", "inst", ".", "explanation", ")", "else", ":", "raise", "return", "r" ]
33.3
19.9
def search_certificate(self, hash): """ Searches for a specific certificate using its hash :param hash: certificate hash :type hash: str :return: dict """ c = CensysCertificates(api_id=self.__uid, api_secret=self.__api_key) return c.view(hash)
[ "def", "search_certificate", "(", "self", ",", "hash", ")", ":", "c", "=", "CensysCertificates", "(", "api_id", "=", "self", ".", "__uid", ",", "api_secret", "=", "self", ".", "__api_key", ")", "return", "c", ".", "view", "(", "hash", ")" ]
29.9
15.1
def _checkRelatesTo(self, value): '''WS-Address From value -- From server returned. ''' if value != self._messageID: raise WSActionException, 'wrong WS-Address RelatesTo(%s), expecting %s'%(value,self._messageID)
[ "def", "_checkRelatesTo", "(", "self", ",", "value", ")", ":", "if", "value", "!=", "self", ".", "_messageID", ":", "raise", "WSActionException", ",", "'wrong WS-Address RelatesTo(%s), expecting %s'", "%", "(", "value", ",", "self", ".", "_messageID", ")" ]
42
20.333333
def p_namedblock(self, p): 'namedblock : BEGIN COLON ID namedblock_statements END' p[0] = Block(p[4], p[3], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
[ "def", "p_namedblock", "(", "self", ",", "p", ")", ":", "p", "[", "0", "]", "=", "Block", "(", "p", "[", "4", "]", ",", "p", "[", "3", "]", ",", "lineno", "=", "p", ".", "lineno", "(", "1", ")", ")", "p", ".", "set_lineno", "(", "0", ",", "p", ".", "lineno", "(", "1", ")", ")" ]
44.25
13.25
def get_topic_keyword_dictionary(): """ Opens the topic-keyword map resource file and returns the corresponding python dictionary. - Input: - file_path: The path pointing to the topic-keyword map resource file. - Output: - topic_set: A topic to keyword python dictionary. """ topic_keyword_dictionary = dict() file_row_gen = get_file_row_generator(get_package_path() + "/twitter/res/topics/topic_keyword_mapping" + ".txt", ",", "utf-8") for file_row in file_row_gen: topic_keyword_dictionary[file_row[0]] = set([keyword for keyword in file_row[1:]]) return topic_keyword_dictionary
[ "def", "get_topic_keyword_dictionary", "(", ")", ":", "topic_keyword_dictionary", "=", "dict", "(", ")", "file_row_gen", "=", "get_file_row_generator", "(", "get_package_path", "(", ")", "+", "\"/twitter/res/topics/topic_keyword_mapping\"", "+", "\".txt\"", ",", "\",\"", ",", "\"utf-8\"", ")", "for", "file_row", "in", "file_row_gen", ":", "topic_keyword_dictionary", "[", "file_row", "[", "0", "]", "]", "=", "set", "(", "[", "keyword", "for", "keyword", "in", "file_row", "[", "1", ":", "]", "]", ")", "return", "topic_keyword_dictionary" ]
43.625
25.25
def create(self, name, incident_preference): """ This API endpoint allows you to create an alert policy :type name: str :param name: The name of the policy :type incident_preference: str :param incident_preference: Can be PER_POLICY, PER_CONDITION or PER_CONDITION_AND_TARGET :rtype: dict :return: The JSON response of the API :: { "policy": { "created_at": "time", "id": "integer", "incident_preference": "string", "name": "string", "updated_at": "time" } } """ data = { "policy": { "name": name, "incident_preference": incident_preference } } return self._post( url='{0}alerts_policies.json'.format(self.URL), headers=self.headers, data=data )
[ "def", "create", "(", "self", ",", "name", ",", "incident_preference", ")", ":", "data", "=", "{", "\"policy\"", ":", "{", "\"name\"", ":", "name", ",", "\"incident_preference\"", ":", "incident_preference", "}", "}", "return", "self", ".", "_post", "(", "url", "=", "'{0}alerts_policies.json'", ".", "format", "(", "self", ".", "URL", ")", ",", "headers", "=", "self", ".", "headers", ",", "data", "=", "data", ")" ]
24.65
19.65
def qnormal(mu, sigma, q, random_state): ''' mu: float or array_like of floats sigma: float or array_like of floats q: sample step random_state: an object of numpy.random.RandomState ''' return np.round(normal(mu, sigma, random_state) / q) * q
[ "def", "qnormal", "(", "mu", ",", "sigma", ",", "q", ",", "random_state", ")", ":", "return", "np", ".", "round", "(", "normal", "(", "mu", ",", "sigma", ",", "random_state", ")", "/", "q", ")", "*", "q" ]
33
15.75
def get_user(request): """ Returns the user model instance associated with the given request session. If no user is retrieved an instance of `MojAnonymousUser` is returned. """ user = None try: user_id = request.session[SESSION_KEY] token = request.session[AUTH_TOKEN_SESSION_KEY] user_data = request.session[USER_DATA_SESSION_KEY] backend_path = request.session[BACKEND_SESSION_KEY] except KeyError: pass else: if backend_path in settings.AUTHENTICATION_BACKENDS: backend = load_backend(backend_path) user = backend.get_user(user_id, token, user_data) # Verify the session if hasattr(user, 'get_session_auth_hash'): session_hash = request.session.get(HASH_SESSION_KEY) session_hash_verified = session_hash and constant_time_compare( session_hash, user.get_session_auth_hash() ) if not session_hash_verified: request.session.flush() user = None return user or MojAnonymousUser()
[ "def", "get_user", "(", "request", ")", ":", "user", "=", "None", "try", ":", "user_id", "=", "request", ".", "session", "[", "SESSION_KEY", "]", "token", "=", "request", ".", "session", "[", "AUTH_TOKEN_SESSION_KEY", "]", "user_data", "=", "request", ".", "session", "[", "USER_DATA_SESSION_KEY", "]", "backend_path", "=", "request", ".", "session", "[", "BACKEND_SESSION_KEY", "]", "except", "KeyError", ":", "pass", "else", ":", "if", "backend_path", "in", "settings", ".", "AUTHENTICATION_BACKENDS", ":", "backend", "=", "load_backend", "(", "backend_path", ")", "user", "=", "backend", ".", "get_user", "(", "user_id", ",", "token", ",", "user_data", ")", "# Verify the session", "if", "hasattr", "(", "user", ",", "'get_session_auth_hash'", ")", ":", "session_hash", "=", "request", ".", "session", ".", "get", "(", "HASH_SESSION_KEY", ")", "session_hash_verified", "=", "session_hash", "and", "constant_time_compare", "(", "session_hash", ",", "user", ".", "get_session_auth_hash", "(", ")", ")", "if", "not", "session_hash_verified", ":", "request", ".", "session", ".", "flush", "(", ")", "user", "=", "None", "return", "user", "or", "MojAnonymousUser", "(", ")" ]
38.862069
17.965517
async def send_packed_command(self, command): "Send an already packed command to the Redis server" if not self._writer: await self.connect() try: if isinstance(command, str): command = [command] self._writer.writelines(command) except asyncio.futures.TimeoutError: self.disconnect() raise TimeoutError("Timeout writing to socket") except Exception: e = sys.exc_info()[1] self.disconnect() if len(e.args) == 1: errno, errmsg = 'UNKNOWN', e.args[0] else: errno = e.args[0] errmsg = e.args[1] raise ConnectionError("Error %s while writing to socket. %s." % (errno, errmsg)) except: self.disconnect() raise
[ "async", "def", "send_packed_command", "(", "self", ",", "command", ")", ":", "if", "not", "self", ".", "_writer", ":", "await", "self", ".", "connect", "(", ")", "try", ":", "if", "isinstance", "(", "command", ",", "str", ")", ":", "command", "=", "[", "command", "]", "self", ".", "_writer", ".", "writelines", "(", "command", ")", "except", "asyncio", ".", "futures", ".", "TimeoutError", ":", "self", ".", "disconnect", "(", ")", "raise", "TimeoutError", "(", "\"Timeout writing to socket\"", ")", "except", "Exception", ":", "e", "=", "sys", ".", "exc_info", "(", ")", "[", "1", "]", "self", ".", "disconnect", "(", ")", "if", "len", "(", "e", ".", "args", ")", "==", "1", ":", "errno", ",", "errmsg", "=", "'UNKNOWN'", ",", "e", ".", "args", "[", "0", "]", "else", ":", "errno", "=", "e", ".", "args", "[", "0", "]", "errmsg", "=", "e", ".", "args", "[", "1", "]", "raise", "ConnectionError", "(", "\"Error %s while writing to socket. %s.\"", "%", "(", "errno", ",", "errmsg", ")", ")", "except", ":", "self", ".", "disconnect", "(", ")", "raise" ]
36.208333
12.875
def keys(request): """Lists API keys. Compatible with jQuery DataTables.""" iDisplayStart = parse_int_param(request, 'iDisplayStart') iDisplayLength = parse_int_param(request, 'iDisplayLength') sEcho = parse_int_param(request, 'sEcho') iSortCol_0 = parse_int_param(request, 'iSortCol_0') sSortDir_0 = request.GET.get('sSortDir_0', 'asc') sSearch = request.GET.get('sSearch') columns = ['key', 'email', 'calls', 'latest_call', 'issued_on'] qry = Key.objects if sSearch not in (None, ''): qry = qry.filter(Q(key__icontains=sSearch) | Q(email__icontains=sSearch) | Q(name__icontains=sSearch) | Q(org_name__icontains=sSearch) | Q(org_url__icontains=sSearch)) qry = qry.values('key', 'email', 'issued_on').annotate(calls=Sum('reports__calls'), latest_call=Max('reports__date')) qry = qry.filter(calls__isnull=False) qry = exclude_internal_keys(qry) # TODO: Add multi-column sorting if iSortCol_0 not in (None, ''): sort_col_field = columns[iSortCol_0] sort_spec = '{dir}{col}'.format(dir='-' if sSortDir_0 == 'desc' else '', col=sort_col_field) qry = qry.order_by(sort_spec) result = { 'iTotalRecords': Key.objects.count(), 'iTotalDisplayRecords': qry.count(), 'sEcho': sEcho, 'aaData': [[k['key'], '<a href="{0}">{1}</a>'.format(reverse('key_analytics', args=(k['key'], )), k['email']), k['calls'], k['latest_call'].isoformat(), k['issued_on'].date().isoformat()] for k in qry[iDisplayStart:iDisplayStart+iDisplayLength]] } return HttpResponse(content=json.dumps(result), status=200, content_type='application/json')
[ "def", "keys", "(", "request", ")", ":", "iDisplayStart", "=", "parse_int_param", "(", "request", ",", "'iDisplayStart'", ")", "iDisplayLength", "=", "parse_int_param", "(", "request", ",", "'iDisplayLength'", ")", "sEcho", "=", "parse_int_param", "(", "request", ",", "'sEcho'", ")", "iSortCol_0", "=", "parse_int_param", "(", "request", ",", "'iSortCol_0'", ")", "sSortDir_0", "=", "request", ".", "GET", ".", "get", "(", "'sSortDir_0'", ",", "'asc'", ")", "sSearch", "=", "request", ".", "GET", ".", "get", "(", "'sSearch'", ")", "columns", "=", "[", "'key'", ",", "'email'", ",", "'calls'", ",", "'latest_call'", ",", "'issued_on'", "]", "qry", "=", "Key", ".", "objects", "if", "sSearch", "not", "in", "(", "None", ",", "''", ")", ":", "qry", "=", "qry", ".", "filter", "(", "Q", "(", "key__icontains", "=", "sSearch", ")", "|", "Q", "(", "email__icontains", "=", "sSearch", ")", "|", "Q", "(", "name__icontains", "=", "sSearch", ")", "|", "Q", "(", "org_name__icontains", "=", "sSearch", ")", "|", "Q", "(", "org_url__icontains", "=", "sSearch", ")", ")", "qry", "=", "qry", ".", "values", "(", "'key'", ",", "'email'", ",", "'issued_on'", ")", ".", "annotate", "(", "calls", "=", "Sum", "(", "'reports__calls'", ")", ",", "latest_call", "=", "Max", "(", "'reports__date'", ")", ")", "qry", "=", "qry", ".", "filter", "(", "calls__isnull", "=", "False", ")", "qry", "=", "exclude_internal_keys", "(", "qry", ")", "# TODO: Add multi-column sorting", "if", "iSortCol_0", "not", "in", "(", "None", ",", "''", ")", ":", "sort_col_field", "=", "columns", "[", "iSortCol_0", "]", "sort_spec", "=", "'{dir}{col}'", ".", "format", "(", "dir", "=", "'-'", "if", "sSortDir_0", "==", "'desc'", "else", "''", ",", "col", "=", "sort_col_field", ")", "qry", "=", "qry", ".", "order_by", "(", "sort_spec", ")", "result", "=", "{", "'iTotalRecords'", ":", "Key", ".", "objects", ".", "count", "(", ")", ",", "'iTotalDisplayRecords'", ":", "qry", ".", "count", "(", ")", ",", "'sEcho'", ":", "sEcho", ",", "'aaData'", ":", "[", "[", "k", "[", "'key'", "]", ",", "'<a href=\"{0}\">{1}</a>'", ".", "format", "(", "reverse", "(", "'key_analytics'", ",", "args", "=", "(", "k", "[", "'key'", "]", ",", ")", ")", ",", "k", "[", "'email'", "]", ")", ",", "k", "[", "'calls'", "]", ",", "k", "[", "'latest_call'", "]", ".", "isoformat", "(", ")", ",", "k", "[", "'issued_on'", "]", ".", "date", "(", ")", ".", "isoformat", "(", ")", "]", "for", "k", "in", "qry", "[", "iDisplayStart", ":", "iDisplayStart", "+", "iDisplayLength", "]", "]", "}", "return", "HttpResponse", "(", "content", "=", "json", ".", "dumps", "(", "result", ")", ",", "status", "=", "200", ",", "content_type", "=", "'application/json'", ")" ]
47.725
19.275
def natsort(string): '''按照语言里的意义对字符串进行排序. 这个方法用于替换按照字符编码顺序对字符串进行排序. 相关链接: http://stackoverflow.com/questions/2545532/python-analog-of-natsort-function-sort-a-list-using-a-natural-order-algorithm http://www.codinghorror.com/blog/2007/12/sorting-for-humans-natural-sort-order.html ''' return [int(s) if s.isdigit() else s for s in re.split('(\d+)', string)]
[ "def", "natsort", "(", "string", ")", ":", "return", "[", "int", "(", "s", ")", "if", "s", ".", "isdigit", "(", ")", "else", "s", "for", "s", "in", "re", ".", "split", "(", "'(\\d+)'", ",", "string", ")", "]" ]
41.777778
35.333333
def load_pem_private_key(data, password): """Load RSA PEM certificate.""" key = _serialization.load_pem_private_key( data, password, _backends.default_backend()) return key
[ "def", "load_pem_private_key", "(", "data", ",", "password", ")", ":", "key", "=", "_serialization", ".", "load_pem_private_key", "(", "data", ",", "password", ",", "_backends", ".", "default_backend", "(", ")", ")", "return", "key" ]
37.6
9
def get_ccle_mrna(gene_list, cell_lines): """Return a dict of mRNA amounts in given genes and cell lines from CCLE. Parameters ---------- gene_list : list[str] A list of HGNC gene symbols to get mRNA amounts for. cell_lines : list[str] A list of CCLE cell line names to get mRNA amounts for. Returns ------- mrna_amounts : dict[dict[float]] A dict keyed to cell lines containing a dict keyed to genes containing float """ gene_list_str = ','.join(gene_list) data = {'cmd': 'getProfileData', 'case_set_id': ccle_study + '_mrna', 'genetic_profile_id': ccle_study + '_mrna', 'gene_list': gene_list_str, 'skiprows': -1} df = send_request(**data) mrna_amounts = {cl: {g: [] for g in gene_list} for cl in cell_lines} for cell_line in cell_lines: if cell_line in df.columns: for gene in gene_list: value_cell = df[cell_line][df['COMMON'] == gene] if value_cell.empty: mrna_amounts[cell_line][gene] = None elif pandas.isnull(value_cell.values[0]): mrna_amounts[cell_line][gene] = None else: value = value_cell.values[0] mrna_amounts[cell_line][gene] = value else: mrna_amounts[cell_line] = None return mrna_amounts
[ "def", "get_ccle_mrna", "(", "gene_list", ",", "cell_lines", ")", ":", "gene_list_str", "=", "','", ".", "join", "(", "gene_list", ")", "data", "=", "{", "'cmd'", ":", "'getProfileData'", ",", "'case_set_id'", ":", "ccle_study", "+", "'_mrna'", ",", "'genetic_profile_id'", ":", "ccle_study", "+", "'_mrna'", ",", "'gene_list'", ":", "gene_list_str", ",", "'skiprows'", ":", "-", "1", "}", "df", "=", "send_request", "(", "*", "*", "data", ")", "mrna_amounts", "=", "{", "cl", ":", "{", "g", ":", "[", "]", "for", "g", "in", "gene_list", "}", "for", "cl", "in", "cell_lines", "}", "for", "cell_line", "in", "cell_lines", ":", "if", "cell_line", "in", "df", ".", "columns", ":", "for", "gene", "in", "gene_list", ":", "value_cell", "=", "df", "[", "cell_line", "]", "[", "df", "[", "'COMMON'", "]", "==", "gene", "]", "if", "value_cell", ".", "empty", ":", "mrna_amounts", "[", "cell_line", "]", "[", "gene", "]", "=", "None", "elif", "pandas", ".", "isnull", "(", "value_cell", ".", "values", "[", "0", "]", ")", ":", "mrna_amounts", "[", "cell_line", "]", "[", "gene", "]", "=", "None", "else", ":", "value", "=", "value_cell", ".", "values", "[", "0", "]", "mrna_amounts", "[", "cell_line", "]", "[", "gene", "]", "=", "value", "else", ":", "mrna_amounts", "[", "cell_line", "]", "=", "None", "return", "mrna_amounts" ]
36.710526
15.289474
def _init(self): """ initialize all values based on provided input :return: None """ self.col_count = len(self.col_list) # list of lengths of longest entries in columns self.col_longest = self.get_all_longest_col_lengths() self.data_length = sum(self.col_longest.values()) if self.terminal_width > 0: # free space is space which should be equeally distributed for all columns # self.terminal_width -- terminal is our canvas # - self.data_length -- substract length of content (the actual data) # - self.col_count + 1 -- table lines are not part of free space, their width is # (number of columns - 1) self.total_free_space = (self.terminal_width - self.data_length) - self.col_count + 1 if self.total_free_space <= 0: self.total_free_space = None else: self.default_column_space = self.total_free_space // self.col_count self.default_column_space_remainder = self.total_free_space % self.col_count logger.debug("total free space: %d, column space: %d, remainder: %d, columns: %d", self.total_free_space, self.default_column_space, self.default_column_space_remainder, self.col_count) else: self.total_free_space = None
[ "def", "_init", "(", "self", ")", ":", "self", ".", "col_count", "=", "len", "(", "self", ".", "col_list", ")", "# list of lengths of longest entries in columns", "self", ".", "col_longest", "=", "self", ".", "get_all_longest_col_lengths", "(", ")", "self", ".", "data_length", "=", "sum", "(", "self", ".", "col_longest", ".", "values", "(", ")", ")", "if", "self", ".", "terminal_width", ">", "0", ":", "# free space is space which should be equeally distributed for all columns", "# self.terminal_width -- terminal is our canvas", "# - self.data_length -- substract length of content (the actual data)", "# - self.col_count + 1 -- table lines are not part of free space, their width is", "# (number of columns - 1)", "self", ".", "total_free_space", "=", "(", "self", ".", "terminal_width", "-", "self", ".", "data_length", ")", "-", "self", ".", "col_count", "+", "1", "if", "self", ".", "total_free_space", "<=", "0", ":", "self", ".", "total_free_space", "=", "None", "else", ":", "self", ".", "default_column_space", "=", "self", ".", "total_free_space", "//", "self", ".", "col_count", "self", ".", "default_column_space_remainder", "=", "self", ".", "total_free_space", "%", "self", ".", "col_count", "logger", ".", "debug", "(", "\"total free space: %d, column space: %d, remainder: %d, columns: %d\"", ",", "self", ".", "total_free_space", ",", "self", ".", "default_column_space", ",", "self", ".", "default_column_space_remainder", ",", "self", ".", "col_count", ")", "else", ":", "self", ".", "total_free_space", "=", "None" ]
51.071429
25.857143
def _init_tools(self, element, callbacks=[]): """ Processes the list of tools to be supplied to the plot. """ tooltips, hover_opts = self._hover_opts(element) tooltips = [(ttp.pprint_label, '@{%s}' % util.dimension_sanitizer(ttp.name)) if isinstance(ttp, Dimension) else ttp for ttp in tooltips] if not tooltips: tooltips = None callbacks = callbacks+self.callbacks cb_tools, tool_names = [], [] hover = False for cb in callbacks: for handle in cb.models+cb.extra_models: if handle and handle in known_tools: tool_names.append(handle) if handle == 'hover': tool = tools.HoverTool( tooltips=tooltips, tags=['hv_created'], **hover_opts) hover = tool else: tool = known_tools[handle]() cb_tools.append(tool) self.handles[handle] = tool tool_list = [ t for t in cb_tools + self.default_tools + self.tools if t not in tool_names] copied_tools = [] for tool in tool_list: if isinstance(tool, tools.Tool): properties = tool.properties_with_values(include_defaults=False) tool = type(tool)(**properties) copied_tools.append(tool) hover_tools = [t for t in copied_tools if isinstance(t, tools.HoverTool)] if 'hover' in copied_tools: hover = tools.HoverTool(tooltips=tooltips, tags=['hv_created'], **hover_opts) copied_tools[copied_tools.index('hover')] = hover elif any(hover_tools): hover = hover_tools[0] if hover: self.handles['hover'] = hover return copied_tools
[ "def", "_init_tools", "(", "self", ",", "element", ",", "callbacks", "=", "[", "]", ")", ":", "tooltips", ",", "hover_opts", "=", "self", ".", "_hover_opts", "(", "element", ")", "tooltips", "=", "[", "(", "ttp", ".", "pprint_label", ",", "'@{%s}'", "%", "util", ".", "dimension_sanitizer", "(", "ttp", ".", "name", ")", ")", "if", "isinstance", "(", "ttp", ",", "Dimension", ")", "else", "ttp", "for", "ttp", "in", "tooltips", "]", "if", "not", "tooltips", ":", "tooltips", "=", "None", "callbacks", "=", "callbacks", "+", "self", ".", "callbacks", "cb_tools", ",", "tool_names", "=", "[", "]", ",", "[", "]", "hover", "=", "False", "for", "cb", "in", "callbacks", ":", "for", "handle", "in", "cb", ".", "models", "+", "cb", ".", "extra_models", ":", "if", "handle", "and", "handle", "in", "known_tools", ":", "tool_names", ".", "append", "(", "handle", ")", "if", "handle", "==", "'hover'", ":", "tool", "=", "tools", ".", "HoverTool", "(", "tooltips", "=", "tooltips", ",", "tags", "=", "[", "'hv_created'", "]", ",", "*", "*", "hover_opts", ")", "hover", "=", "tool", "else", ":", "tool", "=", "known_tools", "[", "handle", "]", "(", ")", "cb_tools", ".", "append", "(", "tool", ")", "self", ".", "handles", "[", "handle", "]", "=", "tool", "tool_list", "=", "[", "t", "for", "t", "in", "cb_tools", "+", "self", ".", "default_tools", "+", "self", ".", "tools", "if", "t", "not", "in", "tool_names", "]", "copied_tools", "=", "[", "]", "for", "tool", "in", "tool_list", ":", "if", "isinstance", "(", "tool", ",", "tools", ".", "Tool", ")", ":", "properties", "=", "tool", ".", "properties_with_values", "(", "include_defaults", "=", "False", ")", "tool", "=", "type", "(", "tool", ")", "(", "*", "*", "properties", ")", "copied_tools", ".", "append", "(", "tool", ")", "hover_tools", "=", "[", "t", "for", "t", "in", "copied_tools", "if", "isinstance", "(", "t", ",", "tools", ".", "HoverTool", ")", "]", "if", "'hover'", "in", "copied_tools", ":", "hover", "=", "tools", ".", "HoverTool", "(", "tooltips", "=", "tooltips", ",", "tags", "=", "[", "'hv_created'", "]", ",", "*", "*", "hover_opts", ")", "copied_tools", "[", "copied_tools", ".", "index", "(", "'hover'", ")", "]", "=", "hover", "elif", "any", "(", "hover_tools", ")", ":", "hover", "=", "hover_tools", "[", "0", "]", "if", "hover", ":", "self", ".", "handles", "[", "'hover'", "]", "=", "hover", "return", "copied_tools" ]
40.521739
15.782609
def get_current_traceback(ignore_system_exceptions=False): """Get the current exception info as `Traceback` object. Per default calling this method will reraise system exceptions such as generator exit, system exit or others. This behavior can be disabled by passing `False` to the function as first parameter. """ exc_type, exc_value, tb = sys.exc_info() if ignore_system_exceptions and exc_type in system_exceptions: raise return Traceback(exc_type, exc_value, tb)
[ "def", "get_current_traceback", "(", "ignore_system_exceptions", "=", "False", ")", ":", "exc_type", ",", "exc_value", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "if", "ignore_system_exceptions", "and", "exc_type", "in", "system_exceptions", ":", "raise", "return", "Traceback", "(", "exc_type", ",", "exc_value", ",", "tb", ")" ]
49.9
15.5
def location(self): """ Returns a ``string`` constant to indicate whether the game was played at home, away, or in a neutral location. """ if self._location.lower() == 'n': return NEUTRAL if self._location.lower() == '@': return AWAY return HOME
[ "def", "location", "(", "self", ")", ":", "if", "self", ".", "_location", ".", "lower", "(", ")", "==", "'n'", ":", "return", "NEUTRAL", "if", "self", ".", "_location", ".", "lower", "(", ")", "==", "'@'", ":", "return", "AWAY", "return", "HOME" ]
31.6
12
def send(self, message) : "puts a message in the outgoing queue." if not isinstance(message, Message) : raise TypeError("message must be a Message") #end if serial = ct.c_uint() if not dbus.dbus_connection_send(self._dbobj, message._dbobj, ct.byref(serial)) : raise CallFailed("dbus_connection_send") #end if return \ serial.value
[ "def", "send", "(", "self", ",", "message", ")", ":", "if", "not", "isinstance", "(", "message", ",", "Message", ")", ":", "raise", "TypeError", "(", "\"message must be a Message\"", ")", "#end if", "serial", "=", "ct", ".", "c_uint", "(", ")", "if", "not", "dbus", ".", "dbus_connection_send", "(", "self", ".", "_dbobj", ",", "message", ".", "_dbobj", ",", "ct", ".", "byref", "(", "serial", ")", ")", ":", "raise", "CallFailed", "(", "\"dbus_connection_send\"", ")", "#end if", "return", "serial", ".", "value" ]
37.454545
18.727273
def register_error_handler( self, error: Union[Type[Exception], int], func: Callable, name: AppOrBlueprintKey=None, ) -> None: """Register a function as an error handler. This is designed to be used on the application directly. An example usage, .. code-block:: python def error_handler(): return "Error", 500 app.register_error_handler(500, error_handler) Arguments: error: The error code or Exception to handle. func: The function to handle the error. name: Optional blueprint key name. """ handler = ensure_coroutine(func) if isinstance(error, int): error = all_http_exceptions[error] self.error_handler_spec[name][error] = handler
[ "def", "register_error_handler", "(", "self", ",", "error", ":", "Union", "[", "Type", "[", "Exception", "]", ",", "int", "]", ",", "func", ":", "Callable", ",", "name", ":", "AppOrBlueprintKey", "=", "None", ",", ")", "->", "None", ":", "handler", "=", "ensure_coroutine", "(", "func", ")", "if", "isinstance", "(", "error", ",", "int", ")", ":", "error", "=", "all_http_exceptions", "[", "error", "]", "self", ".", "error_handler_spec", "[", "name", "]", "[", "error", "]", "=", "handler" ]
33
19.416667
def load_dump(self, name=None): """ Загружает базу с жёсткого диска. Текущая база заменяется. :name: Имя файла, без расширения. """ name = name or "vocabularDump" dump_file = os_join( self.temp_folder, "{0}.json".format(name) ) if not isfile(dump_file): raise MarkovTextExcept("Файл {0!r} не найден.".format(dump_file)) with open(dump_file, "rb") as js_file: self.tokens_array = tuple(json.load(js_file)) self.create_base()
[ "def", "load_dump", "(", "self", ",", "name", "=", "None", ")", ":", "name", "=", "name", "or", "\"vocabularDump\"", "dump_file", "=", "os_join", "(", "self", ".", "temp_folder", ",", "\"{0}.json\"", ".", "format", "(", "name", ")", ")", "if", "not", "isfile", "(", "dump_file", ")", ":", "raise", "MarkovTextExcept", "(", "\"Файл {0!r} не найден.\".format(dump", "_", "file))", "", "", "", "", "with", "open", "(", "dump_file", ",", "\"rb\"", ")", "as", "js_file", ":", "self", ".", "tokens_array", "=", "tuple", "(", "json", ".", "load", "(", "js_file", ")", ")", "self", ".", "create_base", "(", ")" ]
34
10
def grant_bonus(self, worker_id, assignment_id, bonus_price, reason): """ Issues a payment of money from your account to a Worker. To be eligible for a bonus, the Worker must have submitted results for one of your HITs, and have had those results approved or rejected. This payment happens separately from the reward you pay to the Worker when you approve the Worker's assignment. The Bonus must be passed in as an instance of the Price object. """ params = bonus_price.get_as_params('BonusAmount', 1) params['WorkerId'] = worker_id params['AssignmentId'] = assignment_id params['Reason'] = reason return self._process_request('GrantBonus', params)
[ "def", "grant_bonus", "(", "self", ",", "worker_id", ",", "assignment_id", ",", "bonus_price", ",", "reason", ")", ":", "params", "=", "bonus_price", ".", "get_as_params", "(", "'BonusAmount'", ",", "1", ")", "params", "[", "'WorkerId'", "]", "=", "worker_id", "params", "[", "'AssignmentId'", "]", "=", "assignment_id", "params", "[", "'Reason'", "]", "=", "reason", "return", "self", ".", "_process_request", "(", "'GrantBonus'", ",", "params", ")" ]
46.75
18.875
def _ep_pairwise( n_items, comparisons, alpha, match_moments, max_iter, initial_state): """Compute a distribution of model parameters using the EP algorithm. Raises ------ RuntimeError If the algorithm does not converge after ``max_iter`` iterations. """ # Static variable that allows to check the # of iterations after the call. _ep_pairwise.iterations = 0 m = len(comparisons) prior_inv = alpha * np.eye(n_items) if initial_state is None: # Initially, mean and covariance come from the prior. mean = np.zeros(n_items) cov = (1 / alpha) * np.eye(n_items) # Initialize the natural params in the function space. tau = np.zeros(m) nu = np.zeros(m) # Initialize the natural params in the space of thetas. prec = np.zeros((n_items, n_items)) xs = np.zeros(n_items) else: tau, nu = initial_state mean, cov, xs, prec = _init_ws( n_items, comparisons, prior_inv, tau, nu) for _ in range(max_iter): _ep_pairwise.iterations += 1 # Keep a copy of the old parameters for convergence testing. tau_old = np.array(tau, copy=True) nu_old = np.array(nu, copy=True) for i in nprand.permutation(m): a, b = comparisons[i] # Update mean and variance in function space. f_var = cov[a,a] + cov[b,b] - 2 * cov[a,b] f_mean = mean[a] - mean[b] # Cavity distribution. tau_tot = 1.0 / f_var nu_tot = tau_tot * f_mean tau_cav = tau_tot - tau[i] nu_cav = nu_tot - nu[i] cov_cav = 1.0 / tau_cav mean_cav = cov_cav * nu_cav # Moment matching. logpart, dlogpart, d2logpart = match_moments(mean_cav, cov_cav) # Update factor params in the function space. tau[i] = -d2logpart / (1 + d2logpart / tau_cav) delta_tau = tau[i] - tau_old[i] nu[i] = ((dlogpart - (nu_cav / tau_cav) * d2logpart) / (1 + d2logpart / tau_cav)) delta_nu = nu[i] - nu_old[i] # Update factor params in the weight space. prec[(a, a, b, b), (a, b, a, b)] += delta_tau * MAT_ONE_FLAT xs[a] += delta_nu xs[b] -= delta_nu # Update mean and covariance. if abs(delta_tau) > 0: phi = -1.0 / ((1.0 / delta_tau) + f_var) * MAT_ONE upd_mat = cov.take([a, b], axis=0) cov = cov + upd_mat.T.dot(phi).dot(upd_mat) mean = cov.dot(xs) # Recompute the global parameters for stability. cov = inv_posdef(prior_inv + prec) mean = cov.dot(xs) if _converged((tau, nu), (tau_old, nu_old)): return mean, cov raise RuntimeError( "EP did not converge after {} iterations".format(max_iter))
[ "def", "_ep_pairwise", "(", "n_items", ",", "comparisons", ",", "alpha", ",", "match_moments", ",", "max_iter", ",", "initial_state", ")", ":", "# Static variable that allows to check the # of iterations after the call.", "_ep_pairwise", ".", "iterations", "=", "0", "m", "=", "len", "(", "comparisons", ")", "prior_inv", "=", "alpha", "*", "np", ".", "eye", "(", "n_items", ")", "if", "initial_state", "is", "None", ":", "# Initially, mean and covariance come from the prior.", "mean", "=", "np", ".", "zeros", "(", "n_items", ")", "cov", "=", "(", "1", "/", "alpha", ")", "*", "np", ".", "eye", "(", "n_items", ")", "# Initialize the natural params in the function space.", "tau", "=", "np", ".", "zeros", "(", "m", ")", "nu", "=", "np", ".", "zeros", "(", "m", ")", "# Initialize the natural params in the space of thetas.", "prec", "=", "np", ".", "zeros", "(", "(", "n_items", ",", "n_items", ")", ")", "xs", "=", "np", ".", "zeros", "(", "n_items", ")", "else", ":", "tau", ",", "nu", "=", "initial_state", "mean", ",", "cov", ",", "xs", ",", "prec", "=", "_init_ws", "(", "n_items", ",", "comparisons", ",", "prior_inv", ",", "tau", ",", "nu", ")", "for", "_", "in", "range", "(", "max_iter", ")", ":", "_ep_pairwise", ".", "iterations", "+=", "1", "# Keep a copy of the old parameters for convergence testing.", "tau_old", "=", "np", ".", "array", "(", "tau", ",", "copy", "=", "True", ")", "nu_old", "=", "np", ".", "array", "(", "nu", ",", "copy", "=", "True", ")", "for", "i", "in", "nprand", ".", "permutation", "(", "m", ")", ":", "a", ",", "b", "=", "comparisons", "[", "i", "]", "# Update mean and variance in function space.", "f_var", "=", "cov", "[", "a", ",", "a", "]", "+", "cov", "[", "b", ",", "b", "]", "-", "2", "*", "cov", "[", "a", ",", "b", "]", "f_mean", "=", "mean", "[", "a", "]", "-", "mean", "[", "b", "]", "# Cavity distribution.", "tau_tot", "=", "1.0", "/", "f_var", "nu_tot", "=", "tau_tot", "*", "f_mean", "tau_cav", "=", "tau_tot", "-", "tau", "[", "i", "]", "nu_cav", "=", "nu_tot", "-", "nu", "[", "i", "]", "cov_cav", "=", "1.0", "/", "tau_cav", "mean_cav", "=", "cov_cav", "*", "nu_cav", "# Moment matching.", "logpart", ",", "dlogpart", ",", "d2logpart", "=", "match_moments", "(", "mean_cav", ",", "cov_cav", ")", "# Update factor params in the function space.", "tau", "[", "i", "]", "=", "-", "d2logpart", "/", "(", "1", "+", "d2logpart", "/", "tau_cav", ")", "delta_tau", "=", "tau", "[", "i", "]", "-", "tau_old", "[", "i", "]", "nu", "[", "i", "]", "=", "(", "(", "dlogpart", "-", "(", "nu_cav", "/", "tau_cav", ")", "*", "d2logpart", ")", "/", "(", "1", "+", "d2logpart", "/", "tau_cav", ")", ")", "delta_nu", "=", "nu", "[", "i", "]", "-", "nu_old", "[", "i", "]", "# Update factor params in the weight space.", "prec", "[", "(", "a", ",", "a", ",", "b", ",", "b", ")", ",", "(", "a", ",", "b", ",", "a", ",", "b", ")", "]", "+=", "delta_tau", "*", "MAT_ONE_FLAT", "xs", "[", "a", "]", "+=", "delta_nu", "xs", "[", "b", "]", "-=", "delta_nu", "# Update mean and covariance.", "if", "abs", "(", "delta_tau", ")", ">", "0", ":", "phi", "=", "-", "1.0", "/", "(", "(", "1.0", "/", "delta_tau", ")", "+", "f_var", ")", "*", "MAT_ONE", "upd_mat", "=", "cov", ".", "take", "(", "[", "a", ",", "b", "]", ",", "axis", "=", "0", ")", "cov", "=", "cov", "+", "upd_mat", ".", "T", ".", "dot", "(", "phi", ")", ".", "dot", "(", "upd_mat", ")", "mean", "=", "cov", ".", "dot", "(", "xs", ")", "# Recompute the global parameters for stability.", "cov", "=", "inv_posdef", "(", "prior_inv", "+", "prec", ")", "mean", "=", "cov", ".", "dot", "(", "xs", ")", "if", "_converged", "(", "(", "tau", ",", "nu", ")", ",", "(", "tau_old", ",", "nu_old", ")", ")", ":", "return", "mean", ",", "cov", "raise", "RuntimeError", "(", "\"EP did not converge after {} iterations\"", ".", "format", "(", "max_iter", ")", ")" ]
41.594203
13.73913
def draw_legend(data, obj): """Adds legend code. """ texts = [] children_alignment = [] for text in obj.texts: texts.append("{}".format(text.get_text())) children_alignment.append("{}".format(text.get_horizontalalignment())) # Get the location. # http://matplotlib.org/api/legend_api.html loc = obj._loc if obj._loc != 0 else _get_location_from_best(obj) pad = 0.03 position, anchor = { 1: (None, None), # upper right 2: ([pad, 1.0 - pad], "north west"), # upper left 3: ([pad, pad], "south west"), # lower left 4: ([1.0 - pad, pad], "south east"), # lower right 5: ([1.0 - pad, 0.5], "east"), # right 6: ([3 * pad, 0.5], "west"), # center left 7: ([1.0 - 3 * pad, 0.5], "east"), # center right 8: ([0.5, 3 * pad], "south"), # lower center 9: ([0.5, 1.0 - 3 * pad], "north"), # upper center 10: ([0.5, 0.5], "center"), # center }[loc] # In case of given position via bbox_to_anchor parameter the center # of legend is changed as follows: if obj._bbox_to_anchor: bbox_center = obj.get_bbox_to_anchor()._bbox._points[1] position = [bbox_center[0], bbox_center[1]] legend_style = [] if position: ff = data["float format"] legend_style.append( ("at={{(" + ff + "," + ff + ")}}").format(position[0], position[1]) ) if anchor: legend_style.append("anchor={}".format(anchor)) # Get the edgecolor of the box if obj.get_frame_on(): edgecolor = obj.get_frame().get_edgecolor() data, frame_xcolor, _ = mycol.mpl_color2xcolor(data, edgecolor) if frame_xcolor != "black": # black is default legend_style.append("draw={}".format(frame_xcolor)) else: legend_style.append("draw=none") # Get the facecolor of the box facecolor = obj.get_frame().get_facecolor() data, fill_xcolor, _ = mycol.mpl_color2xcolor(data, facecolor) if fill_xcolor != "white": # white is default legend_style.append("fill={}".format(fill_xcolor)) # Get the horizontal alignment try: alignment = children_alignment[0] except IndexError: alignment = None for child_alignment in children_alignment: if alignment != child_alignment: warnings.warn("Varying horizontal alignments in the legend. Using default.") alignment = None break if alignment: data["current axes"].axis_options.append( "legend cell align={{{}}}".format(alignment) ) if obj._ncol != 1: data["current axes"].axis_options.append("legend columns={}".format(obj._ncol)) # Write styles to data if legend_style: style = "legend style={{{}}}".format(", ".join(legend_style)) data["current axes"].axis_options.append(style) return data
[ "def", "draw_legend", "(", "data", ",", "obj", ")", ":", "texts", "=", "[", "]", "children_alignment", "=", "[", "]", "for", "text", "in", "obj", ".", "texts", ":", "texts", ".", "append", "(", "\"{}\"", ".", "format", "(", "text", ".", "get_text", "(", ")", ")", ")", "children_alignment", ".", "append", "(", "\"{}\"", ".", "format", "(", "text", ".", "get_horizontalalignment", "(", ")", ")", ")", "# Get the location.", "# http://matplotlib.org/api/legend_api.html", "loc", "=", "obj", ".", "_loc", "if", "obj", ".", "_loc", "!=", "0", "else", "_get_location_from_best", "(", "obj", ")", "pad", "=", "0.03", "position", ",", "anchor", "=", "{", "1", ":", "(", "None", ",", "None", ")", ",", "# upper right", "2", ":", "(", "[", "pad", ",", "1.0", "-", "pad", "]", ",", "\"north west\"", ")", ",", "# upper left", "3", ":", "(", "[", "pad", ",", "pad", "]", ",", "\"south west\"", ")", ",", "# lower left", "4", ":", "(", "[", "1.0", "-", "pad", ",", "pad", "]", ",", "\"south east\"", ")", ",", "# lower right", "5", ":", "(", "[", "1.0", "-", "pad", ",", "0.5", "]", ",", "\"east\"", ")", ",", "# right", "6", ":", "(", "[", "3", "*", "pad", ",", "0.5", "]", ",", "\"west\"", ")", ",", "# center left", "7", ":", "(", "[", "1.0", "-", "3", "*", "pad", ",", "0.5", "]", ",", "\"east\"", ")", ",", "# center right", "8", ":", "(", "[", "0.5", ",", "3", "*", "pad", "]", ",", "\"south\"", ")", ",", "# lower center", "9", ":", "(", "[", "0.5", ",", "1.0", "-", "3", "*", "pad", "]", ",", "\"north\"", ")", ",", "# upper center", "10", ":", "(", "[", "0.5", ",", "0.5", "]", ",", "\"center\"", ")", ",", "# center", "}", "[", "loc", "]", "# In case of given position via bbox_to_anchor parameter the center", "# of legend is changed as follows:", "if", "obj", ".", "_bbox_to_anchor", ":", "bbox_center", "=", "obj", ".", "get_bbox_to_anchor", "(", ")", ".", "_bbox", ".", "_points", "[", "1", "]", "position", "=", "[", "bbox_center", "[", "0", "]", ",", "bbox_center", "[", "1", "]", "]", "legend_style", "=", "[", "]", "if", "position", ":", "ff", "=", "data", "[", "\"float format\"", "]", "legend_style", ".", "append", "(", "(", "\"at={{(\"", "+", "ff", "+", "\",\"", "+", "ff", "+", "\")}}\"", ")", ".", "format", "(", "position", "[", "0", "]", ",", "position", "[", "1", "]", ")", ")", "if", "anchor", ":", "legend_style", ".", "append", "(", "\"anchor={}\"", ".", "format", "(", "anchor", ")", ")", "# Get the edgecolor of the box", "if", "obj", ".", "get_frame_on", "(", ")", ":", "edgecolor", "=", "obj", ".", "get_frame", "(", ")", ".", "get_edgecolor", "(", ")", "data", ",", "frame_xcolor", ",", "_", "=", "mycol", ".", "mpl_color2xcolor", "(", "data", ",", "edgecolor", ")", "if", "frame_xcolor", "!=", "\"black\"", ":", "# black is default", "legend_style", ".", "append", "(", "\"draw={}\"", ".", "format", "(", "frame_xcolor", ")", ")", "else", ":", "legend_style", ".", "append", "(", "\"draw=none\"", ")", "# Get the facecolor of the box", "facecolor", "=", "obj", ".", "get_frame", "(", ")", ".", "get_facecolor", "(", ")", "data", ",", "fill_xcolor", ",", "_", "=", "mycol", ".", "mpl_color2xcolor", "(", "data", ",", "facecolor", ")", "if", "fill_xcolor", "!=", "\"white\"", ":", "# white is default", "legend_style", ".", "append", "(", "\"fill={}\"", ".", "format", "(", "fill_xcolor", ")", ")", "# Get the horizontal alignment", "try", ":", "alignment", "=", "children_alignment", "[", "0", "]", "except", "IndexError", ":", "alignment", "=", "None", "for", "child_alignment", "in", "children_alignment", ":", "if", "alignment", "!=", "child_alignment", ":", "warnings", ".", "warn", "(", "\"Varying horizontal alignments in the legend. Using default.\"", ")", "alignment", "=", "None", "break", "if", "alignment", ":", "data", "[", "\"current axes\"", "]", ".", "axis_options", ".", "append", "(", "\"legend cell align={{{}}}\"", ".", "format", "(", "alignment", ")", ")", "if", "obj", ".", "_ncol", "!=", "1", ":", "data", "[", "\"current axes\"", "]", ".", "axis_options", ".", "append", "(", "\"legend columns={}\"", ".", "format", "(", "obj", ".", "_ncol", ")", ")", "# Write styles to data", "if", "legend_style", ":", "style", "=", "\"legend style={{{}}}\"", ".", "format", "(", "\", \"", ".", "join", "(", "legend_style", ")", ")", "data", "[", "\"current axes\"", "]", ".", "axis_options", ".", "append", "(", "style", ")", "return", "data" ]
34.670732
20.268293
def get_market_deep(symbols=None, **kwargs): """ MOVED to iexfinance.iexdata.get_deep """ import warnings warnings.warn(WNG_MSG % ("get_market_deep", "iexdata.get_deep")) return DEEP(symbols, **kwargs).fetch()
[ "def", "get_market_deep", "(", "symbols", "=", "None", ",", "*", "*", "kwargs", ")", ":", "import", "warnings", "warnings", ".", "warn", "(", "WNG_MSG", "%", "(", "\"get_market_deep\"", ",", "\"iexdata.get_deep\"", ")", ")", "return", "DEEP", "(", "symbols", ",", "*", "*", "kwargs", ")", ".", "fetch", "(", ")" ]
33.285714
8.142857
def _AssertIsLocal(path): ''' Checks if a given path is local, raise an exception if not. This is used in filesystem functions that do not support remote operations yet. :param unicode path: :raises NotImplementedForRemotePathError: If the given path is not local ''' from six.moves.urllib.parse import urlparse if not _UrlIsLocal(urlparse(path)): from ._exceptions import NotImplementedForRemotePathError raise NotImplementedForRemotePathError
[ "def", "_AssertIsLocal", "(", "path", ")", ":", "from", "six", ".", "moves", ".", "urllib", ".", "parse", "import", "urlparse", "if", "not", "_UrlIsLocal", "(", "urlparse", "(", "path", ")", ")", ":", "from", ".", "_exceptions", "import", "NotImplementedForRemotePathError", "raise", "NotImplementedForRemotePathError" ]
32.6
21.933333
def time(self, intervals=1, *args, _show_progress=True, _print=True, _collect_garbage=True, _quiet=True, **kwargs): """ Measures the execution time of :prop:_callable for @intervals @intervals: #int number of intervals to measure the execution time of the function for @*args: arguments to pass to the callable being timed @**kwargs: arguments to pass to the callable being timed @_show_progress: #bool whether or not to print a progress bar @_print: #bool whether or not to print the results of the timing @_collect_garbage: #bool whether or not to garbage collect while timing @_quiet: #bool whether or not to disable the print() function's ability to output to terminal during the timing -> :class:collections.OrderedDict of stats about the timing """ self.reset() args = list(args) + list(self._callableargs[0]) _kwargs = self._callableargs[1] _kwargs.update(kwargs) kwargs = _kwargs if not _collect_garbage: gc.disable() # Garbage collection setting gc.collect() self.allocated_memory = 0 for x in self.progress(intervals): if _quiet: # Quiets print()s in the tested function sys.stdout = NullIO() try: self.start() # Starts the timer self._callable(*args, **kwargs) self.stop() # Stops the timer except Exception as e: if _quiet: # Unquiets prints() sys.stdout = sys.__stdout__ raise e if _quiet: # Unquiets prints() sys.stdout = sys.__stdout__ if not _collect_garbage: gc.enable() # Garbage collection setting if _print: self.info()
[ "def", "time", "(", "self", ",", "intervals", "=", "1", ",", "*", "args", ",", "_show_progress", "=", "True", ",", "_print", "=", "True", ",", "_collect_garbage", "=", "True", ",", "_quiet", "=", "True", ",", "*", "*", "kwargs", ")", ":", "self", ".", "reset", "(", ")", "args", "=", "list", "(", "args", ")", "+", "list", "(", "self", ".", "_callableargs", "[", "0", "]", ")", "_kwargs", "=", "self", ".", "_callableargs", "[", "1", "]", "_kwargs", ".", "update", "(", "kwargs", ")", "kwargs", "=", "_kwargs", "if", "not", "_collect_garbage", ":", "gc", ".", "disable", "(", ")", "# Garbage collection setting", "gc", ".", "collect", "(", ")", "self", ".", "allocated_memory", "=", "0", "for", "x", "in", "self", ".", "progress", "(", "intervals", ")", ":", "if", "_quiet", ":", "# Quiets print()s in the tested function", "sys", ".", "stdout", "=", "NullIO", "(", ")", "try", ":", "self", ".", "start", "(", ")", "# Starts the timer", "self", ".", "_callable", "(", "*", "args", ",", "*", "*", "kwargs", ")", "self", ".", "stop", "(", ")", "# Stops the timer", "except", "Exception", "as", "e", ":", "if", "_quiet", ":", "# Unquiets prints()", "sys", ".", "stdout", "=", "sys", ".", "__stdout__", "raise", "e", "if", "_quiet", ":", "# Unquiets prints()", "sys", ".", "stdout", "=", "sys", ".", "__stdout__", "if", "not", "_collect_garbage", ":", "gc", ".", "enable", "(", ")", "# Garbage collection setting", "if", "_print", ":", "self", ".", "info", "(", ")" ]
43.790698
16.534884
def get_guild_member_by_id(self, guild_id: int, member_id: int) -> Dict[str, Any]: """Get a guild member by their id Args: guild_id: snowflake id of the guild member_id: snowflake id of the member Returns: Dictionary data for the guild member. Example: { "id": "41771983423143937", "name": "Discord Developers", "icon": "SEkgTU9NIElUUyBBTkRSRUkhISEhISEh", "splash": null, "owner_id": "80351110224678912", "region": "us-east", "afk_channel_id": "42072017402331136", "afk_timeout": 300, "embed_enabled": true, "embed_channel_id": "41771983444115456", "verification_level": 1, "roles": [ "41771983423143936", "41771983423143937", "41771983423143938" ], "emojis": [], "features": ["INVITE_SPLASH"], "unavailable": false } """ return self._query(f'guilds/{guild_id}/members/{member_id}', 'GET')
[ "def", "get_guild_member_by_id", "(", "self", ",", "guild_id", ":", "int", ",", "member_id", ":", "int", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "return", "self", ".", "_query", "(", "f'guilds/{guild_id}/members/{member_id}'", ",", "'GET'", ")" ]
37.382353
14.558824
def merge_lists(dest, source, extend_lists=False): """Recursively merge two lists. :keyword extend_lists: if true, just extends lists instead of merging them. This applies merge_dictionary if any of the entries are dicts. Note: This updates dest and returns it. """ if not source: return if not extend_lists: # Make them the same size left = dest right = source[:] if len(dest) > len(source): right.extend([None] * (len(dest) - len(source))) elif len(dest) < len(source): left.extend([None] * (len(source) - len(dest))) # Merge lists for index, value in enumerate(left): if value is None and right[index] is not None: dest[index] = right[index] elif isinstance(value, dict) and isinstance(right[index], dict): merge_dictionary(dest[index], source[index], extend_lists=extend_lists) elif isinstance(value, list): merge_lists(value, right[index]) elif right[index] is not None: dest[index] = right[index] else: dest.extend([src for src in source if src not in dest]) return dest
[ "def", "merge_lists", "(", "dest", ",", "source", ",", "extend_lists", "=", "False", ")", ":", "if", "not", "source", ":", "return", "if", "not", "extend_lists", ":", "# Make them the same size", "left", "=", "dest", "right", "=", "source", "[", ":", "]", "if", "len", "(", "dest", ")", ">", "len", "(", "source", ")", ":", "right", ".", "extend", "(", "[", "None", "]", "*", "(", "len", "(", "dest", ")", "-", "len", "(", "source", ")", ")", ")", "elif", "len", "(", "dest", ")", "<", "len", "(", "source", ")", ":", "left", ".", "extend", "(", "[", "None", "]", "*", "(", "len", "(", "source", ")", "-", "len", "(", "dest", ")", ")", ")", "# Merge lists", "for", "index", ",", "value", "in", "enumerate", "(", "left", ")", ":", "if", "value", "is", "None", "and", "right", "[", "index", "]", "is", "not", "None", ":", "dest", "[", "index", "]", "=", "right", "[", "index", "]", "elif", "isinstance", "(", "value", ",", "dict", ")", "and", "isinstance", "(", "right", "[", "index", "]", ",", "dict", ")", ":", "merge_dictionary", "(", "dest", "[", "index", "]", ",", "source", "[", "index", "]", ",", "extend_lists", "=", "extend_lists", ")", "elif", "isinstance", "(", "value", ",", "list", ")", ":", "merge_lists", "(", "value", ",", "right", "[", "index", "]", ")", "elif", "right", "[", "index", "]", "is", "not", "None", ":", "dest", "[", "index", "]", "=", "right", "[", "index", "]", "else", ":", "dest", ".", "extend", "(", "[", "src", "for", "src", "in", "source", "if", "src", "not", "in", "dest", "]", ")", "return", "dest" ]
38.25
16.3125
def recv_multipart(self, *args, **kwargs): """wrap recv_multipart to prevent state_changed on each partial recv""" self.__in_recv_multipart = True try: msg = super(GreenSocket, self).recv_multipart(*args, **kwargs) finally: self.__in_recv_multipart = False self.__state_changed() return msg
[ "def", "recv_multipart", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "__in_recv_multipart", "=", "True", "try", ":", "msg", "=", "super", "(", "GreenSocket", ",", "self", ")", ".", "recv_multipart", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "self", ".", "__in_recv_multipart", "=", "False", "self", ".", "__state_changed", "(", ")", "return", "msg" ]
39.777778
13.444444
def get_parentparser(parser, description=None, help=True): """ :param parser: :class:`argparse.ArgumentParser` instance or None :param description: string used to build a new parser if parser is None :param help: flag used to build a new parser if parser is None :returns: if parser is None the new parser; otherwise the `.parentparser` attribute (if set) or the parser itself (if not set) """ if parser is None: return argparse.ArgumentParser( description=description, add_help=help) elif hasattr(parser, 'parentparser'): return parser.parentparser else: return parser
[ "def", "get_parentparser", "(", "parser", ",", "description", "=", "None", ",", "help", "=", "True", ")", ":", "if", "parser", "is", "None", ":", "return", "argparse", ".", "ArgumentParser", "(", "description", "=", "description", ",", "add_help", "=", "help", ")", "elif", "hasattr", "(", "parser", ",", "'parentparser'", ")", ":", "return", "parser", ".", "parentparser", "else", ":", "return", "parser" ]
42.733333
17.133333
def to_python(self, value): """ Convert our string value to JSON after we load it from the DB """ if not value: return {} elif isinstance(value, six.string_types): res = loads(value) assert isinstance(res, dict) return JSONDict(**res) else: return value
[ "def", "to_python", "(", "self", ",", "value", ")", ":", "if", "not", "value", ":", "return", "{", "}", "elif", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "res", "=", "loads", "(", "value", ")", "assert", "isinstance", "(", "res", ",", "dict", ")", "return", "JSONDict", "(", "*", "*", "res", ")", "else", ":", "return", "value" ]
33.6
11.9
def tomorrow(hour=None, minute=None): """ Gives the ``datetime.datetime`` object corresponding to tomorrow. The default value for optional parameters is the current value of hour and minute. I.e: when called without specifying values for parameters, the resulting object will refer to the time = now + 24 hours; when called with only hour specified, the resulting object will refer to tomorrow at the specified hour and at the current minute. :param hour: the hour for tomorrow, in the format *0-23* (defaults to ``None``) :type hour: int :param minute: the minute for tomorrow, in the format *0-59* (defaults to ``None``) :type minute: int :returns: a ``datetime.datetime`` object :raises: *ValueError* when hour or minute have bad values """ if hour is None: hour = datetime.now().hour if minute is None: minute = datetime.now().minute tomorrow_date = date.today() + timedelta(days=1) return datetime(tomorrow_date.year, tomorrow_date.month, tomorrow_date.day, hour, minute, 0)
[ "def", "tomorrow", "(", "hour", "=", "None", ",", "minute", "=", "None", ")", ":", "if", "hour", "is", "None", ":", "hour", "=", "datetime", ".", "now", "(", ")", ".", "hour", "if", "minute", "is", "None", ":", "minute", "=", "datetime", ".", "now", "(", ")", ".", "minute", "tomorrow_date", "=", "date", ".", "today", "(", ")", "+", "timedelta", "(", "days", "=", "1", ")", "return", "datetime", "(", "tomorrow_date", ".", "year", ",", "tomorrow_date", ".", "month", ",", "tomorrow_date", ".", "day", ",", "hour", ",", "minute", ",", "0", ")" ]
41.538462
20.923077
def get_array(self, variable_name, period): """ Return the value of ``variable_name`` for ``period``, if this value is alreay in the cache (if it has been set as an input or previously calculated). Unlike :any:`calculate`, this method *does not* trigger calculations and *does not* use any formula. """ if period is not None and not isinstance(period, periods.Period): period = periods.period(period) return self.get_holder(variable_name).get_array(period)
[ "def", "get_array", "(", "self", ",", "variable_name", ",", "period", ")", ":", "if", "period", "is", "not", "None", "and", "not", "isinstance", "(", "period", ",", "periods", ".", "Period", ")", ":", "period", "=", "periods", ".", "period", "(", "period", ")", "return", "self", ".", "get_holder", "(", "variable_name", ")", ".", "get_array", "(", "period", ")" ]
57.444444
32.777778
def package( files, tag, metadata, extra_options, branch, template_dir, plugins_dir, static, install, spatialite, version_note, **extra_metadata ): "Package specified SQLite files into a new datasette Docker container" if not shutil.which("docker"): click.secho( ' The package command requires "docker" to be installed and configured ', bg="red", fg="white", bold=True, err=True, ) sys.exit(1) with temporary_docker_directory( files, "datasette", metadata, extra_options, branch, template_dir, plugins_dir, static, install, spatialite, version_note, extra_metadata, ): args = ["docker", "build"] if tag: args.append("-t") args.append(tag) args.append(".") call(args)
[ "def", "package", "(", "files", ",", "tag", ",", "metadata", ",", "extra_options", ",", "branch", ",", "template_dir", ",", "plugins_dir", ",", "static", ",", "install", ",", "spatialite", ",", "version_note", ",", "*", "*", "extra_metadata", ")", ":", "if", "not", "shutil", ".", "which", "(", "\"docker\"", ")", ":", "click", ".", "secho", "(", "' The package command requires \"docker\" to be installed and configured '", ",", "bg", "=", "\"red\"", ",", "fg", "=", "\"white\"", ",", "bold", "=", "True", ",", "err", "=", "True", ",", ")", "sys", ".", "exit", "(", "1", ")", "with", "temporary_docker_directory", "(", "files", ",", "\"datasette\"", ",", "metadata", ",", "extra_options", ",", "branch", ",", "template_dir", ",", "plugins_dir", ",", "static", ",", "install", ",", "spatialite", ",", "version_note", ",", "extra_metadata", ",", ")", ":", "args", "=", "[", "\"docker\"", ",", "\"build\"", "]", "if", "tag", ":", "args", ".", "append", "(", "\"-t\"", ")", "args", ".", "append", "(", "tag", ")", "args", ".", "append", "(", "\".\"", ")", "call", "(", "args", ")" ]
20.977273
22.613636
def trunc(self, model, prompt, **fields): """ Truncates fields values for the given model. Prompts for a new value if truncation occurs. """ for field_name, value in fields.items(): field = model._meta.get_field(field_name) max_length = getattr(field, "max_length", None) if not max_length: continue elif not prompt: fields[field_name] = value[:max_length] continue while len(value) > max_length: encoded_value = value.encode("utf-8") new_value = input("The value for the field %s.%s exceeds " "its maximum length of %s chars: %s\n\nEnter a new value " "for it, or press return to have it truncated: " % (model.__name__, field_name, max_length, encoded_value)) value = new_value if new_value else value[:max_length] fields[field_name] = value return fields
[ "def", "trunc", "(", "self", ",", "model", ",", "prompt", ",", "*", "*", "fields", ")", ":", "for", "field_name", ",", "value", "in", "fields", ".", "items", "(", ")", ":", "field", "=", "model", ".", "_meta", ".", "get_field", "(", "field_name", ")", "max_length", "=", "getattr", "(", "field", ",", "\"max_length\"", ",", "None", ")", "if", "not", "max_length", ":", "continue", "elif", "not", "prompt", ":", "fields", "[", "field_name", "]", "=", "value", "[", ":", "max_length", "]", "continue", "while", "len", "(", "value", ")", ">", "max_length", ":", "encoded_value", "=", "value", ".", "encode", "(", "\"utf-8\"", ")", "new_value", "=", "input", "(", "\"The value for the field %s.%s exceeds \"", "\"its maximum length of %s chars: %s\\n\\nEnter a new value \"", "\"for it, or press return to have it truncated: \"", "%", "(", "model", ".", "__name__", ",", "field_name", ",", "max_length", ",", "encoded_value", ")", ")", "value", "=", "new_value", "if", "new_value", "else", "value", "[", ":", "max_length", "]", "fields", "[", "field_name", "]", "=", "value", "return", "fields" ]
45.954545
15.863636
def get_current_block_hash(self, is_full: bool = False) -> str: """ This interface is used to get the hexadecimal hash value of the highest block in current network. Return: the hexadecimal hash value of the highest block in current network. """ payload = self.generate_json_rpc_payload(RpcMethod.GET_CURRENT_BLOCK_HASH) response = self.__post(self.__url, payload) if is_full: return response return response['result']
[ "def", "get_current_block_hash", "(", "self", ",", "is_full", ":", "bool", "=", "False", ")", "->", "str", ":", "payload", "=", "self", ".", "generate_json_rpc_payload", "(", "RpcMethod", ".", "GET_CURRENT_BLOCK_HASH", ")", "response", "=", "self", ".", "__post", "(", "self", ".", "__url", ",", "payload", ")", "if", "is_full", ":", "return", "response", "return", "response", "[", "'result'", "]" ]
41.333333
23.833333
def publish_workflow_submission(self, user_id, workflow_id_or_name, parameters): """Publish workflow submission parameters.""" msg = { "user": user_id, "workflow_id_or_name": workflow_id_or_name, "parameters": parameters } self._publish(msg)
[ "def", "publish_workflow_submission", "(", "self", ",", "user_id", ",", "workflow_id_or_name", ",", "parameters", ")", ":", "msg", "=", "{", "\"user\"", ":", "user_id", ",", "\"workflow_id_or_name\"", ":", "workflow_id_or_name", ",", "\"parameters\"", ":", "parameters", "}", "self", ".", "_publish", "(", "msg", ")" ]
37
12
def make_request_fn(): """Returns a request function.""" if FLAGS.cloud_mlengine_model_name: request_fn = serving_utils.make_cloud_mlengine_request_fn( credentials=GoogleCredentials.get_application_default(), model_name=FLAGS.cloud_mlengine_model_name, version=FLAGS.cloud_mlengine_model_version) else: request_fn = serving_utils.make_grpc_request_fn( servable_name=FLAGS.servable_name, server=FLAGS.server, timeout_secs=FLAGS.timeout_secs) return request_fn
[ "def", "make_request_fn", "(", ")", ":", "if", "FLAGS", ".", "cloud_mlengine_model_name", ":", "request_fn", "=", "serving_utils", ".", "make_cloud_mlengine_request_fn", "(", "credentials", "=", "GoogleCredentials", ".", "get_application_default", "(", ")", ",", "model_name", "=", "FLAGS", ".", "cloud_mlengine_model_name", ",", "version", "=", "FLAGS", ".", "cloud_mlengine_model_version", ")", "else", ":", "request_fn", "=", "serving_utils", ".", "make_grpc_request_fn", "(", "servable_name", "=", "FLAGS", ".", "servable_name", ",", "server", "=", "FLAGS", ".", "server", ",", "timeout_secs", "=", "FLAGS", ".", "timeout_secs", ")", "return", "request_fn" ]
36.428571
14.928571
def format(self, formatter, subset=None): """ Format the text display value of cells. .. versionadded:: 0.18.0 Parameters ---------- formatter : str, callable, or dict subset : IndexSlice An argument to ``DataFrame.loc`` that restricts which elements ``formatter`` is applied to. Returns ------- self : Styler Notes ----- ``formatter`` is either an ``a`` or a dict ``{column name: a}`` where ``a`` is one of - str: this will be wrapped in: ``a.format(x)`` - callable: called with the value of an individual cell The default display value for numeric values is the "general" (``g``) format with ``pd.options.display.precision`` precision. Examples -------- >>> df = pd.DataFrame(np.random.randn(4, 2), columns=['a', 'b']) >>> df.style.format("{:.2%}") >>> df['c'] = ['a', 'b', 'c', 'd'] >>> df.style.format({'c': str.upper}) """ if subset is None: row_locs = range(len(self.data)) col_locs = range(len(self.data.columns)) else: subset = _non_reducing_slice(subset) if len(subset) == 1: subset = subset, self.data.columns sub_df = self.data.loc[subset] row_locs = self.data.index.get_indexer_for(sub_df.index) col_locs = self.data.columns.get_indexer_for(sub_df.columns) if is_dict_like(formatter): for col, col_formatter in formatter.items(): # formatter must be callable, so '{}' are converted to lambdas col_formatter = _maybe_wrap_formatter(col_formatter) col_num = self.data.columns.get_indexer_for([col])[0] for row_num in row_locs: self._display_funcs[(row_num, col_num)] = col_formatter else: # single scalar to format all cells with locs = product(*(row_locs, col_locs)) for i, j in locs: formatter = _maybe_wrap_formatter(formatter) self._display_funcs[(i, j)] = formatter return self
[ "def", "format", "(", "self", ",", "formatter", ",", "subset", "=", "None", ")", ":", "if", "subset", "is", "None", ":", "row_locs", "=", "range", "(", "len", "(", "self", ".", "data", ")", ")", "col_locs", "=", "range", "(", "len", "(", "self", ".", "data", ".", "columns", ")", ")", "else", ":", "subset", "=", "_non_reducing_slice", "(", "subset", ")", "if", "len", "(", "subset", ")", "==", "1", ":", "subset", "=", "subset", ",", "self", ".", "data", ".", "columns", "sub_df", "=", "self", ".", "data", ".", "loc", "[", "subset", "]", "row_locs", "=", "self", ".", "data", ".", "index", ".", "get_indexer_for", "(", "sub_df", ".", "index", ")", "col_locs", "=", "self", ".", "data", ".", "columns", ".", "get_indexer_for", "(", "sub_df", ".", "columns", ")", "if", "is_dict_like", "(", "formatter", ")", ":", "for", "col", ",", "col_formatter", "in", "formatter", ".", "items", "(", ")", ":", "# formatter must be callable, so '{}' are converted to lambdas", "col_formatter", "=", "_maybe_wrap_formatter", "(", "col_formatter", ")", "col_num", "=", "self", ".", "data", ".", "columns", ".", "get_indexer_for", "(", "[", "col", "]", ")", "[", "0", "]", "for", "row_num", "in", "row_locs", ":", "self", ".", "_display_funcs", "[", "(", "row_num", ",", "col_num", ")", "]", "=", "col_formatter", "else", ":", "# single scalar to format all cells with", "locs", "=", "product", "(", "*", "(", "row_locs", ",", "col_locs", ")", ")", "for", "i", ",", "j", "in", "locs", ":", "formatter", "=", "_maybe_wrap_formatter", "(", "formatter", ")", "self", ".", "_display_funcs", "[", "(", "i", ",", "j", ")", "]", "=", "formatter", "return", "self" ]
33.890625
21.328125
def stop(self): """Stop this gateway agent.""" if self._disconnector: self._disconnector.stop() self.client.disconnect()
[ "def", "stop", "(", "self", ")", ":", "if", "self", ".", "_disconnector", ":", "self", ".", "_disconnector", ".", "stop", "(", ")", "self", ".", "client", ".", "disconnect", "(", ")" ]
21.714286
18
def _make_parser(**kwargs): """ :return: (keyword args to be used, parser object) """ # Optional arguements for configparser.SafeConfigParser{,readfp} kwargs_0 = filter_options(("defaults", "dict_type", "allow_no_value"), kwargs) kwargs_1 = filter_options(("filename", ), kwargs) try: parser = configparser.SafeConfigParser(**kwargs_0) except TypeError: # .. note:: # It seems ConfigParser.*ConfigParser in python 2.6 does not support # 'allow_no_value' option parameter, and TypeError will be thrown. kwargs_0 = filter_options(("defaults", "dict_type"), kwargs) parser = configparser.SafeConfigParser(**kwargs_0) return (kwargs_1, parser)
[ "def", "_make_parser", "(", "*", "*", "kwargs", ")", ":", "# Optional arguements for configparser.SafeConfigParser{,readfp}", "kwargs_0", "=", "filter_options", "(", "(", "\"defaults\"", ",", "\"dict_type\"", ",", "\"allow_no_value\"", ")", ",", "kwargs", ")", "kwargs_1", "=", "filter_options", "(", "(", "\"filename\"", ",", ")", ",", "kwargs", ")", "try", ":", "parser", "=", "configparser", ".", "SafeConfigParser", "(", "*", "*", "kwargs_0", ")", "except", "TypeError", ":", "# .. note::", "# It seems ConfigParser.*ConfigParser in python 2.6 does not support", "# 'allow_no_value' option parameter, and TypeError will be thrown.", "kwargs_0", "=", "filter_options", "(", "(", "\"defaults\"", ",", "\"dict_type\"", ")", ",", "kwargs", ")", "parser", "=", "configparser", ".", "SafeConfigParser", "(", "*", "*", "kwargs_0", ")", "return", "(", "kwargs_1", ",", "parser", ")" ]
39.105263
21.421053
def as_ul(self, show_leaf=True, current_linkable=False, class_current="active_link"): """ It returns breadcrumb as ul """ return self.__do_menu("as_ul", show_leaf, current_linkable, class_current)
[ "def", "as_ul", "(", "self", ",", "show_leaf", "=", "True", ",", "current_linkable", "=", "False", ",", "class_current", "=", "\"active_link\"", ")", ":", "return", "self", ".", "__do_menu", "(", "\"as_ul\"", ",", "show_leaf", ",", "current_linkable", ",", "class_current", ")" ]
44.8
18.4
def expected_error_messages(*error_messages): """ Decorator expecting defined error messages at the end of test method. As param use what :py:meth:`~.WebdriverWrapperErrorMixin.get_error_messages` returns. .. versionadded:: 2.0 Before this decorator was called ``ShouldBeError``. """ def wrapper(func): setattr(func, EXPECTED_ERROR_MESSAGES, error_messages) return func return wrapper
[ "def", "expected_error_messages", "(", "*", "error_messages", ")", ":", "def", "wrapper", "(", "func", ")", ":", "setattr", "(", "func", ",", "EXPECTED_ERROR_MESSAGES", ",", "error_messages", ")", "return", "func", "return", "wrapper" ]
30.857143
19.285714
def calculate_diagram_ranges(data): """ Given a numpy array calculate what the ranges of the H-R diagram should be. """ data = round_arr_teff_luminosity(data) temps = data['temp'] x_range = [1.05 * np.amax(temps), .95 * np.amin(temps)] lums = data['lum'] y_range = [.50 * np.amin(lums), 2 * np.amax(lums)] return (x_range, y_range)
[ "def", "calculate_diagram_ranges", "(", "data", ")", ":", "data", "=", "round_arr_teff_luminosity", "(", "data", ")", "temps", "=", "data", "[", "'temp'", "]", "x_range", "=", "[", "1.05", "*", "np", ".", "amax", "(", "temps", ")", ",", ".95", "*", "np", ".", "amin", "(", "temps", ")", "]", "lums", "=", "data", "[", "'lum'", "]", "y_range", "=", "[", ".50", "*", "np", ".", "amin", "(", "lums", ")", ",", "2", "*", "np", ".", "amax", "(", "lums", ")", "]", "return", "(", "x_range", ",", "y_range", ")" ]
32.818182
11.181818
def all_api_methods(cls): """ Return a list of all the TradingAlgorithm API methods. """ return [ fn for fn in itervalues(vars(cls)) if getattr(fn, 'is_api_method', False) ]
[ "def", "all_api_methods", "(", "cls", ")", ":", "return", "[", "fn", "for", "fn", "in", "itervalues", "(", "vars", "(", "cls", ")", ")", "if", "getattr", "(", "fn", ",", "'is_api_method'", ",", "False", ")", "]" ]
28.75
13.5
def list_product_versions_for_build_configuration(id=None, name=None, page_size=200, page_index=0, sort="", q=""): """ List all ProductVersions associated with a BuildConfiguration """ data = list_product_versions_for_build_configuration_raw(id, name, page_size, page_index, sort, q) if data: return utils.format_json_list(data)
[ "def", "list_product_versions_for_build_configuration", "(", "id", "=", "None", ",", "name", "=", "None", ",", "page_size", "=", "200", ",", "page_index", "=", "0", ",", "sort", "=", "\"\"", ",", "q", "=", "\"\"", ")", ":", "data", "=", "list_product_versions_for_build_configuration_raw", "(", "id", ",", "name", ",", "page_size", ",", "page_index", ",", "sort", ",", "q", ")", "if", "data", ":", "return", "utils", ".", "format_json_list", "(", "data", ")" ]
50
27.428571
def create(cls, messageType, extended, hopsleft=3, hopsmax=3): """Create message flags. messageType: integter 0 to 7: MESSAGE_TYPE_DIRECT_MESSAGE = 0 MESSAGE_TYPE_DIRECT_MESSAGE_ACK = 1 MESSAGE_TYPE_ALL_LINK_CLEANUP = 2 MESSAGE_TYPE_ALL_LINK_CLEANUP_ACK = 3 MESSAGE_TYPE_BROADCAST_MESSAGE = 4 MESSAGE_TYPE_DIRECT_MESSAGE_NAK = 5 MESSAGE_TYPE_ALL_LINK_BROADCAST = 6 MESSAGE_TYPE_ALL_LINK_CLEANUP_NAK = 7 extended: 1 for extended, 0 for standard hopsleft: int 0 - 3 hopsmax: int 0 - 3 """ flags = MessageFlags(None) if messageType < 8: flags._messageType = messageType else: flags._messageType = messageType >> 5 if extended in [0, 1, True, False]: if extended: flags._extended = 1 else: flags._extended = 0 else: flags._extended = extended >> 4 flags._hopsLeft = hopsleft flags._hopsMax = hopsmax return flags
[ "def", "create", "(", "cls", ",", "messageType", ",", "extended", ",", "hopsleft", "=", "3", ",", "hopsmax", "=", "3", ")", ":", "flags", "=", "MessageFlags", "(", "None", ")", "if", "messageType", "<", "8", ":", "flags", ".", "_messageType", "=", "messageType", "else", ":", "flags", ".", "_messageType", "=", "messageType", ">>", "5", "if", "extended", "in", "[", "0", ",", "1", ",", "True", ",", "False", "]", ":", "if", "extended", ":", "flags", ".", "_extended", "=", "1", "else", ":", "flags", ".", "_extended", "=", "0", "else", ":", "flags", ".", "_extended", "=", "extended", ">>", "4", "flags", ".", "_hopsLeft", "=", "hopsleft", "flags", ".", "_hopsMax", "=", "hopsmax", "return", "flags" ]
34.903226
10.483871
def react(self, **kwargs): """ Bounce off of a shoreline feature = Linestring of two points, being the line segment the particle hit. angle = decimal degrees from 0 (x-axis), couter-clockwise (math style) """ if self._type == "bounce": print "This shoreline type is NOT SUPPORTED and is broken" return self.__bounce(**kwargs) elif self._type == "reverse": return self.__reverse(**kwargs) else: return kwargs.get('hit_point') print "Not reacting to shoreline (sticky with inifinite concentration)"
[ "def", "react", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_type", "==", "\"bounce\"", ":", "print", "\"This shoreline type is NOT SUPPORTED and is broken\"", "return", "self", ".", "__bounce", "(", "*", "*", "kwargs", ")", "elif", "self", ".", "_type", "==", "\"reverse\"", ":", "return", "self", ".", "__reverse", "(", "*", "*", "kwargs", ")", "else", ":", "return", "kwargs", ".", "get", "(", "'hit_point'", ")", "print", "\"Not reacting to shoreline (sticky with inifinite concentration)\"" ]
44.214286
15.928571
def get_sftp_conn(config): """Make a SFTP connection, returns sftp client and connection objects""" remote = config.get('remote_location') parts = urlparse(remote) if ':' in parts.netloc: hostname, port = parts.netloc.split(':') else: hostname = parts.netloc port = 22 port = int(port) username = config.get('remote_username') or getuser() luser = get_local_user(username) sshdir = get_ssh_dir(config, luser) hostkey = get_host_keys(hostname, sshdir) try: sftp = None keys = get_ssh_keys(sshdir) transport = Transport((hostname, port)) while not keys.empty(): try: key = PKey.from_private_key_file(keys.get()) transport.connect( hostkey=hostkey, username=username, password=None, pkey=key) sftp = SFTPClient.from_transport(transport) break except (PasswordRequiredException, SSHException): pass if sftp is None: raise SaChannelUpdateTransportError("SFTP connection failed") return sftp, transport except BaseException as msg: raise SaChannelUpdateTransportError(msg)
[ "def", "get_sftp_conn", "(", "config", ")", ":", "remote", "=", "config", ".", "get", "(", "'remote_location'", ")", "parts", "=", "urlparse", "(", "remote", ")", "if", "':'", "in", "parts", ".", "netloc", ":", "hostname", ",", "port", "=", "parts", ".", "netloc", ".", "split", "(", "':'", ")", "else", ":", "hostname", "=", "parts", ".", "netloc", "port", "=", "22", "port", "=", "int", "(", "port", ")", "username", "=", "config", ".", "get", "(", "'remote_username'", ")", "or", "getuser", "(", ")", "luser", "=", "get_local_user", "(", "username", ")", "sshdir", "=", "get_ssh_dir", "(", "config", ",", "luser", ")", "hostkey", "=", "get_host_keys", "(", "hostname", ",", "sshdir", ")", "try", ":", "sftp", "=", "None", "keys", "=", "get_ssh_keys", "(", "sshdir", ")", "transport", "=", "Transport", "(", "(", "hostname", ",", "port", ")", ")", "while", "not", "keys", ".", "empty", "(", ")", ":", "try", ":", "key", "=", "PKey", ".", "from_private_key_file", "(", "keys", ".", "get", "(", ")", ")", "transport", ".", "connect", "(", "hostkey", "=", "hostkey", ",", "username", "=", "username", ",", "password", "=", "None", ",", "pkey", "=", "key", ")", "sftp", "=", "SFTPClient", ".", "from_transport", "(", "transport", ")", "break", "except", "(", "PasswordRequiredException", ",", "SSHException", ")", ":", "pass", "if", "sftp", "is", "None", ":", "raise", "SaChannelUpdateTransportError", "(", "\"SFTP connection failed\"", ")", "return", "sftp", ",", "transport", "except", "BaseException", "as", "msg", ":", "raise", "SaChannelUpdateTransportError", "(", "msg", ")" ]
33.052632
15.263158
def is_python3_identifier(possible_identifier): """ Returns `True` if the given `possible_identifier` can be used as an identifier in Python 3. """ possible_identifier = unicodedata.normalize('NFKC', possible_identifier) return ( bool(possible_identifier) and _is_in_id_start(possible_identifier[0]) and all(map(_is_in_id_continue, possible_identifier[1:])) ) and not iskeyword(possible_identifier)
[ "def", "is_python3_identifier", "(", "possible_identifier", ")", ":", "possible_identifier", "=", "unicodedata", ".", "normalize", "(", "'NFKC'", ",", "possible_identifier", ")", "return", "(", "bool", "(", "possible_identifier", ")", "and", "_is_in_id_start", "(", "possible_identifier", "[", "0", "]", ")", "and", "all", "(", "map", "(", "_is_in_id_continue", ",", "possible_identifier", "[", "1", ":", "]", ")", ")", ")", "and", "not", "iskeyword", "(", "possible_identifier", ")" ]
40
14
def auto_delete_files_on_instance_delete(instance: Any, fieldnames: Iterable[str]) -> None: """ Deletes files from filesystem when object is deleted. """ for fieldname in fieldnames: filefield = getattr(instance, fieldname, None) if filefield: if os.path.isfile(filefield.path): os.remove(filefield.path)
[ "def", "auto_delete_files_on_instance_delete", "(", "instance", ":", "Any", ",", "fieldnames", ":", "Iterable", "[", "str", "]", ")", "->", "None", ":", "for", "fieldname", "in", "fieldnames", ":", "filefield", "=", "getattr", "(", "instance", ",", "fieldname", ",", "None", ")", "if", "filefield", ":", "if", "os", ".", "path", ".", "isfile", "(", "filefield", ".", "path", ")", ":", "os", ".", "remove", "(", "filefield", ".", "path", ")" ]
39.6
11.6
def get_ipv4(hostname): """Get list of ipv4 addresses for hostname """ addrinfo = socket.getaddrinfo(hostname, None, socket.AF_INET, socket.SOCK_STREAM) return [addrinfo[x][4][0] for x in range(len(addrinfo))]
[ "def", "get_ipv4", "(", "hostname", ")", ":", "addrinfo", "=", "socket", ".", "getaddrinfo", "(", "hostname", ",", "None", ",", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "return", "[", "addrinfo", "[", "x", "]", "[", "4", "]", "[", "0", "]", "for", "x", "in", "range", "(", "len", "(", "addrinfo", ")", ")", "]" ]
36.285714
16.428571
def is_seq_of(seq, expected_type, seq_type=None): """Check whether it is a sequence of some type. Args: seq (Sequence): The sequence to be checked. expected_type (type): Expected type of sequence items. seq_type (type, optional): Expected sequence type. Returns: bool: Whether the sequence is valid. """ if seq_type is None: exp_seq_type = collections_abc.Sequence else: assert isinstance(seq_type, type) exp_seq_type = seq_type if not isinstance(seq, exp_seq_type): return False for item in seq: if not isinstance(item, expected_type): return False return True
[ "def", "is_seq_of", "(", "seq", ",", "expected_type", ",", "seq_type", "=", "None", ")", ":", "if", "seq_type", "is", "None", ":", "exp_seq_type", "=", "collections_abc", ".", "Sequence", "else", ":", "assert", "isinstance", "(", "seq_type", ",", "type", ")", "exp_seq_type", "=", "seq_type", "if", "not", "isinstance", "(", "seq", ",", "exp_seq_type", ")", ":", "return", "False", "for", "item", "in", "seq", ":", "if", "not", "isinstance", "(", "item", ",", "expected_type", ")", ":", "return", "False", "return", "True" ]
30.090909
16.181818
def vqa_attention_base(): """VQA attention baseline hparams.""" hparams = common_hparams.basic_params1() hparams.batch_size = 128 hparams.use_fixed_batch_size = True, hparams.optimizer = "adam" hparams.optimizer_adam_beta1 = 0.9 hparams.optimizer_adam_beta2 = 0.999 hparams.optimizer_adam_epsilon = 1e-8 hparams.weight_decay = 0. hparams.clip_grad_norm = 0. hparams.initializer = "xavier" hparams.learning_rate = 0.5 hparams.learning_rate_schedule = "legacy" hparams.learning_rate_warmup_steps = 0 hparams.learning_rate_decay_scheme = "exp" hparams.learning_rate_decay_rate = 0.5 hparams.learning_rate_decay_steps = 50000 hparams.dropout = 0.5 hparams.summarize_grads = True hparams.summarize_vars = True # not used hparams hparams.label_smoothing = 0. hparams.multiply_embedding_mode = "" # add new hparams # preprocess hparams.add_hparam("resize_side", 512) hparams.add_hparam("height", 448) hparams.add_hparam("width", 448) hparams.add_hparam("distort", True) hparams.add_hparam("train_resnet", False) hparams.add_hparam("rnn_type", "lstm") hparams.add_hparam("num_rnn_layers", 1) hparams.add_hparam("max_question_length", 15) # lstm hidden size hparams.hidden_size = 512 hparams.add_hparam("attn_dim", 512) hparams.add_hparam("num_glimps", 2) hparams.add_hparam("num_mlp_layers", 1) hparams.add_hparam("mlp_dim", 1024) hparams.add_hparam("image_input_type", "image") hparams.add_hparam("image_model_fn", "resnet_v1_152") hparams.add_hparam("image_feat_size", 0) # self attention parts hparams.norm_type = "layer" hparams.layer_preprocess_sequence = "n" hparams.layer_postprocess_sequence = "da" hparams.layer_prepostprocess_dropout = 0.3 hparams.attention_dropout = 0.1 hparams.relu_dropout = 0.1 hparams.image_hidden_size = 2048 hparams.add_hparam("num_encoder_layers", 1) # Attention-related flags. hparams.add_hparam("num_heads", 8) hparams.add_hparam("attention_key_channels", 0) hparams.add_hparam("attention_value_channels", 0) hparams.add_hparam("image_filter_size", 1024) hparams.add_hparam("self_attention_type", "dot_product") hparams.add_hparam("scale_dotproduct", True) return hparams
[ "def", "vqa_attention_base", "(", ")", ":", "hparams", "=", "common_hparams", ".", "basic_params1", "(", ")", "hparams", ".", "batch_size", "=", "128", "hparams", ".", "use_fixed_batch_size", "=", "True", ",", "hparams", ".", "optimizer", "=", "\"adam\"", "hparams", ".", "optimizer_adam_beta1", "=", "0.9", "hparams", ".", "optimizer_adam_beta2", "=", "0.999", "hparams", ".", "optimizer_adam_epsilon", "=", "1e-8", "hparams", ".", "weight_decay", "=", "0.", "hparams", ".", "clip_grad_norm", "=", "0.", "hparams", ".", "initializer", "=", "\"xavier\"", "hparams", ".", "learning_rate", "=", "0.5", "hparams", ".", "learning_rate_schedule", "=", "\"legacy\"", "hparams", ".", "learning_rate_warmup_steps", "=", "0", "hparams", ".", "learning_rate_decay_scheme", "=", "\"exp\"", "hparams", ".", "learning_rate_decay_rate", "=", "0.5", "hparams", ".", "learning_rate_decay_steps", "=", "50000", "hparams", ".", "dropout", "=", "0.5", "hparams", ".", "summarize_grads", "=", "True", "hparams", ".", "summarize_vars", "=", "True", "# not used hparams", "hparams", ".", "label_smoothing", "=", "0.", "hparams", ".", "multiply_embedding_mode", "=", "\"\"", "# add new hparams", "# preprocess", "hparams", ".", "add_hparam", "(", "\"resize_side\"", ",", "512", ")", "hparams", ".", "add_hparam", "(", "\"height\"", ",", "448", ")", "hparams", ".", "add_hparam", "(", "\"width\"", ",", "448", ")", "hparams", ".", "add_hparam", "(", "\"distort\"", ",", "True", ")", "hparams", ".", "add_hparam", "(", "\"train_resnet\"", ",", "False", ")", "hparams", ".", "add_hparam", "(", "\"rnn_type\"", ",", "\"lstm\"", ")", "hparams", ".", "add_hparam", "(", "\"num_rnn_layers\"", ",", "1", ")", "hparams", ".", "add_hparam", "(", "\"max_question_length\"", ",", "15", ")", "# lstm hidden size", "hparams", ".", "hidden_size", "=", "512", "hparams", ".", "add_hparam", "(", "\"attn_dim\"", ",", "512", ")", "hparams", ".", "add_hparam", "(", "\"num_glimps\"", ",", "2", ")", "hparams", ".", "add_hparam", "(", "\"num_mlp_layers\"", ",", "1", ")", "hparams", ".", "add_hparam", "(", "\"mlp_dim\"", ",", "1024", ")", "hparams", ".", "add_hparam", "(", "\"image_input_type\"", ",", "\"image\"", ")", "hparams", ".", "add_hparam", "(", "\"image_model_fn\"", ",", "\"resnet_v1_152\"", ")", "hparams", ".", "add_hparam", "(", "\"image_feat_size\"", ",", "0", ")", "# self attention parts", "hparams", ".", "norm_type", "=", "\"layer\"", "hparams", ".", "layer_preprocess_sequence", "=", "\"n\"", "hparams", ".", "layer_postprocess_sequence", "=", "\"da\"", "hparams", ".", "layer_prepostprocess_dropout", "=", "0.3", "hparams", ".", "attention_dropout", "=", "0.1", "hparams", ".", "relu_dropout", "=", "0.1", "hparams", ".", "image_hidden_size", "=", "2048", "hparams", ".", "add_hparam", "(", "\"num_encoder_layers\"", ",", "1", ")", "# Attention-related flags.", "hparams", ".", "add_hparam", "(", "\"num_heads\"", ",", "8", ")", "hparams", ".", "add_hparam", "(", "\"attention_key_channels\"", ",", "0", ")", "hparams", ".", "add_hparam", "(", "\"attention_value_channels\"", ",", "0", ")", "hparams", ".", "add_hparam", "(", "\"image_filter_size\"", ",", "1024", ")", "hparams", ".", "add_hparam", "(", "\"self_attention_type\"", ",", "\"dot_product\"", ")", "hparams", ".", "add_hparam", "(", "\"scale_dotproduct\"", ",", "True", ")", "return", "hparams" ]
31.75
11.588235
def MultiAppend(self, value_timestamp_pairs): """Adds multiple value<->timestamp pairs. Args: value_timestamp_pairs: Tuples of (value, timestamp). """ for value, timestamp in value_timestamp_pairs: self.Append(value, timestamp)
[ "def", "MultiAppend", "(", "self", ",", "value_timestamp_pairs", ")", ":", "for", "value", ",", "timestamp", "in", "value_timestamp_pairs", ":", "self", ".", "Append", "(", "value", ",", "timestamp", ")" ]
31.125
13.625
def action(self, column=None, value=None, **kwargs): """ The underlying GICS table provides codes and descriptions identifying the current status or disposition of a grant project. >>> GICS().action('action_code', 'A') """ return self._resolve_call('GIC_ACTION', column, value, **kwargs)
[ "def", "action", "(", "self", ",", "column", "=", "None", ",", "value", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_resolve_call", "(", "'GIC_ACTION'", ",", "column", ",", "value", ",", "*", "*", "kwargs", ")" ]
41.125
18.375
def create_or_update(cls, course_video, file_name=None, image_data=None, generated_images=None): """ Create a VideoImage object for a CourseVideo. NOTE: If `image_data` is None then `file_name` value will be used as it is, otherwise a new file name is constructed based on uuid and extension from `file_name` value. `image_data` will be None in case of course re-run and export. `generated_images` list contains names of images auto generated by VEDA. If an image is not already set then first image name from `generated_images` list will be used. Arguments: course_video (CourseVideo): CourseVideo instance file_name (str): File name of the image image_data (InMemoryUploadedFile): Image data to be saved. generated_images (list): auto generated image names Returns: Returns a tuple of (video_image, created). """ video_image, created = cls.objects.get_or_create(course_video=course_video) if image_data: # Delete the existing image only if this image is not used by anyone else. This is necessary because # after a course re-run, a video in original course and the new course points to same image, So when # we update an image in new course and delete the existing image. This will delete the image from # original course as well, thus leaving video with having no image. if not created and VideoImage.objects.filter(image=video_image.image).count() == 1: video_image.image.delete() with closing(image_data) as image_file: file_name = '{uuid}{ext}'.format(uuid=uuid4().hex, ext=os.path.splitext(file_name)[1]) try: video_image.image.save(file_name, image_file) except Exception: # pylint: disable=broad-except logger.exception( 'VAL: Video Image save failed to storage for course_id [%s] and video_id [%s]', course_video.course_id, course_video.video.edx_video_id ) raise else: if generated_images: video_image.generated_images = generated_images if not video_image.image.name: file_name = generated_images[0] video_image.image.name = file_name video_image.save() return video_image, created
[ "def", "create_or_update", "(", "cls", ",", "course_video", ",", "file_name", "=", "None", ",", "image_data", "=", "None", ",", "generated_images", "=", "None", ")", ":", "video_image", ",", "created", "=", "cls", ".", "objects", ".", "get_or_create", "(", "course_video", "=", "course_video", ")", "if", "image_data", ":", "# Delete the existing image only if this image is not used by anyone else. This is necessary because", "# after a course re-run, a video in original course and the new course points to same image, So when", "# we update an image in new course and delete the existing image. This will delete the image from", "# original course as well, thus leaving video with having no image.", "if", "not", "created", "and", "VideoImage", ".", "objects", ".", "filter", "(", "image", "=", "video_image", ".", "image", ")", ".", "count", "(", ")", "==", "1", ":", "video_image", ".", "image", ".", "delete", "(", ")", "with", "closing", "(", "image_data", ")", "as", "image_file", ":", "file_name", "=", "'{uuid}{ext}'", ".", "format", "(", "uuid", "=", "uuid4", "(", ")", ".", "hex", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "file_name", ")", "[", "1", "]", ")", "try", ":", "video_image", ".", "image", ".", "save", "(", "file_name", ",", "image_file", ")", "except", "Exception", ":", "# pylint: disable=broad-except", "logger", ".", "exception", "(", "'VAL: Video Image save failed to storage for course_id [%s] and video_id [%s]'", ",", "course_video", ".", "course_id", ",", "course_video", ".", "video", ".", "edx_video_id", ")", "raise", "else", ":", "if", "generated_images", ":", "video_image", ".", "generated_images", "=", "generated_images", "if", "not", "video_image", ".", "image", ".", "name", ":", "file_name", "=", "generated_images", "[", "0", "]", "video_image", ".", "image", ".", "name", "=", "file_name", "video_image", ".", "save", "(", ")", "return", "video_image", ",", "created" ]
50.938776
29.061224
def concatenate_fastas(output_fna_clustered, output_fna_failures, output_concat_filepath): """ Concatenates two input fastas, writes to output_concat_filepath output_fna_clustered: fasta of successful ref clusters output_fna_failures: de novo fasta of cluster failures output_concat_filepath: path to write combined fastas to """ output_fp = open(output_concat_filepath, "w") for label, seq in parse_fasta(open(output_fna_clustered, "U")): output_fp.write(">%s\n%s\n" % (label, seq)) for label, seq in parse_fasta(open(output_fna_failures, "U")): output_fp.write(">%s\n%s\n" % (label, seq)) return output_concat_filepath
[ "def", "concatenate_fastas", "(", "output_fna_clustered", ",", "output_fna_failures", ",", "output_concat_filepath", ")", ":", "output_fp", "=", "open", "(", "output_concat_filepath", ",", "\"w\"", ")", "for", "label", ",", "seq", "in", "parse_fasta", "(", "open", "(", "output_fna_clustered", ",", "\"U\"", ")", ")", ":", "output_fp", ".", "write", "(", "\">%s\\n%s\\n\"", "%", "(", "label", ",", "seq", ")", ")", "for", "label", ",", "seq", "in", "parse_fasta", "(", "open", "(", "output_fna_failures", ",", "\"U\"", ")", ")", ":", "output_fp", ".", "write", "(", "\">%s\\n%s\\n\"", "%", "(", "label", ",", "seq", ")", ")", "return", "output_concat_filepath" ]
39.166667
17.833333
def set_roi(location): """ Send MAV_CMD_DO_SET_ROI message to point camera gimbal at a specified region of interest (LocationGlobal). The vehicle may also turn to face the ROI. For more information see: http://copter.ardupilot.com/common-mavlink-mission-command-messages-mav_cmd/#mav_cmd_do_set_roi """ # create the MAV_CMD_DO_SET_ROI command msg = vehicle.message_factory.command_long_encode( 0, 0, # target system, target component mavutil.mavlink.MAV_CMD_DO_SET_ROI, #command 0, #confirmation 0, 0, 0, 0, #params 1-4 location.lat, location.lon, location.alt ) # send command to vehicle vehicle.send_mavlink(msg)
[ "def", "set_roi", "(", "location", ")", ":", "# create the MAV_CMD_DO_SET_ROI command", "msg", "=", "vehicle", ".", "message_factory", ".", "command_long_encode", "(", "0", ",", "0", ",", "# target system, target component", "mavutil", ".", "mavlink", ".", "MAV_CMD_DO_SET_ROI", ",", "#command", "0", ",", "#confirmation", "0", ",", "0", ",", "0", ",", "0", ",", "#params 1-4", "location", ".", "lat", ",", "location", ".", "lon", ",", "location", ".", "alt", ")", "# send command to vehicle", "vehicle", ".", "send_mavlink", "(", "msg", ")" ]
33.714286
16.285714
def setmonitor(self, enable=True): """Alias for setmode('monitor') or setmode('managed') Only available with Npcap""" # We must reset the monitor cache if enable: res = self.setmode('monitor') else: res = self.setmode('managed') if not res: log_runtime.error("Npcap WlanHelper returned with an error code !") self.cache_mode = None tmp = self.cache_mode = self.ismonitor() return tmp if enable else (not tmp)
[ "def", "setmonitor", "(", "self", ",", "enable", "=", "True", ")", ":", "# We must reset the monitor cache", "if", "enable", ":", "res", "=", "self", ".", "setmode", "(", "'monitor'", ")", "else", ":", "res", "=", "self", ".", "setmode", "(", "'managed'", ")", "if", "not", "res", ":", "log_runtime", ".", "error", "(", "\"Npcap WlanHelper returned with an error code !\"", ")", "self", ".", "cache_mode", "=", "None", "tmp", "=", "self", ".", "cache_mode", "=", "self", ".", "ismonitor", "(", ")", "return", "tmp", "if", "enable", "else", "(", "not", "tmp", ")" ]
38.769231
10.692308
def missing_values(self): ''' Computes the values that must be missing from each player's hand, based on when they have passed. :return: a list of sets, each one containing the values that must be missing from the corresponding player's hand ''' missing = [set() for _ in self.hands] # replay the game from the beginning board = dominoes.SkinnyBoard() player = self.starting_player for move in self.moves: if move is None: # pass - update the missing values missing[player].update([board.left_end(), board.right_end()]) else: # not a pass - update the board board.add(*move) # move on to the next player player = next_player(player) return missing
[ "def", "missing_values", "(", "self", ")", ":", "missing", "=", "[", "set", "(", ")", "for", "_", "in", "self", ".", "hands", "]", "# replay the game from the beginning", "board", "=", "dominoes", ".", "SkinnyBoard", "(", ")", "player", "=", "self", ".", "starting_player", "for", "move", "in", "self", ".", "moves", ":", "if", "move", "is", "None", ":", "# pass - update the missing values", "missing", "[", "player", "]", ".", "update", "(", "[", "board", ".", "left_end", "(", ")", ",", "board", ".", "right_end", "(", ")", "]", ")", "else", ":", "# not a pass - update the board", "board", ".", "add", "(", "*", "move", ")", "# move on to the next player", "player", "=", "next_player", "(", "player", ")", "return", "missing" ]
33.076923
16.769231
def load(items, default_section=_DEFAULT_SECTION): """ 从混合类型组中读取配置 :param default_section: :param items: :return: """ settings = [] assert isinstance(items, list), 'items必须为list' logger.debug(items) for item in items: if _is_conf(item): settings.append(load_from_ini(item, default_section)) else: settings.append(load_from_name(item)) logger.debug(settings) return merge(settings)
[ "def", "load", "(", "items", ",", "default_section", "=", "_DEFAULT_SECTION", ")", ":", "settings", "=", "[", "]", "assert", "isinstance", "(", "items", ",", "list", ")", ",", "'items必须为list'", "logger", ".", "debug", "(", "items", ")", "for", "item", "in", "items", ":", "if", "_is_conf", "(", "item", ")", ":", "settings", ".", "append", "(", "load_from_ini", "(", "item", ",", "default_section", ")", ")", "else", ":", "settings", ".", "append", "(", "load_from_name", "(", "item", ")", ")", "logger", ".", "debug", "(", "settings", ")", "return", "merge", "(", "settings", ")" ]
23.789474
18.421053
def should_run(self): """ Returns true if the feature should run """ should_run = True config = self.target or self.source if config.has('systems'): should_run = False valid_systems = [s.lower() for s in config.get('systems').split(",")] for system_type, param in [('is_osx', 'osx'), ('is_debian', 'debian')]: if param in valid_systems and getattr(system, system_type)(): should_run = True return should_run
[ "def", "should_run", "(", "self", ")", ":", "should_run", "=", "True", "config", "=", "self", ".", "target", "or", "self", ".", "source", "if", "config", ".", "has", "(", "'systems'", ")", ":", "should_run", "=", "False", "valid_systems", "=", "[", "s", ".", "lower", "(", ")", "for", "s", "in", "config", ".", "get", "(", "'systems'", ")", ".", "split", "(", "\",\"", ")", "]", "for", "system_type", ",", "param", "in", "[", "(", "'is_osx'", ",", "'osx'", ")", ",", "(", "'is_debian'", ",", "'debian'", ")", "]", ":", "if", "param", "in", "valid_systems", "and", "getattr", "(", "system", ",", "system_type", ")", "(", ")", ":", "should_run", "=", "True", "return", "should_run" ]
45.583333
15.916667
def stringify(metrics_headers=()): """Convert the provided metrics headers to a string. Iterate over the metrics headers (a dictionary, usually ordered) and return a properly-formatted space-separated string (e.g. foo/1.2.3 bar/3.14.159). """ metrics_headers = collections.OrderedDict(metrics_headers) return ' '.join(['%s/%s' % (k, v) for k, v in metrics_headers.items()])
[ "def", "stringify", "(", "metrics_headers", "=", "(", ")", ")", ":", "metrics_headers", "=", "collections", ".", "OrderedDict", "(", "metrics_headers", ")", "return", "' '", ".", "join", "(", "[", "'%s/%s'", "%", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "metrics_headers", ".", "items", "(", ")", "]", ")" ]
43.777778
17.222222
def prop_power(self, propulsion_eff=0.7, sea_margin=0.2): """ Total propulsion power of the ship. :param propulsion_eff: Shaft efficiency of the ship :param sea_margin: Sea margin take account of interaction between ship and the sea, e.g. wave :return: Watts shaft propulsion power of the ship """ PP = (1 + sea_margin) * self.resistance() * self.speed/propulsion_eff return PP
[ "def", "prop_power", "(", "self", ",", "propulsion_eff", "=", "0.7", ",", "sea_margin", "=", "0.2", ")", ":", "PP", "=", "(", "1", "+", "sea_margin", ")", "*", "self", ".", "resistance", "(", ")", "*", "self", ".", "speed", "/", "propulsion_eff", "return", "PP" ]
43.3
21.7
def firmware_manifest_retrieve(self, manifest_id, **kwargs): # noqa: E501 """Get a manifest # noqa: E501 Retrieve a firmware manifest. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.firmware_manifest_retrieve(manifest_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str manifest_id: The firmware manifest ID (required) :return: FirmwareManifest If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.firmware_manifest_retrieve_with_http_info(manifest_id, **kwargs) # noqa: E501 else: (data) = self.firmware_manifest_retrieve_with_http_info(manifest_id, **kwargs) # noqa: E501 return data
[ "def", "firmware_manifest_retrieve", "(", "self", ",", "manifest_id", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'asynchronous'", ")", ":", "return", "self", ".", "firmware_manifest_retrieve_with_http_info", "(", "manifest_id", ",", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "firmware_manifest_retrieve_with_http_info", "(", "manifest_id", ",", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
47.095238
22.619048
def value(self, extra=None): """The value used for processing. Can be a tuple. with optional extra bits """ if isinstance(self.code, WithExtra): if not 0<=extra<1<<self.extraBits(): raise ValueError("value: extra value doesn't fit in extraBits") return self.code.value(self.index, extra) if extra is not None: raise ValueError('value: no extra bits for this code') return self.code.value(self.index)
[ "def", "value", "(", "self", ",", "extra", "=", "None", ")", ":", "if", "isinstance", "(", "self", ".", "code", ",", "WithExtra", ")", ":", "if", "not", "0", "<=", "extra", "<", "1", "<<", "self", ".", "extraBits", "(", ")", ":", "raise", "ValueError", "(", "\"value: extra value doesn't fit in extraBits\"", ")", "return", "self", ".", "code", ".", "value", "(", "self", ".", "index", ",", "extra", ")", "if", "extra", "is", "not", "None", ":", "raise", "ValueError", "(", "'value: no extra bits for this code'", ")", "return", "self", ".", "code", ".", "value", "(", "self", ".", "index", ")" ]
44.454545
11.181818
def create(logger_name, logfile='gromacs.log'): """Create a top level logger. - The file logger logs everything (including DEBUG). - The console logger only logs INFO and above. Logging to a file and the console. See http://docs.python.org/library/logging.html?#logging-to-multiple-destinations The top level logger of the library is named 'gromacs'. Note that we are configuring this logger with console output. If the root logger also does this then we will get two output lines to the console. We'll live with this because this is a simple convenience library... """ logger = logging.getLogger(logger_name) logger.setLevel(logging.DEBUG) logfile = logging.FileHandler(logfile) logfile_formatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s') logfile.setFormatter(logfile_formatter) logger.addHandler(logfile) # define a Handler which writes INFO messages or higher to the sys.stderr console = logging.StreamHandler() console.setLevel(logging.INFO) # set a format which is simpler for console use formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) logger.addHandler(console) return logger
[ "def", "create", "(", "logger_name", ",", "logfile", "=", "'gromacs.log'", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "logger_name", ")", "logger", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "logfile", "=", "logging", ".", "FileHandler", "(", "logfile", ")", "logfile_formatter", "=", "logging", ".", "Formatter", "(", "'%(asctime)s %(name)-12s %(levelname)-8s %(message)s'", ")", "logfile", ".", "setFormatter", "(", "logfile_formatter", ")", "logger", ".", "addHandler", "(", "logfile", ")", "# define a Handler which writes INFO messages or higher to the sys.stderr", "console", "=", "logging", ".", "StreamHandler", "(", ")", "console", ".", "setLevel", "(", "logging", ".", "INFO", ")", "# set a format which is simpler for console use", "formatter", "=", "logging", ".", "Formatter", "(", "'%(name)-12s: %(levelname)-8s %(message)s'", ")", "console", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "addHandler", "(", "console", ")", "return", "logger" ]
34.916667
22.194444
def reset_syslog_config(host, username, password, protocol=None, port=None, syslog_config=None, esxi_hosts=None, credstore=None): ''' Reset the syslog service to its default settings. Valid syslog_config values are ``logdir``, ``loghost``, ``logdir-unique``, ``default-rotate``, ``default-size``, ``default-timeout``, or ``all`` for all of these. host The location of the host. username The username used to login to the host, such as ``root``. password The password used to login to the host. protocol Optionally set to alternate protocol if the host is not using the default protocol. Default protocol is ``https``. port Optionally set to alternate port if the host is not using the default port. Default port is ``443``. syslog_config List of parameters to reset, provided as a comma-delimited string, or 'all' to reset all syslog configuration parameters. Required. esxi_hosts If ``host`` is a vCenter host, then use esxi_hosts to execute this function on a list of one or more ESXi machines. credstore Optionally set to path to the credential store file. :return: Dictionary with a top-level key of 'success' which indicates if all the parameters were reset, and individual keys for each parameter indicating which succeeded or failed, per host. CLI Example: ``syslog_config`` can be passed as a quoted, comma-separated string, e.g. .. code-block:: bash # Used for ESXi host connection information salt '*' vsphere.reset_syslog_config my.esxi.host root bad-password \ syslog_config='logdir,loghost' # Used for connecting to a vCenter Server salt '*' vsphere.reset_syslog_config my.vcenter.location root bad-password \ syslog_config='logdir,loghost' esxi_hosts='[esxi-1.host.com, esxi-2.host.com]' ''' if not syslog_config: raise CommandExecutionError('The \'reset_syslog_config\' function requires a ' '\'syslog_config\' setting.') valid_resets = ['logdir', 'loghost', 'default-rotate', 'default-size', 'default-timeout', 'logdir-unique'] cmd = 'system syslog config set --reset=' if ',' in syslog_config: resets = [ind_reset.strip() for ind_reset in syslog_config.split(',')] elif syslog_config == 'all': resets = valid_resets else: resets = [syslog_config] ret = {} if esxi_hosts: if not isinstance(esxi_hosts, list): raise CommandExecutionError('\'esxi_hosts\' must be a list.') for esxi_host in esxi_hosts: response_dict = _reset_syslog_config_params(host, username, password, cmd, resets, valid_resets, protocol=protocol, port=port, esxi_host=esxi_host, credstore=credstore) ret.update({esxi_host: response_dict}) else: # Handles a single host or a vCenter connection when no esxi_hosts are provided. response_dict = _reset_syslog_config_params(host, username, password, cmd, resets, valid_resets, protocol=protocol, port=port, credstore=credstore) ret.update({host: response_dict}) return ret
[ "def", "reset_syslog_config", "(", "host", ",", "username", ",", "password", ",", "protocol", "=", "None", ",", "port", "=", "None", ",", "syslog_config", "=", "None", ",", "esxi_hosts", "=", "None", ",", "credstore", "=", "None", ")", ":", "if", "not", "syslog_config", ":", "raise", "CommandExecutionError", "(", "'The \\'reset_syslog_config\\' function requires a '", "'\\'syslog_config\\' setting.'", ")", "valid_resets", "=", "[", "'logdir'", ",", "'loghost'", ",", "'default-rotate'", ",", "'default-size'", ",", "'default-timeout'", ",", "'logdir-unique'", "]", "cmd", "=", "'system syslog config set --reset='", "if", "','", "in", "syslog_config", ":", "resets", "=", "[", "ind_reset", ".", "strip", "(", ")", "for", "ind_reset", "in", "syslog_config", ".", "split", "(", "','", ")", "]", "elif", "syslog_config", "==", "'all'", ":", "resets", "=", "valid_resets", "else", ":", "resets", "=", "[", "syslog_config", "]", "ret", "=", "{", "}", "if", "esxi_hosts", ":", "if", "not", "isinstance", "(", "esxi_hosts", ",", "list", ")", ":", "raise", "CommandExecutionError", "(", "'\\'esxi_hosts\\' must be a list.'", ")", "for", "esxi_host", "in", "esxi_hosts", ":", "response_dict", "=", "_reset_syslog_config_params", "(", "host", ",", "username", ",", "password", ",", "cmd", ",", "resets", ",", "valid_resets", ",", "protocol", "=", "protocol", ",", "port", "=", "port", ",", "esxi_host", "=", "esxi_host", ",", "credstore", "=", "credstore", ")", "ret", ".", "update", "(", "{", "esxi_host", ":", "response_dict", "}", ")", "else", ":", "# Handles a single host or a vCenter connection when no esxi_hosts are provided.", "response_dict", "=", "_reset_syslog_config_params", "(", "host", ",", "username", ",", "password", ",", "cmd", ",", "resets", ",", "valid_resets", ",", "protocol", "=", "protocol", ",", "port", "=", "port", ",", "credstore", "=", "credstore", ")", "ret", ".", "update", "(", "{", "host", ":", "response_dict", "}", ")", "return", "ret" ]
38.673684
26.8
def make_bindings_type(filenames,color_input,colorkey,file_dictionary,sidebar,bounds): # instantiating string the main string block for the javascript block of html code string = '' ''' # logic for instantiating variable colorkey input if not colorkeyfields == False: colorkey = 'selectedText' ''' # iterating through each geojson filename count = 0 for row in filenames: color_input = '' colorkeyfields = False count += 1 filename = row zoomrange = ['',''] # reading in geojson file into memory with open(filename) as data_file: data = json.load(data_file) #pprint(data) # getting the featuretype which will later dictate what javascript splices are needed data = data['features'] data = data[0] featuretype = data['geometry'] featuretype = featuretype['type'] data = data['properties'] # logic for overwriting colorkey fields if it exists for the filename # in the file dictionary try: colorkeyfields = file_dictionary[filename][str('colorkeyfields')] except KeyError: colorkeyfields = False except TypeError: colorkeyfields = False if not colorkeyfields == False: if len(colorkeyfields) == 1: colorkey = colorkeyfields[0] colorkeyfields = False try: zoomrange = file_dictionary[filename][str('zooms')] except KeyError: zoomrange = ['',''] except TypeError: zoomrange = ['',''] # code for if the file_dictionary input isn't false #(i.e. getting the color inputs out of dictionary variable) if file_dictionary==False and colorkey == False: # logic for getting the colorline for different feature types # the point feature requires a different line of code if featuretype == 'Point': colorline = get_colorline_marker(color_input) else: colorline = get_colorline_marker2(color_input) # setting minzoom and maxzoom to be sent into js parsing minzoom,maxzoom = zoomrange # getting filter file dictionary if filter_dictonary exists if not file_dictionary == False: filter_file_dictionary = file_dictionary[filename] else: filter_file_dictionary = False # checking to see if a chart_dictionary exists try: chart_dictionary = filter_file_dictionary['chart_dictionary'] except KeyError: chart_dictionary = False except TypeError: chart_dictionary = False # sending min and max zoom into the function that makes the zoom block zoomblock = make_zoom_block(minzoom,maxzoom,count,colorkeyfields,bounds,filter_file_dictionary) # logic for if a color key is given # HINT look here for rgb raw color integration in a color line if not colorkey == '': if row == filenames[0]: if colorkey == 'selectedText': colorkey = """feature.properties[%s]""" % colorkey else: colorkey = """feature.properties['%s']""" % colorkey if featuretype == 'Point': colorline = get_colorline_marker(str(colorkey)) else: colorline = get_colorline_marker2(str(colorkey)) # this may be able to be deleted # test later # im not sure what the fuck its here for if file_dictionary == False and colorkey == '': if featuretype == 'Point': colorline = get_colorline_marker(color_input) else: colorline = get_colorline_marker2(color_input) if colorkey == '' and colorkeyfields == False: if featuretype == 'Point': colorline = get_colorline_marker(color_input) else: colorline = get_colorline_marker2(color_input) # iterating through each header headers = [] for row in data: headers.append(str(row)) # logic for getting sidebar string that will be added in make_blockstr() if sidebar == True: sidebarstring = make_sidebar_string(headers,chart_dictionary) else: sidebarstring = '' # section of javascript code dedicated to the adding the data layer if count == 1: blocky = """ function add%s() { \n\tfunction addDataToMap%s(data, map) { \t\tvar dataLayer = L.geoJson(data); \t\tvar map = L.mapbox.map('map', 'mapbox.streets',{ \t\t\tzoom: 5 \t\t\t}).fitBounds(dataLayer.getBounds()); \t\tdataLayer.addTo(map) \t}\n""" % (count,count) else: blocky = """ function add%s() { \n\tfunction addDataToMap%s(data, map) { \t\tvar dataLayer = L.geoJson(data); \t\tdataLayer.addTo(map) \t}\n""" % (count,count) # making the string section that locally links the geojson file to the html document ''' if not time == '': preloc='\tfunction add%s() {\n' % (str(count)) loc = """\t$.getJSON('http://localhost:8000/%s',function(data) { addDataToMap%s(data,map); });""" % (filename,count) loc = preloc + loc else: ''' loc = """\t$.getJSON('http://localhost:8000/%s',function(data) { addDataToMap%s(data,map); });""" % (filename,count) # creating block to be added to the total or constituent string block if featuretype == 'Point': bindings = make_bindings(headers,count,colorline,featuretype,zoomblock,filename,sidebarstring,colorkeyfields)+'\n' stringblock = blocky + loc + bindings else: bindings = make_bindings(headers,count,colorline,featuretype,zoomblock,filename,sidebarstring,colorkeyfields)+'\n' stringblock = blocky + loc + bindings # adding the stringblock (one geojson file javascript block) to the total string block string += stringblock # adding async function to end of string block string = string + async_function_call(count) return string
[ "def", "make_bindings_type", "(", "filenames", ",", "color_input", ",", "colorkey", ",", "file_dictionary", ",", "sidebar", ",", "bounds", ")", ":", "# instantiating string the main string block for the javascript block of html code", "string", "=", "''", "# iterating through each geojson filename", "count", "=", "0", "for", "row", "in", "filenames", ":", "color_input", "=", "''", "colorkeyfields", "=", "False", "count", "+=", "1", "filename", "=", "row", "zoomrange", "=", "[", "''", ",", "''", "]", "# reading in geojson file into memory", "with", "open", "(", "filename", ")", "as", "data_file", ":", "data", "=", "json", ".", "load", "(", "data_file", ")", "#pprint(data)", "# getting the featuretype which will later dictate what javascript splices are needed", "data", "=", "data", "[", "'features'", "]", "data", "=", "data", "[", "0", "]", "featuretype", "=", "data", "[", "'geometry'", "]", "featuretype", "=", "featuretype", "[", "'type'", "]", "data", "=", "data", "[", "'properties'", "]", "# logic for overwriting colorkey fields if it exists for the filename ", "# in the file dictionary", "try", ":", "colorkeyfields", "=", "file_dictionary", "[", "filename", "]", "[", "str", "(", "'colorkeyfields'", ")", "]", "except", "KeyError", ":", "colorkeyfields", "=", "False", "except", "TypeError", ":", "colorkeyfields", "=", "False", "if", "not", "colorkeyfields", "==", "False", ":", "if", "len", "(", "colorkeyfields", ")", "==", "1", ":", "colorkey", "=", "colorkeyfields", "[", "0", "]", "colorkeyfields", "=", "False", "try", ":", "zoomrange", "=", "file_dictionary", "[", "filename", "]", "[", "str", "(", "'zooms'", ")", "]", "except", "KeyError", ":", "zoomrange", "=", "[", "''", ",", "''", "]", "except", "TypeError", ":", "zoomrange", "=", "[", "''", ",", "''", "]", "# code for if the file_dictionary input isn't false ", "#(i.e. getting the color inputs out of dictionary variable)", "if", "file_dictionary", "==", "False", "and", "colorkey", "==", "False", ":", "# logic for getting the colorline for different feature types", "# the point feature requires a different line of code", "if", "featuretype", "==", "'Point'", ":", "colorline", "=", "get_colorline_marker", "(", "color_input", ")", "else", ":", "colorline", "=", "get_colorline_marker2", "(", "color_input", ")", "# setting minzoom and maxzoom to be sent into js parsing ", "minzoom", ",", "maxzoom", "=", "zoomrange", "# getting filter file dictionary if filter_dictonary exists", "if", "not", "file_dictionary", "==", "False", ":", "filter_file_dictionary", "=", "file_dictionary", "[", "filename", "]", "else", ":", "filter_file_dictionary", "=", "False", "# checking to see if a chart_dictionary exists", "try", ":", "chart_dictionary", "=", "filter_file_dictionary", "[", "'chart_dictionary'", "]", "except", "KeyError", ":", "chart_dictionary", "=", "False", "except", "TypeError", ":", "chart_dictionary", "=", "False", "# sending min and max zoom into the function that makes the zoom block", "zoomblock", "=", "make_zoom_block", "(", "minzoom", ",", "maxzoom", ",", "count", ",", "colorkeyfields", ",", "bounds", ",", "filter_file_dictionary", ")", "# logic for if a color key is given ", "# HINT look here for rgb raw color integration in a color line", "if", "not", "colorkey", "==", "''", ":", "if", "row", "==", "filenames", "[", "0", "]", ":", "if", "colorkey", "==", "'selectedText'", ":", "colorkey", "=", "\"\"\"feature.properties[%s]\"\"\"", "%", "colorkey", "else", ":", "colorkey", "=", "\"\"\"feature.properties['%s']\"\"\"", "%", "colorkey", "if", "featuretype", "==", "'Point'", ":", "colorline", "=", "get_colorline_marker", "(", "str", "(", "colorkey", ")", ")", "else", ":", "colorline", "=", "get_colorline_marker2", "(", "str", "(", "colorkey", ")", ")", "# this may be able to be deleted ", "# test later ", "# im not sure what the fuck its here for ", "if", "file_dictionary", "==", "False", "and", "colorkey", "==", "''", ":", "if", "featuretype", "==", "'Point'", ":", "colorline", "=", "get_colorline_marker", "(", "color_input", ")", "else", ":", "colorline", "=", "get_colorline_marker2", "(", "color_input", ")", "if", "colorkey", "==", "''", "and", "colorkeyfields", "==", "False", ":", "if", "featuretype", "==", "'Point'", ":", "colorline", "=", "get_colorline_marker", "(", "color_input", ")", "else", ":", "colorline", "=", "get_colorline_marker2", "(", "color_input", ")", "# iterating through each header ", "headers", "=", "[", "]", "for", "row", "in", "data", ":", "headers", ".", "append", "(", "str", "(", "row", ")", ")", "# logic for getting sidebar string that will be added in make_blockstr()", "if", "sidebar", "==", "True", ":", "sidebarstring", "=", "make_sidebar_string", "(", "headers", ",", "chart_dictionary", ")", "else", ":", "sidebarstring", "=", "''", "# section of javascript code dedicated to the adding the data layer ", "if", "count", "==", "1", ":", "blocky", "=", "\"\"\"\n\tfunction add%s() { \n\t\\n\\tfunction addDataToMap%s(data, map) {\n\t\\t\\tvar dataLayer = L.geoJson(data);\n\t\\t\\tvar map = L.mapbox.map('map', 'mapbox.streets',{\n\t\\t\\t\\tzoom: 5\n\t\\t\\t\\t}).fitBounds(dataLayer.getBounds());\n\t\\t\\tdataLayer.addTo(map)\n\t\\t}\\n\"\"\"", "%", "(", "count", ",", "count", ")", "else", ":", "blocky", "=", "\"\"\"\n\tfunction add%s() { \n\t\\n\\tfunction addDataToMap%s(data, map) {\n\t\\t\\tvar dataLayer = L.geoJson(data);\n\t\\t\\tdataLayer.addTo(map)\n\t\\t}\\n\"\"\"", "%", "(", "count", ",", "count", ")", "# making the string section that locally links the geojson file to the html document", "'''\n\t\tif not time == '':\n\t\t\tpreloc='\\tfunction add%s() {\\n' % (str(count))\n\t\t\tloc = \"\"\"\\t$.getJSON('http://localhost:8000/%s',function(data) { addDataToMap%s(data,map); });\"\"\" % (filename,count)\n\t\t\tloc = preloc + loc\n\t\telse: \n\t\t'''", "loc", "=", "\"\"\"\\t$.getJSON('http://localhost:8000/%s',function(data) { addDataToMap%s(data,map); });\"\"\"", "%", "(", "filename", ",", "count", ")", "# creating block to be added to the total or constituent string block", "if", "featuretype", "==", "'Point'", ":", "bindings", "=", "make_bindings", "(", "headers", ",", "count", ",", "colorline", ",", "featuretype", ",", "zoomblock", ",", "filename", ",", "sidebarstring", ",", "colorkeyfields", ")", "+", "'\\n'", "stringblock", "=", "blocky", "+", "loc", "+", "bindings", "else", ":", "bindings", "=", "make_bindings", "(", "headers", ",", "count", ",", "colorline", ",", "featuretype", ",", "zoomblock", ",", "filename", ",", "sidebarstring", ",", "colorkeyfields", ")", "+", "'\\n'", "stringblock", "=", "blocky", "+", "loc", "+", "bindings", "# adding the stringblock (one geojson file javascript block) to the total string block", "string", "+=", "stringblock", "# adding async function to end of string block", "string", "=", "string", "+", "async_function_call", "(", "count", ")", "return", "string" ]
31.538462
21.650888
def make_us_postal_code(s, allowed_lengths=(), allowed_digits=()): """ >>> make_us_postal_code(1234) '01234' >>> make_us_postal_code(507.6009) '507' >>> make_us_postal_code(90210.0) '90210' >>> make_us_postal_code(39567.7226) '39567-7226' >>> make_us_postal_code(39567.7226) '39567-7226' """ allowed_lengths = allowed_lengths or tuple(N if N < 6 else N + 1 for N in allowed_digits) allowed_lengths = allowed_lengths or (2, 3, 5, 10) ints = int_pair(s) z = str(ints[0]) if ints[0] else '' z4 = '-' + str(ints[1]) if ints[1] else '' if len(z) == 4: z = '0' + z if len(z + z4) in allowed_lengths: return z + z4 elif len(z) in (min(l, 5) for l in allowed_lengths): return z return ''
[ "def", "make_us_postal_code", "(", "s", ",", "allowed_lengths", "=", "(", ")", ",", "allowed_digits", "=", "(", ")", ")", ":", "allowed_lengths", "=", "allowed_lengths", "or", "tuple", "(", "N", "if", "N", "<", "6", "else", "N", "+", "1", "for", "N", "in", "allowed_digits", ")", "allowed_lengths", "=", "allowed_lengths", "or", "(", "2", ",", "3", ",", "5", ",", "10", ")", "ints", "=", "int_pair", "(", "s", ")", "z", "=", "str", "(", "ints", "[", "0", "]", ")", "if", "ints", "[", "0", "]", "else", "''", "z4", "=", "'-'", "+", "str", "(", "ints", "[", "1", "]", ")", "if", "ints", "[", "1", "]", "else", "''", "if", "len", "(", "z", ")", "==", "4", ":", "z", "=", "'0'", "+", "z", "if", "len", "(", "z", "+", "z4", ")", "in", "allowed_lengths", ":", "return", "z", "+", "z4", "elif", "len", "(", "z", ")", "in", "(", "min", "(", "l", ",", "5", ")", "for", "l", "in", "allowed_lengths", ")", ":", "return", "z", "return", "''" ]
30.52
16.04
def on_make_toast(self, ref): """ Using Toast.makeToast returns async so we have to initialize it later. """ d = self.declaration self.toast = Toast(__id__=ref) self.init_widget()
[ "def", "on_make_toast", "(", "self", ",", "ref", ")", ":", "d", "=", "self", ".", "declaration", "self", ".", "toast", "=", "Toast", "(", "__id__", "=", "ref", ")", "self", ".", "init_widget", "(", ")" ]
28.875
12
def convert_args(test_fcn, *test_args): """ Decorator to be using in formulas to convert ``test_args`` depending on the ``test_fcn``. :param test_fcn: A test function that converts arguments. :type test_fcn: function :param test_args: Names of args to convert using ``test_fcn``. :type test_args: str The following test functions are available. * :func:`dimensionless_to_index` Example: Convert ``dawn_idx`` and ``eve_idx`` to indices:: @convert_args(dimensionless_to_index, 'dawn_idx', 'eve_idx') def f_max_T(Tcell24, dawn_idx, eve_idx): idx = dawn_idx + np.argmax(Tcell24[dawn_idx:eve_idx]) return Tcell24[idx], idx """ def wrapper(origfcn): @functools.wraps(origfcn) def newfcn(*args, **kwargs): argspec = getargspec(origfcn) # use ``inspect`` to get arg names kwargs.update(zip(argspec.args, args)) # convert args to kw # loop over test args for a in test_args: # convert a if it's in args if a in argspec.args: kwargs[a] = test_fcn(kwargs[a]) # update kwargs # call original function with converted args return origfcn(**kwargs) # return wrapped function return newfcn # return the wrapper function that consumes the original function return wrapper
[ "def", "convert_args", "(", "test_fcn", ",", "*", "test_args", ")", ":", "def", "wrapper", "(", "origfcn", ")", ":", "@", "functools", ".", "wraps", "(", "origfcn", ")", "def", "newfcn", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "argspec", "=", "getargspec", "(", "origfcn", ")", "# use ``inspect`` to get arg names", "kwargs", ".", "update", "(", "zip", "(", "argspec", ".", "args", ",", "args", ")", ")", "# convert args to kw", "# loop over test args", "for", "a", "in", "test_args", ":", "# convert a if it's in args", "if", "a", "in", "argspec", ".", "args", ":", "kwargs", "[", "a", "]", "=", "test_fcn", "(", "kwargs", "[", "a", "]", ")", "# update kwargs", "# call original function with converted args", "return", "origfcn", "(", "*", "*", "kwargs", ")", "# return wrapped function", "return", "newfcn", "# return the wrapper function that consumes the original function", "return", "wrapper" ]
38.277778
17.5
def get_synset_1000(self): """ Returns: dict: {cls_number: synset_id} """ fname = os.path.join(self.dir, 'synsets.txt') assert os.path.isfile(fname) lines = [x.strip() for x in open(fname).readlines()] return dict(enumerate(lines))
[ "def", "get_synset_1000", "(", "self", ")", ":", "fname", "=", "os", ".", "path", ".", "join", "(", "self", ".", "dir", ",", "'synsets.txt'", ")", "assert", "os", ".", "path", ".", "isfile", "(", "fname", ")", "lines", "=", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "open", "(", "fname", ")", ".", "readlines", "(", ")", "]", "return", "dict", "(", "enumerate", "(", "lines", ")", ")" ]
32.333333
8.777778
def create_table_from_json( self, json_source, table_name="", primary_key=None, add_primary_key_column=False, index_attrs=None, ): """ Create a table from a JSON file/text. :param str json_source: Path to the JSON file or JSON text. :param str table_name: Table name to create. :param str primary_key: |primary_key| :param tuple index_attrs: |index_attrs| :Dependency Packages: - `pytablereader <https://github.com/thombashi/pytablereader>`__ :Examples: :ref:`example-create-table-from-json` .. seealso:: :py:meth:`.pytablereader.JsonTableFileLoader.load` :py:meth:`.pytablereader.JsonTableTextLoader.load` """ import pytablereader as ptr loader = ptr.JsonTableFileLoader(json_source) if typepy.is_not_null_string(table_name): loader.table_name = table_name try: for table_data in loader.load(): self.__create_table_from_tabledata( table_data, primary_key, add_primary_key_column, index_attrs ) return except (ptr.InvalidFilePathError, IOError): pass loader = ptr.JsonTableTextLoader(json_source) if typepy.is_not_null_string(table_name): loader.table_name = table_name for table_data in loader.load(): self.__create_table_from_tabledata( table_data, primary_key, add_primary_key_column, index_attrs )
[ "def", "create_table_from_json", "(", "self", ",", "json_source", ",", "table_name", "=", "\"\"", ",", "primary_key", "=", "None", ",", "add_primary_key_column", "=", "False", ",", "index_attrs", "=", "None", ",", ")", ":", "import", "pytablereader", "as", "ptr", "loader", "=", "ptr", ".", "JsonTableFileLoader", "(", "json_source", ")", "if", "typepy", ".", "is_not_null_string", "(", "table_name", ")", ":", "loader", ".", "table_name", "=", "table_name", "try", ":", "for", "table_data", "in", "loader", ".", "load", "(", ")", ":", "self", ".", "__create_table_from_tabledata", "(", "table_data", ",", "primary_key", ",", "add_primary_key_column", ",", "index_attrs", ")", "return", "except", "(", "ptr", ".", "InvalidFilePathError", ",", "IOError", ")", ":", "pass", "loader", "=", "ptr", ".", "JsonTableTextLoader", "(", "json_source", ")", "if", "typepy", ".", "is_not_null_string", "(", "table_name", ")", ":", "loader", ".", "table_name", "=", "table_name", "for", "table_data", "in", "loader", ".", "load", "(", ")", ":", "self", ".", "__create_table_from_tabledata", "(", "table_data", ",", "primary_key", ",", "add_primary_key_column", ",", "index_attrs", ")" ]
32.4375
18.9375
def clean_dataframe(df, is_slugify=True, threshold=50, rename_cols=None): """ This method is used to: - slugify the column names (if slugify is set to True) - convert columns to 'category' (if len(unique) < threshold) or 'int' - clean the dataframe and rename if necessary """ if is_slugify: df = df.rename(columns=slugify) df = df.dropna(axis=1, how='all') for column in get_category_cols(df, threshold=threshold): df[column] = df[column].astype('category') for column in get_int_cols(df): df[column] = df[column].astype(int) if rename_cols is not None: df = df.rename(columns=rename_cols) return df
[ "def", "clean_dataframe", "(", "df", ",", "is_slugify", "=", "True", ",", "threshold", "=", "50", ",", "rename_cols", "=", "None", ")", ":", "if", "is_slugify", ":", "df", "=", "df", ".", "rename", "(", "columns", "=", "slugify", ")", "df", "=", "df", ".", "dropna", "(", "axis", "=", "1", ",", "how", "=", "'all'", ")", "for", "column", "in", "get_category_cols", "(", "df", ",", "threshold", "=", "threshold", ")", ":", "df", "[", "column", "]", "=", "df", "[", "column", "]", ".", "astype", "(", "'category'", ")", "for", "column", "in", "get_int_cols", "(", "df", ")", ":", "df", "[", "column", "]", "=", "df", "[", "column", "]", ".", "astype", "(", "int", ")", "if", "rename_cols", "is", "not", "None", ":", "df", "=", "df", ".", "rename", "(", "columns", "=", "rename_cols", ")", "return", "df" ]
33.2
16.5
def load(stream): """Parse the LHA document and produce the corresponding Python object. Accepts a string or a file-like object.""" if isinstance(stream, str): string = stream else: string = stream.read() tokens = tokenize(string) return parse(tokens)
[ "def", "load", "(", "stream", ")", ":", "if", "isinstance", "(", "stream", ",", "str", ")", ":", "string", "=", "stream", "else", ":", "string", "=", "stream", ".", "read", "(", ")", "tokens", "=", "tokenize", "(", "string", ")", "return", "parse", "(", "tokens", ")" ]
31.444444
13
def list_commands_audit(self, from_sec=None, to_sec=None, scope_filter=None, command_filter=None, limit=100, offset=0, metrics=[]): '''**Description** List the commands audit. **Arguments** - from_sec: the start of the timerange for which to get commands audit. - end_sec: the end of the timerange for which to get commands audit. - scope_filter: this is a SysdigMonitor-like filter (e.g 'container.image=ubuntu'). When provided, commands are filtered by their scope, so only a subset will be returned (e.g. 'container.image=ubuntu' will provide only commands that have happened on an ubuntu container). - command_filter: this is a SysdigMonitor-like filter (e.g. command.comm="touch"). When provided, commands are filtered by some of their properties. Currently the supported set of filters is command.comm, command.cwd, command.pid, command.ppid, command.uid, command.loginshell.id, command.loginshell.distance - limit: Maximum number of commands in the response. - metrics: A list of metric values to include in the return. **Success Return Value** A JSON representation of the commands audit. ''' if to_sec is None: to_sec = time.time() if from_sec is None: from_sec = to_sec - (24 * 60 * 60) # 1 day url = "{url}/api/commands?from={frm}&to={to}&offset={offset}&limit={limit}{scope}{commandFilter}{metrics}".format( url=self.url, offset=offset, limit=limit, frm=int(from_sec * 10**6), to=int(to_sec * 10**6), scope="&scopeFilter=" + scope_filter if scope_filter else "", commandFilter="&commandFilter=" + command_filter if command_filter else "", metrics="&metrics=" + json.dumps(metrics) if metrics else "") res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify) return self._request_result(res)
[ "def", "list_commands_audit", "(", "self", ",", "from_sec", "=", "None", ",", "to_sec", "=", "None", ",", "scope_filter", "=", "None", ",", "command_filter", "=", "None", ",", "limit", "=", "100", ",", "offset", "=", "0", ",", "metrics", "=", "[", "]", ")", ":", "if", "to_sec", "is", "None", ":", "to_sec", "=", "time", ".", "time", "(", ")", "if", "from_sec", "is", "None", ":", "from_sec", "=", "to_sec", "-", "(", "24", "*", "60", "*", "60", ")", "# 1 day", "url", "=", "\"{url}/api/commands?from={frm}&to={to}&offset={offset}&limit={limit}{scope}{commandFilter}{metrics}\"", ".", "format", "(", "url", "=", "self", ".", "url", ",", "offset", "=", "offset", ",", "limit", "=", "limit", ",", "frm", "=", "int", "(", "from_sec", "*", "10", "**", "6", ")", ",", "to", "=", "int", "(", "to_sec", "*", "10", "**", "6", ")", ",", "scope", "=", "\"&scopeFilter=\"", "+", "scope_filter", "if", "scope_filter", "else", "\"\"", ",", "commandFilter", "=", "\"&commandFilter=\"", "+", "command_filter", "if", "command_filter", "else", "\"\"", ",", "metrics", "=", "\"&metrics=\"", "+", "json", ".", "dumps", "(", "metrics", ")", "if", "metrics", "else", "\"\"", ")", "res", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "self", ".", "hdrs", ",", "verify", "=", "self", ".", "ssl_verify", ")", "return", "self", ".", "_request_result", "(", "res", ")" ]
63.677419
41.741935
def iter_code_frequency(self, number=-1, etag=None): """Iterate over the code frequency per week. Returns a weekly aggregate of the number of additions and deletions pushed to this repository. :param int number: (optional), number of weeks to return. Default: -1 returns all weeks :param str etag: (optional), ETag from a previous request to the same endpoint :returns: generator of lists ``[seconds_from_epoch, additions, deletions]`` .. note:: All statistics methods may return a 202. On those occasions, you will not receive any objects. You should store your iterator and check the new ``last_status`` attribute. If it is a 202 you should wait before re-requesting. .. versionadded:: 0.7 """ url = self._build_url('stats', 'code_frequency', base_url=self._api) return self._iter(int(number), url, list, etag=etag)
[ "def", "iter_code_frequency", "(", "self", ",", "number", "=", "-", "1", ",", "etag", "=", "None", ")", ":", "url", "=", "self", ".", "_build_url", "(", "'stats'", ",", "'code_frequency'", ",", "base_url", "=", "self", ".", "_api", ")", "return", "self", ".", "_iter", "(", "int", "(", "number", ")", ",", "url", ",", "list", ",", "etag", "=", "etag", ")" ]
42.521739
26.217391
def replace_component(self, name, callable, provides=None, depends=None): """Changes an existing component with a given name, invalidating all the values computed by the previous component and its successors.""" self.remove_component(name) self.add_component(name, callable, provides, depends)
[ "def", "replace_component", "(", "self", ",", "name", ",", "callable", ",", "provides", "=", "None", ",", "depends", "=", "None", ")", ":", "self", ".", "remove_component", "(", "name", ")", "self", ".", "add_component", "(", "name", ",", "callable", ",", "provides", ",", "depends", ")" ]
64.2
11.8
def jsonify(py_data, default=None, indent=4, sort_keys=True): """ Converts the inputted Python data to JSON format. :param py_data | <variant> """ return json.dumps(py_data, default=py2json, indent=indent, sort_keys=sort_keys)
[ "def", "jsonify", "(", "py_data", ",", "default", "=", "None", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ")", ":", "return", "json", ".", "dumps", "(", "py_data", ",", "default", "=", "py2json", ",", "indent", "=", "indent", ",", "sort_keys", "=", "sort_keys", ")" ]
35.714286
16.857143
def du(path): ''' Put it all together! ''' size, err = calc(path) if err: return err else: hr, unit = convert(size) hr = str(hr) result = hr + " " + unit return result
[ "def", "du", "(", "path", ")", ":", "size", ",", "err", "=", "calc", "(", "path", ")", "if", "err", ":", "return", "err", "else", ":", "hr", ",", "unit", "=", "convert", "(", "size", ")", "hr", "=", "str", "(", "hr", ")", "result", "=", "hr", "+", "\" \"", "+", "unit", "return", "result" ]
18.333333
21.666667
def read_list(self, request): """ :param request: an apiv2 request object :return: request if successful with entities set on request """ request_filters = request.context_params.get( self.request_filters_property, {}) request_filters.update(**self.get_limit_and_offset(request_filters)) entities = self.get_entity_list(request, **request_filters) request.context_params[self.list_property_name] = entities # offset and limit don't make sense to get aggregates count_request_filters = request_filters.copy() count_request_filters.pop('offset', None) count_request_filters.pop('limit', None) count_request_filters.pop('order_by', None) total_count = self.get_entity_list_total_count(request, **count_request_filters) request.context_params[self.entity_list_total_count_property_name] = \ total_count return request
[ "def", "read_list", "(", "self", ",", "request", ")", ":", "request_filters", "=", "request", ".", "context_params", ".", "get", "(", "self", ".", "request_filters_property", ",", "{", "}", ")", "request_filters", ".", "update", "(", "*", "*", "self", ".", "get_limit_and_offset", "(", "request_filters", ")", ")", "entities", "=", "self", ".", "get_entity_list", "(", "request", ",", "*", "*", "request_filters", ")", "request", ".", "context_params", "[", "self", ".", "list_property_name", "]", "=", "entities", "# offset and limit don't make sense to get aggregates", "count_request_filters", "=", "request_filters", ".", "copy", "(", ")", "count_request_filters", ".", "pop", "(", "'offset'", ",", "None", ")", "count_request_filters", ".", "pop", "(", "'limit'", ",", "None", ")", "count_request_filters", ".", "pop", "(", "'order_by'", ",", "None", ")", "total_count", "=", "self", ".", "get_entity_list_total_count", "(", "request", ",", "*", "*", "count_request_filters", ")", "request", ".", "context_params", "[", "self", ".", "entity_list_total_count_property_name", "]", "=", "total_count", "return", "request" ]
45.5
19.590909
def parse_raw_list_data(data, proxy_type='http', proxy_userpwd=None): """Iterate over proxy servers found in the raw data""" if not isinstance(data, six.text_type): data = data.decode('utf-8') for orig_line in data.splitlines(): line = orig_line.strip().replace(' ', '') if line and not line.startswith('#'): try: host, port, username, password = parse_proxy_line(line) except InvalidProxyLine as ex: logger.error(ex) else: if username is None and proxy_userpwd is not None: username, password = proxy_userpwd.split(':') yield Proxy(host, port, username, password, proxy_type)
[ "def", "parse_raw_list_data", "(", "data", ",", "proxy_type", "=", "'http'", ",", "proxy_userpwd", "=", "None", ")", ":", "if", "not", "isinstance", "(", "data", ",", "six", ".", "text_type", ")", ":", "data", "=", "data", ".", "decode", "(", "'utf-8'", ")", "for", "orig_line", "in", "data", ".", "splitlines", "(", ")", ":", "line", "=", "orig_line", ".", "strip", "(", ")", ".", "replace", "(", "' '", ",", "''", ")", "if", "line", "and", "not", "line", ".", "startswith", "(", "'#'", ")", ":", "try", ":", "host", ",", "port", ",", "username", ",", "password", "=", "parse_proxy_line", "(", "line", ")", "except", "InvalidProxyLine", "as", "ex", ":", "logger", ".", "error", "(", "ex", ")", "else", ":", "if", "username", "is", "None", "and", "proxy_userpwd", "is", "not", "None", ":", "username", ",", "password", "=", "proxy_userpwd", ".", "split", "(", "':'", ")", "yield", "Proxy", "(", "host", ",", "port", ",", "username", ",", "password", ",", "proxy_type", ")" ]
47.866667
14.8
def gcd(*numbers): """ Returns the greatest common divisor for a sequence of numbers. Args: \*numbers: Sequence of numbers. Returns: (int) Greatest common divisor of numbers. """ n = numbers[0] for i in numbers: n = pygcd(n, i) return n
[ "def", "gcd", "(", "*", "numbers", ")", ":", "n", "=", "numbers", "[", "0", "]", "for", "i", "in", "numbers", ":", "n", "=", "pygcd", "(", "n", ",", "i", ")", "return", "n" ]
20.071429
20.214286
def get_weekly_charts(self, chart_kind, from_date=None, to_date=None): """ Returns the weekly charts for the week starting from the from_date value to the to_date value. chart_kind should be one of "album", "artist" or "track" """ method = ".getWeekly" + chart_kind.title() + "Chart" chart_type = eval(chart_kind.title()) # string to type params = self._get_params() if from_date and to_date: params["from"] = from_date params["to"] = to_date doc = self._request(self.ws_prefix + method, True, params) seq = [] for node in doc.getElementsByTagName(chart_kind.lower()): if chart_kind == "artist": item = chart_type(_extract(node, "name"), self.network) else: item = chart_type( _extract(node, "artist"), _extract(node, "name"), self.network ) weight = _number(_extract(node, "playcount")) seq.append(TopItem(item, weight)) return seq
[ "def", "get_weekly_charts", "(", "self", ",", "chart_kind", ",", "from_date", "=", "None", ",", "to_date", "=", "None", ")", ":", "method", "=", "\".getWeekly\"", "+", "chart_kind", ".", "title", "(", ")", "+", "\"Chart\"", "chart_type", "=", "eval", "(", "chart_kind", ".", "title", "(", ")", ")", "# string to type", "params", "=", "self", ".", "_get_params", "(", ")", "if", "from_date", "and", "to_date", ":", "params", "[", "\"from\"", "]", "=", "from_date", "params", "[", "\"to\"", "]", "=", "to_date", "doc", "=", "self", ".", "_request", "(", "self", ".", "ws_prefix", "+", "method", ",", "True", ",", "params", ")", "seq", "=", "[", "]", "for", "node", "in", "doc", ".", "getElementsByTagName", "(", "chart_kind", ".", "lower", "(", ")", ")", ":", "if", "chart_kind", "==", "\"artist\"", ":", "item", "=", "chart_type", "(", "_extract", "(", "node", ",", "\"name\"", ")", ",", "self", ".", "network", ")", "else", ":", "item", "=", "chart_type", "(", "_extract", "(", "node", ",", "\"artist\"", ")", ",", "_extract", "(", "node", ",", "\"name\"", ")", ",", "self", ".", "network", ")", "weight", "=", "_number", "(", "_extract", "(", "node", ",", "\"playcount\"", ")", ")", "seq", ".", "append", "(", "TopItem", "(", "item", ",", "weight", ")", ")", "return", "seq" ]
37.642857
19.714286
def Write3DData(self, data3d, path, filetype='auto', metadata=None, progress_callback=None, sfin=True): """ :param data3d: input ndarray data :param path: output path, to specify slice number advanced formatting options (like {:06d}) can be used Check function filename_format() for more details. :param metadata: {'voxelsize_mm': [1, 1, 1]} :param filetype: dcm, vtk, rawiv, image_stack :param progress_callback: fuction for progressbar f.e. callback(value, minimum, maximum) :param sfin: Use separate file for segmentation if necessary """ self.orig_path = path path = os.path.expanduser(path) try: d3d = data3d.pop('data3d') metadata = data3d data3d = d3d except: pass if progress_callback is not None: self.progress_callback = progress_callback if filetype == 'auto': startpath, ext = os.path.splitext(path) filetype = ext[1:].lower() segmentation = None if metadata is not None and "segmentation" in metadata.keys(): segmentation_path = self.__get_segmentation_path(path) segmentation = metadata["segmentation"] mtd = {'voxelsize_mm': [1, 1, 1]} if metadata is not None: mtd.update(metadata) metadata=mtd if path.find('{') >= 0: filetype = 'image_stack' # one_file_per_slice = True # if one_file_per_slice: # self._one_file_per_slice(self, data3d, path, filetype, metadata) # else: # self._all_in_one_file(self, data3d, path, filetype, metadata) # # def _all_in_one_file(self, data3d, path, filetype, metadata): if filetype in ['vtk', 'tiff', 'tif', "mhd", "nii", "raw"]: self._write_with_sitk(path, data3d, metadata) if sfin and segmentation is not None: self._write_with_sitk(segmentation_path, segmentation, metadata) elif filetype in ['dcm', 'DCM', 'dicom']: self._write_with_sitk(path, data3d, metadata) self._fix_sitk_bug(path, metadata) if sfin and segmentation is not None: self._write_with_sitk(segmentation_path, segmentation, metadata) self._fix_sitk_bug(segmentation_path, metadata) elif filetype in ['rawiv']: rawN.write(path, data3d, metadata) elif filetype in ['image_stack']: self.save_image_stack(data3d, path, metadata) elif filetype in ['hdf5', 'hdf', 'h5', 'he5']: self.save_hdf5(data3d, path, metadata) elif filetype in ['pkl', 'pklz']: from . import misc metadata['data3d'] = data3d datap = metadata misc.obj_to_file(datap, path) else: logger.error('Unknown filetype: "' + filetype + '"') raise ValueError("Unknown filetype: '" + filetype + "'")
[ "def", "Write3DData", "(", "self", ",", "data3d", ",", "path", ",", "filetype", "=", "'auto'", ",", "metadata", "=", "None", ",", "progress_callback", "=", "None", ",", "sfin", "=", "True", ")", ":", "self", ".", "orig_path", "=", "path", "path", "=", "os", ".", "path", ".", "expanduser", "(", "path", ")", "try", ":", "d3d", "=", "data3d", ".", "pop", "(", "'data3d'", ")", "metadata", "=", "data3d", "data3d", "=", "d3d", "except", ":", "pass", "if", "progress_callback", "is", "not", "None", ":", "self", ".", "progress_callback", "=", "progress_callback", "if", "filetype", "==", "'auto'", ":", "startpath", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "path", ")", "filetype", "=", "ext", "[", "1", ":", "]", ".", "lower", "(", ")", "segmentation", "=", "None", "if", "metadata", "is", "not", "None", "and", "\"segmentation\"", "in", "metadata", ".", "keys", "(", ")", ":", "segmentation_path", "=", "self", ".", "__get_segmentation_path", "(", "path", ")", "segmentation", "=", "metadata", "[", "\"segmentation\"", "]", "mtd", "=", "{", "'voxelsize_mm'", ":", "[", "1", ",", "1", ",", "1", "]", "}", "if", "metadata", "is", "not", "None", ":", "mtd", ".", "update", "(", "metadata", ")", "metadata", "=", "mtd", "if", "path", ".", "find", "(", "'{'", ")", ">=", "0", ":", "filetype", "=", "'image_stack'", "# one_file_per_slice = True", "# if one_file_per_slice:", "# self._one_file_per_slice(self, data3d, path, filetype, metadata)", "# else:", "# self._all_in_one_file(self, data3d, path, filetype, metadata)", "#", "# def _all_in_one_file(self, data3d, path, filetype, metadata):", "if", "filetype", "in", "[", "'vtk'", ",", "'tiff'", ",", "'tif'", ",", "\"mhd\"", ",", "\"nii\"", ",", "\"raw\"", "]", ":", "self", ".", "_write_with_sitk", "(", "path", ",", "data3d", ",", "metadata", ")", "if", "sfin", "and", "segmentation", "is", "not", "None", ":", "self", ".", "_write_with_sitk", "(", "segmentation_path", ",", "segmentation", ",", "metadata", ")", "elif", "filetype", "in", "[", "'dcm'", ",", "'DCM'", ",", "'dicom'", "]", ":", "self", ".", "_write_with_sitk", "(", "path", ",", "data3d", ",", "metadata", ")", "self", ".", "_fix_sitk_bug", "(", "path", ",", "metadata", ")", "if", "sfin", "and", "segmentation", "is", "not", "None", ":", "self", ".", "_write_with_sitk", "(", "segmentation_path", ",", "segmentation", ",", "metadata", ")", "self", ".", "_fix_sitk_bug", "(", "segmentation_path", ",", "metadata", ")", "elif", "filetype", "in", "[", "'rawiv'", "]", ":", "rawN", ".", "write", "(", "path", ",", "data3d", ",", "metadata", ")", "elif", "filetype", "in", "[", "'image_stack'", "]", ":", "self", ".", "save_image_stack", "(", "data3d", ",", "path", ",", "metadata", ")", "elif", "filetype", "in", "[", "'hdf5'", ",", "'hdf'", ",", "'h5'", ",", "'he5'", "]", ":", "self", ".", "save_hdf5", "(", "data3d", ",", "path", ",", "metadata", ")", "elif", "filetype", "in", "[", "'pkl'", ",", "'pklz'", "]", ":", "from", ".", "import", "misc", "metadata", "[", "'data3d'", "]", "=", "data3d", "datap", "=", "metadata", "misc", ".", "obj_to_file", "(", "datap", ",", "path", ")", "else", ":", "logger", ".", "error", "(", "'Unknown filetype: \"'", "+", "filetype", "+", "'\"'", ")", "raise", "ValueError", "(", "\"Unknown filetype: '\"", "+", "filetype", "+", "\"'\"", ")" ]
37.227848
21.151899
def get_clan_image(self, obj: BaseAttrDict): """Get the clan badge image URL Parameters --------- obj: official_api.models.BaseAttrDict An object that has the clan badge ID either in ``.clan.badge_id`` or ``.badge_id`` Can be a clan or a profile for example. Returns str """ try: badge_id = obj.clan.badge_id except AttributeError: try: badge_id = obj.badge_id except AttributeError: return 'https://i.imgur.com/Y3uXsgj.png' if badge_id is None: return 'https://i.imgur.com/Y3uXsgj.png' for i in self.constants.alliance_badges: if i.id == badge_id: return 'https://royaleapi.github.io/cr-api-assets/badges/' + i.name + '.png'
[ "def", "get_clan_image", "(", "self", ",", "obj", ":", "BaseAttrDict", ")", ":", "try", ":", "badge_id", "=", "obj", ".", "clan", ".", "badge_id", "except", "AttributeError", ":", "try", ":", "badge_id", "=", "obj", ".", "badge_id", "except", "AttributeError", ":", "return", "'https://i.imgur.com/Y3uXsgj.png'", "if", "badge_id", "is", "None", ":", "return", "'https://i.imgur.com/Y3uXsgj.png'", "for", "i", "in", "self", ".", "constants", ".", "alliance_badges", ":", "if", "i", ".", "id", "==", "badge_id", ":", "return", "'https://royaleapi.github.io/cr-api-assets/badges/'", "+", "i", ".", "name", "+", "'.png'" ]
31.423077
19.884615
def plotOutline(self, maptype=None, colour='#AAAAAA', **kwargs): """Plot an outline of the FOV. """ if maptype is None: maptype=self.defaultMap xarr = [] yarr = [] radec = self.currentRaDec for ch in [20,4,11,28,32, 71,68, 84, 75, 60, 56, 15 ]: idx = np.where(radec[:,2].astype(np.int) == ch)[0] idx = idx[0] #Take on the first one x, y = maptype.skyToPix(radec[idx][3], radec[idx][4]) xarr.append(x) yarr.append(y) verts = np.empty( (len(xarr), 2)) verts[:,0] = xarr verts[:,1] = yarr #There are two ways to specify line colour ec = kwargs.pop('ec', "none") ec = kwargs.pop('edgecolor', ec) p = matplotlib.patches.Polygon(verts, fill=True, ec=ec, fc=colour, **kwargs) mp.gca().add_patch(p)
[ "def", "plotOutline", "(", "self", ",", "maptype", "=", "None", ",", "colour", "=", "'#AAAAAA'", ",", "*", "*", "kwargs", ")", ":", "if", "maptype", "is", "None", ":", "maptype", "=", "self", ".", "defaultMap", "xarr", "=", "[", "]", "yarr", "=", "[", "]", "radec", "=", "self", ".", "currentRaDec", "for", "ch", "in", "[", "20", ",", "4", ",", "11", ",", "28", ",", "32", ",", "71", ",", "68", ",", "84", ",", "75", ",", "60", ",", "56", ",", "15", "]", ":", "idx", "=", "np", ".", "where", "(", "radec", "[", ":", ",", "2", "]", ".", "astype", "(", "np", ".", "int", ")", "==", "ch", ")", "[", "0", "]", "idx", "=", "idx", "[", "0", "]", "#Take on the first one", "x", ",", "y", "=", "maptype", ".", "skyToPix", "(", "radec", "[", "idx", "]", "[", "3", "]", ",", "radec", "[", "idx", "]", "[", "4", "]", ")", "xarr", ".", "append", "(", "x", ")", "yarr", ".", "append", "(", "y", ")", "verts", "=", "np", ".", "empty", "(", "(", "len", "(", "xarr", ")", ",", "2", ")", ")", "verts", "[", ":", ",", "0", "]", "=", "xarr", "verts", "[", ":", ",", "1", "]", "=", "yarr", "#There are two ways to specify line colour", "ec", "=", "kwargs", ".", "pop", "(", "'ec'", ",", "\"none\"", ")", "ec", "=", "kwargs", ".", "pop", "(", "'edgecolor'", ",", "ec", ")", "p", "=", "matplotlib", ".", "patches", ".", "Polygon", "(", "verts", ",", "fill", "=", "True", ",", "ec", "=", "ec", ",", "fc", "=", "colour", ",", "*", "*", "kwargs", ")", "mp", ".", "gca", "(", ")", ".", "add_patch", "(", "p", ")" ]
33.230769
17.730769
def hamming_calc(TP, POP): """ Calculate hamming loss. :param TP: true positive :type TP : dict :param POP: population :type POP : int :return: hamming loss as float """ try: length = POP return (1 / length) * (length - sum(TP.values())) except Exception: return "None"
[ "def", "hamming_calc", "(", "TP", ",", "POP", ")", ":", "try", ":", "length", "=", "POP", "return", "(", "1", "/", "length", ")", "*", "(", "length", "-", "sum", "(", "TP", ".", "values", "(", ")", ")", ")", "except", "Exception", ":", "return", "\"None\"" ]
21.333333
16.533333
def get_corrector_f(rinput, meta, ins, datamodel): """Corrector for intensity flat""" from emirdrp.processing.flatfield import FlatFieldCorrector flat_info = meta['master_flat'] with rinput.master_flat.open() as hdul: _logger.info('loading intensity flat') _logger.debug('flat info: %s', flat_info) mflat = hdul[0].data # Check NaN and Ceros mask1 = mflat < 0 mask2 = ~numpy.isfinite(mflat) if numpy.any(mask1): _logger.warning('flat has %d values below 0', mask1.sum()) if numpy.any(mask2): _logger.warning('flat has %d NaN', mask2.sum()) flat_corrector = FlatFieldCorrector(mflat, datamodel=datamodel, calibid=datamodel.get_imgid(hdul)) return flat_corrector
[ "def", "get_corrector_f", "(", "rinput", ",", "meta", ",", "ins", ",", "datamodel", ")", ":", "from", "emirdrp", ".", "processing", ".", "flatfield", "import", "FlatFieldCorrector", "flat_info", "=", "meta", "[", "'master_flat'", "]", "with", "rinput", ".", "master_flat", ".", "open", "(", ")", "as", "hdul", ":", "_logger", ".", "info", "(", "'loading intensity flat'", ")", "_logger", ".", "debug", "(", "'flat info: %s'", ",", "flat_info", ")", "mflat", "=", "hdul", "[", "0", "]", ".", "data", "# Check NaN and Ceros", "mask1", "=", "mflat", "<", "0", "mask2", "=", "~", "numpy", ".", "isfinite", "(", "mflat", ")", "if", "numpy", ".", "any", "(", "mask1", ")", ":", "_logger", ".", "warning", "(", "'flat has %d values below 0'", ",", "mask1", ".", "sum", "(", ")", ")", "if", "numpy", ".", "any", "(", "mask2", ")", ":", "_logger", ".", "warning", "(", "'flat has %d NaN'", ",", "mask2", ".", "sum", "(", ")", ")", "flat_corrector", "=", "FlatFieldCorrector", "(", "mflat", ",", "datamodel", "=", "datamodel", ",", "calibid", "=", "datamodel", ".", "get_imgid", "(", "hdul", ")", ")", "return", "flat_corrector" ]
42.3
14.8