text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def add_file_patterns(self, patterns, blacklist): """Adds a list of file patterns to either the black- or white-list. Note that this pattern is applied to the absolute path of the file that will be delivered. For including or excluding folders use `add_folder_mask` or `add_folder_fallback`. """ bl = self._pattern_black if blacklist else self._pattern_white for pattern in patterns: bl.append(pattern)
[ "def", "add_file_patterns", "(", "self", ",", "patterns", ",", "blacklist", ")", ":", "bl", "=", "self", ".", "_pattern_black", "if", "blacklist", "else", "self", ".", "_pattern_white", "for", "pattern", "in", "patterns", ":", "bl", ".", "append", "(", "pattern", ")" ]
52.333333
15.666667
def run(**options): """ _run_ Run the dockerstache process to render templates based on the options provided If extend_context is passed as options it will be used to extend the context with the contents of the dictionary provided via context.update(extend_context) """ with Dotfile(options) as conf: if conf['context'] is None: msg = "No context file has been provided" LOGGER.error(msg) raise RuntimeError(msg) if not os.path.exists(conf['context_path']): msg = "Context file {} not found".format(conf['context_path']) LOGGER.error(msg) raise RuntimeError(msg) LOGGER.info( ( "{{dockerstache}}: In: {}\n" "{{dockerstache}}: Out: {}\n" "{{dockerstache}}: Context: {}\n" "{{dockerstache}}: Defaults: {}\n" ).format(conf['input'], conf['output'], conf['context'], conf['defaults']) ) context = Context(conf['context'], conf['defaults']) context.load() if 'extend_context' in options: LOGGER.info("{{dockerstache}} Extended context provided") context.update(options['extend_context']) process_templates( conf['input'], conf['output'], context ) if conf['inclusive']: process_copies( conf['input'], conf['output'], conf['exclude'] ) return dict(conf)
[ "def", "run", "(", "*", "*", "options", ")", ":", "with", "Dotfile", "(", "options", ")", "as", "conf", ":", "if", "conf", "[", "'context'", "]", "is", "None", ":", "msg", "=", "\"No context file has been provided\"", "LOGGER", ".", "error", "(", "msg", ")", "raise", "RuntimeError", "(", "msg", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "conf", "[", "'context_path'", "]", ")", ":", "msg", "=", "\"Context file {} not found\"", ".", "format", "(", "conf", "[", "'context_path'", "]", ")", "LOGGER", ".", "error", "(", "msg", ")", "raise", "RuntimeError", "(", "msg", ")", "LOGGER", ".", "info", "(", "(", "\"{{dockerstache}}: In: {}\\n\"", "\"{{dockerstache}}: Out: {}\\n\"", "\"{{dockerstache}}: Context: {}\\n\"", "\"{{dockerstache}}: Defaults: {}\\n\"", ")", ".", "format", "(", "conf", "[", "'input'", "]", ",", "conf", "[", "'output'", "]", ",", "conf", "[", "'context'", "]", ",", "conf", "[", "'defaults'", "]", ")", ")", "context", "=", "Context", "(", "conf", "[", "'context'", "]", ",", "conf", "[", "'defaults'", "]", ")", "context", ".", "load", "(", ")", "if", "'extend_context'", "in", "options", ":", "LOGGER", ".", "info", "(", "\"{{dockerstache}} Extended context provided\"", ")", "context", ".", "update", "(", "options", "[", "'extend_context'", "]", ")", "process_templates", "(", "conf", "[", "'input'", "]", ",", "conf", "[", "'output'", "]", ",", "context", ")", "if", "conf", "[", "'inclusive'", "]", ":", "process_copies", "(", "conf", "[", "'input'", "]", ",", "conf", "[", "'output'", "]", ",", "conf", "[", "'exclude'", "]", ")", "return", "dict", "(", "conf", ")" ]
32.361702
17.085106
def main(forward=26944, host='127.0.0.1', listen=5555): ''' Args: - forward(int): local forward port - host(string): local forward host - listen(int): listen port ''' # HTTP->HTTP: On your computer, browse to "http://127.0.0.1:81/" and you'll get http://www.google.com server = maproxy.proxyserver.ProxyServer("127.0.0.1", forward) server.listen(listen) print("Local IP:", socket.gethostbyname(socket.gethostname())) print("0.0.0.0:{} -> {}:{}".format(listen, host, forward)) tornado.ioloop.IOLoop.instance().start()
[ "def", "main", "(", "forward", "=", "26944", ",", "host", "=", "'127.0.0.1'", ",", "listen", "=", "5555", ")", ":", "# HTTP->HTTP: On your computer, browse to \"http://127.0.0.1:81/\" and you'll get http://www.google.com", "server", "=", "maproxy", ".", "proxyserver", ".", "ProxyServer", "(", "\"127.0.0.1\"", ",", "forward", ")", "server", ".", "listen", "(", "listen", ")", "print", "(", "\"Local IP:\"", ",", "socket", ".", "gethostbyname", "(", "socket", ".", "gethostname", "(", ")", ")", ")", "print", "(", "\"0.0.0.0:{} -> {}:{}\"", ".", "format", "(", "listen", ",", "host", ",", "forward", ")", ")", "tornado", ".", "ioloop", ".", "IOLoop", ".", "instance", "(", ")", ".", "start", "(", ")" ]
43.384615
21.538462
def _ddns(self, ip): """ curl -X POST https://dnsapi.cn/Record.Ddns -d 'login_token=LOGIN_TOKEN&format=json&domain_id=2317346&record_id=16894439&record_line=默认&sub_domain=www' :return: """ headers = {"Accept": "text/json", "User-Agent": "ddns/0.1.0 ([email protected])"} data = { 'login_token': self.login_token, 'format': "json", 'domain_id': self.domain_id, 'record_id': self.record_id, 'sub_domain': self.sub_domain, 'record_line': '默认', 'value': ip } res = requests.post(Ddns.DNSPOD_API, data, headers=headers) logger.debug(res.json()) return res.json()['status']['code'] == '1'
[ "def", "_ddns", "(", "self", ",", "ip", ")", ":", "headers", "=", "{", "\"Accept\"", ":", "\"text/json\"", ",", "\"User-Agent\"", ":", "\"ddns/0.1.0 ([email protected])\"", "}", "data", "=", "{", "'login_token'", ":", "self", ".", "login_token", ",", "'format'", ":", "\"json\"", ",", "'domain_id'", ":", "self", ".", "domain_id", ",", "'record_id'", ":", "self", ".", "record_id", ",", "'sub_domain'", ":", "self", ".", "sub_domain", ",", "'record_line'", ":", "'默认',", "", "'value'", ":", "ip", "}", "res", "=", "requests", ".", "post", "(", "Ddns", ".", "DNSPOD_API", ",", "data", ",", "headers", "=", "headers", ")", "logger", ".", "debug", "(", "res", ".", "json", "(", ")", ")", "return", "res", ".", "json", "(", ")", "[", "'status'", "]", "[", "'code'", "]", "==", "'1'" ]
34.809524
22.619048
def get_val(self): """ Gets attribute's value. @return: stored value. @rtype: int @raise IOError: if corresponding file in /proc/sys cannot be read. """ with open(os.path.join(self._base, self._attr), 'r') as file_obj: return int(file_obj.readline())
[ "def", "get_val", "(", "self", ")", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "self", ".", "_base", ",", "self", ".", "_attr", ")", ",", "'r'", ")", "as", "file_obj", ":", "return", "int", "(", "file_obj", ".", "readline", "(", ")", ")" ]
31
17.2
def is_deaf(self): """ Глухая ли согласная. """ if not self.is_consonant(): return False if self.letter in self.forever_deaf: return True if self.letter in self.forever_sonorus: return False if self.__forsed_sonorus: return False if self.__forsed_sonorus is False: return True for _, df in self.sonorus_deaf_pairs: if self.letter == df: return True return False
[ "def", "is_deaf", "(", "self", ")", ":", "if", "not", "self", ".", "is_consonant", "(", ")", ":", "return", "False", "if", "self", ".", "letter", "in", "self", ".", "forever_deaf", ":", "return", "True", "if", "self", ".", "letter", "in", "self", ".", "forever_sonorus", ":", "return", "False", "if", "self", ".", "__forsed_sonorus", ":", "return", "False", "if", "self", ".", "__forsed_sonorus", "is", "False", ":", "return", "True", "for", "_", ",", "df", "in", "self", ".", "sonorus_deaf_pairs", ":", "if", "self", ".", "letter", "==", "df", ":", "return", "True", "return", "False" ]
28.444444
10.333333
def _get_ax_layer(cls, ax, primary=True): """get left (primary) or right (secondary) axes""" if primary: return getattr(ax, 'left_ax', ax) else: return getattr(ax, 'right_ax', ax)
[ "def", "_get_ax_layer", "(", "cls", ",", "ax", ",", "primary", "=", "True", ")", ":", "if", "primary", ":", "return", "getattr", "(", "ax", ",", "'left_ax'", ",", "ax", ")", "else", ":", "return", "getattr", "(", "ax", ",", "'right_ax'", ",", "ax", ")" ]
37
10
def get(name, default=None, allow_default=True): """ Shortcut method for getting a setting value. :param str name: Setting key name. :param default: Default value of setting if it's not explicitly set. Defaults to `None` :param bool allow_default: If true, use the parameter default as default if the key is not set, else raise :exc:`KeyError`. Defaults to `None` :raises: :exc:`KeyError` if allow_default is false and the setting is not set. """ return Config().get(name, default, allow_default=allow_default)
[ "def", "get", "(", "name", ",", "default", "=", "None", ",", "allow_default", "=", "True", ")", ":", "return", "Config", "(", ")", ".", "get", "(", "name", ",", "default", ",", "allow_default", "=", "allow_default", ")" ]
48.692308
18.769231
def getColors(self): """ Overrideable function that generates the colors to be used by various borderstyles. Should return a 5-tuple of ``(bg,o,i,s,h)``\ . ``bg`` is the base color of the background. ``o`` is the outer color, it is usually the same as the background color. ``i`` is the inner color, it is usually lighter than the background color. ``s`` is the shadow color, it is usually quite a bit darker than the background. ``h`` is the highlight color, it is usually quite a bit lighter than the background. """ bg = self.submenu.bg[:3] if isinstance(self.submenu.bg,list) or isinstance(self.submenu.bg,tuple) else [242,241,240] o,i = bg, [min(bg[0]+8,255),min(bg[1]+8,255),min(bg[2]+8,255)] s,h = [max(bg[0]-40,0),max(bg[1]-40,0),max(bg[2]-40,0)], [min(bg[0]+12,255),min(bg[1]+12,255),min(bg[2]+12,255)] # Outer,Inner,Shadow,Highlight return bg,o,i,s,h
[ "def", "getColors", "(", "self", ")", ":", "bg", "=", "self", ".", "submenu", ".", "bg", "[", ":", "3", "]", "if", "isinstance", "(", "self", ".", "submenu", ".", "bg", ",", "list", ")", "or", "isinstance", "(", "self", ".", "submenu", ".", "bg", ",", "tuple", ")", "else", "[", "242", ",", "241", ",", "240", "]", "o", ",", "i", "=", "bg", ",", "[", "min", "(", "bg", "[", "0", "]", "+", "8", ",", "255", ")", ",", "min", "(", "bg", "[", "1", "]", "+", "8", ",", "255", ")", ",", "min", "(", "bg", "[", "2", "]", "+", "8", ",", "255", ")", "]", "s", ",", "h", "=", "[", "max", "(", "bg", "[", "0", "]", "-", "40", ",", "0", ")", ",", "max", "(", "bg", "[", "1", "]", "-", "40", ",", "0", ")", ",", "max", "(", "bg", "[", "2", "]", "-", "40", ",", "0", ")", "]", ",", "[", "min", "(", "bg", "[", "0", "]", "+", "12", ",", "255", ")", ",", "min", "(", "bg", "[", "1", "]", "+", "12", ",", "255", ")", ",", "min", "(", "bg", "[", "2", "]", "+", "12", ",", "255", ")", "]", "# Outer,Inner,Shadow,Highlight", "return", "bg", ",", "o", ",", "i", ",", "s", ",", "h" ]
47.904762
32.47619
def killCells(self, percent=0.05): """ Changes the percentage of cells that are now considered dead. The first time you call this method a permutation list is set up. Calls change the number of cells considered dead. """ numColumns = numpy.prod(self.getColumnDimensions()) if self.zombiePermutation is None: self.zombiePermutation = numpy.random.permutation(numColumns) self.numDead = int(round(percent * numColumns)) if self.numDead > 0: self.deadCols = self.zombiePermutation[0:self.numDead] else: self.deadCols = numpy.array([]) self.deadColumnInputSpan = self.getConnectedSpan(self.deadCols) self.removeDeadColumns()
[ "def", "killCells", "(", "self", ",", "percent", "=", "0.05", ")", ":", "numColumns", "=", "numpy", ".", "prod", "(", "self", ".", "getColumnDimensions", "(", ")", ")", "if", "self", ".", "zombiePermutation", "is", "None", ":", "self", ".", "zombiePermutation", "=", "numpy", ".", "random", ".", "permutation", "(", "numColumns", ")", "self", ".", "numDead", "=", "int", "(", "round", "(", "percent", "*", "numColumns", ")", ")", "if", "self", ".", "numDead", ">", "0", ":", "self", ".", "deadCols", "=", "self", ".", "zombiePermutation", "[", "0", ":", "self", ".", "numDead", "]", "else", ":", "self", ".", "deadCols", "=", "numpy", ".", "array", "(", "[", "]", ")", "self", ".", "deadColumnInputSpan", "=", "self", ".", "getConnectedSpan", "(", "self", ".", "deadCols", ")", "self", ".", "removeDeadColumns", "(", ")" ]
33.55
20.25
def delete_license_request(request): """Submission to remove a license acceptance request.""" uuid_ = request.matchdict['uuid'] posted_uids = [x['uid'] for x in request.json.get('licensors', [])] with db_connect() as db_conn: with db_conn.cursor() as cursor: remove_license_requests(cursor, uuid_, posted_uids) resp = request.response resp.status_int = 200 return resp
[ "def", "delete_license_request", "(", "request", ")", ":", "uuid_", "=", "request", ".", "matchdict", "[", "'uuid'", "]", "posted_uids", "=", "[", "x", "[", "'uid'", "]", "for", "x", "in", "request", ".", "json", ".", "get", "(", "'licensors'", ",", "[", "]", ")", "]", "with", "db_connect", "(", ")", "as", "db_conn", ":", "with", "db_conn", ".", "cursor", "(", ")", "as", "cursor", ":", "remove_license_requests", "(", "cursor", ",", "uuid_", ",", "posted_uids", ")", "resp", "=", "request", ".", "response", "resp", ".", "status_int", "=", "200", "return", "resp" ]
33.916667
16.75
def serialize_attribute(attribute): # noqa: C901 pylint: disable=too-many-locals # type: (dynamodb_types.RAW_ATTRIBUTE) -> bytes """Serializes a raw attribute to a byte string as defined for the DynamoDB Client-Side Encryption Standard. :param dict attribute: Item attribute value :returns: Serialized attribute :rtype: bytes """ def _transform_binary_value(value): # type: (dynamodb_types.BINARY) -> bytes """ :param value: Input value :type value: boto3.dynamodb.types.Binary :returns: bytes value :rtype: bytes """ if isinstance(value, Binary): return bytes(value.value) return bytes(value) def _serialize_binary(_attribute): # type: (dynamodb_types.BINARY) -> bytes """ :param _attribute: Attribute to serialize :type _attribute: boto3.dynamodb.types.Binary :returns: Serialized _attribute :rtype: bytes """ return _RESERVED + Tag.BINARY.tag + encode_value(_transform_binary_value(_attribute)) def _transform_number_value(value): # type: (str) -> bytes """ :param value: Input value :type value: numbers.Number :returns: bytes value :rtype: bytes """ # At this point we are receiving values which have already been transformed # by dynamodb.TypeSerializer, so all numbers are str. However, TypeSerializer # leaves trailing zeros if they are defined in the Decimal call, but we need to # strip all trailing zeros. decimal_value = DYNAMODB_CONTEXT.create_decimal(value).normalize() return "{0:f}".format(decimal_value).encode("utf-8") def _serialize_number(_attribute): # type: (str) -> bytes """ :param _attribute: Attribute to serialize :type _attribute: numbers.Number :returns: Serialized _attribute :rtype: bytes """ return _RESERVED + Tag.NUMBER.tag + encode_value(_transform_number_value(_attribute)) def _transform_string_value(value): # type: (dynamodb_types.STRING) -> bytes """ :param value: Input value :type value: bytes or str :returns: bytes value :rtype: bytes """ return to_bytes(value) def _serialize_string(_attribute): # type: (dynamodb_types.STRING) -> bytes """ :param _attribute: Attribute to serialize :type _attribute: six.string_types :returns: Serialized _attribute :rtype: bytes """ return _RESERVED + Tag.STRING.tag + encode_value(_transform_string_value(_attribute)) def _serialize_boolean(_attribute): # type: (dynamodb_types.BOOLEAN) -> bytes """ :param bool _attribute: Attribute to serialize :returns: Serialized _attribute :rtype: bytes """ _attribute_value = TagValues.TRUE.value if _attribute else TagValues.FALSE.value return _RESERVED + Tag.BOOLEAN.tag + _attribute_value def _serialize_null(_attribute): # type: (dynamodb_types.NULL) -> bytes """ :param _attribute: Attribute to serialize :type _attribute: types.NoneType :returns: Serialized _attribute :rtype: bytes """ return _RESERVED + Tag.NULL.tag def _serialize_set(tag, _attribute, member_function): # type: (Tag, dynamodb_types.SET[dynamodb_types.ATTRIBUTE], Callable) -> bytes """ :param bytes tag: Tag to identify this set :param set _attribute: Attribute to serialize :param member_function: Serialization function for members :returns: Serialized _attribute :rtype: bytes """ serialized_attribute = io.BytesIO() serialized_attribute.write(_RESERVED) serialized_attribute.write(tag.tag) serialized_attribute.write(encode_length(_attribute)) encoded_members = [] for member in _attribute: encoded_members.append(member_function(member)) for member in sorted(encoded_members): serialized_attribute.write(encode_value(member)) return serialized_attribute.getvalue() def _serialize_binary_set(_attribute): # type: (dynamodb_types.SET[dynamodb_types.ATTRIBUTE]) -> bytes """ :param set _attribute: Attribute to serialize :returns: Serialized _attribute :rtype: bytes """ return _serialize_set(Tag.BINARY_SET, _attribute, _transform_binary_value) def _serialize_number_set(_attribute): # type: (dynamodb_types.SET[dynamodb_types.ATTRIBUTE]) -> bytes """ :param set _attribute: Attribute to serialize :returns: Serialized _attribute :rtype: bytes """ return _serialize_set(Tag.NUMBER_SET, _attribute, _transform_number_value) def _serialize_string_set(_attribute): # type: (dynamodb_types.SET[dynamodb_types.ATTRIBUTE]) -> bytes """ :param set _attribute: Attribute to serialize :returns: Serialized _attribute :rtype: bytes """ return _serialize_set(Tag.STRING_SET, _attribute, _transform_string_value) def _serialize_list(_attribute): # type: (dynamodb_types.LIST) -> bytes """ :param list _attribute: Attribute to serialize :returns: Serialized _attribute :rtype: bytes """ serialized_attribute = io.BytesIO() serialized_attribute.write(_RESERVED) serialized_attribute.write(Tag.LIST.tag) serialized_attribute.write(encode_length(_attribute)) for member in _attribute: serialized_attribute.write(serialize_attribute(member)) return serialized_attribute.getvalue() def _serialize_map(_attribute): # type: (dynamodb_types.MAP) -> bytes """ :param list _attribute: Attribute to serialize :returns: Serialized _attribute :rtype: bytes """ serialized_attribute = io.BytesIO() serialized_attribute.write(_RESERVED) serialized_attribute.write(Tag.MAP.tag) serialized_attribute.write(encode_length(_attribute)) sorted_items = _sorted_key_map(item=_attribute, transform=_transform_string_value) for key, value, _original_key in sorted_items: serialized_attribute.write(_serialize_string(key)) serialized_attribute.write(serialize_attribute(value)) return serialized_attribute.getvalue() def _serialize_function(dynamodb_tag): # type: (str) -> Callable[[dynamodb_types.ATTRIBUTE], bytes] """Locates the appropriate serialization function for the specified DynamoDB attribute tag.""" serialize_functions = { Tag.BINARY.dynamodb_tag: _serialize_binary, Tag.BINARY_SET.dynamodb_tag: _serialize_binary_set, Tag.NUMBER.dynamodb_tag: _serialize_number, Tag.NUMBER_SET.dynamodb_tag: _serialize_number_set, Tag.STRING.dynamodb_tag: _serialize_string, Tag.STRING_SET.dynamodb_tag: _serialize_string_set, Tag.BOOLEAN.dynamodb_tag: _serialize_boolean, Tag.NULL.dynamodb_tag: _serialize_null, Tag.LIST.dynamodb_tag: _serialize_list, Tag.MAP.dynamodb_tag: _serialize_map, } try: return serialize_functions[dynamodb_tag] except KeyError: raise SerializationError('Unsupported DynamoDB data type: "{}"'.format(dynamodb_tag)) if not isinstance(attribute, dict): raise TypeError('Invalid attribute type "{}": must be dict'.format(type(attribute))) if len(attribute) != 1: raise SerializationError( "cannot serialize attribute: incorrect number of members {} != 1".format(len(attribute)) ) key, value = list(attribute.items())[0] return _serialize_function(key)(value)
[ "def", "serialize_attribute", "(", "attribute", ")", ":", "# noqa: C901 pylint: disable=too-many-locals", "# type: (dynamodb_types.RAW_ATTRIBUTE) -> bytes", "def", "_transform_binary_value", "(", "value", ")", ":", "# type: (dynamodb_types.BINARY) -> bytes", "\"\"\"\n :param value: Input value\n :type value: boto3.dynamodb.types.Binary\n :returns: bytes value\n :rtype: bytes\n \"\"\"", "if", "isinstance", "(", "value", ",", "Binary", ")", ":", "return", "bytes", "(", "value", ".", "value", ")", "return", "bytes", "(", "value", ")", "def", "_serialize_binary", "(", "_attribute", ")", ":", "# type: (dynamodb_types.BINARY) -> bytes", "\"\"\"\n :param _attribute: Attribute to serialize\n :type _attribute: boto3.dynamodb.types.Binary\n :returns: Serialized _attribute\n :rtype: bytes\n \"\"\"", "return", "_RESERVED", "+", "Tag", ".", "BINARY", ".", "tag", "+", "encode_value", "(", "_transform_binary_value", "(", "_attribute", ")", ")", "def", "_transform_number_value", "(", "value", ")", ":", "# type: (str) -> bytes", "\"\"\"\n :param value: Input value\n :type value: numbers.Number\n :returns: bytes value\n :rtype: bytes\n \"\"\"", "# At this point we are receiving values which have already been transformed", "# by dynamodb.TypeSerializer, so all numbers are str. However, TypeSerializer", "# leaves trailing zeros if they are defined in the Decimal call, but we need to", "# strip all trailing zeros.", "decimal_value", "=", "DYNAMODB_CONTEXT", ".", "create_decimal", "(", "value", ")", ".", "normalize", "(", ")", "return", "\"{0:f}\"", ".", "format", "(", "decimal_value", ")", ".", "encode", "(", "\"utf-8\"", ")", "def", "_serialize_number", "(", "_attribute", ")", ":", "# type: (str) -> bytes", "\"\"\"\n :param _attribute: Attribute to serialize\n :type _attribute: numbers.Number\n :returns: Serialized _attribute\n :rtype: bytes\n \"\"\"", "return", "_RESERVED", "+", "Tag", ".", "NUMBER", ".", "tag", "+", "encode_value", "(", "_transform_number_value", "(", "_attribute", ")", ")", "def", "_transform_string_value", "(", "value", ")", ":", "# type: (dynamodb_types.STRING) -> bytes", "\"\"\"\n :param value: Input value\n :type value: bytes or str\n :returns: bytes value\n :rtype: bytes\n \"\"\"", "return", "to_bytes", "(", "value", ")", "def", "_serialize_string", "(", "_attribute", ")", ":", "# type: (dynamodb_types.STRING) -> bytes", "\"\"\"\n :param _attribute: Attribute to serialize\n :type _attribute: six.string_types\n :returns: Serialized _attribute\n :rtype: bytes\n \"\"\"", "return", "_RESERVED", "+", "Tag", ".", "STRING", ".", "tag", "+", "encode_value", "(", "_transform_string_value", "(", "_attribute", ")", ")", "def", "_serialize_boolean", "(", "_attribute", ")", ":", "# type: (dynamodb_types.BOOLEAN) -> bytes", "\"\"\"\n :param bool _attribute: Attribute to serialize\n :returns: Serialized _attribute\n :rtype: bytes\n \"\"\"", "_attribute_value", "=", "TagValues", ".", "TRUE", ".", "value", "if", "_attribute", "else", "TagValues", ".", "FALSE", ".", "value", "return", "_RESERVED", "+", "Tag", ".", "BOOLEAN", ".", "tag", "+", "_attribute_value", "def", "_serialize_null", "(", "_attribute", ")", ":", "# type: (dynamodb_types.NULL) -> bytes", "\"\"\"\n :param _attribute: Attribute to serialize\n :type _attribute: types.NoneType\n :returns: Serialized _attribute\n :rtype: bytes\n \"\"\"", "return", "_RESERVED", "+", "Tag", ".", "NULL", ".", "tag", "def", "_serialize_set", "(", "tag", ",", "_attribute", ",", "member_function", ")", ":", "# type: (Tag, dynamodb_types.SET[dynamodb_types.ATTRIBUTE], Callable) -> bytes", "\"\"\"\n :param bytes tag: Tag to identify this set\n :param set _attribute: Attribute to serialize\n :param member_function: Serialization function for members\n :returns: Serialized _attribute\n :rtype: bytes\n \"\"\"", "serialized_attribute", "=", "io", ".", "BytesIO", "(", ")", "serialized_attribute", ".", "write", "(", "_RESERVED", ")", "serialized_attribute", ".", "write", "(", "tag", ".", "tag", ")", "serialized_attribute", ".", "write", "(", "encode_length", "(", "_attribute", ")", ")", "encoded_members", "=", "[", "]", "for", "member", "in", "_attribute", ":", "encoded_members", ".", "append", "(", "member_function", "(", "member", ")", ")", "for", "member", "in", "sorted", "(", "encoded_members", ")", ":", "serialized_attribute", ".", "write", "(", "encode_value", "(", "member", ")", ")", "return", "serialized_attribute", ".", "getvalue", "(", ")", "def", "_serialize_binary_set", "(", "_attribute", ")", ":", "# type: (dynamodb_types.SET[dynamodb_types.ATTRIBUTE]) -> bytes", "\"\"\"\n :param set _attribute: Attribute to serialize\n :returns: Serialized _attribute\n :rtype: bytes\n \"\"\"", "return", "_serialize_set", "(", "Tag", ".", "BINARY_SET", ",", "_attribute", ",", "_transform_binary_value", ")", "def", "_serialize_number_set", "(", "_attribute", ")", ":", "# type: (dynamodb_types.SET[dynamodb_types.ATTRIBUTE]) -> bytes", "\"\"\"\n :param set _attribute: Attribute to serialize\n :returns: Serialized _attribute\n :rtype: bytes\n \"\"\"", "return", "_serialize_set", "(", "Tag", ".", "NUMBER_SET", ",", "_attribute", ",", "_transform_number_value", ")", "def", "_serialize_string_set", "(", "_attribute", ")", ":", "# type: (dynamodb_types.SET[dynamodb_types.ATTRIBUTE]) -> bytes", "\"\"\"\n :param set _attribute: Attribute to serialize\n :returns: Serialized _attribute\n :rtype: bytes\n \"\"\"", "return", "_serialize_set", "(", "Tag", ".", "STRING_SET", ",", "_attribute", ",", "_transform_string_value", ")", "def", "_serialize_list", "(", "_attribute", ")", ":", "# type: (dynamodb_types.LIST) -> bytes", "\"\"\"\n :param list _attribute: Attribute to serialize\n :returns: Serialized _attribute\n :rtype: bytes\n \"\"\"", "serialized_attribute", "=", "io", ".", "BytesIO", "(", ")", "serialized_attribute", ".", "write", "(", "_RESERVED", ")", "serialized_attribute", ".", "write", "(", "Tag", ".", "LIST", ".", "tag", ")", "serialized_attribute", ".", "write", "(", "encode_length", "(", "_attribute", ")", ")", "for", "member", "in", "_attribute", ":", "serialized_attribute", ".", "write", "(", "serialize_attribute", "(", "member", ")", ")", "return", "serialized_attribute", ".", "getvalue", "(", ")", "def", "_serialize_map", "(", "_attribute", ")", ":", "# type: (dynamodb_types.MAP) -> bytes", "\"\"\"\n :param list _attribute: Attribute to serialize\n :returns: Serialized _attribute\n :rtype: bytes\n \"\"\"", "serialized_attribute", "=", "io", ".", "BytesIO", "(", ")", "serialized_attribute", ".", "write", "(", "_RESERVED", ")", "serialized_attribute", ".", "write", "(", "Tag", ".", "MAP", ".", "tag", ")", "serialized_attribute", ".", "write", "(", "encode_length", "(", "_attribute", ")", ")", "sorted_items", "=", "_sorted_key_map", "(", "item", "=", "_attribute", ",", "transform", "=", "_transform_string_value", ")", "for", "key", ",", "value", ",", "_original_key", "in", "sorted_items", ":", "serialized_attribute", ".", "write", "(", "_serialize_string", "(", "key", ")", ")", "serialized_attribute", ".", "write", "(", "serialize_attribute", "(", "value", ")", ")", "return", "serialized_attribute", ".", "getvalue", "(", ")", "def", "_serialize_function", "(", "dynamodb_tag", ")", ":", "# type: (str) -> Callable[[dynamodb_types.ATTRIBUTE], bytes]", "\"\"\"Locates the appropriate serialization function for the specified DynamoDB attribute tag.\"\"\"", "serialize_functions", "=", "{", "Tag", ".", "BINARY", ".", "dynamodb_tag", ":", "_serialize_binary", ",", "Tag", ".", "BINARY_SET", ".", "dynamodb_tag", ":", "_serialize_binary_set", ",", "Tag", ".", "NUMBER", ".", "dynamodb_tag", ":", "_serialize_number", ",", "Tag", ".", "NUMBER_SET", ".", "dynamodb_tag", ":", "_serialize_number_set", ",", "Tag", ".", "STRING", ".", "dynamodb_tag", ":", "_serialize_string", ",", "Tag", ".", "STRING_SET", ".", "dynamodb_tag", ":", "_serialize_string_set", ",", "Tag", ".", "BOOLEAN", ".", "dynamodb_tag", ":", "_serialize_boolean", ",", "Tag", ".", "NULL", ".", "dynamodb_tag", ":", "_serialize_null", ",", "Tag", ".", "LIST", ".", "dynamodb_tag", ":", "_serialize_list", ",", "Tag", ".", "MAP", ".", "dynamodb_tag", ":", "_serialize_map", ",", "}", "try", ":", "return", "serialize_functions", "[", "dynamodb_tag", "]", "except", "KeyError", ":", "raise", "SerializationError", "(", "'Unsupported DynamoDB data type: \"{}\"'", ".", "format", "(", "dynamodb_tag", ")", ")", "if", "not", "isinstance", "(", "attribute", ",", "dict", ")", ":", "raise", "TypeError", "(", "'Invalid attribute type \"{}\": must be dict'", ".", "format", "(", "type", "(", "attribute", ")", ")", ")", "if", "len", "(", "attribute", ")", "!=", "1", ":", "raise", "SerializationError", "(", "\"cannot serialize attribute: incorrect number of members {} != 1\"", ".", "format", "(", "len", "(", "attribute", ")", ")", ")", "key", ",", "value", "=", "list", "(", "attribute", ".", "items", "(", ")", ")", "[", "0", "]", "return", "_serialize_function", "(", "key", ")", "(", "value", ")" ]
37.2
15.561905
def _connect(self, server_info): """Connect to the workbench server""" # First we do a temp connect with a short heartbeat _tmp_connect = zerorpc.Client(timeout=300, heartbeat=2) _tmp_connect.connect('tcp://'+server_info['server']+':'+server_info['port']) try: _tmp_connect._zerorpc_name() _tmp_connect.close() del _tmp_connect except zerorpc.exceptions.LostRemote: print '%sError: Could not connect to Workbench Server at %s:%s%s' % \ (color.Red, server_info['server'], server_info['port'], color.Normal) sys.exit(1) # Okay do the real connection if self.workbench: self.workbench.close() self.workbench = zerorpc.Client(timeout=300, heartbeat=60) self.workbench.connect('tcp://'+server_info['server']+':'+server_info['port']) print '\n%s<<< Connected: %s:%s >>>%s' % (color.Green, server_info['server'], server_info['port'], color.Normal)
[ "def", "_connect", "(", "self", ",", "server_info", ")", ":", "# First we do a temp connect with a short heartbeat", "_tmp_connect", "=", "zerorpc", ".", "Client", "(", "timeout", "=", "300", ",", "heartbeat", "=", "2", ")", "_tmp_connect", ".", "connect", "(", "'tcp://'", "+", "server_info", "[", "'server'", "]", "+", "':'", "+", "server_info", "[", "'port'", "]", ")", "try", ":", "_tmp_connect", ".", "_zerorpc_name", "(", ")", "_tmp_connect", ".", "close", "(", ")", "del", "_tmp_connect", "except", "zerorpc", ".", "exceptions", ".", "LostRemote", ":", "print", "'%sError: Could not connect to Workbench Server at %s:%s%s'", "%", "(", "color", ".", "Red", ",", "server_info", "[", "'server'", "]", ",", "server_info", "[", "'port'", "]", ",", "color", ".", "Normal", ")", "sys", ".", "exit", "(", "1", ")", "# Okay do the real connection", "if", "self", ".", "workbench", ":", "self", ".", "workbench", ".", "close", "(", ")", "self", ".", "workbench", "=", "zerorpc", ".", "Client", "(", "timeout", "=", "300", ",", "heartbeat", "=", "60", ")", "self", ".", "workbench", ".", "connect", "(", "'tcp://'", "+", "server_info", "[", "'server'", "]", "+", "':'", "+", "server_info", "[", "'port'", "]", ")", "print", "'\\n%s<<< Connected: %s:%s >>>%s'", "%", "(", "color", ".", "Green", ",", "server_info", "[", "'server'", "]", ",", "server_info", "[", "'port'", "]", ",", "color", ".", "Normal", ")" ]
47.619048
24.142857
def import_settings(dotted_path): """Import settings instance from python dotted path. Last item in dotted path must be settings instace. Example: import_settings('path.to.settings') """ if "." in dotted_path: module, name = dotted_path.rsplit(".", 1) else: raise click.UsageError( "invalid path to settings instance: {}".format(dotted_path) ) try: module = importlib.import_module(module) except ImportError as e: raise click.UsageError(e) try: return getattr(module, name) except AttributeError as e: raise click.UsageError(e)
[ "def", "import_settings", "(", "dotted_path", ")", ":", "if", "\".\"", "in", "dotted_path", ":", "module", ",", "name", "=", "dotted_path", ".", "rsplit", "(", "\".\"", ",", "1", ")", "else", ":", "raise", "click", ".", "UsageError", "(", "\"invalid path to settings instance: {}\"", ".", "format", "(", "dotted_path", ")", ")", "try", ":", "module", "=", "importlib", ".", "import_module", "(", "module", ")", "except", "ImportError", "as", "e", ":", "raise", "click", ".", "UsageError", "(", "e", ")", "try", ":", "return", "getattr", "(", "module", ",", "name", ")", "except", "AttributeError", "as", "e", ":", "raise", "click", ".", "UsageError", "(", "e", ")" ]
29.428571
16.428571
def run(self, input_files, url=None, verbose=0): """ run the headless browser with given input if url given, the proc will only run hlb with given url and ignore input_list. :param url: :param input_files: the name of the file in "index url" format. i.e. 1, www.facebook.com 1, www.google.com ... :param verbose: :return: """ if not url and not input_files: logging.warning("No input file") return {"error": "no inputs"} results = {} self.open_virtual_display() if verbose > 0: log_file = sys.stdout else: log_file = None # set up firefox driver self.binary = FirefoxBinary(os.path.join(self.cur_path, 'firefox/firefox'), log_file=log_file) self.profile = self.setup_profile() self.driver = webdriver.Firefox(firefox_profile=self.profile, firefox_binary=self.binary, timeout=60) self.driver.set_page_load_timeout(60) isfile = False if url: host, path = self.divide_url(url) results[url] = self.get(host, path) else: isfile = True for input_file in input_files.items(): logging.info("Testing input file %s..." % (input_file[0])) self.run_file(input_file, results) # foctor_core will quit the driver by itself so we only quit the driver when we don't use foctor core if not isfile: logging.info("Quit driver") self.driver.quit() self.close_virtual_display() logging.debug("Deleting har folder") shutil.rmtree(os.path.join(self.cur_path, 'har')) return results
[ "def", "run", "(", "self", ",", "input_files", ",", "url", "=", "None", ",", "verbose", "=", "0", ")", ":", "if", "not", "url", "and", "not", "input_files", ":", "logging", ".", "warning", "(", "\"No input file\"", ")", "return", "{", "\"error\"", ":", "\"no inputs\"", "}", "results", "=", "{", "}", "self", ".", "open_virtual_display", "(", ")", "if", "verbose", ">", "0", ":", "log_file", "=", "sys", ".", "stdout", "else", ":", "log_file", "=", "None", "# set up firefox driver ", "self", ".", "binary", "=", "FirefoxBinary", "(", "os", ".", "path", ".", "join", "(", "self", ".", "cur_path", ",", "'firefox/firefox'", ")", ",", "log_file", "=", "log_file", ")", "self", ".", "profile", "=", "self", ".", "setup_profile", "(", ")", "self", ".", "driver", "=", "webdriver", ".", "Firefox", "(", "firefox_profile", "=", "self", ".", "profile", ",", "firefox_binary", "=", "self", ".", "binary", ",", "timeout", "=", "60", ")", "self", ".", "driver", ".", "set_page_load_timeout", "(", "60", ")", "isfile", "=", "False", "if", "url", ":", "host", ",", "path", "=", "self", ".", "divide_url", "(", "url", ")", "results", "[", "url", "]", "=", "self", ".", "get", "(", "host", ",", "path", ")", "else", ":", "isfile", "=", "True", "for", "input_file", "in", "input_files", ".", "items", "(", ")", ":", "logging", ".", "info", "(", "\"Testing input file %s...\"", "%", "(", "input_file", "[", "0", "]", ")", ")", "self", ".", "run_file", "(", "input_file", ",", "results", ")", "# foctor_core will quit the driver by itself so we only quit the driver when we don't use foctor core", "if", "not", "isfile", ":", "logging", ".", "info", "(", "\"Quit driver\"", ")", "self", ".", "driver", ".", "quit", "(", ")", "self", ".", "close_virtual_display", "(", ")", "logging", ".", "debug", "(", "\"Deleting har folder\"", ")", "shutil", ".", "rmtree", "(", "os", ".", "path", ".", "join", "(", "self", ".", "cur_path", ",", "'har'", ")", ")", "return", "results" ]
35.428571
19.673469
def add(self, obj): """Add a object Args: Object: Object will be added Returns: Object: Object with id Raises: TypeError: If add object is not a dict MultipleInvalid: If input object is invaild """ if not isinstance(obj, dict): raise TypeError("Add object should be a dict object") obj = self.validation(obj) obj["id"] = self.maxId + 1 obj = self._cast_model(obj) self.model.db.append(obj) if not self._batch.enable.is_set(): self.model.save_db() return obj
[ "def", "add", "(", "self", ",", "obj", ")", ":", "if", "not", "isinstance", "(", "obj", ",", "dict", ")", ":", "raise", "TypeError", "(", "\"Add object should be a dict object\"", ")", "obj", "=", "self", ".", "validation", "(", "obj", ")", "obj", "[", "\"id\"", "]", "=", "self", ".", "maxId", "+", "1", "obj", "=", "self", ".", "_cast_model", "(", "obj", ")", "self", ".", "model", ".", "db", ".", "append", "(", "obj", ")", "if", "not", "self", ".", "_batch", ".", "enable", ".", "is_set", "(", ")", ":", "self", ".", "model", ".", "save_db", "(", ")", "return", "obj" ]
31.75
12.45
def install(name=None, refresh=False, version=None, pkgs=None, **kwargs): ''' Install packages using the pkgutil tool. CLI Example: .. code-block:: bash salt '*' pkg.install <package_name> salt '*' pkg.install SMClgcc346 Multiple Package Installation Options: pkgs A list of packages to install from OpenCSW. Must be passed as a python list. CLI Example: .. code-block:: bash salt '*' pkg.install pkgs='["foo", "bar"]' salt '*' pkg.install pkgs='["foo", {"bar": "1.2.3"}]' Returns a dict containing the new package names and versions:: {'<package>': {'old': '<old-version>', 'new': '<new-version>'}} ''' if refresh: refresh_db() try: # Ignore 'sources' argument pkg_params = __salt__['pkg_resource.parse_targets'](name, pkgs, **kwargs)[0] except MinionError as exc: raise CommandExecutionError(exc) if not pkg_params: return {} if pkgs is None and version and len(pkg_params) == 1: pkg_params = {name: version} targets = [] for param, pkgver in six.iteritems(pkg_params): if pkgver is None: targets.append(param) else: targets.append('{0}-{1}'.format(param, pkgver)) cmd = '/opt/csw/bin/pkgutil -yu {0}'.format(' '.join(targets)) old = list_pkgs() __salt__['cmd.run_all'](cmd) __context__.pop('pkg.list_pkgs', None) new = list_pkgs() return salt.utils.data.compare_dicts(old, new)
[ "def", "install", "(", "name", "=", "None", ",", "refresh", "=", "False", ",", "version", "=", "None", ",", "pkgs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "refresh", ":", "refresh_db", "(", ")", "try", ":", "# Ignore 'sources' argument", "pkg_params", "=", "__salt__", "[", "'pkg_resource.parse_targets'", "]", "(", "name", ",", "pkgs", ",", "*", "*", "kwargs", ")", "[", "0", "]", "except", "MinionError", "as", "exc", ":", "raise", "CommandExecutionError", "(", "exc", ")", "if", "not", "pkg_params", ":", "return", "{", "}", "if", "pkgs", "is", "None", "and", "version", "and", "len", "(", "pkg_params", ")", "==", "1", ":", "pkg_params", "=", "{", "name", ":", "version", "}", "targets", "=", "[", "]", "for", "param", ",", "pkgver", "in", "six", ".", "iteritems", "(", "pkg_params", ")", ":", "if", "pkgver", "is", "None", ":", "targets", ".", "append", "(", "param", ")", "else", ":", "targets", ".", "append", "(", "'{0}-{1}'", ".", "format", "(", "param", ",", "pkgver", ")", ")", "cmd", "=", "'/opt/csw/bin/pkgutil -yu {0}'", ".", "format", "(", "' '", ".", "join", "(", "targets", ")", ")", "old", "=", "list_pkgs", "(", ")", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "__context__", ".", "pop", "(", "'pkg.list_pkgs'", ",", "None", ")", "new", "=", "list_pkgs", "(", ")", "return", "salt", ".", "utils", ".", "data", ".", "compare_dicts", "(", "old", ",", "new", ")" ]
27.2
23.633333
def all(self, list_id, subscriber_hash, **queryparams): """ Get the last 50 events of a member’s activity on a specific list, including opens, clicks, and unsubscribes. :param list_id: The unique id for the list. :type list_id: :py:class:`str` :param subscriber_hash: The MD5 hash of the lowercase version of the list member’s email address. :type subscriber_hash: :py:class:`str` :param queryparams: The query string parameters queryparams['fields'] = [] queryparams['exclude_fields'] = [] """ subscriber_hash = check_subscriber_hash(subscriber_hash) self.list_id = list_id self.subscriber_hash = subscriber_hash return self._mc_client._get(url=self._build_path(list_id, 'members', subscriber_hash, 'activity'), **queryparams)
[ "def", "all", "(", "self", ",", "list_id", ",", "subscriber_hash", ",", "*", "*", "queryparams", ")", ":", "subscriber_hash", "=", "check_subscriber_hash", "(", "subscriber_hash", ")", "self", ".", "list_id", "=", "list_id", "self", ".", "subscriber_hash", "=", "subscriber_hash", "return", "self", ".", "_mc_client", ".", "_get", "(", "url", "=", "self", ".", "_build_path", "(", "list_id", ",", "'members'", ",", "subscriber_hash", ",", "'activity'", ")", ",", "*", "*", "queryparams", ")" ]
46.722222
16.611111
def start_fsweep(self, start=None, stop=None, step=None): """Starts a frequency sweep. :param start: Sets the start frequency. :param stop: Sets the target frequency. :param step: Sets the frequency step. """ if start: self.frequency_start = start if stop: self.frequency_stop = stop if step: self.frequency_step = step self._write(('SWEEP', Integer), 1)
[ "def", "start_fsweep", "(", "self", ",", "start", "=", "None", ",", "stop", "=", "None", ",", "step", "=", "None", ")", ":", "if", "start", ":", "self", ".", "frequency_start", "=", "start", "if", "stop", ":", "self", ".", "frequency_stop", "=", "stop", "if", "step", ":", "self", ".", "frequency_step", "=", "step", "self", ".", "_write", "(", "(", "'SWEEP'", ",", "Integer", ")", ",", "1", ")" ]
30
12.866667
def get_unweighted_sources(graph: BELGraph, key: Optional[str] = None) -> Iterable[BaseEntity]: """Get nodes on the periphery of the sub-graph that do not have a annotation for the given key. :param graph: A BEL graph :param key: The key in the node data dictionary representing the experimental data :return: An iterator over BEL nodes that are unannotated and on the periphery of this subgraph """ if key is None: key = WEIGHT for node in graph: if is_unweighted_source(graph, node, key): yield node
[ "def", "get_unweighted_sources", "(", "graph", ":", "BELGraph", ",", "key", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "Iterable", "[", "BaseEntity", "]", ":", "if", "key", "is", "None", ":", "key", "=", "WEIGHT", "for", "node", "in", "graph", ":", "if", "is_unweighted_source", "(", "graph", ",", "node", ",", "key", ")", ":", "yield", "node" ]
42.076923
25.923077
def p_annotation_ref(self, p): """annotation_ref : AT ID NL | AT ID DOT ID NL""" if len(p) < 5: p[0] = AstAnnotationRef(self.path, p.lineno(1), p.lexpos(1), p[2], None) else: p[0] = AstAnnotationRef(self.path, p.lineno(1), p.lexpos(1), p[4], p[2])
[ "def", "p_annotation_ref", "(", "self", ",", "p", ")", ":", "if", "len", "(", "p", ")", "<", "5", ":", "p", "[", "0", "]", "=", "AstAnnotationRef", "(", "self", ".", "path", ",", "p", ".", "lineno", "(", "1", ")", ",", "p", ".", "lexpos", "(", "1", ")", ",", "p", "[", "2", "]", ",", "None", ")", "else", ":", "p", "[", "0", "]", "=", "AstAnnotationRef", "(", "self", ".", "path", ",", "p", ".", "lineno", "(", "1", ")", ",", "p", ".", "lexpos", "(", "1", ")", ",", "p", "[", "4", "]", ",", "p", "[", "2", "]", ")" ]
45
20.428571
def update_lun(self, add_luns=None, remove_luns=None): """Updates the LUNs in CG, adding the ones in `add_luns` and removing the ones in `remove_luns`""" if not add_luns and not remove_luns: log.debug("Empty add_luns and remove_luns passed in, " "skip update_lun.") return RESP_OK lun_add = self._prepare_luns_add(add_luns) lun_remove = self._prepare_luns_remove(remove_luns, True) return self.modify(lun_add=lun_add, lun_remove=lun_remove)
[ "def", "update_lun", "(", "self", ",", "add_luns", "=", "None", ",", "remove_luns", "=", "None", ")", ":", "if", "not", "add_luns", "and", "not", "remove_luns", ":", "log", ".", "debug", "(", "\"Empty add_luns and remove_luns passed in, \"", "\"skip update_lun.\"", ")", "return", "RESP_OK", "lun_add", "=", "self", ".", "_prepare_luns_add", "(", "add_luns", ")", "lun_remove", "=", "self", ".", "_prepare_luns_remove", "(", "remove_luns", ",", "True", ")", "return", "self", ".", "modify", "(", "lun_add", "=", "lun_add", ",", "lun_remove", "=", "lun_remove", ")" ]
52.5
12
def browse(self, cat=None, subCat=None): """Browse categories. If neither cat nor subcat are specified, return a list of categories, otherwise it return a list of apps using cat (category ID) and subCat (subcategory ID) as filters.""" path = BROWSE_URL + "?c=3" if cat is not None: path += "&cat={}".format(requests.utils.quote(cat)) if subCat is not None: path += "&ctr={}".format(requests.utils.quote(subCat)) data = self.executeRequestApi2(path) return utils.parseProtobufObj(data.payload.browseResponse)
[ "def", "browse", "(", "self", ",", "cat", "=", "None", ",", "subCat", "=", "None", ")", ":", "path", "=", "BROWSE_URL", "+", "\"?c=3\"", "if", "cat", "is", "not", "None", ":", "path", "+=", "\"&cat={}\"", ".", "format", "(", "requests", ".", "utils", ".", "quote", "(", "cat", ")", ")", "if", "subCat", "is", "not", "None", ":", "path", "+=", "\"&ctr={}\"", ".", "format", "(", "requests", ".", "utils", ".", "quote", "(", "subCat", ")", ")", "data", "=", "self", ".", "executeRequestApi2", "(", "path", ")", "return", "utils", ".", "parseProtobufObj", "(", "data", ".", "payload", ".", "browseResponse", ")" ]
48.75
14.916667
def dtcurrent(self, value): """Set value of `dtcurrent`, update derivatives if needed.""" assert isinstance(value, bool) if value and self.dparamscurrent: raise RuntimeError("Can't set both dparamscurrent and dtcurrent True") if value != self.dtcurrent: self._dtcurrent = value self._updateInternals()
[ "def", "dtcurrent", "(", "self", ",", "value", ")", ":", "assert", "isinstance", "(", "value", ",", "bool", ")", "if", "value", "and", "self", ".", "dparamscurrent", ":", "raise", "RuntimeError", "(", "\"Can't set both dparamscurrent and dtcurrent True\"", ")", "if", "value", "!=", "self", ".", "dtcurrent", ":", "self", ".", "_dtcurrent", "=", "value", "self", ".", "_updateInternals", "(", ")" ]
45.25
9.125
def _flatterm_iter(cls, expression: Expression) -> Iterator[TermAtom]: """Generator that yields the atoms of the expressions in prefix notation with operation end markers.""" if isinstance(expression, Operation): yield type(expression) for operand in op_iter(expression): yield from cls._flatterm_iter(operand) yield OPERATION_END elif isinstance(expression, SymbolWildcard): yield expression.symbol_type elif isinstance(expression, (Symbol, Wildcard)): yield expression else: assert False, "Unreachable unless a new unsupported expression type is added."
[ "def", "_flatterm_iter", "(", "cls", ",", "expression", ":", "Expression", ")", "->", "Iterator", "[", "TermAtom", "]", ":", "if", "isinstance", "(", "expression", ",", "Operation", ")", ":", "yield", "type", "(", "expression", ")", "for", "operand", "in", "op_iter", "(", "expression", ")", ":", "yield", "from", "cls", ".", "_flatterm_iter", "(", "operand", ")", "yield", "OPERATION_END", "elif", "isinstance", "(", "expression", ",", "SymbolWildcard", ")", ":", "yield", "expression", ".", "symbol_type", "elif", "isinstance", "(", "expression", ",", "(", "Symbol", ",", "Wildcard", ")", ")", ":", "yield", "expression", "else", ":", "assert", "False", ",", "\"Unreachable unless a new unsupported expression type is added.\"" ]
51.615385
14.461538
def lightcurve(self, name, **kwargs): """Generate a lightcurve for the named source. The function will complete the basic analysis steps for each bin and perform a likelihood fit for each bin. Extracted values (along with errors) are Integral Flux, spectral model, Spectral index, TS value, pred. # of photons. Note: successful calculation of TS:subscript:`var` requires at least one free background parameter and a previously optimized ROI model. Parameters --------- name: str source name {options} Returns --------- LightCurve : dict Dictionary containing output of the LC analysis """ name = self.roi.get_source_by_name(name).name # Create schema for method configuration schema = ConfigSchema(self.defaults['lightcurve'], optimizer=self.defaults['optimizer']) schema.add_option('prefix', '') config = utils.create_dict(self.config['lightcurve'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Computing Lightcurve for %s' % name) o = self._make_lc(name, **config) filename = utils.format_filename(self.workdir, 'lightcurve', prefix=[config['prefix'], name.lower().replace(' ', '_')]) o['file'] = None if config['write_fits']: o['file'] = os.path.basename(filename) + '.fits' self._make_lc_fits(o, filename + '.fits', **config) if config['write_npy']: np.save(filename + '.npy', o) self.logger.info('Finished Lightcurve') return o
[ "def", "lightcurve", "(", "self", ",", "name", ",", "*", "*", "kwargs", ")", ":", "name", "=", "self", ".", "roi", ".", "get_source_by_name", "(", "name", ")", ".", "name", "# Create schema for method configuration", "schema", "=", "ConfigSchema", "(", "self", ".", "defaults", "[", "'lightcurve'", "]", ",", "optimizer", "=", "self", ".", "defaults", "[", "'optimizer'", "]", ")", "schema", ".", "add_option", "(", "'prefix'", ",", "''", ")", "config", "=", "utils", ".", "create_dict", "(", "self", ".", "config", "[", "'lightcurve'", "]", ",", "optimizer", "=", "self", ".", "config", "[", "'optimizer'", "]", ")", "config", "=", "schema", ".", "create_config", "(", "config", ",", "*", "*", "kwargs", ")", "self", ".", "logger", ".", "info", "(", "'Computing Lightcurve for %s'", "%", "name", ")", "o", "=", "self", ".", "_make_lc", "(", "name", ",", "*", "*", "config", ")", "filename", "=", "utils", ".", "format_filename", "(", "self", ".", "workdir", ",", "'lightcurve'", ",", "prefix", "=", "[", "config", "[", "'prefix'", "]", ",", "name", ".", "lower", "(", ")", ".", "replace", "(", "' '", ",", "'_'", ")", "]", ")", "o", "[", "'file'", "]", "=", "None", "if", "config", "[", "'write_fits'", "]", ":", "o", "[", "'file'", "]", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "+", "'.fits'", "self", ".", "_make_lc_fits", "(", "o", ",", "filename", "+", "'.fits'", ",", "*", "*", "config", ")", "if", "config", "[", "'write_npy'", "]", ":", "np", ".", "save", "(", "filename", "+", "'.npy'", ",", "o", ")", "self", ".", "logger", ".", "info", "(", "'Finished Lightcurve'", ")", "return", "o" ]
35.27451
23.137255
def _init_po_files(target, source, env): """ Action function for `POInit` builder. """ nop = lambda target, source, env: 0 if 'POAUTOINIT' in env: autoinit = env['POAUTOINIT'] else: autoinit = False # Well, if everything outside works well, this loop should do single # iteration. Otherwise we are rebuilding all the targets even, if just # one has changed (but is this our fault?). for tgt in target: if not tgt.exists(): if autoinit: action = SCons.Action.Action('$MSGINITCOM', '$MSGINITCOMSTR') else: msg = 'File ' + repr(str(tgt)) + ' does not exist. ' \ + 'If you are a translator, you can create it through: \n' \ + '$MSGINITCOM' action = SCons.Action.Action(nop, msg) status = action([tgt], source, env) if status: return status return 0
[ "def", "_init_po_files", "(", "target", ",", "source", ",", "env", ")", ":", "nop", "=", "lambda", "target", ",", "source", ",", "env", ":", "0", "if", "'POAUTOINIT'", "in", "env", ":", "autoinit", "=", "env", "[", "'POAUTOINIT'", "]", "else", ":", "autoinit", "=", "False", "# Well, if everything outside works well, this loop should do single", "# iteration. Otherwise we are rebuilding all the targets even, if just", "# one has changed (but is this our fault?).", "for", "tgt", "in", "target", ":", "if", "not", "tgt", ".", "exists", "(", ")", ":", "if", "autoinit", ":", "action", "=", "SCons", ".", "Action", ".", "Action", "(", "'$MSGINITCOM'", ",", "'$MSGINITCOMSTR'", ")", "else", ":", "msg", "=", "'File '", "+", "repr", "(", "str", "(", "tgt", ")", ")", "+", "' does not exist. '", "+", "'If you are a translator, you can create it through: \\n'", "+", "'$MSGINITCOM'", "action", "=", "SCons", ".", "Action", ".", "Action", "(", "nop", ",", "msg", ")", "status", "=", "action", "(", "[", "tgt", "]", ",", "source", ",", "env", ")", "if", "status", ":", "return", "status", "return", "0" ]
41.954545
16.909091
def parse_epoch(self, epoch_str): """ Converts epoch field to a float value (adding 24... prefix), or ``None`` if there is no epoch in GCVS record. """ epoch = epoch_str.translate(TRANSLATION_MAP)[:10].strip() return 2400000.0 + float(epoch) if epoch else None
[ "def", "parse_epoch", "(", "self", ",", "epoch_str", ")", ":", "epoch", "=", "epoch_str", ".", "translate", "(", "TRANSLATION_MAP", ")", "[", ":", "10", "]", ".", "strip", "(", ")", "return", "2400000.0", "+", "float", "(", "epoch", ")", "if", "epoch", "else", "None" ]
43.142857
13.428571
def import_and_get_class(path_to_pex, python_class_name): """Imports and load a class from a given pex file path and python class name For example, if you want to get a class called `Sample` in /some-path/sample.pex/heron/examples/src/python/sample.py, ``path_to_pex`` needs to be ``/some-path/sample.pex``, and ``python_class_name`` needs to be ``heron.examples.src.python.sample.Sample`` """ abs_path_to_pex = os.path.abspath(path_to_pex) Log.debug("Add a pex to the path: %s" % abs_path_to_pex) Log.debug("In import_and_get_class with cls_name: %s" % python_class_name) split = python_class_name.split('.') from_path = '.'.join(split[:-1]) import_name = python_class_name.split('.')[-1] Log.debug("From path: %s, import name: %s" % (from_path, import_name)) # Resolve duplicate package suffix problem (heron.), if the top level package name is heron if python_class_name.startswith("heron."): try: mod = resolve_heron_suffix_issue(abs_path_to_pex, python_class_name) return getattr(mod, import_name) except: Log.error("Could not resolve class %s with special handling" % python_class_name) mod = __import__(from_path, fromlist=[import_name], level=-1) Log.debug("Imported module: %s" % str(mod)) return getattr(mod, import_name)
[ "def", "import_and_get_class", "(", "path_to_pex", ",", "python_class_name", ")", ":", "abs_path_to_pex", "=", "os", ".", "path", ".", "abspath", "(", "path_to_pex", ")", "Log", ".", "debug", "(", "\"Add a pex to the path: %s\"", "%", "abs_path_to_pex", ")", "Log", ".", "debug", "(", "\"In import_and_get_class with cls_name: %s\"", "%", "python_class_name", ")", "split", "=", "python_class_name", ".", "split", "(", "'.'", ")", "from_path", "=", "'.'", ".", "join", "(", "split", "[", ":", "-", "1", "]", ")", "import_name", "=", "python_class_name", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "Log", ".", "debug", "(", "\"From path: %s, import name: %s\"", "%", "(", "from_path", ",", "import_name", ")", ")", "# Resolve duplicate package suffix problem (heron.), if the top level package name is heron", "if", "python_class_name", ".", "startswith", "(", "\"heron.\"", ")", ":", "try", ":", "mod", "=", "resolve_heron_suffix_issue", "(", "abs_path_to_pex", ",", "python_class_name", ")", "return", "getattr", "(", "mod", ",", "import_name", ")", "except", ":", "Log", ".", "error", "(", "\"Could not resolve class %s with special handling\"", "%", "python_class_name", ")", "mod", "=", "__import__", "(", "from_path", ",", "fromlist", "=", "[", "import_name", "]", ",", "level", "=", "-", "1", ")", "Log", ".", "debug", "(", "\"Imported module: %s\"", "%", "str", "(", "mod", ")", ")", "return", "getattr", "(", "mod", ",", "import_name", ")" ]
43.793103
22.793103
def load_config_module(): """ If the config.py file exists, import it as a module. If it does not exist, call sys.exit() with a request to run oaepub configure. """ import imp config_path = config_location() try: config = imp.load_source('config', config_path) except IOError: log.critical('Config file not found. oaepub exiting...') sys.exit('Config file not found. Please run \'oaepub configure\'') else: log.debug('Config file loaded from {0}'.format(config_path)) return config
[ "def", "load_config_module", "(", ")", ":", "import", "imp", "config_path", "=", "config_location", "(", ")", "try", ":", "config", "=", "imp", ".", "load_source", "(", "'config'", ",", "config_path", ")", "except", "IOError", ":", "log", ".", "critical", "(", "'Config file not found. oaepub exiting...'", ")", "sys", ".", "exit", "(", "'Config file not found. Please run \\'oaepub configure\\''", ")", "else", ":", "log", ".", "debug", "(", "'Config file loaded from {0}'", ".", "format", "(", "config_path", ")", ")", "return", "config" ]
36.2
20.466667
def create_group(self, data): """Create a Group.""" # http://teampasswordmanager.com/docs/api-groups/#create_group log.info('Create group with %s' % data) NewID = self.post('groups.json', data).get('id') log.info('Group has been created with ID %s' % NewID) return NewID
[ "def", "create_group", "(", "self", ",", "data", ")", ":", "# http://teampasswordmanager.com/docs/api-groups/#create_group", "log", ".", "info", "(", "'Create group with %s'", "%", "data", ")", "NewID", "=", "self", ".", "post", "(", "'groups.json'", ",", "data", ")", ".", "get", "(", "'id'", ")", "log", ".", "info", "(", "'Group has been created with ID %s'", "%", "NewID", ")", "return", "NewID" ]
44.571429
15
def fit_transform(self, X, y=None): """Encode categorical columns into label encoded columns Args: X (pandas.DataFrame): categorical columns to encode Returns: X (pandas.DataFrame): label encoded columns """ self.label_encoders = [None] * X.shape[1] self.label_maxes = [None] * X.shape[1] for i, col in enumerate(X.columns): self.label_encoders[i], self.label_maxes[i] = \ self._get_label_encoder_and_max(X[col]) X.loc[:, col] = X[col].fillna(NAN_INT).map(self.label_encoders[i]).fillna(0) return X
[ "def", "fit_transform", "(", "self", ",", "X", ",", "y", "=", "None", ")", ":", "self", ".", "label_encoders", "=", "[", "None", "]", "*", "X", ".", "shape", "[", "1", "]", "self", ".", "label_maxes", "=", "[", "None", "]", "*", "X", ".", "shape", "[", "1", "]", "for", "i", ",", "col", "in", "enumerate", "(", "X", ".", "columns", ")", ":", "self", ".", "label_encoders", "[", "i", "]", ",", "self", ".", "label_maxes", "[", "i", "]", "=", "self", ".", "_get_label_encoder_and_max", "(", "X", "[", "col", "]", ")", "X", ".", "loc", "[", ":", ",", "col", "]", "=", "X", "[", "col", "]", ".", "fillna", "(", "NAN_INT", ")", ".", "map", "(", "self", ".", "label_encoders", "[", "i", "]", ")", ".", "fillna", "(", "0", ")", "return", "X" ]
30.65
22.9
def insert_many(cls, documents): """Insert a list of documents""" from mongoframes.queries import to_refs # Ensure all documents have been converted to frames frames = cls._ensure_frames(documents) # Send insert signal signal('insert').send(cls, frames=frames) # Prepare the documents to be inserted documents = [to_refs(f._document) for f in frames] # Bulk insert ids = cls.get_collection().insert_many(documents).inserted_ids # Apply the Ids to the frames for i, id in enumerate(ids): frames[i]._id = id # Send inserted signal signal('inserted').send(cls, frames=frames) return frames
[ "def", "insert_many", "(", "cls", ",", "documents", ")", ":", "from", "mongoframes", ".", "queries", "import", "to_refs", "# Ensure all documents have been converted to frames", "frames", "=", "cls", ".", "_ensure_frames", "(", "documents", ")", "# Send insert signal", "signal", "(", "'insert'", ")", ".", "send", "(", "cls", ",", "frames", "=", "frames", ")", "# Prepare the documents to be inserted", "documents", "=", "[", "to_refs", "(", "f", ".", "_document", ")", "for", "f", "in", "frames", "]", "# Bulk insert", "ids", "=", "cls", ".", "get_collection", "(", ")", ".", "insert_many", "(", "documents", ")", ".", "inserted_ids", "# Apply the Ids to the frames", "for", "i", ",", "id", "in", "enumerate", "(", "ids", ")", ":", "frames", "[", "i", "]", ".", "_id", "=", "id", "# Send inserted signal", "signal", "(", "'inserted'", ")", ".", "send", "(", "cls", ",", "frames", "=", "frames", ")", "return", "frames" ]
29.25
19.666667
def _sensoryComputeInferenceMode(self, anchorInput): """ Infer the location from sensory input. Activate any cells with enough active synapses to this sensory input. Deactivate all other cells. @param anchorInput (numpy array) A sensory input. This will often come from a feature-location pair layer. """ if len(anchorInput) == 0: return overlaps = self.connections.computeActivity(anchorInput, self.connectedPermanence) activeSegments = np.where(overlaps >= self.activationThreshold)[0] sensorySupportedCells = np.unique( self.connections.mapSegmentsToCells(activeSegments)) inactivated = np.setdiff1d(self.activeCells, sensorySupportedCells) inactivatedIndices = np.in1d(self.cellsForActivePhases, inactivated).nonzero()[0] if inactivatedIndices.size > 0: self.activePhases = np.delete(self.activePhases, inactivatedIndices, axis=0) activated = np.setdiff1d(sensorySupportedCells, self.activeCells) # Find centers of point clouds if "corners" in self.anchoringMethod: activatedCoordsBase = np.transpose( np.unravel_index(sensorySupportedCells, self.cellDimensions)).astype('float') else: activatedCoordsBase = np.transpose( np.unravel_index(activated, self.cellDimensions)).astype('float') # Generate points to add activatedCoords = np.concatenate( [activatedCoordsBase + [iOffset, jOffset] for iOffset in self.cellCoordinateOffsets for jOffset in self.cellCoordinateOffsets] ) if "corners" in self.anchoringMethod: self.activePhases = activatedCoords / self.cellDimensions else: if activatedCoords.size > 0: self.activePhases = np.append(self.activePhases, activatedCoords / self.cellDimensions, axis=0) self._computeActiveCells() self.activeSegments = activeSegments self.sensoryAssociatedCells = sensorySupportedCells
[ "def", "_sensoryComputeInferenceMode", "(", "self", ",", "anchorInput", ")", ":", "if", "len", "(", "anchorInput", ")", "==", "0", ":", "return", "overlaps", "=", "self", ".", "connections", ".", "computeActivity", "(", "anchorInput", ",", "self", ".", "connectedPermanence", ")", "activeSegments", "=", "np", ".", "where", "(", "overlaps", ">=", "self", ".", "activationThreshold", ")", "[", "0", "]", "sensorySupportedCells", "=", "np", ".", "unique", "(", "self", ".", "connections", ".", "mapSegmentsToCells", "(", "activeSegments", ")", ")", "inactivated", "=", "np", ".", "setdiff1d", "(", "self", ".", "activeCells", ",", "sensorySupportedCells", ")", "inactivatedIndices", "=", "np", ".", "in1d", "(", "self", ".", "cellsForActivePhases", ",", "inactivated", ")", ".", "nonzero", "(", ")", "[", "0", "]", "if", "inactivatedIndices", ".", "size", ">", "0", ":", "self", ".", "activePhases", "=", "np", ".", "delete", "(", "self", ".", "activePhases", ",", "inactivatedIndices", ",", "axis", "=", "0", ")", "activated", "=", "np", ".", "setdiff1d", "(", "sensorySupportedCells", ",", "self", ".", "activeCells", ")", "# Find centers of point clouds", "if", "\"corners\"", "in", "self", ".", "anchoringMethod", ":", "activatedCoordsBase", "=", "np", ".", "transpose", "(", "np", ".", "unravel_index", "(", "sensorySupportedCells", ",", "self", ".", "cellDimensions", ")", ")", ".", "astype", "(", "'float'", ")", "else", ":", "activatedCoordsBase", "=", "np", ".", "transpose", "(", "np", ".", "unravel_index", "(", "activated", ",", "self", ".", "cellDimensions", ")", ")", ".", "astype", "(", "'float'", ")", "# Generate points to add", "activatedCoords", "=", "np", ".", "concatenate", "(", "[", "activatedCoordsBase", "+", "[", "iOffset", ",", "jOffset", "]", "for", "iOffset", "in", "self", ".", "cellCoordinateOffsets", "for", "jOffset", "in", "self", ".", "cellCoordinateOffsets", "]", ")", "if", "\"corners\"", "in", "self", ".", "anchoringMethod", ":", "self", ".", "activePhases", "=", "activatedCoords", "/", "self", ".", "cellDimensions", "else", ":", "if", "activatedCoords", ".", "size", ">", "0", ":", "self", ".", "activePhases", "=", "np", ".", "append", "(", "self", ".", "activePhases", ",", "activatedCoords", "/", "self", ".", "cellDimensions", ",", "axis", "=", "0", ")", "self", ".", "_computeActiveCells", "(", ")", "self", ".", "activeSegments", "=", "activeSegments", "self", ".", "sensoryAssociatedCells", "=", "sensorySupportedCells" ]
38.555556
19.925926
def get_week_start_end_day(): """ Get the week start date and end date """ t = date.today() wd = t.weekday() return (t - timedelta(wd), t + timedelta(6 - wd))
[ "def", "get_week_start_end_day", "(", ")", ":", "t", "=", "date", ".", "today", "(", ")", "wd", "=", "t", ".", "weekday", "(", ")", "return", "(", "t", "-", "timedelta", "(", "wd", ")", ",", "t", "+", "timedelta", "(", "6", "-", "wd", ")", ")" ]
25.142857
9.142857
def _get_language_with_alpha2_fallback(language_code): """ Lookup language code `language_code` (string) in the internal language codes, and if that fails, try to map map `language_code` to the internal represention using the `getlang_by_alpha2` helper method. Returns either a le-utils Language object or None if both lookups fail. """ # 1. try to lookup `language` using internal representation language_obj = languages.getlang(language_code) # if language_obj not None, we know `language` is a valid language_id in the internal repr. if language_obj is None: # 2. try to match by two-letter ISO code language_obj = languages.getlang_by_alpha2(language_code) return language_obj
[ "def", "_get_language_with_alpha2_fallback", "(", "language_code", ")", ":", "# 1. try to lookup `language` using internal representation", "language_obj", "=", "languages", ".", "getlang", "(", "language_code", ")", "# if language_obj not None, we know `language` is a valid language_id in the internal repr.", "if", "language_obj", "is", "None", ":", "# 2. try to match by two-letter ISO code", "language_obj", "=", "languages", ".", "getlang_by_alpha2", "(", "language_code", ")", "return", "language_obj" ]
51.928571
20.785714
def uninstall_ruby(ruby, runas=None): ''' Uninstall a ruby implementation. ruby The version of ruby to uninstall. Should match one of the versions listed by :py:func:`rbenv.versions <salt.modules.rbenv.versions>`. runas The user under which to run rbenv. If not specified, then rbenv will be run as the user under which Salt is running. CLI Example: .. code-block:: bash salt '*' rbenv.uninstall_ruby 2.0.0-p0 ''' ruby = re.sub(r'^ruby-', '', ruby) _rbenv_exec(['uninstall', '--force', ruby], runas=runas) return True
[ "def", "uninstall_ruby", "(", "ruby", ",", "runas", "=", "None", ")", ":", "ruby", "=", "re", ".", "sub", "(", "r'^ruby-'", ",", "''", ",", "ruby", ")", "_rbenv_exec", "(", "[", "'uninstall'", ",", "'--force'", ",", "ruby", "]", ",", "runas", "=", "runas", ")", "return", "True" ]
27.714286
26.095238
def get_medium_url(self): """Returns the medium size image URL.""" if self.is_gif(): return self.get_absolute_url() return '%s%s-%s.jpg' % (settings.MEDIA_URL, self.get_name(), 'medium')
[ "def", "get_medium_url", "(", "self", ")", ":", "if", "self", ".", "is_gif", "(", ")", ":", "return", "self", ".", "get_absolute_url", "(", ")", "return", "'%s%s-%s.jpg'", "%", "(", "settings", ".", "MEDIA_URL", ",", "self", ".", "get_name", "(", ")", ",", "'medium'", ")" ]
43.6
14
def po_file(self): """Return the parsed .po file that is currently being translated/viewed. (Note that this parsing also involves marking up each entry with a hash of its contents.) """ if self.po_file_is_writable: # If we can write changes to file, then we pull it up fresh with # each request. # XXX: brittle; what if this path doesn't exist? Isn't a .po file? po_file = pofile(self.po_file_path, wrapwidth=rosetta_settings.POFILE_WRAP_WIDTH) for entry in po_file: # Entry is an object representing a single entry in the catalog. # We interate through the *entire catalog*, pasting a hashed # value of the meat of each entry on its side in an attribute # called "md5hash". str_to_hash = ( six.text_type(entry.msgid) + six.text_type(entry.msgstr) + six.text_type(entry.msgctxt or '') ).encode('utf8') entry.md5hash = hashlib.md5(str_to_hash).hexdigest() else: storage = get_storage(self.request) po_file = storage.get(self.po_file_cache_key, None) if not po_file: po_file = pofile(self.po_file_path) for entry in po_file: # Entry is an object representing a single entry in the # catalog. We interate through the entire catalog, pasting # a hashed value of the meat of each entry on its side in # an attribute called "md5hash". str_to_hash = ( six.text_type(entry.msgid) + six.text_type(entry.msgstr) + six.text_type(entry.msgctxt or '') ).encode('utf8') entry.md5hash = hashlib.new('md5', str_to_hash).hexdigest() storage.set(self.po_file_cache_key, po_file) return po_file
[ "def", "po_file", "(", "self", ")", ":", "if", "self", ".", "po_file_is_writable", ":", "# If we can write changes to file, then we pull it up fresh with", "# each request.", "# XXX: brittle; what if this path doesn't exist? Isn't a .po file?", "po_file", "=", "pofile", "(", "self", ".", "po_file_path", ",", "wrapwidth", "=", "rosetta_settings", ".", "POFILE_WRAP_WIDTH", ")", "for", "entry", "in", "po_file", ":", "# Entry is an object representing a single entry in the catalog.", "# We interate through the *entire catalog*, pasting a hashed", "# value of the meat of each entry on its side in an attribute", "# called \"md5hash\".", "str_to_hash", "=", "(", "six", ".", "text_type", "(", "entry", ".", "msgid", ")", "+", "six", ".", "text_type", "(", "entry", ".", "msgstr", ")", "+", "six", ".", "text_type", "(", "entry", ".", "msgctxt", "or", "''", ")", ")", ".", "encode", "(", "'utf8'", ")", "entry", ".", "md5hash", "=", "hashlib", ".", "md5", "(", "str_to_hash", ")", ".", "hexdigest", "(", ")", "else", ":", "storage", "=", "get_storage", "(", "self", ".", "request", ")", "po_file", "=", "storage", ".", "get", "(", "self", ".", "po_file_cache_key", ",", "None", ")", "if", "not", "po_file", ":", "po_file", "=", "pofile", "(", "self", ".", "po_file_path", ")", "for", "entry", "in", "po_file", ":", "# Entry is an object representing a single entry in the", "# catalog. We interate through the entire catalog, pasting", "# a hashed value of the meat of each entry on its side in", "# an attribute called \"md5hash\".", "str_to_hash", "=", "(", "six", ".", "text_type", "(", "entry", ".", "msgid", ")", "+", "six", ".", "text_type", "(", "entry", ".", "msgstr", ")", "+", "six", ".", "text_type", "(", "entry", ".", "msgctxt", "or", "''", ")", ")", ".", "encode", "(", "'utf8'", ")", "entry", ".", "md5hash", "=", "hashlib", ".", "new", "(", "'md5'", ",", "str_to_hash", ")", ".", "hexdigest", "(", ")", "storage", ".", "set", "(", "self", ".", "po_file_cache_key", ",", "po_file", ")", "return", "po_file" ]
49.97561
19.121951
def get_trusted_subjects(): """Get set of subjects that have unlimited access to all SciObj and APIs on this node.""" cert_subj = _get_client_side_certificate_subject() return ( d1_gmn.app.node_registry.get_cn_subjects() | django.conf.settings.DATAONE_TRUSTED_SUBJECTS | {cert_subj} if cert_subj is not None else set() )
[ "def", "get_trusted_subjects", "(", ")", ":", "cert_subj", "=", "_get_client_side_certificate_subject", "(", ")", "return", "(", "d1_gmn", ".", "app", ".", "node_registry", ".", "get_cn_subjects", "(", ")", "|", "django", ".", "conf", ".", "settings", ".", "DATAONE_TRUSTED_SUBJECTS", "|", "{", "cert_subj", "}", "if", "cert_subj", "is", "not", "None", "else", "set", "(", ")", ")" ]
33.636364
14.909091
def get_related_synsets(self,relation): """Retrieves all the synsets which are related by given relation. Parameters ---------- relation : str Name of the relation via which the sought synsets are linked. Returns ------- list of Synsets Synsets which are related via `relation`. """ results = [] for relation_candidate in self._raw_synset.internalLinks: if relation_candidate.name == relation: linked_synset = synset(_get_key_from_raw_synset(relation_candidate.target_concept)) relation_candidate.target_concept = linked_synset._raw_synset results.append(linked_synset) return results
[ "def", "get_related_synsets", "(", "self", ",", "relation", ")", ":", "results", "=", "[", "]", "for", "relation_candidate", "in", "self", ".", "_raw_synset", ".", "internalLinks", ":", "if", "relation_candidate", ".", "name", "==", "relation", ":", "linked_synset", "=", "synset", "(", "_get_key_from_raw_synset", "(", "relation_candidate", ".", "target_concept", ")", ")", "relation_candidate", ".", "target_concept", "=", "linked_synset", ".", "_raw_synset", "results", ".", "append", "(", "linked_synset", ")", "return", "results" ]
33.409091
22.681818
def extractClips(self, specsFilePathOrStr, outputDir=None, zipOutput=False): """Extract clips according to the specification file or string. Arguments: specsFilePathOrStr (str): Specification file path or string outputDir (str): Location of the extracted clips zipOutput (bool): Archive extracted clips' flag Specifications format: <begin:seconds> <end:seconds> [<text_metadata>] 20.5 59.75 Discussion about dogs 105.3 200.3 Cat story Notes: <text_metadata> is completely optional """ clips = SpecsParser.parse(specsFilePathOrStr) # Output to current working directory if no outputDir was provided if not outputDir: outputDir = os.path.abspath('.') zipFile = None if zipOutput: bname = os.path.splitext(os.path.basename(specsFilePathOrStr))[0] zipPath = "%s_clips.zip" % bname zipFile = zipfile.ZipFile(os.path.join(outputDir, zipPath), mode='w') for i, clip in enumerate(clips): # 13 clips => clip01.mp3, clip12.mp3... filenameFormat = 'clip%%0%dd.mp3' % len(str(len(clips))) filepath = os.path.join(outputDir, filenameFormat % (i+1)) clipData = self._extractClipData(clip) with open(filepath, 'wb') as f_out: f_out.write(clipData) if zipFile: zipFile.write(filepath, arcname=os.path.basename(filepath)) os.unlink(filepath) if zipFile: zipFile.close()
[ "def", "extractClips", "(", "self", ",", "specsFilePathOrStr", ",", "outputDir", "=", "None", ",", "zipOutput", "=", "False", ")", ":", "clips", "=", "SpecsParser", ".", "parse", "(", "specsFilePathOrStr", ")", "# Output to current working directory if no outputDir was provided", "if", "not", "outputDir", ":", "outputDir", "=", "os", ".", "path", ".", "abspath", "(", "'.'", ")", "zipFile", "=", "None", "if", "zipOutput", ":", "bname", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "specsFilePathOrStr", ")", ")", "[", "0", "]", "zipPath", "=", "\"%s_clips.zip\"", "%", "bname", "zipFile", "=", "zipfile", ".", "ZipFile", "(", "os", ".", "path", ".", "join", "(", "outputDir", ",", "zipPath", ")", ",", "mode", "=", "'w'", ")", "for", "i", ",", "clip", "in", "enumerate", "(", "clips", ")", ":", "# 13 clips => clip01.mp3, clip12.mp3...", "filenameFormat", "=", "'clip%%0%dd.mp3'", "%", "len", "(", "str", "(", "len", "(", "clips", ")", ")", ")", "filepath", "=", "os", ".", "path", ".", "join", "(", "outputDir", ",", "filenameFormat", "%", "(", "i", "+", "1", ")", ")", "clipData", "=", "self", ".", "_extractClipData", "(", "clip", ")", "with", "open", "(", "filepath", ",", "'wb'", ")", "as", "f_out", ":", "f_out", ".", "write", "(", "clipData", ")", "if", "zipFile", ":", "zipFile", ".", "write", "(", "filepath", ",", "arcname", "=", "os", ".", "path", ".", "basename", "(", "filepath", ")", ")", "os", ".", "unlink", "(", "filepath", ")", "if", "zipFile", ":", "zipFile", ".", "close", "(", ")" ]
34.695652
22.652174
def restore_node(self, node): """ Restores a previously hidden node back into the graph and restores all of its incoming and outgoing edges. """ try: self.nodes[node], all_edges = self.hidden_nodes[node] for edge in all_edges: self.restore_edge(edge) del self.hidden_nodes[node] except KeyError: raise GraphError('Invalid node %s' % node)
[ "def", "restore_node", "(", "self", ",", "node", ")", ":", "try", ":", "self", ".", "nodes", "[", "node", "]", ",", "all_edges", "=", "self", ".", "hidden_nodes", "[", "node", "]", "for", "edge", "in", "all_edges", ":", "self", ".", "restore_edge", "(", "edge", ")", "del", "self", ".", "hidden_nodes", "[", "node", "]", "except", "KeyError", ":", "raise", "GraphError", "(", "'Invalid node %s'", "%", "node", ")" ]
36.583333
11.916667
def _get_standard_tc_matches(text, full_text, options): ''' get the standard tab completions. These are the options which could complete the full_text. ''' final_matches = [o for o in options if o.startswith(full_text)] return final_matches
[ "def", "_get_standard_tc_matches", "(", "text", ",", "full_text", ",", "options", ")", ":", "final_matches", "=", "[", "o", "for", "o", "in", "options", "if", "o", ".", "startswith", "(", "full_text", ")", "]", "return", "final_matches" ]
36.857143
21.142857
def compute(self, inputs, outputs): """ Get the next record from the queue and encode it. The fields for inputs and outputs are as defined in the spec above. """ if len(self.queue) > 0: # Take the top element of the data queue data = self.queue.pop() else: raise Exception("RawSensor: No data to encode: queue is empty ") # Copy data into output vectors outputs["resetOut"][0] = data["reset"] outputs["sequenceIdOut"][0] = data["sequenceId"] outputs["dataOut"][:] = 0 outputs["dataOut"][data["nonZeros"]] = 1 if self.verbosity > 1: print "RawSensor outputs:" print "sequenceIdOut: ", outputs["sequenceIdOut"] print "resetOut: ", outputs["resetOut"] print "dataOut: ", outputs["dataOut"].nonzero()[0]
[ "def", "compute", "(", "self", ",", "inputs", ",", "outputs", ")", ":", "if", "len", "(", "self", ".", "queue", ")", ">", "0", ":", "# Take the top element of the data queue", "data", "=", "self", ".", "queue", ".", "pop", "(", ")", "else", ":", "raise", "Exception", "(", "\"RawSensor: No data to encode: queue is empty \"", ")", "# Copy data into output vectors", "outputs", "[", "\"resetOut\"", "]", "[", "0", "]", "=", "data", "[", "\"reset\"", "]", "outputs", "[", "\"sequenceIdOut\"", "]", "[", "0", "]", "=", "data", "[", "\"sequenceId\"", "]", "outputs", "[", "\"dataOut\"", "]", "[", ":", "]", "=", "0", "outputs", "[", "\"dataOut\"", "]", "[", "data", "[", "\"nonZeros\"", "]", "]", "=", "1", "if", "self", ".", "verbosity", ">", "1", ":", "print", "\"RawSensor outputs:\"", "print", "\"sequenceIdOut: \"", ",", "outputs", "[", "\"sequenceIdOut\"", "]", "print", "\"resetOut: \"", ",", "outputs", "[", "\"resetOut\"", "]", "print", "\"dataOut: \"", ",", "outputs", "[", "\"dataOut\"", "]", ".", "nonzero", "(", ")", "[", "0", "]" ]
33.478261
15.304348
def get_next_create_state(self, state, ret): """Return the next create state from previous state. """ if ret: if state == fw_const.FABRIC_PREPARE_DONE_STATE: return state else: return state + 1 else: return state
[ "def", "get_next_create_state", "(", "self", ",", "state", ",", "ret", ")", ":", "if", "ret", ":", "if", "state", "==", "fw_const", ".", "FABRIC_PREPARE_DONE_STATE", ":", "return", "state", "else", ":", "return", "state", "+", "1", "else", ":", "return", "state" ]
32.888889
14.888889
def putRequest(self, request, block=True, timeout=0): """Put work request into work queue and save its id for later.""" # don't reuse old work requests # print '\tthread pool putting work request %s'%request self._requests_queue.put(request, block, timeout) self.workRequests[request.requestID] = request
[ "def", "putRequest", "(", "self", ",", "request", ",", "block", "=", "True", ",", "timeout", "=", "0", ")", ":", "# don't reuse old work requests", "# print '\\tthread pool putting work request %s'%request", "self", ".", "_requests_queue", ".", "put", "(", "request", ",", "block", ",", "timeout", ")", "self", ".", "workRequests", "[", "request", ".", "requestID", "]", "=", "request" ]
56.5
11.333333
def setup(self): """Initialize the consumer, setting up needed attributes and connecting to RabbitMQ. """ LOGGER.info('Initializing for %s', self.name) if 'consumer' not in self.consumer_config: return self.on_startup_error( '"consumer" not specified in configuration') self.consumer = self.get_consumer(self.consumer_config) if not self.consumer: return self.on_startup_error( 'Could not import "{}"'.format( self.consumer_config.get( 'consumer', 'unconfigured consumer'))) self.setup_instrumentation() self.reset_error_counter() self.setup_sighandlers() self.create_connections()
[ "def", "setup", "(", "self", ")", ":", "LOGGER", ".", "info", "(", "'Initializing for %s'", ",", "self", ".", "name", ")", "if", "'consumer'", "not", "in", "self", ".", "consumer_config", ":", "return", "self", ".", "on_startup_error", "(", "'\"consumer\" not specified in configuration'", ")", "self", ".", "consumer", "=", "self", ".", "get_consumer", "(", "self", ".", "consumer_config", ")", "if", "not", "self", ".", "consumer", ":", "return", "self", ".", "on_startup_error", "(", "'Could not import \"{}\"'", ".", "format", "(", "self", ".", "consumer_config", ".", "get", "(", "'consumer'", ",", "'unconfigured consumer'", ")", ")", ")", "self", ".", "setup_instrumentation", "(", ")", "self", ".", "reset_error_counter", "(", ")", "self", ".", "setup_sighandlers", "(", ")", "self", ".", "create_connections", "(", ")" ]
32.695652
16.608696
def get_song_by_url(self, song_url, song_name, folder, lyric_info): """Download a song and save it to disk. :params song_url: download address. :params song_name: song name. :params folder: storage path. :params lyric: lyric info. """ if not os.path.exists(folder): os.makedirs(folder) fpath = os.path.join(folder, song_name+'.mp3') if sys.platform == 'win32' or sys.platform == 'cygwin': valid_name = re.sub(r'[<>:"/\\|?*]', '', song_name) if valid_name != song_name: click.echo('{} will be saved as: {}.mp3'.format(song_name, valid_name)) fpath = os.path.join(folder, valid_name + '.mp3') if not os.path.exists(fpath): resp = self.download_session.get( song_url, timeout=self.timeout, stream=True) length = int(resp.headers.get('content-length')) label = 'Downloading {} {}kb'.format(song_name, int(length/1024)) with click.progressbar(length=length, label=label) as progressbar: with open(fpath, 'wb') as song_file: for chunk in resp.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks song_file.write(chunk) progressbar.update(1024) if lyric_info: folder = os.path.join(folder, 'lyric') if not os.path.exists(folder): os.makedirs(folder) fpath = os.path.join(folder, song_name+'.lrc') with open(fpath, 'w') as lyric_file: lyric_file.write(lyric_info)
[ "def", "get_song_by_url", "(", "self", ",", "song_url", ",", "song_name", ",", "folder", ",", "lyric_info", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "folder", ")", ":", "os", ".", "makedirs", "(", "folder", ")", "fpath", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "song_name", "+", "'.mp3'", ")", "if", "sys", ".", "platform", "==", "'win32'", "or", "sys", ".", "platform", "==", "'cygwin'", ":", "valid_name", "=", "re", ".", "sub", "(", "r'[<>:\"/\\\\|?*]'", ",", "''", ",", "song_name", ")", "if", "valid_name", "!=", "song_name", ":", "click", ".", "echo", "(", "'{} will be saved as: {}.mp3'", ".", "format", "(", "song_name", ",", "valid_name", ")", ")", "fpath", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "valid_name", "+", "'.mp3'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "fpath", ")", ":", "resp", "=", "self", ".", "download_session", ".", "get", "(", "song_url", ",", "timeout", "=", "self", ".", "timeout", ",", "stream", "=", "True", ")", "length", "=", "int", "(", "resp", ".", "headers", ".", "get", "(", "'content-length'", ")", ")", "label", "=", "'Downloading {} {}kb'", ".", "format", "(", "song_name", ",", "int", "(", "length", "/", "1024", ")", ")", "with", "click", ".", "progressbar", "(", "length", "=", "length", ",", "label", "=", "label", ")", "as", "progressbar", ":", "with", "open", "(", "fpath", ",", "'wb'", ")", "as", "song_file", ":", "for", "chunk", "in", "resp", ".", "iter_content", "(", "chunk_size", "=", "1024", ")", ":", "if", "chunk", ":", "# filter out keep-alive new chunks", "song_file", ".", "write", "(", "chunk", ")", "progressbar", ".", "update", "(", "1024", ")", "if", "lyric_info", ":", "folder", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "'lyric'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "folder", ")", ":", "os", ".", "makedirs", "(", "folder", ")", "fpath", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "song_name", "+", "'.lrc'", ")", "with", "open", "(", "fpath", ",", "'w'", ")", "as", "lyric_file", ":", "lyric_file", ".", "write", "(", "lyric_info", ")" ]
42.641026
18.076923
def get_event_public_discount(self, id, discount_id, **data): """ GET /events/:id/public_discounts/:discount_id/ Gets a public :format:`discount` by ID as the key ``discount``. """ return self.get("/events/{0}/public_discounts/{0}/".format(id,discount_id), data=data)
[ "def", "get_event_public_discount", "(", "self", ",", "id", ",", "discount_id", ",", "*", "*", "data", ")", ":", "return", "self", ".", "get", "(", "\"/events/{0}/public_discounts/{0}/\"", ".", "format", "(", "id", ",", "discount_id", ")", ",", "data", "=", "data", ")" ]
44.285714
21.714286
def __branch_point_dfs_recursive(u, large_n, b, stem, dfs_data): """A recursive implementation of the BranchPtDFS function, as defined on page 14 of the paper.""" first_vertex = dfs_data['adj'][u][0] large_w = wt(u, first_vertex, dfs_data) if large_w % 2 == 0: large_w += 1 v_I = 0 v_II = 0 for v in [v for v in dfs_data['adj'][u] if wt(u, v, dfs_data) <= large_w]: stem[u] = v # not in the original paper, but a logical extension based on page 13 if a(v, dfs_data) == u: # uv is a tree edge large_n[v] = 0 if wt(u, v, dfs_data) % 2 == 0: v_I = v else: b_u = b[u] l2_v = L2(v, dfs_data) #if l2_v > b_u: # If this is true, then we're not on a branch at all # continue if l2_v < b_u: large_n[v] = 1 elif b_u != 1: #print stem #print dfs_data['lowpoint_2_lookup'] #print b xnode = stem[l2_v] if large_n[xnode] != 0: large_n[v] = large_n[xnode] + 1 elif dfs_data['graph'].adjacent(u, L1(v, dfs_data)): large_n[v] = 2 else: large_n[v] = large_n[u] if large_n[v] % 2 == 0: v_II = v break # Goto 1 if v_II != 0: # Move v_II to head of Adj[u] dfs_data['adj'][u].remove(v_II) dfs_data['adj'][u].insert(0, v_II) elif v_I != 0: # Move v_I to head of Adj[u] dfs_data['adj'][u].remove(v_I) dfs_data['adj'][u].insert(0, v_I) first_time = True for v in dfs_data['adj'][u]: if a(v, dfs_data) == u: b[v] = u if first_time: b[v] = b[u] elif wt(u, v, dfs_data) % 2 == 0: large_n[v] = 0 else: large_n[v] = 1 stem[u] = v __branch_point_dfs_recursive(v, large_n, b, stem, dfs_data) first_time = False return
[ "def", "__branch_point_dfs_recursive", "(", "u", ",", "large_n", ",", "b", ",", "stem", ",", "dfs_data", ")", ":", "first_vertex", "=", "dfs_data", "[", "'adj'", "]", "[", "u", "]", "[", "0", "]", "large_w", "=", "wt", "(", "u", ",", "first_vertex", ",", "dfs_data", ")", "if", "large_w", "%", "2", "==", "0", ":", "large_w", "+=", "1", "v_I", "=", "0", "v_II", "=", "0", "for", "v", "in", "[", "v", "for", "v", "in", "dfs_data", "[", "'adj'", "]", "[", "u", "]", "if", "wt", "(", "u", ",", "v", ",", "dfs_data", ")", "<=", "large_w", "]", ":", "stem", "[", "u", "]", "=", "v", "# not in the original paper, but a logical extension based on page 13", "if", "a", "(", "v", ",", "dfs_data", ")", "==", "u", ":", "# uv is a tree edge", "large_n", "[", "v", "]", "=", "0", "if", "wt", "(", "u", ",", "v", ",", "dfs_data", ")", "%", "2", "==", "0", ":", "v_I", "=", "v", "else", ":", "b_u", "=", "b", "[", "u", "]", "l2_v", "=", "L2", "(", "v", ",", "dfs_data", ")", "#if l2_v > b_u:", "# If this is true, then we're not on a branch at all", "# continue", "if", "l2_v", "<", "b_u", ":", "large_n", "[", "v", "]", "=", "1", "elif", "b_u", "!=", "1", ":", "#print stem", "#print dfs_data['lowpoint_2_lookup']", "#print b", "xnode", "=", "stem", "[", "l2_v", "]", "if", "large_n", "[", "xnode", "]", "!=", "0", ":", "large_n", "[", "v", "]", "=", "large_n", "[", "xnode", "]", "+", "1", "elif", "dfs_data", "[", "'graph'", "]", ".", "adjacent", "(", "u", ",", "L1", "(", "v", ",", "dfs_data", ")", ")", ":", "large_n", "[", "v", "]", "=", "2", "else", ":", "large_n", "[", "v", "]", "=", "large_n", "[", "u", "]", "if", "large_n", "[", "v", "]", "%", "2", "==", "0", ":", "v_II", "=", "v", "break", "# Goto 1", "if", "v_II", "!=", "0", ":", "# Move v_II to head of Adj[u]", "dfs_data", "[", "'adj'", "]", "[", "u", "]", ".", "remove", "(", "v_II", ")", "dfs_data", "[", "'adj'", "]", "[", "u", "]", ".", "insert", "(", "0", ",", "v_II", ")", "elif", "v_I", "!=", "0", ":", "# Move v_I to head of Adj[u]", "dfs_data", "[", "'adj'", "]", "[", "u", "]", ".", "remove", "(", "v_I", ")", "dfs_data", "[", "'adj'", "]", "[", "u", "]", ".", "insert", "(", "0", ",", "v_I", ")", "first_time", "=", "True", "for", "v", "in", "dfs_data", "[", "'adj'", "]", "[", "u", "]", ":", "if", "a", "(", "v", ",", "dfs_data", ")", "==", "u", ":", "b", "[", "v", "]", "=", "u", "if", "first_time", ":", "b", "[", "v", "]", "=", "b", "[", "u", "]", "elif", "wt", "(", "u", ",", "v", ",", "dfs_data", ")", "%", "2", "==", "0", ":", "large_n", "[", "v", "]", "=", "0", "else", ":", "large_n", "[", "v", "]", "=", "1", "stem", "[", "u", "]", "=", "v", "__branch_point_dfs_recursive", "(", "v", ",", "large_n", ",", "b", ",", "stem", ",", "dfs_data", ")", "first_time", "=", "False", "return" ]
36.948276
13.482759
def j1(x, context=None): """ Return the value of the first kind Bessel function of order 1 at x. """ return _apply_function_in_current_context( BigFloat, mpfr.mpfr_j1, (BigFloat._implicit_convert(x),), context, )
[ "def", "j1", "(", "x", ",", "context", "=", "None", ")", ":", "return", "_apply_function_in_current_context", "(", "BigFloat", ",", "mpfr", ".", "mpfr_j1", ",", "(", "BigFloat", ".", "_implicit_convert", "(", "x", ")", ",", ")", ",", "context", ",", ")" ]
23.181818
17.727273
def remove_file_from_tree(tree, file_path): """Remove a file from a tree. Args: tree A list of dicts containing info about each blob in a tree. file_path The path of a file to remove from a tree. Returns: The provided tree, but with the item matching the specified file_path removed. """ match = None for item in tree: if item.get("path") == file_path: match = item break if match: tree.remove(match) return tree
[ "def", "remove_file_from_tree", "(", "tree", ",", "file_path", ")", ":", "match", "=", "None", "for", "item", "in", "tree", ":", "if", "item", ".", "get", "(", "\"path\"", ")", "==", "file_path", ":", "match", "=", "item", "break", "if", "match", ":", "tree", ".", "remove", "(", "match", ")", "return", "tree" ]
21.75
22.75
def keep_absolute_impute__roc_auc(X, y, model_generator, method_name, num_fcounts=11): """ Keep Absolute (impute) xlabel = "Max fraction of features kept" ylabel = "ROC AUC" transform = "identity" sort_order = 19 """ return __run_measure(measures.keep_mask, X, y, model_generator, method_name, 0, num_fcounts, sklearn.metrics.roc_auc_score)
[ "def", "keep_absolute_impute__roc_auc", "(", "X", ",", "y", ",", "model_generator", ",", "method_name", ",", "num_fcounts", "=", "11", ")", ":", "return", "__run_measure", "(", "measures", ".", "keep_mask", ",", "X", ",", "y", ",", "model_generator", ",", "method_name", ",", "0", ",", "num_fcounts", ",", "sklearn", ".", "metrics", ".", "roc_auc_score", ")" ]
45.125
23.75
def _find_exits(self, src_block, target_block): """ Source block has more than one exit, and through some of those exits, the control flow can eventually go to the target block. This method returns exits that lead to the target block. :param src_block: The block that has multiple exits. :param target_block: The target block to reach. :returns: a dict of statement ID -> a list of target IPs (or None if the exit should not be taken), each corresponds to an exit to take in order to reach the target. For example, it returns the following dict: { 'default': None, # It has a default exit, but shouldn't be taken 15: [ 0x400080 ], # Statement 15 is an exit statement, and should be taken when the target is # 0x400080 28: None # Statement 28 is an exit statement, but shouldn't be taken } """ # Enumerate all statements and find exit statements # Since we don't have a state, we have to rely on the pyvex block instead of SimIRSB # Just create the block from pyvex again - not a big deal if self.project.is_hooked(src_block.addr): # Just return all exits for now return { -1: [ target_block.addr ] } block = self.project.factory.block(src_block.addr) vex_block = block.vex exit_stmt_ids = { } for stmt_idx, stmt in enumerate(vex_block.statements): if isinstance(stmt, pyvex.IRStmt.Exit): exit_stmt_ids[stmt_idx] = None # And of course, it has a default exit # Don't forget about it. exit_stmt_ids[DEFAULT_STATEMENT] = None # Find all paths from src_block to target_block # FIXME: This is some crappy code written in a hurry. Replace the all_simple_paths() later. all_simple_paths = list(networkx.all_simple_paths(self._cfg.graph, src_block, target_block, cutoff=3)) for simple_path in all_simple_paths: if len(simple_path) <= 1: # Oops, it looks that src_block and target_block are the same guy? continue if self._same_function: # Examine this path and make sure it does not have call or return edge for i in range(len(simple_path) - 1): jumpkind = self._cfg.graph[simple_path[i]][simple_path[i + 1]]['jumpkind'] if jumpkind in ('Ijk_Call', 'Ijk_Ret'): return { } # Get the first two nodes a, b = simple_path[0], simple_path[1] # Get the exit statement ID from CFG exit_stmt_id = self._cfg.get_exit_stmt_idx(a, b) if exit_stmt_id is None: continue # Mark it! if exit_stmt_ids[exit_stmt_id] is None: exit_stmt_ids[exit_stmt_id] = [ b.addr ] else: exit_stmt_ids[exit_stmt_id].append(b.addr) return exit_stmt_ids
[ "def", "_find_exits", "(", "self", ",", "src_block", ",", "target_block", ")", ":", "# Enumerate all statements and find exit statements", "# Since we don't have a state, we have to rely on the pyvex block instead of SimIRSB", "# Just create the block from pyvex again - not a big deal", "if", "self", ".", "project", ".", "is_hooked", "(", "src_block", ".", "addr", ")", ":", "# Just return all exits for now", "return", "{", "-", "1", ":", "[", "target_block", ".", "addr", "]", "}", "block", "=", "self", ".", "project", ".", "factory", ".", "block", "(", "src_block", ".", "addr", ")", "vex_block", "=", "block", ".", "vex", "exit_stmt_ids", "=", "{", "}", "for", "stmt_idx", ",", "stmt", "in", "enumerate", "(", "vex_block", ".", "statements", ")", ":", "if", "isinstance", "(", "stmt", ",", "pyvex", ".", "IRStmt", ".", "Exit", ")", ":", "exit_stmt_ids", "[", "stmt_idx", "]", "=", "None", "# And of course, it has a default exit", "# Don't forget about it.", "exit_stmt_ids", "[", "DEFAULT_STATEMENT", "]", "=", "None", "# Find all paths from src_block to target_block", "# FIXME: This is some crappy code written in a hurry. Replace the all_simple_paths() later.", "all_simple_paths", "=", "list", "(", "networkx", ".", "all_simple_paths", "(", "self", ".", "_cfg", ".", "graph", ",", "src_block", ",", "target_block", ",", "cutoff", "=", "3", ")", ")", "for", "simple_path", "in", "all_simple_paths", ":", "if", "len", "(", "simple_path", ")", "<=", "1", ":", "# Oops, it looks that src_block and target_block are the same guy?", "continue", "if", "self", ".", "_same_function", ":", "# Examine this path and make sure it does not have call or return edge", "for", "i", "in", "range", "(", "len", "(", "simple_path", ")", "-", "1", ")", ":", "jumpkind", "=", "self", ".", "_cfg", ".", "graph", "[", "simple_path", "[", "i", "]", "]", "[", "simple_path", "[", "i", "+", "1", "]", "]", "[", "'jumpkind'", "]", "if", "jumpkind", "in", "(", "'Ijk_Call'", ",", "'Ijk_Ret'", ")", ":", "return", "{", "}", "# Get the first two nodes", "a", ",", "b", "=", "simple_path", "[", "0", "]", ",", "simple_path", "[", "1", "]", "# Get the exit statement ID from CFG", "exit_stmt_id", "=", "self", ".", "_cfg", ".", "get_exit_stmt_idx", "(", "a", ",", "b", ")", "if", "exit_stmt_id", "is", "None", ":", "continue", "# Mark it!", "if", "exit_stmt_ids", "[", "exit_stmt_id", "]", "is", "None", ":", "exit_stmt_ids", "[", "exit_stmt_id", "]", "=", "[", "b", ".", "addr", "]", "else", ":", "exit_stmt_ids", "[", "exit_stmt_id", "]", ".", "append", "(", "b", ".", "addr", ")", "return", "exit_stmt_ids" ]
44.333333
25.550725
def print_cmd_line(self, s, target, source, env): """ In python 3, and in some of our tests, sys.stdout is a String io object, and it takes unicode strings only In other cases it's a regular Python 2.x file object which takes strings (bytes), and if you pass those a unicode object they try to decode with 'ascii' codec which fails if the cmd line has any hi-bit-set chars. This code assumes s is a regular string, but should work if it's unicode too. """ try: sys.stdout.write(s + u"\n") except UnicodeDecodeError: sys.stdout.write(s + "\n")
[ "def", "print_cmd_line", "(", "self", ",", "s", ",", "target", ",", "source", ",", "env", ")", ":", "try", ":", "sys", ".", "stdout", ".", "write", "(", "s", "+", "u\"\\n\"", ")", "except", "UnicodeDecodeError", ":", "sys", ".", "stdout", ".", "write", "(", "s", "+", "\"\\n\"", ")" ]
43.2
12.933333
def update(self, ttl=values.unset, collection_ttl=values.unset): """ Update the SyncMapInstance :param unicode ttl: Alias for collection_ttl :param unicode collection_ttl: New time-to-live of this Map in seconds. :returns: Updated SyncMapInstance :rtype: twilio.rest.sync.v1.service.sync_map.SyncMapInstance """ return self._proxy.update(ttl=ttl, collection_ttl=collection_ttl, )
[ "def", "update", "(", "self", ",", "ttl", "=", "values", ".", "unset", ",", "collection_ttl", "=", "values", ".", "unset", ")", ":", "return", "self", ".", "_proxy", ".", "update", "(", "ttl", "=", "ttl", ",", "collection_ttl", "=", "collection_ttl", ",", ")" ]
39.545455
20.454545
def _is_ctype(self, ctype): """Return True iff content is valid and of the given type.""" if not self.valid: return False mime = self.content_type return self.ContentMimetypes.get(mime) == ctype
[ "def", "_is_ctype", "(", "self", ",", "ctype", ")", ":", "if", "not", "self", ".", "valid", ":", "return", "False", "mime", "=", "self", ".", "content_type", "return", "self", ".", "ContentMimetypes", ".", "get", "(", "mime", ")", "==", "ctype" ]
38.833333
11
def _evaluate(self): """Scan for orphaned records and retrieve any records that have not already been grabbed""" retrieved_records = SortedDict() for record_id, record in six.iteritems(self._elements): if record is self._field._unset: # Record has not yet been retrieved, get it try: record = self.target_app.records.get(id=record_id) except SwimlaneHTTP400Error: # Record appears to be orphaned, don't include in set of elements logger.debug("Received 400 response retrieving record '{}', ignoring assumed orphaned record") continue retrieved_records[record_id] = record self._elements = retrieved_records return self._elements.values()
[ "def", "_evaluate", "(", "self", ")", ":", "retrieved_records", "=", "SortedDict", "(", ")", "for", "record_id", ",", "record", "in", "six", ".", "iteritems", "(", "self", ".", "_elements", ")", ":", "if", "record", "is", "self", ".", "_field", ".", "_unset", ":", "# Record has not yet been retrieved, get it", "try", ":", "record", "=", "self", ".", "target_app", ".", "records", ".", "get", "(", "id", "=", "record_id", ")", "except", "SwimlaneHTTP400Error", ":", "# Record appears to be orphaned, don't include in set of elements", "logger", ".", "debug", "(", "\"Received 400 response retrieving record '{}', ignoring assumed orphaned record\"", ")", "continue", "retrieved_records", "[", "record_id", "]", "=", "record", "self", ".", "_elements", "=", "retrieved_records", "return", "self", ".", "_elements", ".", "values", "(", ")" ]
40.75
23.2
def config(name='ckeditor', custom_config='', **kwargs): """Config CKEditor. :param name: The target input field's name. If you use Flask-WTF/WTForms, it need to set to field's name. Default to ``'ckeditor'``. :param custom_config: The addition config, for example ``uiColor: '#9AB8F3'``. The proper syntax for each option is ``configuration name : configuration value``. You can use comma to separate multiple key-value pairs. See the list of available configuration settings on `CKEditor documentation <https://docs.ckeditor.com/ckeditor4/docs/#!/api/CKEDITOR.config>`_. :param kwargs: Mirror arguments to overwritten configuration variables, see docs for more details. .. versionadded:: 0.3 """ extra_plugins = kwargs.get('extra_plugins', current_app.config['CKEDITOR_EXTRA_PLUGINS']) file_uploader = kwargs.get('file_uploader', current_app.config['CKEDITOR_FILE_UPLOADER']) file_browser = kwargs.get('file_browser', current_app.config['CKEDITOR_FILE_BROWSER']) if file_uploader != '': file_uploader = get_url(file_uploader) if file_browser != '': file_browser = get_url(file_browser) if file_uploader or file_browser and 'filebrowser' not in extra_plugins: extra_plugins.append('filebrowser') language = kwargs.get('language', current_app.config['CKEDITOR_LANGUAGE']) height = kwargs.get('height', current_app.config['CKEDITOR_HEIGHT']) width = kwargs.get('width', current_app.config['CKEDITOR_WIDTH']) code_theme = kwargs.get('code_theme', current_app.config['CKEDITOR_CODE_THEME']) wrong_key_arg = kwargs.get('codesnippet', None) if wrong_key_arg: warnings.warn('Argument codesnippet was renamed to enable_codesnippet and will be removed in future.') enable_codesnippet = kwargs.get('enable_codesnippet', wrong_key_arg) or \ current_app.config['CKEDITOR_ENABLE_CODESNIPPET'] if enable_codesnippet and 'codesnippet' not in extra_plugins: extra_plugins.append('codesnippet') enable_csrf = kwargs.get('enable_csrf', current_app.config['CKEDITOR_ENABLE_CSRF']) if enable_csrf: if 'csrf' not in current_app.extensions: raise RuntimeError("CSRFProtect is not initialized. It's required to enable CSRF protect, \ see docs for more details.") csrf_header = render_template_string(''' fileTools_requestHeaders: { 'X-CSRFToken': '{{ csrf_token() }}', },''') else: csrf_header = '' return Markup(''' <script type="text/javascript"> CKEDITOR.replace( "%s", { language: "%s", height: %s, width: %s, codeSnippet_theme: "%s", imageUploadUrl: "%s", filebrowserUploadUrl: "%s", filebrowserBrowseUrl: "%s", extraPlugins: "%s", %s // CSRF token header for XHR request %s }); </script>''' % ( name, language, height, width, code_theme, file_uploader, file_uploader, file_browser, ','.join(extra_plugins), csrf_header, custom_config))
[ "def", "config", "(", "name", "=", "'ckeditor'", ",", "custom_config", "=", "''", ",", "*", "*", "kwargs", ")", ":", "extra_plugins", "=", "kwargs", ".", "get", "(", "'extra_plugins'", ",", "current_app", ".", "config", "[", "'CKEDITOR_EXTRA_PLUGINS'", "]", ")", "file_uploader", "=", "kwargs", ".", "get", "(", "'file_uploader'", ",", "current_app", ".", "config", "[", "'CKEDITOR_FILE_UPLOADER'", "]", ")", "file_browser", "=", "kwargs", ".", "get", "(", "'file_browser'", ",", "current_app", ".", "config", "[", "'CKEDITOR_FILE_BROWSER'", "]", ")", "if", "file_uploader", "!=", "''", ":", "file_uploader", "=", "get_url", "(", "file_uploader", ")", "if", "file_browser", "!=", "''", ":", "file_browser", "=", "get_url", "(", "file_browser", ")", "if", "file_uploader", "or", "file_browser", "and", "'filebrowser'", "not", "in", "extra_plugins", ":", "extra_plugins", ".", "append", "(", "'filebrowser'", ")", "language", "=", "kwargs", ".", "get", "(", "'language'", ",", "current_app", ".", "config", "[", "'CKEDITOR_LANGUAGE'", "]", ")", "height", "=", "kwargs", ".", "get", "(", "'height'", ",", "current_app", ".", "config", "[", "'CKEDITOR_HEIGHT'", "]", ")", "width", "=", "kwargs", ".", "get", "(", "'width'", ",", "current_app", ".", "config", "[", "'CKEDITOR_WIDTH'", "]", ")", "code_theme", "=", "kwargs", ".", "get", "(", "'code_theme'", ",", "current_app", ".", "config", "[", "'CKEDITOR_CODE_THEME'", "]", ")", "wrong_key_arg", "=", "kwargs", ".", "get", "(", "'codesnippet'", ",", "None", ")", "if", "wrong_key_arg", ":", "warnings", ".", "warn", "(", "'Argument codesnippet was renamed to enable_codesnippet and will be removed in future.'", ")", "enable_codesnippet", "=", "kwargs", ".", "get", "(", "'enable_codesnippet'", ",", "wrong_key_arg", ")", "or", "current_app", ".", "config", "[", "'CKEDITOR_ENABLE_CODESNIPPET'", "]", "if", "enable_codesnippet", "and", "'codesnippet'", "not", "in", "extra_plugins", ":", "extra_plugins", ".", "append", "(", "'codesnippet'", ")", "enable_csrf", "=", "kwargs", ".", "get", "(", "'enable_csrf'", ",", "current_app", ".", "config", "[", "'CKEDITOR_ENABLE_CSRF'", "]", ")", "if", "enable_csrf", ":", "if", "'csrf'", "not", "in", "current_app", ".", "extensions", ":", "raise", "RuntimeError", "(", "\"CSRFProtect is not initialized. It's required to enable CSRF protect, \\\n see docs for more details.\"", ")", "csrf_header", "=", "render_template_string", "(", "'''\n fileTools_requestHeaders: {\n 'X-CSRFToken': '{{ csrf_token() }}',\n },'''", ")", "else", ":", "csrf_header", "=", "''", "return", "Markup", "(", "'''\n<script type=\"text/javascript\">\n CKEDITOR.replace( \"%s\", {\n language: \"%s\",\n height: %s,\n width: %s,\n codeSnippet_theme: \"%s\",\n imageUploadUrl: \"%s\",\n filebrowserUploadUrl: \"%s\",\n filebrowserBrowseUrl: \"%s\",\n extraPlugins: \"%s\",\n %s // CSRF token header for XHR request\n %s\n });\n</script>'''", "%", "(", "name", ",", "language", ",", "height", ",", "width", ",", "code_theme", ",", "file_uploader", ",", "file_uploader", ",", "file_browser", ",", "','", ".", "join", "(", "extra_plugins", ")", ",", "csrf_header", ",", "custom_config", ")", ")" ]
44.109589
28.410959
def _queue_dag(self, name, *, data=None): """ Add a new dag to the queue. If the stop workflow flag is set, no new dag can be queued. Args: name (str): The name of the dag that should be queued. data (MultiTaskData): The data that should be passed on to the new dag. Raises: DagNameUnknown: If the specified dag name does not exist Returns: str: The name of the queued dag. """ if self._stop_workflow: return None if name not in self._dags_blueprint: raise DagNameUnknown() new_dag = copy.deepcopy(self._dags_blueprint[name]) new_dag.workflow_name = self.name self._dags_running[new_dag.name] = self._celery_app.send_task( JobExecPath.Dag, args=(new_dag, self._workflow_id, data), queue=new_dag.queue, routing_key=new_dag.queue) return new_dag.name
[ "def", "_queue_dag", "(", "self", ",", "name", ",", "*", ",", "data", "=", "None", ")", ":", "if", "self", ".", "_stop_workflow", ":", "return", "None", "if", "name", "not", "in", "self", ".", "_dags_blueprint", ":", "raise", "DagNameUnknown", "(", ")", "new_dag", "=", "copy", ".", "deepcopy", "(", "self", ".", "_dags_blueprint", "[", "name", "]", ")", "new_dag", ".", "workflow_name", "=", "self", ".", "name", "self", ".", "_dags_running", "[", "new_dag", ".", "name", "]", "=", "self", ".", "_celery_app", ".", "send_task", "(", "JobExecPath", ".", "Dag", ",", "args", "=", "(", "new_dag", ",", "self", ".", "_workflow_id", ",", "data", ")", ",", "queue", "=", "new_dag", ".", "queue", ",", "routing_key", "=", "new_dag", ".", "queue", ")", "return", "new_dag", ".", "name" ]
32.857143
22.571429
def get_all_eip_addresses(addresses=None, allocation_ids=None, region=None, key=None, keyid=None, profile=None): ''' Get public addresses of some, or all EIPs associated with the current account. addresses (list) - Optional list of addresses. If provided, only the addresses associated with those in the list will be returned. allocation_ids (list) - Optional list of allocation IDs. If provided, only the addresses associated with the given allocation IDs will be returned. returns (list) - A list of the requested EIP addresses CLI Example: .. code-block:: bash salt-call boto_ec2.get_all_eip_addresses .. versionadded:: 2016.3.0 ''' return [x.public_ip for x in _get_all_eip_addresses(addresses, allocation_ids, region, key, keyid, profile)]
[ "def", "get_all_eip_addresses", "(", "addresses", "=", "None", ",", "allocation_ids", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "return", "[", "x", ".", "public_ip", "for", "x", "in", "_get_all_eip_addresses", "(", "addresses", ",", "allocation_ids", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "]" ]
34.36
29.32
def update(device_id, **params): ''' Updates device information in Server Density. For more information see the `API docs`__. .. __: https://apidocs.serverdensity.com/Inventory/Devices/Updating CLI Example: .. code-block:: bash salt '*' serverdensity_device.update 51f7eafcdba4bb235e000ae4 name=lama group=lama_band salt '*' serverdensity_device.update 51f7eafcdba4bb235e000ae4 name=better_lama group=rock_lamas swapSpace=512 ''' params = _clean_salt_variables(params) api_response = requests.put( 'https://api.serverdensity.io/inventory/devices/' + device_id, params={'token': get_sd_auth('api_token')}, data=params ) log.debug('Server Density API Response: %s', api_response) log.debug('Server Density API Response content: %s', api_response.content) if api_response.status_code == 200: try: return salt.utils.json.loads(api_response.content) except ValueError: log.error( 'Could not parse Server Density API Response content: %s', api_response.content ) raise CommandExecutionError( 'Failed to create, API Response: {0}'.format(api_response) ) else: return None
[ "def", "update", "(", "device_id", ",", "*", "*", "params", ")", ":", "params", "=", "_clean_salt_variables", "(", "params", ")", "api_response", "=", "requests", ".", "put", "(", "'https://api.serverdensity.io/inventory/devices/'", "+", "device_id", ",", "params", "=", "{", "'token'", ":", "get_sd_auth", "(", "'api_token'", ")", "}", ",", "data", "=", "params", ")", "log", ".", "debug", "(", "'Server Density API Response: %s'", ",", "api_response", ")", "log", ".", "debug", "(", "'Server Density API Response content: %s'", ",", "api_response", ".", "content", ")", "if", "api_response", ".", "status_code", "==", "200", ":", "try", ":", "return", "salt", ".", "utils", ".", "json", ".", "loads", "(", "api_response", ".", "content", ")", "except", "ValueError", ":", "log", ".", "error", "(", "'Could not parse Server Density API Response content: %s'", ",", "api_response", ".", "content", ")", "raise", "CommandExecutionError", "(", "'Failed to create, API Response: {0}'", ".", "format", "(", "api_response", ")", ")", "else", ":", "return", "None" ]
35.055556
26.833333
def kraken_request(self, method, endpoint, **kwargs): """Make a request to one of the kraken api endpoints. Headers are automatically set to accept :data:`TWITCH_HEADER_ACCEPT`. Also the client id from :data:`CLIENT_ID` will be set. The url will be constructed of :data:`TWITCH_KRAKENURL` and the given endpoint. :param method: the request method :type method: :class:`str` :param endpoint: the endpoint of the kraken api. The base url is automatically provided. :type endpoint: :class:`str` :param kwargs: keyword arguments of :meth:`requests.Session.request` :returns: a resonse object :rtype: :class:`requests.Response` :raises: :class:`requests.HTTPError` """ url = TWITCH_KRAKENURL + endpoint headers = kwargs.setdefault('headers', {}) headers['Accept'] = TWITCH_HEADER_ACCEPT headers['Client-ID'] = CLIENT_ID # https://github.com/justintv/Twitch-API#rate-limits return self.request(method, url, **kwargs)
[ "def", "kraken_request", "(", "self", ",", "method", ",", "endpoint", ",", "*", "*", "kwargs", ")", ":", "url", "=", "TWITCH_KRAKENURL", "+", "endpoint", "headers", "=", "kwargs", ".", "setdefault", "(", "'headers'", ",", "{", "}", ")", "headers", "[", "'Accept'", "]", "=", "TWITCH_HEADER_ACCEPT", "headers", "[", "'Client-ID'", "]", "=", "CLIENT_ID", "# https://github.com/justintv/Twitch-API#rate-limits", "return", "self", ".", "request", "(", "method", ",", "url", ",", "*", "*", "kwargs", ")" ]
46.434783
16.26087
def enable_autocenter(self, option): """Set ``autocenter`` behavior. Parameters ---------- option : {'on', 'override', 'once', 'off'} Option for auto-center behavior. A list of acceptable options can also be obtained by :meth:`get_autocenter_options`. Raises ------ ginga.ImageView.ImageViewError Invalid option. """ option = option.lower() assert(option in self.autocenter_options), \ ImageViewError("Bad autocenter option '%s': must be one of %s" % ( str(self.autocenter_options))) self.t_.set(autocenter=option)
[ "def", "enable_autocenter", "(", "self", ",", "option", ")", ":", "option", "=", "option", ".", "lower", "(", ")", "assert", "(", "option", "in", "self", ".", "autocenter_options", ")", ",", "ImageViewError", "(", "\"Bad autocenter option '%s': must be one of %s\"", "%", "(", "str", "(", "self", ".", "autocenter_options", ")", ")", ")", "self", ".", "t_", ".", "set", "(", "autocenter", "=", "option", ")" ]
32.5
18.6
def create_body(action, params): """Create http body for rest request.""" body = {} body['action'] = action if params is not None: body['params'] = params return body
[ "def", "create_body", "(", "action", ",", "params", ")", ":", "body", "=", "{", "}", "body", "[", "'action'", "]", "=", "action", "if", "params", "is", "not", "None", ":", "body", "[", "'params'", "]", "=", "params", "return", "body" ]
30.285714
10.857143
def chrome_tracing_object_transfer_dump(self, filename=None): """Return a list of transfer events that can viewed as a timeline. To view this information as a timeline, simply dump it as a json file by passing in "filename" or using using json.dump, and then load go to chrome://tracing in the Chrome web browser and load the dumped file. Make sure to enable "Flow events" in the "View Options" menu. Args: filename: If a filename is provided, the timeline is dumped to that file. Returns: If filename is not provided, this returns a list of profiling events. Each profile event is a dictionary. """ client_id_to_address = {} for client_info in ray.global_state.client_table(): client_id_to_address[client_info["ClientID"]] = "{}:{}".format( client_info["NodeManagerAddress"], client_info["ObjectManagerPort"]) all_events = [] for key, items in self.profile_table().items(): # Only consider object manager events. if items[0]["component_type"] != "object_manager": continue for event in items: if event["event_type"] == "transfer_send": object_id, remote_client_id, _, _ = event["extra_data"] elif event["event_type"] == "transfer_receive": object_id, remote_client_id, _, _ = event["extra_data"] elif event["event_type"] == "receive_pull_request": object_id, remote_client_id = event["extra_data"] else: assert False, "This should be unreachable." # Choose a color by reading the first couple of hex digits of # the object ID as an integer and turning that into a color. object_id_int = int(object_id[:2], 16) color = self._chrome_tracing_colors[object_id_int % len( self._chrome_tracing_colors)] new_event = { # The category of the event. "cat": event["event_type"], # The string displayed on the event. "name": event["event_type"], # The identifier for the group of rows that the event # appears in. "pid": client_id_to_address[key], # The identifier for the row that the event appears in. "tid": client_id_to_address[remote_client_id], # The start time in microseconds. "ts": self._seconds_to_microseconds(event["start_time"]), # The duration in microseconds. "dur": self._seconds_to_microseconds(event["end_time"] - event["start_time"]), # What is this? "ph": "X", # This is the name of the color to display the box in. "cname": color, # The extra user-defined data. "args": event["extra_data"], } all_events.append(new_event) # Add another box with a color indicating whether it was a send # or a receive event. if event["event_type"] == "transfer_send": additional_event = new_event.copy() additional_event["cname"] = "black" all_events.append(additional_event) elif event["event_type"] == "transfer_receive": additional_event = new_event.copy() additional_event["cname"] = "grey" all_events.append(additional_event) else: pass if filename is not None: with open(filename, "w") as outfile: json.dump(all_events, outfile) else: return all_events
[ "def", "chrome_tracing_object_transfer_dump", "(", "self", ",", "filename", "=", "None", ")", ":", "client_id_to_address", "=", "{", "}", "for", "client_info", "in", "ray", ".", "global_state", ".", "client_table", "(", ")", ":", "client_id_to_address", "[", "client_info", "[", "\"ClientID\"", "]", "]", "=", "\"{}:{}\"", ".", "format", "(", "client_info", "[", "\"NodeManagerAddress\"", "]", ",", "client_info", "[", "\"ObjectManagerPort\"", "]", ")", "all_events", "=", "[", "]", "for", "key", ",", "items", "in", "self", ".", "profile_table", "(", ")", ".", "items", "(", ")", ":", "# Only consider object manager events.", "if", "items", "[", "0", "]", "[", "\"component_type\"", "]", "!=", "\"object_manager\"", ":", "continue", "for", "event", "in", "items", ":", "if", "event", "[", "\"event_type\"", "]", "==", "\"transfer_send\"", ":", "object_id", ",", "remote_client_id", ",", "_", ",", "_", "=", "event", "[", "\"extra_data\"", "]", "elif", "event", "[", "\"event_type\"", "]", "==", "\"transfer_receive\"", ":", "object_id", ",", "remote_client_id", ",", "_", ",", "_", "=", "event", "[", "\"extra_data\"", "]", "elif", "event", "[", "\"event_type\"", "]", "==", "\"receive_pull_request\"", ":", "object_id", ",", "remote_client_id", "=", "event", "[", "\"extra_data\"", "]", "else", ":", "assert", "False", ",", "\"This should be unreachable.\"", "# Choose a color by reading the first couple of hex digits of", "# the object ID as an integer and turning that into a color.", "object_id_int", "=", "int", "(", "object_id", "[", ":", "2", "]", ",", "16", ")", "color", "=", "self", ".", "_chrome_tracing_colors", "[", "object_id_int", "%", "len", "(", "self", ".", "_chrome_tracing_colors", ")", "]", "new_event", "=", "{", "# The category of the event.", "\"cat\"", ":", "event", "[", "\"event_type\"", "]", ",", "# The string displayed on the event.", "\"name\"", ":", "event", "[", "\"event_type\"", "]", ",", "# The identifier for the group of rows that the event", "# appears in.", "\"pid\"", ":", "client_id_to_address", "[", "key", "]", ",", "# The identifier for the row that the event appears in.", "\"tid\"", ":", "client_id_to_address", "[", "remote_client_id", "]", ",", "# The start time in microseconds.", "\"ts\"", ":", "self", ".", "_seconds_to_microseconds", "(", "event", "[", "\"start_time\"", "]", ")", ",", "# The duration in microseconds.", "\"dur\"", ":", "self", ".", "_seconds_to_microseconds", "(", "event", "[", "\"end_time\"", "]", "-", "event", "[", "\"start_time\"", "]", ")", ",", "# What is this?", "\"ph\"", ":", "\"X\"", ",", "# This is the name of the color to display the box in.", "\"cname\"", ":", "color", ",", "# The extra user-defined data.", "\"args\"", ":", "event", "[", "\"extra_data\"", "]", ",", "}", "all_events", ".", "append", "(", "new_event", ")", "# Add another box with a color indicating whether it was a send", "# or a receive event.", "if", "event", "[", "\"event_type\"", "]", "==", "\"transfer_send\"", ":", "additional_event", "=", "new_event", ".", "copy", "(", ")", "additional_event", "[", "\"cname\"", "]", "=", "\"black\"", "all_events", ".", "append", "(", "additional_event", ")", "elif", "event", "[", "\"event_type\"", "]", "==", "\"transfer_receive\"", ":", "additional_event", "=", "new_event", ".", "copy", "(", ")", "additional_event", "[", "\"cname\"", "]", "=", "\"grey\"", "all_events", ".", "append", "(", "additional_event", ")", "else", ":", "pass", "if", "filename", "is", "not", "None", ":", "with", "open", "(", "filename", ",", "\"w\"", ")", "as", "outfile", ":", "json", ".", "dump", "(", "all_events", ",", "outfile", ")", "else", ":", "return", "all_events" ]
44.577778
22.366667
def __map_axis(self, axis): """Get the linux xpad code from the Windows xinput code.""" start_code, start_value = axis value = start_value code = self.manager.codes['xpad'][start_code] return code, value
[ "def", "__map_axis", "(", "self", ",", "axis", ")", ":", "start_code", ",", "start_value", "=", "axis", "value", "=", "start_value", "code", "=", "self", ".", "manager", ".", "codes", "[", "'xpad'", "]", "[", "start_code", "]", "return", "code", ",", "value" ]
39.666667
9.166667
def on_drag(self, cursor_x, cursor_y): """ Mouse cursor is moving Glut calls this function (when mouse button is down) and pases the mouse cursor postion in window coords as the mouse moves. """ from blmath.geometry.transform.rodrigues import as_rotation_matrix if self.isdragging: mouse_pt = arcball.Point2fT(cursor_x, cursor_y) # Update End Vector And Get Rotation As Quaternion ThisQuat = self.arcball.drag(mouse_pt) # Convert Quaternion Into Matrix3fT self.thisrot = arcball.Matrix3fSetRotationFromQuat4f(ThisQuat) # Use correct Linear Algebra matrix multiplication C = A * B # Accumulate Last Rotation Into This One self.thisrot = arcball.Matrix3fMulMatrix3f(self.lastrot, self.thisrot) # make sure it is a rotation self.thisrot = as_rotation_matrix(self.thisrot) # Set Our Final Transform's Rotation From This One self.transform = arcball.Matrix4fSetRotationFromMatrix3f(self.transform, self.thisrot) glut.glutPostRedisplay() return
[ "def", "on_drag", "(", "self", ",", "cursor_x", ",", "cursor_y", ")", ":", "from", "blmath", ".", "geometry", ".", "transform", ".", "rodrigues", "import", "as_rotation_matrix", "if", "self", ".", "isdragging", ":", "mouse_pt", "=", "arcball", ".", "Point2fT", "(", "cursor_x", ",", "cursor_y", ")", "# Update End Vector And Get Rotation As Quaternion", "ThisQuat", "=", "self", ".", "arcball", ".", "drag", "(", "mouse_pt", ")", "# Convert Quaternion Into Matrix3fT", "self", ".", "thisrot", "=", "arcball", ".", "Matrix3fSetRotationFromQuat4f", "(", "ThisQuat", ")", "# Use correct Linear Algebra matrix multiplication C = A * B", "# Accumulate Last Rotation Into This One", "self", ".", "thisrot", "=", "arcball", ".", "Matrix3fMulMatrix3f", "(", "self", ".", "lastrot", ",", "self", ".", "thisrot", ")", "# make sure it is a rotation", "self", ".", "thisrot", "=", "as_rotation_matrix", "(", "self", ".", "thisrot", ")", "# Set Our Final Transform's Rotation From This One", "self", ".", "transform", "=", "arcball", ".", "Matrix4fSetRotationFromMatrix3f", "(", "self", ".", "transform", ",", "self", ".", "thisrot", ")", "glut", ".", "glutPostRedisplay", "(", ")", "return" ]
54.190476
20.142857
def scroll_backward_vertically(self, steps=10, *args, **selectors): """ Perform scroll backward (vertically)action on the object which has *selectors* attributes. Return whether the object can be Scroll or not. See `Scroll Forward Vertically` for more details. """ return self.device(**selectors).scroll.vert.backward(steps=steps)
[ "def", "scroll_backward_vertically", "(", "self", ",", "steps", "=", "10", ",", "*", "args", ",", "*", "*", "selectors", ")", ":", "return", "self", ".", "device", "(", "*", "*", "selectors", ")", ".", "scroll", ".", "vert", ".", "backward", "(", "steps", "=", "steps", ")" ]
41.333333
25.555556
def reflect_image(image, axis=None, tx=None, metric='mattes'): """ Reflect an image along an axis ANTsR function: `reflectImage` Arguments --------- image : ANTsImage image to reflect axis : integer (optional) which dimension to reflect across, numbered from 0 to imageDimension-1 tx : string (optional) transformation type to estimate after reflection metric : string similarity metric for image registration. see antsRegistration. Returns ------- ANTsImage Example ------- >>> import ants >>> fi = ants.image_read( ants.get_ants_data('r16'), 'float' ) >>> axis = 2 >>> asym = ants.reflect_image(fi, axis, 'Affine')['warpedmovout'] >>> asym = asym - fi """ if axis is None: axis = image.dimension - 1 if (axis > image.dimension) or (axis < 0): axis = image.dimension - 1 rflct = mktemp(suffix='.mat') libfn = utils.get_lib_fn('reflectionMatrix%s'%image._libsuffix) libfn(image.pointer, axis, rflct) if tx is not None: rfi = registration(image, image, type_of_transform=tx, syn_metric=metric, outprefix=mktemp(), initial_transform=rflct) return rfi else: return apply_transforms(image, image, rflct)
[ "def", "reflect_image", "(", "image", ",", "axis", "=", "None", ",", "tx", "=", "None", ",", "metric", "=", "'mattes'", ")", ":", "if", "axis", "is", "None", ":", "axis", "=", "image", ".", "dimension", "-", "1", "if", "(", "axis", ">", "image", ".", "dimension", ")", "or", "(", "axis", "<", "0", ")", ":", "axis", "=", "image", ".", "dimension", "-", "1", "rflct", "=", "mktemp", "(", "suffix", "=", "'.mat'", ")", "libfn", "=", "utils", ".", "get_lib_fn", "(", "'reflectionMatrix%s'", "%", "image", ".", "_libsuffix", ")", "libfn", "(", "image", ".", "pointer", ",", "axis", ",", "rflct", ")", "if", "tx", "is", "not", "None", ":", "rfi", "=", "registration", "(", "image", ",", "image", ",", "type_of_transform", "=", "tx", ",", "syn_metric", "=", "metric", ",", "outprefix", "=", "mktemp", "(", ")", ",", "initial_transform", "=", "rflct", ")", "return", "rfi", "else", ":", "return", "apply_transforms", "(", "image", ",", "image", ",", "rflct", ")" ]
26.3
23.06
def decompile_pyc(bin_pyc, output=sys.stdout): ''' decompile apython pyc or pyo binary file. :param bin_pyc: input file objects :param output: output file objects ''' from turicreate.meta.asttools import python_source bin = bin_pyc.read() code = marshal.loads(bin[8:]) mod_ast = make_module(code) python_source(mod_ast, file=output)
[ "def", "decompile_pyc", "(", "bin_pyc", ",", "output", "=", "sys", ".", "stdout", ")", ":", "from", "turicreate", ".", "meta", ".", "asttools", "import", "python_source", "bin", "=", "bin_pyc", ".", "read", "(", ")", "code", "=", "marshal", ".", "loads", "(", "bin", "[", "8", ":", "]", ")", "mod_ast", "=", "make_module", "(", "code", ")", "python_source", "(", "mod_ast", ",", "file", "=", "output", ")" ]
22.705882
20.235294
def from_credentials(credentials): """Returns a new API object from an existing Credentials object. :param credentials: The existing saved credentials. :type credentials: Credentials :return: A new API object populated with MyGeotab credentials. :rtype: API """ return API(username=credentials.username, password=credentials.password, database=credentials.database, session_id=credentials.session_id, server=credentials.server)
[ "def", "from_credentials", "(", "credentials", ")", ":", "return", "API", "(", "username", "=", "credentials", ".", "username", ",", "password", "=", "credentials", ".", "password", ",", "database", "=", "credentials", ".", "database", ",", "session_id", "=", "credentials", ".", "session_id", ",", "server", "=", "credentials", ".", "server", ")" ]
46.545455
18.818182
def set_password(self, user, password): """ Sets the password for the passed user. Zendesk API `Reference <https://developer.zendesk.com/rest_api/docs/support/users#set-a-users-password>`__. :param user: User object or id :param password: new password """ url = self._build_url(self.endpoint.set_password(id=user)) return self._post(url, payload=dict(password=password))
[ "def", "set_password", "(", "self", ",", "user", ",", "password", ")", ":", "url", "=", "self", ".", "_build_url", "(", "self", ".", "endpoint", ".", "set_password", "(", "id", "=", "user", ")", ")", "return", "self", ".", "_post", "(", "url", ",", "payload", "=", "dict", "(", "password", "=", "password", ")", ")" ]
42.6
17.6
def _attach_endpoints(self): """Dynamically attaches endpoint callables to this client""" for name, value in inspect.getmembers(self): if inspect.isclass(value) and issubclass(value, self._Endpoint) and (value is not self._Endpoint): endpoint_instance = value(self.requester) setattr(self, endpoint_instance.endpoint_base, endpoint_instance) if not hasattr(endpoint_instance, 'get_endpoints'): endpoint_instance.get_endpoints = () if not hasattr(endpoint_instance, 'post_endpoints'): endpoint_instance.post_endpoints = () if not hasattr(endpoint_instance, 'is_callable'): endpoint_instance.is_callable = False for endpoint in (endpoint_instance.get_endpoints + endpoint_instance.post_endpoints): function = endpoint_instance.create_endpoint_function(endpoint) function_name = endpoint.replace('/', '_') setattr(endpoint_instance, function_name, function) function.__name__ = str(function_name) function.__doc__ = 'Tells the object to make a request to the {0} endpoint'.format(endpoint)
[ "def", "_attach_endpoints", "(", "self", ")", ":", "for", "name", ",", "value", "in", "inspect", ".", "getmembers", "(", "self", ")", ":", "if", "inspect", ".", "isclass", "(", "value", ")", "and", "issubclass", "(", "value", ",", "self", ".", "_Endpoint", ")", "and", "(", "value", "is", "not", "self", ".", "_Endpoint", ")", ":", "endpoint_instance", "=", "value", "(", "self", ".", "requester", ")", "setattr", "(", "self", ",", "endpoint_instance", ".", "endpoint_base", ",", "endpoint_instance", ")", "if", "not", "hasattr", "(", "endpoint_instance", ",", "'get_endpoints'", ")", ":", "endpoint_instance", ".", "get_endpoints", "=", "(", ")", "if", "not", "hasattr", "(", "endpoint_instance", ",", "'post_endpoints'", ")", ":", "endpoint_instance", ".", "post_endpoints", "=", "(", ")", "if", "not", "hasattr", "(", "endpoint_instance", ",", "'is_callable'", ")", ":", "endpoint_instance", ".", "is_callable", "=", "False", "for", "endpoint", "in", "(", "endpoint_instance", ".", "get_endpoints", "+", "endpoint_instance", ".", "post_endpoints", ")", ":", "function", "=", "endpoint_instance", ".", "create_endpoint_function", "(", "endpoint", ")", "function_name", "=", "endpoint", ".", "replace", "(", "'/'", ",", "'_'", ")", "setattr", "(", "endpoint_instance", ",", "function_name", ",", "function", ")", "function", ".", "__name__", "=", "str", "(", "function_name", ")", "function", ".", "__doc__", "=", "'Tells the object to make a request to the {0} endpoint'", ".", "format", "(", "endpoint", ")" ]
69.611111
29.388889
def addAccount(self, username, domain, password, avatars=None, protocol=u'email', disabled=0, internal=False, verified=True): """ Create a user account, add it to this LoginBase, and return it. This method must be called within a transaction in my store. @param username: the user's name. @param domain: the domain part of the user's name [XXX TODO: this really ought to say something about whether it's a Q2Q domain, a SIP domain, an HTTP realm, or an email address domain - right now the assumption is generally that it's an email address domain, but not always] @param password: A shared secret. @param avatars: (Optional). A SubStore which, if passed, will be used by cred as the target of all adaptations for this user. By default, I will create a SubStore, and plugins can be installed on that substore using the powerUp method to provide implementations of cred client interfaces. @raise DuplicateUniqueItem: if the 'avatars' argument already contains a LoginAccount. @return: an instance of a LoginAccount, with all attributes filled out as they are passed in, stored in my store. """ # unicode(None) == u'None', kids. if username is not None: username = unicode(username) if domain is not None: domain = unicode(domain) if password is not None: password = unicode(password) if self.accountByAddress(username, domain) is not None: raise DuplicateUser(username, domain) if avatars is None: avatars = self.makeAvatars(domain, username) subStore = avatars.open() # create this unconditionally; as the docstring says, we must be run # within a transaction, so if something goes wrong in the substore # transaction this item's creation will be reverted... la = LoginAccount(store=self.store, password=password, avatars=avatars, disabled=disabled) def createSubStoreAccountObjects(): LoginAccount(store=subStore, password=password, disabled=disabled, avatars=subStore) la.addLoginMethod(localpart=username, domain=domain, protocol=protocol, internal=internal, verified=verified) subStore.transact(createSubStoreAccountObjects) return la
[ "def", "addAccount", "(", "self", ",", "username", ",", "domain", ",", "password", ",", "avatars", "=", "None", ",", "protocol", "=", "u'email'", ",", "disabled", "=", "0", ",", "internal", "=", "False", ",", "verified", "=", "True", ")", ":", "# unicode(None) == u'None', kids.", "if", "username", "is", "not", "None", ":", "username", "=", "unicode", "(", "username", ")", "if", "domain", "is", "not", "None", ":", "domain", "=", "unicode", "(", "domain", ")", "if", "password", "is", "not", "None", ":", "password", "=", "unicode", "(", "password", ")", "if", "self", ".", "accountByAddress", "(", "username", ",", "domain", ")", "is", "not", "None", ":", "raise", "DuplicateUser", "(", "username", ",", "domain", ")", "if", "avatars", "is", "None", ":", "avatars", "=", "self", ".", "makeAvatars", "(", "domain", ",", "username", ")", "subStore", "=", "avatars", ".", "open", "(", ")", "# create this unconditionally; as the docstring says, we must be run", "# within a transaction, so if something goes wrong in the substore", "# transaction this item's creation will be reverted...", "la", "=", "LoginAccount", "(", "store", "=", "self", ".", "store", ",", "password", "=", "password", ",", "avatars", "=", "avatars", ",", "disabled", "=", "disabled", ")", "def", "createSubStoreAccountObjects", "(", ")", ":", "LoginAccount", "(", "store", "=", "subStore", ",", "password", "=", "password", ",", "disabled", "=", "disabled", ",", "avatars", "=", "subStore", ")", "la", ".", "addLoginMethod", "(", "localpart", "=", "username", ",", "domain", "=", "domain", ",", "protocol", "=", "protocol", ",", "internal", "=", "internal", ",", "verified", "=", "verified", ")", "subStore", ".", "transact", "(", "createSubStoreAccountObjects", ")", "return", "la" ]
38.463768
20.811594
def get_key_name(self, environ, response_interception, exception=None): """Get the timer key name. :param environ: wsgi environment :type environ: dict :param response_interception: dictionary in form {'status': '<response status>', 'response_headers': [<response headers], 'exc_info': <exc_info>} This is the interception of what was passed to start_response handler. :type response_interception: dict :param exception: optional exception happened during the iteration of the response :type exception: Exception :return: string in form '{{UNDERSCORED_PATH}}.{{METHOD}}.{{STATUS_CODE}}' :rtype: str """ status = response_interception.get('status') status_code = status.split()[0] # Leave only the status code. # PATH_INFO can be empty, so falling back to '/' in that case path = CHAR_RE.sub(self.separator, (environ['PATH_INFO'].rstrip('\/') or '/')[1:]) parts = [path, environ['REQUEST_METHOD'], status_code] if exception: parts.append(exception.__class__.__name__) return '.'.join(parts)
[ "def", "get_key_name", "(", "self", ",", "environ", ",", "response_interception", ",", "exception", "=", "None", ")", ":", "status", "=", "response_interception", ".", "get", "(", "'status'", ")", "status_code", "=", "status", ".", "split", "(", ")", "[", "0", "]", "# Leave only the status code.", "# PATH_INFO can be empty, so falling back to '/' in that case", "path", "=", "CHAR_RE", ".", "sub", "(", "self", ".", "separator", ",", "(", "environ", "[", "'PATH_INFO'", "]", ".", "rstrip", "(", "'\\/'", ")", "or", "'/'", ")", "[", "1", ":", "]", ")", "parts", "=", "[", "path", ",", "environ", "[", "'REQUEST_METHOD'", "]", ",", "status_code", "]", "if", "exception", ":", "parts", ".", "append", "(", "exception", ".", "__class__", ".", "__name__", ")", "return", "'.'", ".", "join", "(", "parts", ")" ]
49.652174
24.130435
def limit(self, max=30): """ The speed limit for a boid. Boids can momentarily go very fast, something that is impossible for real animals. """ if abs(self.vx) > max: self.vx = self.vx/abs(self.vx)*max if abs(self.vy) > max: self.vy = self.vy/abs(self.vy)*max if abs(self.vz) > max: self.vz = self.vz/abs(self.vz)*max
[ "def", "limit", "(", "self", ",", "max", "=", "30", ")", ":", "if", "abs", "(", "self", ".", "vx", ")", ">", "max", ":", "self", ".", "vx", "=", "self", ".", "vx", "/", "abs", "(", "self", ".", "vx", ")", "*", "max", "if", "abs", "(", "self", ".", "vy", ")", ">", "max", ":", "self", ".", "vy", "=", "self", ".", "vy", "/", "abs", "(", "self", ".", "vy", ")", "*", "max", "if", "abs", "(", "self", ".", "vz", ")", ">", "max", ":", "self", ".", "vz", "=", "self", ".", "vz", "/", "abs", "(", "self", ".", "vz", ")", "*", "max" ]
28.933333
13.733333
def not_(self): ''' Negates this instance's query expression using MongoDB's ``$not`` operator **Example**: ``(User.name == 'Jeff').not_()`` .. note:: Another usage is via an operator, but parens are needed to get past precedence issues: ``~ (User.name == 'Jeff')`` ''' ret_obj = {} for k, v in self.obj.items(): if not isinstance(v, dict): ret_obj[k] = {'$ne' : v } continue num_ops = len([x for x in v if x[0] == '$']) if num_ops != len(v) and num_ops != 0: raise BadQueryException('$ operator used in field name') if num_ops == 0: ret_obj[k] = {'$ne' : v } continue for op, value in v.items(): k_dict = ret_obj.setdefault(k, {}) not_dict = k_dict.setdefault('$not', {}) not_dict[op] = value return QueryExpression(ret_obj)
[ "def", "not_", "(", "self", ")", ":", "ret_obj", "=", "{", "}", "for", "k", ",", "v", "in", "self", ".", "obj", ".", "items", "(", ")", ":", "if", "not", "isinstance", "(", "v", ",", "dict", ")", ":", "ret_obj", "[", "k", "]", "=", "{", "'$ne'", ":", "v", "}", "continue", "num_ops", "=", "len", "(", "[", "x", "for", "x", "in", "v", "if", "x", "[", "0", "]", "==", "'$'", "]", ")", "if", "num_ops", "!=", "len", "(", "v", ")", "and", "num_ops", "!=", "0", ":", "raise", "BadQueryException", "(", "'$ operator used in field name'", ")", "if", "num_ops", "==", "0", ":", "ret_obj", "[", "k", "]", "=", "{", "'$ne'", ":", "v", "}", "continue", "for", "op", ",", "value", "in", "v", ".", "items", "(", ")", ":", "k_dict", "=", "ret_obj", ".", "setdefault", "(", "k", ",", "{", "}", ")", "not_dict", "=", "k_dict", ".", "setdefault", "(", "'$not'", ",", "{", "}", ")", "not_dict", "[", "op", "]", "=", "value", "return", "QueryExpression", "(", "ret_obj", ")" ]
34.034483
20.517241
def hook(): """Install or remove GitHub webhook.""" repo_id = request.json['id'] github = GitHubAPI(user_id=current_user.id) repos = github.account.extra_data['repos'] if repo_id not in repos: abort(404) if request.method == 'DELETE': try: if github.remove_hook(repo_id, repos[repo_id]['full_name']): db.session.commit() return '', 204 else: abort(400) except Exception: abort(403) elif request.method == 'POST': try: if github.create_hook(repo_id, repos[repo_id]['full_name']): db.session.commit() return '', 201 else: abort(400) except Exception: abort(403) else: abort(400)
[ "def", "hook", "(", ")", ":", "repo_id", "=", "request", ".", "json", "[", "'id'", "]", "github", "=", "GitHubAPI", "(", "user_id", "=", "current_user", ".", "id", ")", "repos", "=", "github", ".", "account", ".", "extra_data", "[", "'repos'", "]", "if", "repo_id", "not", "in", "repos", ":", "abort", "(", "404", ")", "if", "request", ".", "method", "==", "'DELETE'", ":", "try", ":", "if", "github", ".", "remove_hook", "(", "repo_id", ",", "repos", "[", "repo_id", "]", "[", "'full_name'", "]", ")", ":", "db", ".", "session", ".", "commit", "(", ")", "return", "''", ",", "204", "else", ":", "abort", "(", "400", ")", "except", "Exception", ":", "abort", "(", "403", ")", "elif", "request", ".", "method", "==", "'POST'", ":", "try", ":", "if", "github", ".", "create_hook", "(", "repo_id", ",", "repos", "[", "repo_id", "]", "[", "'full_name'", "]", ")", ":", "db", ".", "session", ".", "commit", "(", ")", "return", "''", ",", "201", "else", ":", "abort", "(", "400", ")", "except", "Exception", ":", "abort", "(", "403", ")", "else", ":", "abort", "(", "400", ")" ]
26.6
18.633333
def top_class(self): """reference to a parent class, which contains this class and defined within a namespace if this class is defined under a namespace, self will be returned""" curr = self parent = self.parent while isinstance(parent, class_t): curr = parent parent = parent.parent return curr
[ "def", "top_class", "(", "self", ")", ":", "curr", "=", "self", "parent", "=", "self", ".", "parent", "while", "isinstance", "(", "parent", ",", "class_t", ")", ":", "curr", "=", "parent", "parent", "=", "parent", ".", "parent", "return", "curr" ]
33.272727
13.727273
def reduce_operators(source): """ Remove spaces between operators in *source* and returns the result. Example:: def foo(foo, bar, blah): test = "This is a %s" % foo Will become:: def foo(foo,bar,blah): test="This is a %s"%foo .. note:: Also removes trailing commas and joins disjointed strings like ``("foo" "bar")``. """ io_obj = io.StringIO(source) prev_tok = None out_tokens = [] out = "" last_lineno = -1 last_col = 0 nl_types = (tokenize.NL, tokenize.NEWLINE) joining_strings = False new_string = "" for tok in tokenize.generate_tokens(io_obj.readline): token_type = tok[0] token_string = tok[1] start_line, start_col = tok[2] end_line, end_col = tok[3] if start_line > last_lineno: last_col = 0 if token_type != tokenize.OP: if start_col > last_col and token_type not in nl_types: if prev_tok[0] != tokenize.OP: out += (" " * (start_col - last_col)) if token_type == tokenize.STRING: if prev_tok[0] == tokenize.STRING: # Join the strings into one string_type = token_string[0] # '' or "" prev_string_type = prev_tok[1][0] out = out.rstrip(" ") # Remove any spaces we inserted prev if not joining_strings: # Remove prev token and start the new combined string out = out[:(len(out)-len(prev_tok[1]))] prev_string = prev_tok[1].strip(prev_string_type) new_string = ( prev_string + token_string.strip(string_type)) joining_strings = True else: new_string += token_string.strip(string_type) else: if token_string in ('}', ')', ']'): if prev_tok[1] == ',': out = out.rstrip(',') if joining_strings: # NOTE: Using triple quotes so that this logic works with # mixed strings using both single quotes and double quotes. out += "'''" + new_string + "'''" joining_strings = False if token_string == '@': # Decorators need special handling if prev_tok[0] == tokenize.NEWLINE: # Ensure it gets indented properly out += (" " * (start_col - last_col)) if not joining_strings: out += token_string last_col = end_col last_lineno = end_line prev_tok = tok return out
[ "def", "reduce_operators", "(", "source", ")", ":", "io_obj", "=", "io", ".", "StringIO", "(", "source", ")", "prev_tok", "=", "None", "out_tokens", "=", "[", "]", "out", "=", "\"\"", "last_lineno", "=", "-", "1", "last_col", "=", "0", "nl_types", "=", "(", "tokenize", ".", "NL", ",", "tokenize", ".", "NEWLINE", ")", "joining_strings", "=", "False", "new_string", "=", "\"\"", "for", "tok", "in", "tokenize", ".", "generate_tokens", "(", "io_obj", ".", "readline", ")", ":", "token_type", "=", "tok", "[", "0", "]", "token_string", "=", "tok", "[", "1", "]", "start_line", ",", "start_col", "=", "tok", "[", "2", "]", "end_line", ",", "end_col", "=", "tok", "[", "3", "]", "if", "start_line", ">", "last_lineno", ":", "last_col", "=", "0", "if", "token_type", "!=", "tokenize", ".", "OP", ":", "if", "start_col", ">", "last_col", "and", "token_type", "not", "in", "nl_types", ":", "if", "prev_tok", "[", "0", "]", "!=", "tokenize", ".", "OP", ":", "out", "+=", "(", "\" \"", "*", "(", "start_col", "-", "last_col", ")", ")", "if", "token_type", "==", "tokenize", ".", "STRING", ":", "if", "prev_tok", "[", "0", "]", "==", "tokenize", ".", "STRING", ":", "# Join the strings into one", "string_type", "=", "token_string", "[", "0", "]", "# '' or \"\"", "prev_string_type", "=", "prev_tok", "[", "1", "]", "[", "0", "]", "out", "=", "out", ".", "rstrip", "(", "\" \"", ")", "# Remove any spaces we inserted prev", "if", "not", "joining_strings", ":", "# Remove prev token and start the new combined string", "out", "=", "out", "[", ":", "(", "len", "(", "out", ")", "-", "len", "(", "prev_tok", "[", "1", "]", ")", ")", "]", "prev_string", "=", "prev_tok", "[", "1", "]", ".", "strip", "(", "prev_string_type", ")", "new_string", "=", "(", "prev_string", "+", "token_string", ".", "strip", "(", "string_type", ")", ")", "joining_strings", "=", "True", "else", ":", "new_string", "+=", "token_string", ".", "strip", "(", "string_type", ")", "else", ":", "if", "token_string", "in", "(", "'}'", ",", "')'", ",", "']'", ")", ":", "if", "prev_tok", "[", "1", "]", "==", "','", ":", "out", "=", "out", ".", "rstrip", "(", "','", ")", "if", "joining_strings", ":", "# NOTE: Using triple quotes so that this logic works with", "# mixed strings using both single quotes and double quotes.", "out", "+=", "\"'''\"", "+", "new_string", "+", "\"'''\"", "joining_strings", "=", "False", "if", "token_string", "==", "'@'", ":", "# Decorators need special handling", "if", "prev_tok", "[", "0", "]", "==", "tokenize", ".", "NEWLINE", ":", "# Ensure it gets indented properly", "out", "+=", "(", "\" \"", "*", "(", "start_col", "-", "last_col", ")", ")", "if", "not", "joining_strings", ":", "out", "+=", "token_string", "last_col", "=", "end_col", "last_lineno", "=", "end_line", "prev_tok", "=", "tok", "return", "out" ]
37.291667
17.041667
def set_list(self, mutagen_file, values): """Set all values for the field using this style. `values` should be an iterable. """ self.store(mutagen_file, [self.serialize(value) for value in values])
[ "def", "set_list", "(", "self", ",", "mutagen_file", ",", "values", ")", ":", "self", ".", "store", "(", "mutagen_file", ",", "[", "self", ".", "serialize", "(", "value", ")", "for", "value", "in", "values", "]", ")" ]
45
9.6
def is_multisig_script(script, blockchain='bitcoin', **blockchain_opts): """ Is the given script a multisig script? """ if blockchain == 'bitcoin': return btc_is_multisig_script(script, **blockchain_opts) else: raise ValueError('Unknown blockchain "{}"'.format(blockchain))
[ "def", "is_multisig_script", "(", "script", ",", "blockchain", "=", "'bitcoin'", ",", "*", "*", "blockchain_opts", ")", ":", "if", "blockchain", "==", "'bitcoin'", ":", "return", "btc_is_multisig_script", "(", "script", ",", "*", "*", "blockchain_opts", ")", "else", ":", "raise", "ValueError", "(", "'Unknown blockchain \"{}\"'", ".", "format", "(", "blockchain", ")", ")" ]
37.75
16
def _post_deactivate_injection(self): """ Injects functions after the deactivation routine of child classes got called :return: None """ # Lets be sure that active is really set to false. self.active = False self.app.signals.send("plugin_deactivate_post", self) # After all receivers are handled. We start to clean up signals and receivers of this plugin # Attention: This signal clean must not be called via a signal (like in other patterns), # because the call order of receivers is not clear and a signal/receiver clean up would prohibit the call # of all "later" receivers. self.signals.deactivate_plugin_signals()
[ "def", "_post_deactivate_injection", "(", "self", ")", ":", "# Lets be sure that active is really set to false.", "self", ".", "active", "=", "False", "self", ".", "app", ".", "signals", ".", "send", "(", "\"plugin_deactivate_post\"", ",", "self", ")", "# After all receivers are handled. We start to clean up signals and receivers of this plugin", "# Attention: This signal clean must not be called via a signal (like in other patterns),", "# because the call order of receivers is not clear and a signal/receiver clean up would prohibit the call", "# of all \"later\" receivers.", "self", ".", "signals", ".", "deactivate_plugin_signals", "(", ")" ]
54
24.615385
def user_info(self, username): """ Get info of a specific user. :param username: the username of the user to get info about :return: """ request_url = "{}/api/0/user/{}".format(self.instance, username) return_value = self._call_api(request_url) return return_value
[ "def", "user_info", "(", "self", ",", "username", ")", ":", "request_url", "=", "\"{}/api/0/user/{}\"", ".", "format", "(", "self", ".", "instance", ",", "username", ")", "return_value", "=", "self", ".", "_call_api", "(", "request_url", ")", "return", "return_value" ]
29.090909
18.181818
def build_agency(relation, nodes): """Extract agency information.""" # TODO: find out the operator for routes without operator tag. # See: http://wiki.openstreetmap.org/wiki/Key:operator # Quote from the above link: # # If the vast majority of a certain object in an area is operated by a certain # organization and only very few by others then it may be sufficient to only tag the # exceptions. For example, when nearly all roads in an area are managed by a local # authority then it would be sufficient to only tag those that are not with an operator # tag. op = relation.tags.get('operator') agency_url = relation.tags.get('url') or relation.tags.get('contact_website') if not op: return agency_id = int(hashlib.sha256(op.encode('utf8')).hexdigest(), 16) % 10**8 return Agency(agency_id, agency_url, op, '')
[ "def", "build_agency", "(", "relation", ",", "nodes", ")", ":", "# TODO: find out the operator for routes without operator tag.", "# See: http://wiki.openstreetmap.org/wiki/Key:operator", "# Quote from the above link:", "#", "# If the vast majority of a certain object in an area is operated by a certain", "# organization and only very few by others then it may be sufficient to only tag the", "# exceptions. For example, when nearly all roads in an area are managed by a local", "# authority then it would be sufficient to only tag those that are not with an operator", "# tag.", "op", "=", "relation", ".", "tags", ".", "get", "(", "'operator'", ")", "agency_url", "=", "relation", ".", "tags", ".", "get", "(", "'url'", ")", "or", "relation", ".", "tags", ".", "get", "(", "'contact_website'", ")", "if", "not", "op", ":", "return", "agency_id", "=", "int", "(", "hashlib", ".", "sha256", "(", "op", ".", "encode", "(", "'utf8'", ")", ")", ".", "hexdigest", "(", ")", ",", "16", ")", "%", "10", "**", "8", "return", "Agency", "(", "agency_id", ",", "agency_url", ",", "op", ",", "''", ")" ]
43.85
29
def display_the_graphic_connection(self): """ The following permits to attribute the function "display_the_graphic" to the slider. Because, to make a connection, we can not have parameters for the function, but "display_the_graphic" has some. """ self.display_the_graphic(self.num_line, self.wavelength, self.data_wanted, self.information)
[ "def", "display_the_graphic_connection", "(", "self", ")", ":", "self", ".", "display_the_graphic", "(", "self", ".", "num_line", ",", "self", ".", "wavelength", ",", "self", ".", "data_wanted", ",", "self", ".", "information", ")" ]
62.333333
32
def add_account_certificate(self, account_id, body, **kwargs): # noqa: E501 """Upload new trusted certificate. # noqa: E501 An endpoint for uploading new trusted certificates. **Example usage:** `curl -X POST https://api.us-east-1.mbedcloud.com/v3/accounts/{accountID}/trusted-certificates -d {\"name\": \"myCert1\", \"description\": \"very important cert\", \"certificate\": \"certificate_data\", \"service\": \"lwm2m\"} -H 'content-type: application/json' -H 'Authorization: Bearer API_KEY'` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.add_account_certificate(account_id, body, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str account_id: Account ID. (required) :param TrustedCertificateRootReq body: A trusted certificate object with attributes, signature is optional. (required) :return: TrustedCertificateResp If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.add_account_certificate_with_http_info(account_id, body, **kwargs) # noqa: E501 else: (data) = self.add_account_certificate_with_http_info(account_id, body, **kwargs) # noqa: E501 return data
[ "def", "add_account_certificate", "(", "self", ",", "account_id", ",", "body", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'asynchronous'", ")", ":", "return", "self", ".", "add_account_certificate_with_http_info", "(", "account_id", ",", "body", ",", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "add_account_certificate_with_http_info", "(", "account_id", ",", "body", ",", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
67
40.5
def distort(value): """ Distorts a string by randomly replacing characters in it. :param value: a string to distort. :return: a distored string. """ value = value.lower() if (RandomBoolean.chance(1, 5)): value = value[0:1].upper() + value[1:] if (RandomBoolean.chance(1, 3)): value = value + random.choice(_symbols) return value
[ "def", "distort", "(", "value", ")", ":", "value", "=", "value", ".", "lower", "(", ")", "if", "(", "RandomBoolean", ".", "chance", "(", "1", ",", "5", ")", ")", ":", "value", "=", "value", "[", "0", ":", "1", "]", ".", "upper", "(", ")", "+", "value", "[", "1", ":", "]", "if", "(", "RandomBoolean", ".", "chance", "(", "1", ",", "3", ")", ")", ":", "value", "=", "value", "+", "random", ".", "choice", "(", "_symbols", ")", "return", "value" ]
24.294118
17.941176
def unify_ips(): """ Unifies the currently saved IPs. Unification is based on last IP segment. So if there are is e.g. 192.168.128.121 and 192.168.128.122 tthey will be merged to 192.168.128.121. This is a little aggressive but the spammers are aggressive, too. :return: number of merged ips :rtype: int """ processed_ips = 0 ips = {} # query for the IPs, also includes the starred IPs for ip in IP.objects.raw( 'select distinct a.id, a.seg_0, a.seg_1, a.seg_2 ' 'from ip_assembler_ip a, ip_assembler_ip b ' 'where a.seg_0 = b.seg_0 and a.seg_1 = b.seg_1 and a.seg_2 = b.seg_2 and a.seg_3 != b.seg_3 ' 'order by a.seg_0, a.seg_1, a.seg_2', ): key = '%d.%d.%d' % (int(ip.seg_0), int(ip.seg_1), int(ip.seg_2)) if not key in ips: ips[key] = [] ips[key].append(ip) for key, ip_list in ips.items(): # check if a starred ip is in list starred_ip = None for ip in ip_list: if ip.seg_3 == '*': starred_ip = ip if starred_ip is None: IP.objects.create(seg_0=ip_list[0].seg_0, seg_1=ip_list[0].seg_1, seg_2=ip_list[0].seg_2, seg_3='*', ) # delete the other ips for ip in ip_list: if ip != starred_ip: processed_ips += 1 ip.delete() return processed_ips
[ "def", "unify_ips", "(", ")", ":", "processed_ips", "=", "0", "ips", "=", "{", "}", "# query for the IPs, also includes the starred IPs", "for", "ip", "in", "IP", ".", "objects", ".", "raw", "(", "'select distinct a.id, a.seg_0, a.seg_1, a.seg_2 '", "'from ip_assembler_ip a, ip_assembler_ip b '", "'where a.seg_0 = b.seg_0 and a.seg_1 = b.seg_1 and a.seg_2 = b.seg_2 and a.seg_3 != b.seg_3 '", "'order by a.seg_0, a.seg_1, a.seg_2'", ",", ")", ":", "key", "=", "'%d.%d.%d'", "%", "(", "int", "(", "ip", ".", "seg_0", ")", ",", "int", "(", "ip", ".", "seg_1", ")", ",", "int", "(", "ip", ".", "seg_2", ")", ")", "if", "not", "key", "in", "ips", ":", "ips", "[", "key", "]", "=", "[", "]", "ips", "[", "key", "]", ".", "append", "(", "ip", ")", "for", "key", ",", "ip_list", "in", "ips", ".", "items", "(", ")", ":", "# check if a starred ip is in list", "starred_ip", "=", "None", "for", "ip", "in", "ip_list", ":", "if", "ip", ".", "seg_3", "==", "'*'", ":", "starred_ip", "=", "ip", "if", "starred_ip", "is", "None", ":", "IP", ".", "objects", ".", "create", "(", "seg_0", "=", "ip_list", "[", "0", "]", ".", "seg_0", ",", "seg_1", "=", "ip_list", "[", "0", "]", ".", "seg_1", ",", "seg_2", "=", "ip_list", "[", "0", "]", ".", "seg_2", ",", "seg_3", "=", "'*'", ",", ")", "# delete the other ips", "for", "ip", "in", "ip_list", ":", "if", "ip", "!=", "starred_ip", ":", "processed_ips", "+=", "1", "ip", ".", "delete", "(", ")", "return", "processed_ips" ]
36.756098
19.341463
def set_webhook(self, webhook_path: Optional[str] = None, request_handler: Any = WebhookRequestHandler, route_name: str = DEFAULT_ROUTE_NAME, web_app: Optional[Application] = None): """ Set webhook for bot :param webhook_path: Optional[str] (default: None) :param request_handler: Any (default: WebhookRequestHandler) :param route_name: str Name of webhook handler route (default: 'webhook_handler') :param web_app: Optional[Application] (default: None) :return: """ self._prepare_webhook(webhook_path, request_handler, route_name, web_app) self.loop.run_until_complete(self._startup_webhook())
[ "def", "set_webhook", "(", "self", ",", "webhook_path", ":", "Optional", "[", "str", "]", "=", "None", ",", "request_handler", ":", "Any", "=", "WebhookRequestHandler", ",", "route_name", ":", "str", "=", "DEFAULT_ROUTE_NAME", ",", "web_app", ":", "Optional", "[", "Application", "]", "=", "None", ")", ":", "self", ".", "_prepare_webhook", "(", "webhook_path", ",", "request_handler", ",", "route_name", ",", "web_app", ")", "self", ".", "loop", ".", "run_until_complete", "(", "self", ".", "_startup_webhook", "(", ")", ")" ]
52.538462
28.846154
def send_command( self, command, callback=True, command_type=QRTPacketType.PacketCommand ): """ Sends commands to QTM """ if self.transport is not None: cmd_length = len(command) LOG.debug("S: %s", command) self.transport.write( struct.pack( RTCommand % cmd_length, RTheader.size + cmd_length + 1, command_type.value, command.encode(), b"\0", ) ) future = self.loop.create_future() if callback: self.request_queue.append(future) else: future.set_result(None) return future raise QRTCommandException("Not connected!")
[ "def", "send_command", "(", "self", ",", "command", ",", "callback", "=", "True", ",", "command_type", "=", "QRTPacketType", ".", "PacketCommand", ")", ":", "if", "self", ".", "transport", "is", "not", "None", ":", "cmd_length", "=", "len", "(", "command", ")", "LOG", ".", "debug", "(", "\"S: %s\"", ",", "command", ")", "self", ".", "transport", ".", "write", "(", "struct", ".", "pack", "(", "RTCommand", "%", "cmd_length", ",", "RTheader", ".", "size", "+", "cmd_length", "+", "1", ",", "command_type", ".", "value", ",", "command", ".", "encode", "(", ")", ",", "b\"\\0\"", ",", ")", ")", "future", "=", "self", ".", "loop", ".", "create_future", "(", ")", "if", "callback", ":", "self", ".", "request_queue", ".", "append", "(", "future", ")", "else", ":", "future", ".", "set_result", "(", "None", ")", "return", "future", "raise", "QRTCommandException", "(", "\"Not connected!\"", ")" ]
31.6
14.52
def get_scms_for_path(path): """ Returns all scm's found at the given path. If no scm is recognized - empty list is returned. :param path: path to directory which should be checked. May be callable. :raises VCSError: if given ``path`` is not a directory """ from vcs.backends import get_backend if hasattr(path, '__call__'): path = path() if not os.path.isdir(path): raise VCSError("Given path %r is not a directory" % path) result = [] for key in ALIASES: dirname = os.path.join(path, '.' + key) if os.path.isdir(dirname): result.append(key) continue # We still need to check if it's not bare repository as # bare repos don't have working directories try: get_backend(key)(path) result.append(key) continue except RepositoryError: # Wrong backend pass except VCSError: # No backend at all pass return result
[ "def", "get_scms_for_path", "(", "path", ")", ":", "from", "vcs", ".", "backends", "import", "get_backend", "if", "hasattr", "(", "path", ",", "'__call__'", ")", ":", "path", "=", "path", "(", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "raise", "VCSError", "(", "\"Given path %r is not a directory\"", "%", "path", ")", "result", "=", "[", "]", "for", "key", "in", "ALIASES", ":", "dirname", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'.'", "+", "key", ")", "if", "os", ".", "path", ".", "isdir", "(", "dirname", ")", ":", "result", ".", "append", "(", "key", ")", "continue", "# We still need to check if it's not bare repository as", "# bare repos don't have working directories", "try", ":", "get_backend", "(", "key", ")", "(", "path", ")", "result", ".", "append", "(", "key", ")", "continue", "except", "RepositoryError", ":", "# Wrong backend", "pass", "except", "VCSError", ":", "# No backend at all", "pass", "return", "result" ]
29.588235
17.294118
def main(): """ Entry point for GNS3 server """ if not sys.platform.startswith("win"): if "--daemon" in sys.argv: daemonize() from gns3server.run import run run()
[ "def", "main", "(", ")", ":", "if", "not", "sys", ".", "platform", ".", "startswith", "(", "\"win\"", ")", ":", "if", "\"--daemon\"", "in", "sys", ".", "argv", ":", "daemonize", "(", ")", "from", "gns3server", ".", "run", "import", "run", "run", "(", ")" ]
19.8
14
def draw_plot(self, metrics, labels=None, ylabel=""): """ metrics: One or more metrics parameters. Each represents the history of one metric. """ metrics = metrics if isinstance(metrics, list) else [metrics] # Loop through metrics title = "" for i, m in enumerate(metrics): label = labels[i] if labels else m.name # TODO: use a standard formating function for values title += (" " if title else "") + "{}: {}".format(label, m.data[-1]) self.ax.plot(m.formatted_steps, m.data, label=label) self.ax.set_title(title) self.ax.set_ylabel(ylabel) self.ax.legend() self.ax.set_xlabel("Steps") self.ax.xaxis.set_major_locator(plt.AutoLocator())
[ "def", "draw_plot", "(", "self", ",", "metrics", ",", "labels", "=", "None", ",", "ylabel", "=", "\"\"", ")", ":", "metrics", "=", "metrics", "if", "isinstance", "(", "metrics", ",", "list", ")", "else", "[", "metrics", "]", "# Loop through metrics", "title", "=", "\"\"", "for", "i", ",", "m", "in", "enumerate", "(", "metrics", ")", ":", "label", "=", "labels", "[", "i", "]", "if", "labels", "else", "m", ".", "name", "# TODO: use a standard formating function for values", "title", "+=", "(", "\" \"", "if", "title", "else", "\"\"", ")", "+", "\"{}: {}\"", ".", "format", "(", "label", ",", "m", ".", "data", "[", "-", "1", "]", ")", "self", ".", "ax", ".", "plot", "(", "m", ".", "formatted_steps", ",", "m", ".", "data", ",", "label", "=", "label", ")", "self", ".", "ax", ".", "set_title", "(", "title", ")", "self", ".", "ax", ".", "set_ylabel", "(", "ylabel", ")", "self", ".", "ax", ".", "legend", "(", ")", "self", ".", "ax", ".", "set_xlabel", "(", "\"Steps\"", ")", "self", ".", "ax", ".", "xaxis", ".", "set_major_locator", "(", "plt", ".", "AutoLocator", "(", ")", ")" ]
43.166667
15.5
def setVehicleClass(self, typeID, clazz): """setVehicleClass(string, string) -> None Sets the class of vehicles of this type. """ self._connection._sendStringCmd( tc.CMD_SET_VEHICLETYPE_VARIABLE, tc.VAR_VEHICLECLASS, typeID, clazz)
[ "def", "setVehicleClass", "(", "self", ",", "typeID", ",", "clazz", ")", ":", "self", ".", "_connection", ".", "_sendStringCmd", "(", "tc", ".", "CMD_SET_VEHICLETYPE_VARIABLE", ",", "tc", ".", "VAR_VEHICLECLASS", ",", "typeID", ",", "clazz", ")" ]
38.571429
12.714286
def dispatch(self, request): """ View to handle final steps of OAuth based authentication where the user gets redirected back to from the service provider """ login_done_url = reverse(self.adapter.provider_id + "_callback") client = self._get_client(request, login_done_url) if not client.is_valid(): if 'denied' in request.GET: error = AuthError.CANCELLED else: error = AuthError.UNKNOWN extra_context = dict(oauth_client=client) return render_authentication_error( request, self.adapter.provider_id, error=error, extra_context=extra_context) app = self.adapter.get_provider().get_app(request) try: access_token = client.get_access_token() token = SocialToken( app=app, token=access_token['oauth_token'], # .get() -- e.g. Evernote does not feature a secret token_secret=access_token.get('oauth_token_secret', '')) login = self.adapter.complete_login(request, app, token, response=access_token) login.token = token login.state = SocialLogin.unstash_state(request) return complete_social_login(request, login) except OAuthError as e: return render_authentication_error( request, self.adapter.provider_id, exception=e)
[ "def", "dispatch", "(", "self", ",", "request", ")", ":", "login_done_url", "=", "reverse", "(", "self", ".", "adapter", ".", "provider_id", "+", "\"_callback\"", ")", "client", "=", "self", ".", "_get_client", "(", "request", ",", "login_done_url", ")", "if", "not", "client", ".", "is_valid", "(", ")", ":", "if", "'denied'", "in", "request", ".", "GET", ":", "error", "=", "AuthError", ".", "CANCELLED", "else", ":", "error", "=", "AuthError", ".", "UNKNOWN", "extra_context", "=", "dict", "(", "oauth_client", "=", "client", ")", "return", "render_authentication_error", "(", "request", ",", "self", ".", "adapter", ".", "provider_id", ",", "error", "=", "error", ",", "extra_context", "=", "extra_context", ")", "app", "=", "self", ".", "adapter", ".", "get_provider", "(", ")", ".", "get_app", "(", "request", ")", "try", ":", "access_token", "=", "client", ".", "get_access_token", "(", ")", "token", "=", "SocialToken", "(", "app", "=", "app", ",", "token", "=", "access_token", "[", "'oauth_token'", "]", ",", "# .get() -- e.g. Evernote does not feature a secret", "token_secret", "=", "access_token", ".", "get", "(", "'oauth_token_secret'", ",", "''", ")", ")", "login", "=", "self", ".", "adapter", ".", "complete_login", "(", "request", ",", "app", ",", "token", ",", "response", "=", "access_token", ")", "login", ".", "token", "=", "token", "login", ".", "state", "=", "SocialLogin", ".", "unstash_state", "(", "request", ")", "return", "complete_social_login", "(", "request", ",", "login", ")", "except", "OAuthError", "as", "e", ":", "return", "render_authentication_error", "(", "request", ",", "self", ".", "adapter", ".", "provider_id", ",", "exception", "=", "e", ")" ]
43.236842
13.657895
def addStep(self, callback, *args, **kwargs): ''' Add rollback step with optional arguments. If a rollback is triggered, each step is called in LIFO order. ''' self.steps.append((callback, args, kwargs))
[ "def", "addStep", "(", "self", ",", "callback", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "steps", ".", "append", "(", "(", "callback", ",", "args", ",", "kwargs", ")", ")" ]
36.333333
18.333333
def clean_file(in_file, data, prefix="", bedprep_dir=None, simple=None): """Prepare a clean sorted input BED file without headers """ # Remove non-ascii characters. Used in coverage analysis, to support JSON code in one column # and be happy with sambamba: simple = "iconv -c -f utf-8 -t ascii | sed 's/ //g' |" if simple else "" if in_file: if not bedprep_dir: bedprep_dir = utils.safe_makedir(os.path.join(data["dirs"]["work"], "bedprep")) # Avoid running multiple times with same prefix if prefix and os.path.basename(in_file).startswith(prefix): return in_file out_file = os.path.join(bedprep_dir, "%s%s" % (prefix, os.path.basename(in_file))) out_file = out_file.replace(".interval_list", ".bed") if out_file.endswith(".gz"): out_file = out_file[:-3] if not utils.file_uptodate(out_file, in_file): check_bed_contigs(in_file, data) check_bed_coords(in_file, data) with file_transaction(data, out_file) as tx_out_file: bcbio_py = sys.executable cat_cmd = "zcat" if in_file.endswith(".gz") else "cat" sort_cmd = get_sort_cmd(os.path.dirname(tx_out_file)) cmd = ("{cat_cmd} {in_file} | grep -v ^track | grep -v ^browser | grep -v ^@ | " "grep -v ^# | {simple} " "{bcbio_py} -c 'from bcbio.variation import bedutils; bedutils.remove_bad()' | " "{sort_cmd} -k1,1 -k2,2n > {tx_out_file}") do.run(cmd.format(**locals()), "Prepare cleaned BED file", data) vcfutils.bgzip_and_index(out_file, data.get("config", {}), remove_orig=False) return out_file
[ "def", "clean_file", "(", "in_file", ",", "data", ",", "prefix", "=", "\"\"", ",", "bedprep_dir", "=", "None", ",", "simple", "=", "None", ")", ":", "# Remove non-ascii characters. Used in coverage analysis, to support JSON code in one column", "# and be happy with sambamba:", "simple", "=", "\"iconv -c -f utf-8 -t ascii | sed 's/ //g' |\"", "if", "simple", "else", "\"\"", "if", "in_file", ":", "if", "not", "bedprep_dir", ":", "bedprep_dir", "=", "utils", ".", "safe_makedir", "(", "os", ".", "path", ".", "join", "(", "data", "[", "\"dirs\"", "]", "[", "\"work\"", "]", ",", "\"bedprep\"", ")", ")", "# Avoid running multiple times with same prefix", "if", "prefix", "and", "os", ".", "path", ".", "basename", "(", "in_file", ")", ".", "startswith", "(", "prefix", ")", ":", "return", "in_file", "out_file", "=", "os", ".", "path", ".", "join", "(", "bedprep_dir", ",", "\"%s%s\"", "%", "(", "prefix", ",", "os", ".", "path", ".", "basename", "(", "in_file", ")", ")", ")", "out_file", "=", "out_file", ".", "replace", "(", "\".interval_list\"", ",", "\".bed\"", ")", "if", "out_file", ".", "endswith", "(", "\".gz\"", ")", ":", "out_file", "=", "out_file", "[", ":", "-", "3", "]", "if", "not", "utils", ".", "file_uptodate", "(", "out_file", ",", "in_file", ")", ":", "check_bed_contigs", "(", "in_file", ",", "data", ")", "check_bed_coords", "(", "in_file", ",", "data", ")", "with", "file_transaction", "(", "data", ",", "out_file", ")", "as", "tx_out_file", ":", "bcbio_py", "=", "sys", ".", "executable", "cat_cmd", "=", "\"zcat\"", "if", "in_file", ".", "endswith", "(", "\".gz\"", ")", "else", "\"cat\"", "sort_cmd", "=", "get_sort_cmd", "(", "os", ".", "path", ".", "dirname", "(", "tx_out_file", ")", ")", "cmd", "=", "(", "\"{cat_cmd} {in_file} | grep -v ^track | grep -v ^browser | grep -v ^@ | \"", "\"grep -v ^# | {simple} \"", "\"{bcbio_py} -c 'from bcbio.variation import bedutils; bedutils.remove_bad()' | \"", "\"{sort_cmd} -k1,1 -k2,2n > {tx_out_file}\"", ")", "do", ".", "run", "(", "cmd", ".", "format", "(", "*", "*", "locals", "(", ")", ")", ",", "\"Prepare cleaned BED file\"", ",", "data", ")", "vcfutils", ".", "bgzip_and_index", "(", "out_file", ",", "data", ".", "get", "(", "\"config\"", ",", "{", "}", ")", ",", "remove_orig", "=", "False", ")", "return", "out_file" ]
57.833333
23.733333