text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def typecast(type_, value): """ Tries to smartly typecast the given value with the given type. :param type_: The type to try to use for the given value :param value: The value to try and typecast to the given type :return: The typecasted value if possible, otherwise just the original value """ # NOTE: does not do any special validation of types before casting # will just raise errors on type casting failures if is_builtin_type(type_) or is_collections_type(type_) or is_enum_type(type_): # FIXME: move to Types enum and TYPE_MAPPING entry if is_bytes_type(type_): return decode_bytes(value) return type_(value) elif is_regex_type(type_): return typecast(str, value) elif is_typing_type(type_): try: base_type = type_.__extra__ except AttributeError: # NOTE: when handling typing._GenericAlias __extra__ is actually __origin__ base_type = type_.__origin__ arg_types = type_.__args__ if is_array_type(type_): if len(arg_types) == 1: item_type = arg_types[0] return base_type([typecast(item_type, item) for item in value]) else: return base_type(value) elif is_object_type(type_): if len(arg_types) == 2: (key_type, item_type) = arg_types return base_type( { typecast(key_type, key): typecast(item_type, item) for (key, item) in value.items() } ) else: return base_type(value) else: return base_type(value) else: return value
[ "def", "typecast", "(", "type_", ",", "value", ")", ":", "# NOTE: does not do any special validation of types before casting", "# will just raise errors on type casting failures", "if", "is_builtin_type", "(", "type_", ")", "or", "is_collections_type", "(", "type_", ")", "or", "is_enum_type", "(", "type_", ")", ":", "# FIXME: move to Types enum and TYPE_MAPPING entry", "if", "is_bytes_type", "(", "type_", ")", ":", "return", "decode_bytes", "(", "value", ")", "return", "type_", "(", "value", ")", "elif", "is_regex_type", "(", "type_", ")", ":", "return", "typecast", "(", "str", ",", "value", ")", "elif", "is_typing_type", "(", "type_", ")", ":", "try", ":", "base_type", "=", "type_", ".", "__extra__", "except", "AttributeError", ":", "# NOTE: when handling typing._GenericAlias __extra__ is actually __origin__", "base_type", "=", "type_", ".", "__origin__", "arg_types", "=", "type_", ".", "__args__", "if", "is_array_type", "(", "type_", ")", ":", "if", "len", "(", "arg_types", ")", "==", "1", ":", "item_type", "=", "arg_types", "[", "0", "]", "return", "base_type", "(", "[", "typecast", "(", "item_type", ",", "item", ")", "for", "item", "in", "value", "]", ")", "else", ":", "return", "base_type", "(", "value", ")", "elif", "is_object_type", "(", "type_", ")", ":", "if", "len", "(", "arg_types", ")", "==", "2", ":", "(", "key_type", ",", "item_type", ")", "=", "arg_types", "return", "base_type", "(", "{", "typecast", "(", "key_type", ",", "key", ")", ":", "typecast", "(", "item_type", ",", "item", ")", "for", "(", "key", ",", "item", ")", "in", "value", ".", "items", "(", ")", "}", ")", "else", ":", "return", "base_type", "(", "value", ")", "else", ":", "return", "base_type", "(", "value", ")", "else", ":", "return", "value" ]
37.478261
16.978261
def autoconf(self): """Implements Munin Plugin Auto-Configuration Option. @return: True if plugin can be auto-configured, False otherwise. """ opcinfo = OPCinfo(self._host, self._port, self._user, self._password, self._monpath, self._ssl) return opcinfo is not None
[ "def", "autoconf", "(", "self", ")", ":", "opcinfo", "=", "OPCinfo", "(", "self", ".", "_host", ",", "self", ".", "_port", ",", "self", ".", "_user", ",", "self", ".", "_password", ",", "self", ".", "_monpath", ",", "self", ".", "_ssl", ")", "return", "opcinfo", "is", "not", "None" ]
39.111111
18.222222
def owns_endpoint(self, endpoint): """Tests if an endpoint name (not path) belongs to this Api. Takes in to account the Blueprint name part of the endpoint name. :param endpoint: The name of the endpoint being checked :return: bool """ if self.blueprint: if endpoint.startswith(self.blueprint.name): endpoint = endpoint.split(self.blueprint.name + '.', 1)[-1] else: return False return endpoint in self.endpoints
[ "def", "owns_endpoint", "(", "self", ",", "endpoint", ")", ":", "if", "self", ".", "blueprint", ":", "if", "endpoint", ".", "startswith", "(", "self", ".", "blueprint", ".", "name", ")", ":", "endpoint", "=", "endpoint", ".", "split", "(", "self", ".", "blueprint", ".", "name", "+", "'.'", ",", "1", ")", "[", "-", "1", "]", "else", ":", "return", "False", "return", "endpoint", "in", "self", ".", "endpoints" ]
36.714286
18.285714
def write_array_empty(self, key, value): """ write a 0-len array """ # ugly hack for length 0 axes arr = np.empty((1,) * value.ndim) self._handle.create_array(self.group, key, arr) getattr(self.group, key)._v_attrs.value_type = str(value.dtype) getattr(self.group, key)._v_attrs.shape = value.shape
[ "def", "write_array_empty", "(", "self", ",", "key", ",", "value", ")", ":", "# ugly hack for length 0 axes", "arr", "=", "np", ".", "empty", "(", "(", "1", ",", ")", "*", "value", ".", "ndim", ")", "self", ".", "_handle", ".", "create_array", "(", "self", ".", "group", ",", "key", ",", "arr", ")", "getattr", "(", "self", ".", "group", ",", "key", ")", ".", "_v_attrs", ".", "value_type", "=", "str", "(", "value", ".", "dtype", ")", "getattr", "(", "self", ".", "group", ",", "key", ")", ".", "_v_attrs", ".", "shape", "=", "value", ".", "shape" ]
42.5
13.875
def subs(self, path): """ Search the strings in a config file for a substitutable value, e.g. "morphologies_dir": "$COMPONENT_DIR/morphologies", """ #print_v('Checking for: \n %s, \n %s \n in %s'%(self.substitutes,self.init_substitutes,path)) if type(path) == int or type(path) == float: return path for s in self.init_substitutes: if path.startswith(s): path = path.replace(s,self.init_substitutes[s], 1) #print_v(' So far: %s'%path) for s in self.substitutes: if s in path: path = path.replace(s,self.substitutes[s]) #print_v(' Returning: %s'%path) return path
[ "def", "subs", "(", "self", ",", "path", ")", ":", "#print_v('Checking for: \\n %s, \\n %s \\n in %s'%(self.substitutes,self.init_substitutes,path))", "if", "type", "(", "path", ")", "==", "int", "or", "type", "(", "path", ")", "==", "float", ":", "return", "path", "for", "s", "in", "self", ".", "init_substitutes", ":", "if", "path", ".", "startswith", "(", "s", ")", ":", "path", "=", "path", ".", "replace", "(", "s", ",", "self", ".", "init_substitutes", "[", "s", "]", ",", "1", ")", "#print_v(' So far: %s'%path)", "for", "s", "in", "self", ".", "substitutes", ":", "if", "s", "in", "path", ":", "path", "=", "path", ".", "replace", "(", "s", ",", "self", ".", "substitutes", "[", "s", "]", ")", "#print_v(' Returning: %s'%path)", "return", "path" ]
42.117647
15.882353
def compose_layer(layer, force=False, **kwargs): """Compose a single layer with pixels.""" from PIL import Image, ImageChops assert layer.bbox != (0, 0, 0, 0), 'Layer bbox is (0, 0, 0, 0)' image = layer.topil(**kwargs) if image is None or force: texture = create_fill(layer) if texture is not None: image = texture if image is None: return image # TODO: Group should have the following too. # Apply mask. if layer.has_mask() and not layer.mask.disabled: mask_bbox = layer.mask.bbox if ( (mask_bbox[2] - mask_bbox[0]) > 0 and (mask_bbox[3] - mask_bbox[1]) > 0 ): color = layer.mask.background_color offset = (mask_bbox[0] - layer.left, mask_bbox[1] - layer.top) mask = Image.new('L', image.size, color=color) mask.paste(layer.mask.topil(), offset) if image.mode.endswith('A'): # What should we do here? There are two alpha channels. pass image.putalpha(mask) elif layer.has_vector_mask() and (force or not layer.has_pixels()): mask = draw_vector_mask(layer) # TODO: Stroke drawing. texture = image image = Image.new(image.mode, image.size, 'white') image.paste(texture, mask=mask) # Apply layer fill effects. apply_effect(layer, image) # Clip layers. if layer.has_clip_layers(): clip_box = extract_bbox(layer.clip_layers) inter_box = intersect(layer.bbox, clip_box) if inter_box != (0, 0, 0, 0): clip_image = compose(layer.clip_layers, bbox=layer.bbox) mask = image.getchannel('A') if clip_image.mode.endswith('A'): mask = ImageChops.multiply(clip_image.getchannel('A'), mask) clip_image.putalpha(mask) image = _blend(image, clip_image, (0, 0)) # Apply opacity. if layer.opacity < 255: opacity = layer.opacity if image.mode.endswith('A'): opacity = opacity / 255. channels = list(image.split()) channels[-1] = channels[-1].point(lambda x: int(x * opacity)) image = Image.merge(image.mode, channels) else: image.putalpha(opacity) return image
[ "def", "compose_layer", "(", "layer", ",", "force", "=", "False", ",", "*", "*", "kwargs", ")", ":", "from", "PIL", "import", "Image", ",", "ImageChops", "assert", "layer", ".", "bbox", "!=", "(", "0", ",", "0", ",", "0", ",", "0", ")", ",", "'Layer bbox is (0, 0, 0, 0)'", "image", "=", "layer", ".", "topil", "(", "*", "*", "kwargs", ")", "if", "image", "is", "None", "or", "force", ":", "texture", "=", "create_fill", "(", "layer", ")", "if", "texture", "is", "not", "None", ":", "image", "=", "texture", "if", "image", "is", "None", ":", "return", "image", "# TODO: Group should have the following too.", "# Apply mask.", "if", "layer", ".", "has_mask", "(", ")", "and", "not", "layer", ".", "mask", ".", "disabled", ":", "mask_bbox", "=", "layer", ".", "mask", ".", "bbox", "if", "(", "(", "mask_bbox", "[", "2", "]", "-", "mask_bbox", "[", "0", "]", ")", ">", "0", "and", "(", "mask_bbox", "[", "3", "]", "-", "mask_bbox", "[", "1", "]", ")", ">", "0", ")", ":", "color", "=", "layer", ".", "mask", ".", "background_color", "offset", "=", "(", "mask_bbox", "[", "0", "]", "-", "layer", ".", "left", ",", "mask_bbox", "[", "1", "]", "-", "layer", ".", "top", ")", "mask", "=", "Image", ".", "new", "(", "'L'", ",", "image", ".", "size", ",", "color", "=", "color", ")", "mask", ".", "paste", "(", "layer", ".", "mask", ".", "topil", "(", ")", ",", "offset", ")", "if", "image", ".", "mode", ".", "endswith", "(", "'A'", ")", ":", "# What should we do here? There are two alpha channels.", "pass", "image", ".", "putalpha", "(", "mask", ")", "elif", "layer", ".", "has_vector_mask", "(", ")", "and", "(", "force", "or", "not", "layer", ".", "has_pixels", "(", ")", ")", ":", "mask", "=", "draw_vector_mask", "(", "layer", ")", "# TODO: Stroke drawing.", "texture", "=", "image", "image", "=", "Image", ".", "new", "(", "image", ".", "mode", ",", "image", ".", "size", ",", "'white'", ")", "image", ".", "paste", "(", "texture", ",", "mask", "=", "mask", ")", "# Apply layer fill effects.", "apply_effect", "(", "layer", ",", "image", ")", "# Clip layers.", "if", "layer", ".", "has_clip_layers", "(", ")", ":", "clip_box", "=", "extract_bbox", "(", "layer", ".", "clip_layers", ")", "inter_box", "=", "intersect", "(", "layer", ".", "bbox", ",", "clip_box", ")", "if", "inter_box", "!=", "(", "0", ",", "0", ",", "0", ",", "0", ")", ":", "clip_image", "=", "compose", "(", "layer", ".", "clip_layers", ",", "bbox", "=", "layer", ".", "bbox", ")", "mask", "=", "image", ".", "getchannel", "(", "'A'", ")", "if", "clip_image", ".", "mode", ".", "endswith", "(", "'A'", ")", ":", "mask", "=", "ImageChops", ".", "multiply", "(", "clip_image", ".", "getchannel", "(", "'A'", ")", ",", "mask", ")", "clip_image", ".", "putalpha", "(", "mask", ")", "image", "=", "_blend", "(", "image", ",", "clip_image", ",", "(", "0", ",", "0", ")", ")", "# Apply opacity.", "if", "layer", ".", "opacity", "<", "255", ":", "opacity", "=", "layer", ".", "opacity", "if", "image", ".", "mode", ".", "endswith", "(", "'A'", ")", ":", "opacity", "=", "opacity", "/", "255.", "channels", "=", "list", "(", "image", ".", "split", "(", ")", ")", "channels", "[", "-", "1", "]", "=", "channels", "[", "-", "1", "]", ".", "point", "(", "lambda", "x", ":", "int", "(", "x", "*", "opacity", ")", ")", "image", "=", "Image", ".", "merge", "(", "image", ".", "mode", ",", "channels", ")", "else", ":", "image", ".", "putalpha", "(", "opacity", ")", "return", "image" ]
34.830769
16.6
def main(): """Program entry point. """ global cf_verbose, cf_show_comment, cf_charset global cf_extract, cf_test_read, cf_test_unrar global cf_test_memory psw = None # parse args try: opts, args = getopt.getopt(sys.argv[1:], 'p:C:hvcxtRM') except getopt.error as ex: print(str(ex), file=sys.stderr) sys.exit(1) for o, v in opts: if o == '-p': psw = v elif o == '-h': xprint(usage) return elif o == '-v': cf_verbose += 1 elif o == '-c': cf_show_comment = 1 elif o == '-x': cf_extract = 1 elif o == '-t': cf_test_read += 1 elif o == '-T': cf_test_unrar = 1 elif o == '-M': cf_test_memory = 1 elif o == '-C': cf_charset = v else: raise Exception("unhandled switch: " + o) args2 = [] for a in args: if a[0] == "@": for ln in open(a[1:], 'r'): fn = ln[:-1] args2.append(fn) else: args2.append(a) args = args2 if not args: xprint(usage) # pypy .readinto()+memoryview() is buggy #if cf_test_read > 1 and hasattr(sys, 'pypy_version_info'): # cf_test_read = 1 for fn in args: test(fn, psw)
[ "def", "main", "(", ")", ":", "global", "cf_verbose", ",", "cf_show_comment", ",", "cf_charset", "global", "cf_extract", ",", "cf_test_read", ",", "cf_test_unrar", "global", "cf_test_memory", "psw", "=", "None", "# parse args", "try", ":", "opts", ",", "args", "=", "getopt", ".", "getopt", "(", "sys", ".", "argv", "[", "1", ":", "]", ",", "'p:C:hvcxtRM'", ")", "except", "getopt", ".", "error", "as", "ex", ":", "print", "(", "str", "(", "ex", ")", ",", "file", "=", "sys", ".", "stderr", ")", "sys", ".", "exit", "(", "1", ")", "for", "o", ",", "v", "in", "opts", ":", "if", "o", "==", "'-p'", ":", "psw", "=", "v", "elif", "o", "==", "'-h'", ":", "xprint", "(", "usage", ")", "return", "elif", "o", "==", "'-v'", ":", "cf_verbose", "+=", "1", "elif", "o", "==", "'-c'", ":", "cf_show_comment", "=", "1", "elif", "o", "==", "'-x'", ":", "cf_extract", "=", "1", "elif", "o", "==", "'-t'", ":", "cf_test_read", "+=", "1", "elif", "o", "==", "'-T'", ":", "cf_test_unrar", "=", "1", "elif", "o", "==", "'-M'", ":", "cf_test_memory", "=", "1", "elif", "o", "==", "'-C'", ":", "cf_charset", "=", "v", "else", ":", "raise", "Exception", "(", "\"unhandled switch: \"", "+", "o", ")", "args2", "=", "[", "]", "for", "a", "in", "args", ":", "if", "a", "[", "0", "]", "==", "\"@\"", ":", "for", "ln", "in", "open", "(", "a", "[", "1", ":", "]", ",", "'r'", ")", ":", "fn", "=", "ln", "[", ":", "-", "1", "]", "args2", ".", "append", "(", "fn", ")", "else", ":", "args2", ".", "append", "(", "a", ")", "args", "=", "args2", "if", "not", "args", ":", "xprint", "(", "usage", ")", "# pypy .readinto()+memoryview() is buggy", "#if cf_test_read > 1 and hasattr(sys, 'pypy_version_info'):", "# cf_test_read = 1", "for", "fn", "in", "args", ":", "test", "(", "fn", ",", "psw", ")" ]
22.931034
19.137931
def from_json_path(path: str, check_version: bool = True) -> BELGraph: """Build a graph from a file containing Node-Link JSON.""" with open(os.path.expanduser(path)) as f: return from_json_file(f, check_version=check_version)
[ "def", "from_json_path", "(", "path", ":", "str", ",", "check_version", ":", "bool", "=", "True", ")", "->", "BELGraph", ":", "with", "open", "(", "os", ".", "path", ".", "expanduser", "(", "path", ")", ")", "as", "f", ":", "return", "from_json_file", "(", "f", ",", "check_version", "=", "check_version", ")" ]
59.5
14
def SlideShapeFactory(shape_elm, parent): """ Return an instance of the appropriate shape proxy class for *shape_elm* on a slide. """ if shape_elm.has_ph_elm: return _SlidePlaceholderFactory(shape_elm, parent) return BaseShapeFactory(shape_elm, parent)
[ "def", "SlideShapeFactory", "(", "shape_elm", ",", "parent", ")", ":", "if", "shape_elm", ".", "has_ph_elm", ":", "return", "_SlidePlaceholderFactory", "(", "shape_elm", ",", "parent", ")", "return", "BaseShapeFactory", "(", "shape_elm", ",", "parent", ")" ]
34.625
12.125
def delete_sequence_rule(self, sequence_rule_id): """Deletes a ``SequenceRule``. arg: sequence_rule_id (osid.id.Id): the ``Id`` of the ``SequenceRule`` to remove raise: NotFound - ``sequence_rule_id`` not found raise: NullArgument - ``sequence_rule_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceAdminSession.delete_resource_template collection = JSONClientValidated('assessment_authoring', collection='SequenceRule', runtime=self._runtime) if not isinstance(sequence_rule_id, ABCId): raise errors.InvalidArgument('the argument is not a valid OSID Id') sequence_rule_map = collection.find_one( dict({'_id': ObjectId(sequence_rule_id.get_identifier())}, **self._view_filter())) objects.SequenceRule(osid_object_map=sequence_rule_map, runtime=self._runtime, proxy=self._proxy)._delete() collection.delete_one({'_id': ObjectId(sequence_rule_id.get_identifier())})
[ "def", "delete_sequence_rule", "(", "self", ",", "sequence_rule_id", ")", ":", "# Implemented from template for", "# osid.resource.ResourceAdminSession.delete_resource_template", "collection", "=", "JSONClientValidated", "(", "'assessment_authoring'", ",", "collection", "=", "'SequenceRule'", ",", "runtime", "=", "self", ".", "_runtime", ")", "if", "not", "isinstance", "(", "sequence_rule_id", ",", "ABCId", ")", ":", "raise", "errors", ".", "InvalidArgument", "(", "'the argument is not a valid OSID Id'", ")", "sequence_rule_map", "=", "collection", ".", "find_one", "(", "dict", "(", "{", "'_id'", ":", "ObjectId", "(", "sequence_rule_id", ".", "get_identifier", "(", ")", ")", "}", ",", "*", "*", "self", ".", "_view_filter", "(", ")", ")", ")", "objects", ".", "SequenceRule", "(", "osid_object_map", "=", "sequence_rule_map", ",", "runtime", "=", "self", ".", "_runtime", ",", "proxy", "=", "self", ".", "_proxy", ")", ".", "_delete", "(", ")", "collection", ".", "delete_one", "(", "{", "'_id'", ":", "ObjectId", "(", "sequence_rule_id", ".", "get_identifier", "(", ")", ")", "}", ")" ]
51.8
22.72
def find_enclosing_bracket_left(self, left_ch, right_ch, start_pos=None): """ Find the left bracket enclosing current position. Return the relative position to the cursor position. When `start_pos` is given, don't look past the position. """ if self.current_char == left_ch: return 0 if start_pos is None: start_pos = 0 else: start_pos = max(0, start_pos) stack = 1 # Look backward. for i in range(self.cursor_position - 1, start_pos - 1, -1): c = self.text[i] if c == right_ch: stack += 1 elif c == left_ch: stack -= 1 if stack == 0: return i - self.cursor_position
[ "def", "find_enclosing_bracket_left", "(", "self", ",", "left_ch", ",", "right_ch", ",", "start_pos", "=", "None", ")", ":", "if", "self", ".", "current_char", "==", "left_ch", ":", "return", "0", "if", "start_pos", "is", "None", ":", "start_pos", "=", "0", "else", ":", "start_pos", "=", "max", "(", "0", ",", "start_pos", ")", "stack", "=", "1", "# Look backward.", "for", "i", "in", "range", "(", "self", ".", "cursor_position", "-", "1", ",", "start_pos", "-", "1", ",", "-", "1", ")", ":", "c", "=", "self", ".", "text", "[", "i", "]", "if", "c", "==", "right_ch", ":", "stack", "+=", "1", "elif", "c", "==", "left_ch", ":", "stack", "-=", "1", "if", "stack", "==", "0", ":", "return", "i", "-", "self", ".", "cursor_position" ]
27.321429
19.892857
def to_igraph(self, attribute="weight", **kwargs): """Convert to an igraph Graph Uses the igraph.Graph.Weighted_Adjacency constructor Parameters ---------- attribute : str, optional (default: "weight") kwargs : additional arguments for igraph.Graph.Weighted_Adjacency """ try: import igraph as ig except ImportError: raise ImportError("Please install igraph with " "`pip install --user python-igraph`.") try: W = self.W except AttributeError: # not a pygsp graph W = self.K.copy() W = utils.set_diagonal(W, 0) return ig.Graph.Weighted_Adjacency(utils.to_dense(W).tolist(), attr=attribute, **kwargs)
[ "def", "to_igraph", "(", "self", ",", "attribute", "=", "\"weight\"", ",", "*", "*", "kwargs", ")", ":", "try", ":", "import", "igraph", "as", "ig", "except", "ImportError", ":", "raise", "ImportError", "(", "\"Please install igraph with \"", "\"`pip install --user python-igraph`.\"", ")", "try", ":", "W", "=", "self", ".", "W", "except", "AttributeError", ":", "# not a pygsp graph", "W", "=", "self", ".", "K", ".", "copy", "(", ")", "W", "=", "utils", ".", "set_diagonal", "(", "W", ",", "0", ")", "return", "ig", ".", "Graph", ".", "Weighted_Adjacency", "(", "utils", ".", "to_dense", "(", "W", ")", ".", "tolist", "(", ")", ",", "attr", "=", "attribute", ",", "*", "*", "kwargs", ")" ]
35.608696
18.73913
def DbGetDeviceInfo(self, argin): """ Returns info from DbImportDevice and started/stopped dates. :param argin: Device name :type: tango.DevString :return: Str[0] = Device name Str[1] = CORBA IOR Str[2] = Device version Str[3] = Device Server name Str[4] = Device Server process host name Str[5] = Started date (or ? if not set) Str[6] = Stopped date (or ? if not set) Str[7] = Device class Lg[0] = Device exported flag Lg[1] = Device Server process PID (or -1 if not set) :rtype: tango.DevVarLongStringArray """ self._log.debug("In DbGetDeviceInfo()") ret, dev_name, dfm = check_device_name(argin) if not ret: th_exc(DB_IncorrectDeviceName, "device name (" + argin + ") syntax error (should be [tango:][//instance/]domain/family/member)", "DataBase::DbGetDeviceAlias()") return self.db.get_device_info(dev_name)
[ "def", "DbGetDeviceInfo", "(", "self", ",", "argin", ")", ":", "self", ".", "_log", ".", "debug", "(", "\"In DbGetDeviceInfo()\"", ")", "ret", ",", "dev_name", ",", "dfm", "=", "check_device_name", "(", "argin", ")", "if", "not", "ret", ":", "th_exc", "(", "DB_IncorrectDeviceName", ",", "\"device name (\"", "+", "argin", "+", "\") syntax error (should be [tango:][//instance/]domain/family/member)\"", ",", "\"DataBase::DbGetDeviceAlias()\"", ")", "return", "self", ".", "db", ".", "get_device_info", "(", "dev_name", ")" ]
39.32
14.68
def get_CrossCatClient(client_type, **kwargs): """Helper which instantiates the appropriate Engine and returns a Client""" client = None if client_type == 'local': import crosscat.LocalEngine as LocalEngine le = LocalEngine.LocalEngine(**kwargs) client = CrossCatClient(le) elif client_type == 'multiprocessing': import crosscat.MultiprocessingEngine as MultiprocessingEngine me = MultiprocessingEngine.MultiprocessingEngine(**kwargs) client = CrossCatClient(me) else: raise Exception('unknown client_type: %s' % client_type) return client
[ "def", "get_CrossCatClient", "(", "client_type", ",", "*", "*", "kwargs", ")", ":", "client", "=", "None", "if", "client_type", "==", "'local'", ":", "import", "crosscat", ".", "LocalEngine", "as", "LocalEngine", "le", "=", "LocalEngine", ".", "LocalEngine", "(", "*", "*", "kwargs", ")", "client", "=", "CrossCatClient", "(", "le", ")", "elif", "client_type", "==", "'multiprocessing'", ":", "import", "crosscat", ".", "MultiprocessingEngine", "as", "MultiprocessingEngine", "me", "=", "MultiprocessingEngine", ".", "MultiprocessingEngine", "(", "*", "*", "kwargs", ")", "client", "=", "CrossCatClient", "(", "me", ")", "else", ":", "raise", "Exception", "(", "'unknown client_type: %s'", "%", "client_type", ")", "return", "client" ]
33.722222
20.111111
def transform_lattice(self, lattice): # type: (Lattice) -> Lattice """ Takes a Lattice object and transforms it. :param lattice: Lattice :return: """ return Lattice(np.matmul(lattice.matrix, self.P))
[ "def", "transform_lattice", "(", "self", ",", "lattice", ")", ":", "# type: (Lattice) -> Lattice", "return", "Lattice", "(", "np", ".", "matmul", "(", "lattice", ".", "matrix", ",", "self", ".", "P", ")", ")" ]
31.125
8.125
def plotlyviz( scomplex, colorscale=None, title="Kepler Mapper", graph_layout="kk", color_function=None, color_function_name=None, dashboard=False, graph_data=False, factor_size=3, edge_linewidth=1.5, node_linecolor="rgb(200,200,200)", width=600, height=500, bgcolor="rgba(240, 240, 240, 0.95)", left=10, bottom=35, summary_height=300, summary_width=600, summary_left=20, summary_right=20, hist_left=25, hist_right=25, member_textbox_width=800, filename=None, ): """ Visualizations and dashboards for kmapper graphs using Plotly. This method is suitable for use in Jupyter notebooks. The generated FigureWidget can be updated (by performing a restyle or relayout). For example, let us add a title to the colorbar (the name of the color function, if any), and set the title font size. To perform these updates faster, Plotly 3.+ provides a context manager that batches up all data and layout updates: To display more info on the generated kmapper-graph, define two more FigureWidget(s): the global node distribution figure, and a dummy figure that displays info on the algorithms involved in getting the graph from data, as well as sklearn class instances. A FigureWidget has event listeners for hovering, clicking or selecting. Using the first one for `fw_graph` we define, via the function `hovering_widgets()`, widgets that display the node distribution, when the node is hovered over, and two textboxes for the cluster size and the member ids/labels of the hovered node members. Parameters ----------- scomplex: dict Simplicial complex is the output from the KeplerMapper `map` method. title: str Title of output graphic graph_layout: igraph layout; recommended 'kk' (kamada-kawai) or 'fr' (fruchterman-reingold) colorscale: Plotly colorscale(colormap) to color graph nodes dashboard: bool, default is False If true, display complete dashboard of node information graph_data: bool, default is False If true, display graph metadata factor_size: double, default is 3 a factor for the node size edge_linewidth : double, default is 1.5 node_linecolor: color str, default is "rgb(200,200,200)" width: int, default is 600, height: int, default is 500, bgcolor: color str, default is "rgba(240, 240, 240, 0.95)", left: int, default is 10, bottom: int, default is 35, summary_height: int, default is 300, summary_width: int, default is 600, summary_left: int, default is 20, summary_right: int, default is 20, hist_left: int, default is 25, hist_right: int, default is 25, member_textbox_width: int, default is 800, filename: str, default is None if filename is given, the graphic will be saved to that file. Returns --------- result: plotly.FigureWidget A FigureWidget that can be shown or editted. See the Plotly Demo notebook for examples of use. """ if not colorscale: colorscale = default_colorscale kmgraph, mapper_summary, n_color_distribution = get_mapper_graph( scomplex, colorscale=colorscale, color_function=color_function, color_function_name=color_function_name, ) annotation = get_kmgraph_meta(mapper_summary) plgraph_data = plotly_graph( kmgraph, graph_layout=graph_layout, colorscale=colorscale, factor_size=factor_size, edge_linewidth=edge_linewidth, node_linecolor=node_linecolor, ) layout = plot_layout( title=title, width=width, height=height, annotation_text=annotation, bgcolor=bgcolor, left=left, bottom=bottom, ) result = go.FigureWidget(data=plgraph_data, layout=layout) if color_function_name: with result.batch_update(): result.data[1].marker.colorbar.title = color_function_name result.data[1].marker.colorbar.titlefont.size = 10 if dashboard or graph_data: fw_hist = node_hist_fig(n_color_distribution, left=hist_left, right=hist_right) fw_summary = summary_fig( mapper_summary, width=summary_width, height=summary_height, left=summary_left, right=summary_right, ) fw_graph = result result = hovering_widgets( kmgraph, fw_graph, member_textbox_width=member_textbox_width ) if graph_data: result = ipw.VBox([fw_graph, ipw.HBox([fw_summary, fw_hist])]) if filename: pio.write_image(result, filename) return result
[ "def", "plotlyviz", "(", "scomplex", ",", "colorscale", "=", "None", ",", "title", "=", "\"Kepler Mapper\"", ",", "graph_layout", "=", "\"kk\"", ",", "color_function", "=", "None", ",", "color_function_name", "=", "None", ",", "dashboard", "=", "False", ",", "graph_data", "=", "False", ",", "factor_size", "=", "3", ",", "edge_linewidth", "=", "1.5", ",", "node_linecolor", "=", "\"rgb(200,200,200)\"", ",", "width", "=", "600", ",", "height", "=", "500", ",", "bgcolor", "=", "\"rgba(240, 240, 240, 0.95)\"", ",", "left", "=", "10", ",", "bottom", "=", "35", ",", "summary_height", "=", "300", ",", "summary_width", "=", "600", ",", "summary_left", "=", "20", ",", "summary_right", "=", "20", ",", "hist_left", "=", "25", ",", "hist_right", "=", "25", ",", "member_textbox_width", "=", "800", ",", "filename", "=", "None", ",", ")", ":", "if", "not", "colorscale", ":", "colorscale", "=", "default_colorscale", "kmgraph", ",", "mapper_summary", ",", "n_color_distribution", "=", "get_mapper_graph", "(", "scomplex", ",", "colorscale", "=", "colorscale", ",", "color_function", "=", "color_function", ",", "color_function_name", "=", "color_function_name", ",", ")", "annotation", "=", "get_kmgraph_meta", "(", "mapper_summary", ")", "plgraph_data", "=", "plotly_graph", "(", "kmgraph", ",", "graph_layout", "=", "graph_layout", ",", "colorscale", "=", "colorscale", ",", "factor_size", "=", "factor_size", ",", "edge_linewidth", "=", "edge_linewidth", ",", "node_linecolor", "=", "node_linecolor", ",", ")", "layout", "=", "plot_layout", "(", "title", "=", "title", ",", "width", "=", "width", ",", "height", "=", "height", ",", "annotation_text", "=", "annotation", ",", "bgcolor", "=", "bgcolor", ",", "left", "=", "left", ",", "bottom", "=", "bottom", ",", ")", "result", "=", "go", ".", "FigureWidget", "(", "data", "=", "plgraph_data", ",", "layout", "=", "layout", ")", "if", "color_function_name", ":", "with", "result", ".", "batch_update", "(", ")", ":", "result", ".", "data", "[", "1", "]", ".", "marker", ".", "colorbar", ".", "title", "=", "color_function_name", "result", ".", "data", "[", "1", "]", ".", "marker", ".", "colorbar", ".", "titlefont", ".", "size", "=", "10", "if", "dashboard", "or", "graph_data", ":", "fw_hist", "=", "node_hist_fig", "(", "n_color_distribution", ",", "left", "=", "hist_left", ",", "right", "=", "hist_right", ")", "fw_summary", "=", "summary_fig", "(", "mapper_summary", ",", "width", "=", "summary_width", ",", "height", "=", "summary_height", ",", "left", "=", "summary_left", ",", "right", "=", "summary_right", ",", ")", "fw_graph", "=", "result", "result", "=", "hovering_widgets", "(", "kmgraph", ",", "fw_graph", ",", "member_textbox_width", "=", "member_textbox_width", ")", "if", "graph_data", ":", "result", "=", "ipw", ".", "VBox", "(", "[", "fw_graph", ",", "ipw", ".", "HBox", "(", "[", "fw_summary", ",", "fw_hist", "]", ")", "]", ")", "if", "filename", ":", "pio", ".", "write_image", "(", "result", ",", "filename", ")", "return", "result" ]
32.880795
23.993377
def serial_number(self, serial_number): """ Sets the serial_number of this DeviceDataPostRequest. The serial number of the device. :param serial_number: The serial_number of this DeviceDataPostRequest. :type: str """ if serial_number is not None and len(serial_number) > 64: raise ValueError("Invalid value for `serial_number`, length must be less than or equal to `64`") self._serial_number = serial_number
[ "def", "serial_number", "(", "self", ",", "serial_number", ")", ":", "if", "serial_number", "is", "not", "None", "and", "len", "(", "serial_number", ")", ">", "64", ":", "raise", "ValueError", "(", "\"Invalid value for `serial_number`, length must be less than or equal to `64`\"", ")", "self", ".", "_serial_number", "=", "serial_number" ]
39.5
21.5
def recv(self, timeout=None): """Receive, optionally with *timeout* in seconds. """ if timeout: timeout *= 1000. for sub in list(self.subscribers) + self._hooks: self.poller.register(sub, POLLIN) self._loop = True try: while self._loop: sleep(0) try: socks = dict(self.poller.poll(timeout=timeout)) if socks: for sub in self.subscribers: if sub in socks and socks[sub] == POLLIN: m__ = Message.decode(sub.recv_string(NOBLOCK)) if not self._filter or self._filter(m__): if self._translate: url = urlsplit(self.sub_addr[sub]) host = url[1].split(":")[0] m__.sender = (m__.sender.split("@")[0] + "@" + host) yield m__ for sub in self._hooks: if sub in socks and socks[sub] == POLLIN: m__ = Message.decode(sub.recv_string(NOBLOCK)) self._hooks_cb[sub](m__) else: # timeout yield None except ZMQError as err: LOGGER.exception("Receive failed: %s", str(err)) finally: for sub in list(self.subscribers) + self._hooks: self.poller.unregister(sub)
[ "def", "recv", "(", "self", ",", "timeout", "=", "None", ")", ":", "if", "timeout", ":", "timeout", "*=", "1000.", "for", "sub", "in", "list", "(", "self", ".", "subscribers", ")", "+", "self", ".", "_hooks", ":", "self", ".", "poller", ".", "register", "(", "sub", ",", "POLLIN", ")", "self", ".", "_loop", "=", "True", "try", ":", "while", "self", ".", "_loop", ":", "sleep", "(", "0", ")", "try", ":", "socks", "=", "dict", "(", "self", ".", "poller", ".", "poll", "(", "timeout", "=", "timeout", ")", ")", "if", "socks", ":", "for", "sub", "in", "self", ".", "subscribers", ":", "if", "sub", "in", "socks", "and", "socks", "[", "sub", "]", "==", "POLLIN", ":", "m__", "=", "Message", ".", "decode", "(", "sub", ".", "recv_string", "(", "NOBLOCK", ")", ")", "if", "not", "self", ".", "_filter", "or", "self", ".", "_filter", "(", "m__", ")", ":", "if", "self", ".", "_translate", ":", "url", "=", "urlsplit", "(", "self", ".", "sub_addr", "[", "sub", "]", ")", "host", "=", "url", "[", "1", "]", ".", "split", "(", "\":\"", ")", "[", "0", "]", "m__", ".", "sender", "=", "(", "m__", ".", "sender", ".", "split", "(", "\"@\"", ")", "[", "0", "]", "+", "\"@\"", "+", "host", ")", "yield", "m__", "for", "sub", "in", "self", ".", "_hooks", ":", "if", "sub", "in", "socks", "and", "socks", "[", "sub", "]", "==", "POLLIN", ":", "m__", "=", "Message", ".", "decode", "(", "sub", ".", "recv_string", "(", "NOBLOCK", ")", ")", "self", ".", "_hooks_cb", "[", "sub", "]", "(", "m__", ")", "else", ":", "# timeout", "yield", "None", "except", "ZMQError", "as", "err", ":", "LOGGER", ".", "exception", "(", "\"Receive failed: %s\"", ",", "str", "(", "err", ")", ")", "finally", ":", "for", "sub", "in", "list", "(", "self", ".", "subscribers", ")", "+", "self", ".", "_hooks", ":", "self", ".", "poller", ".", "unregister", "(", "sub", ")" ]
44.131579
19.078947
def register_listener(self, listener, interesting, active): """Register an event listener. To avoid system overload, the VirtualBox server process checks if passive event listeners call :py:func:`IEventSource.get_event` frequently enough. In the current implementation, if more than 500 pending events are detected for a passive event listener, it is forcefully unregistered by the system, and further :py:func:`get_event` calls will return @c VBOX_E_OBJECT_NOT_FOUND. in listener of type :class:`IEventListener` Listener to register. in interesting of type :class:`VBoxEventType` Event types listener is interested in. One can use wildcards like - :py:attr:`VBoxEventType.any_p` to specify wildcards, matching more than one event. in active of type bool Which mode this listener is operating in. In active mode, :py:func:`IEventListener.handle_event` is called directly. In passive mode, an internal event queue is created for this this IEventListener. For each event coming in, it is added to queues for all interested registered passive listeners. It is then up to the external code to call the listener's :py:func:`IEventListener.handle_event` method. When done with an event, the external code must call :py:func:`event_processed` . """ if not isinstance(listener, IEventListener): raise TypeError("listener can only be an instance of type IEventListener") if not isinstance(interesting, list): raise TypeError("interesting can only be an instance of type list") for a in interesting[:10]: if not isinstance(a, VBoxEventType): raise TypeError( "array can only contain objects of type VBoxEventType") if not isinstance(active, bool): raise TypeError("active can only be an instance of type bool") self._call("registerListener", in_p=[listener, interesting, active])
[ "def", "register_listener", "(", "self", ",", "listener", ",", "interesting", ",", "active", ")", ":", "if", "not", "isinstance", "(", "listener", ",", "IEventListener", ")", ":", "raise", "TypeError", "(", "\"listener can only be an instance of type IEventListener\"", ")", "if", "not", "isinstance", "(", "interesting", ",", "list", ")", ":", "raise", "TypeError", "(", "\"interesting can only be an instance of type list\"", ")", "for", "a", "in", "interesting", "[", ":", "10", "]", ":", "if", "not", "isinstance", "(", "a", ",", "VBoxEventType", ")", ":", "raise", "TypeError", "(", "\"array can only contain objects of type VBoxEventType\"", ")", "if", "not", "isinstance", "(", "active", ",", "bool", ")", ":", "raise", "TypeError", "(", "\"active can only be an instance of type bool\"", ")", "self", ".", "_call", "(", "\"registerListener\"", ",", "in_p", "=", "[", "listener", ",", "interesting", ",", "active", "]", ")" ]
52.95
27.225
def from_cli(cls, opts): """Loads a config file from the given options, with overrides and deletes applied. """ # read configuration file logging.info("Reading configuration file") if opts.config_overrides is not None: overrides = [override.split(":") for override in opts.config_overrides] else: overrides = None if opts.config_delete is not None: deletes = [delete.split(":") for delete in opts.config_delete] else: deletes = None return cls(opts.config_files, overrides, deleteTuples=deletes)
[ "def", "from_cli", "(", "cls", ",", "opts", ")", ":", "# read configuration file", "logging", ".", "info", "(", "\"Reading configuration file\"", ")", "if", "opts", ".", "config_overrides", "is", "not", "None", ":", "overrides", "=", "[", "override", ".", "split", "(", "\":\"", ")", "for", "override", "in", "opts", ".", "config_overrides", "]", "else", ":", "overrides", "=", "None", "if", "opts", ".", "config_delete", "is", "not", "None", ":", "deletes", "=", "[", "delete", ".", "split", "(", "\":\"", ")", "for", "delete", "in", "opts", ".", "config_delete", "]", "else", ":", "deletes", "=", "None", "return", "cls", "(", "opts", ".", "config_files", ",", "overrides", ",", "deleteTuples", "=", "deletes", ")" ]
39.5625
14.1875
def load(cls, context_id, persistence_engine=None): """Load and instantiate a Context from the persistence_engine.""" if not persistence_engine: from furious.config import get_default_persistence_engine persistence_engine = get_default_persistence_engine() if not persistence_engine: raise RuntimeError( 'Specify a valid persistence_engine to load the context.') return persistence_engine.load_context(context_id)
[ "def", "load", "(", "cls", ",", "context_id", ",", "persistence_engine", "=", "None", ")", ":", "if", "not", "persistence_engine", ":", "from", "furious", ".", "config", "import", "get_default_persistence_engine", "persistence_engine", "=", "get_default_persistence_engine", "(", ")", "if", "not", "persistence_engine", ":", "raise", "RuntimeError", "(", "'Specify a valid persistence_engine to load the context.'", ")", "return", "persistence_engine", ".", "load_context", "(", "context_id", ")" ]
44.454545
19.818182
def rgbmap_cb(self, rgbmap, channel): """ This method is called when the RGBMap is changed. We update the ColorBar to match. """ if not self.gui_up: return fitsimage = channel.fitsimage if fitsimage != self.fv.getfocus_fitsimage(): return False self.change_cbar(self.fv, channel)
[ "def", "rgbmap_cb", "(", "self", ",", "rgbmap", ",", "channel", ")", ":", "if", "not", "self", ".", "gui_up", ":", "return", "fitsimage", "=", "channel", ".", "fitsimage", "if", "fitsimage", "!=", "self", ".", "fv", ".", "getfocus_fitsimage", "(", ")", ":", "return", "False", "self", ".", "change_cbar", "(", "self", ".", "fv", ",", "channel", ")" ]
32.545455
10
def _set(self, data, version): """serialize and set data to self.path.""" self.zk.set(self.path, json.dumps(data), version)
[ "def", "_set", "(", "self", ",", "data", ",", "version", ")", ":", "self", ".", "zk", ".", "set", "(", "self", ".", "path", ",", "json", ".", "dumps", "(", "data", ")", ",", "version", ")" ]
34.25
16.75
def matches(self,string,fuzzy=90,fname_match=True,fuzzy_fragment=None,guess=False): '''Returns whether this :class:`Concept` matches ``string``''' matches = [] for item in self.examples: m = best_match_from_list(string,self.examples[item],fuzzy,fname_match,fuzzy_fragment,guess) if m: match = ConceptMatch(self) match.concept = self match.string = string match.item = item match.examples = m[0] match.type = m[2] match.amount = m[3] matches.append(match) return sort_matches(matches)
[ "def", "matches", "(", "self", ",", "string", ",", "fuzzy", "=", "90", ",", "fname_match", "=", "True", ",", "fuzzy_fragment", "=", "None", ",", "guess", "=", "False", ")", ":", "matches", "=", "[", "]", "for", "item", "in", "self", ".", "examples", ":", "m", "=", "best_match_from_list", "(", "string", ",", "self", ".", "examples", "[", "item", "]", ",", "fuzzy", ",", "fname_match", ",", "fuzzy_fragment", ",", "guess", ")", "if", "m", ":", "match", "=", "ConceptMatch", "(", "self", ")", "match", ".", "concept", "=", "self", "match", ".", "string", "=", "string", "match", ".", "item", "=", "item", "match", ".", "examples", "=", "m", "[", "0", "]", "match", ".", "type", "=", "m", "[", "2", "]", "match", ".", "amount", "=", "m", "[", "3", "]", "matches", ".", "append", "(", "match", ")", "return", "sort_matches", "(", "matches", ")" ]
41.3125
15.9375
def set_cookie(self, name: str, value: str, *, expires: Optional[str]=None, domain: Optional[str]=None, max_age: Optional[Union[int, str]]=None, path: str='/', secure: Optional[str]=None, httponly: Optional[str]=None, version: Optional[str]=None) -> None: """Set or update response cookie. Sets new cookie or updates existent with new value. Also updates only those params which are not None. """ old = self._cookies.get(name) if old is not None and old.coded_value == '': # deleted cookie self._cookies.pop(name, None) self._cookies[name] = value c = self._cookies[name] if expires is not None: c['expires'] = expires elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT': del c['expires'] if domain is not None: c['domain'] = domain if max_age is not None: c['max-age'] = str(max_age) elif 'max-age' in c: del c['max-age'] c['path'] = path if secure is not None: c['secure'] = secure if httponly is not None: c['httponly'] = httponly if version is not None: c['version'] = version
[ "def", "set_cookie", "(", "self", ",", "name", ":", "str", ",", "value", ":", "str", ",", "*", ",", "expires", ":", "Optional", "[", "str", "]", "=", "None", ",", "domain", ":", "Optional", "[", "str", "]", "=", "None", ",", "max_age", ":", "Optional", "[", "Union", "[", "int", ",", "str", "]", "]", "=", "None", ",", "path", ":", "str", "=", "'/'", ",", "secure", ":", "Optional", "[", "str", "]", "=", "None", ",", "httponly", ":", "Optional", "[", "str", "]", "=", "None", ",", "version", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "None", ":", "old", "=", "self", ".", "_cookies", ".", "get", "(", "name", ")", "if", "old", "is", "not", "None", "and", "old", ".", "coded_value", "==", "''", ":", "# deleted cookie", "self", ".", "_cookies", ".", "pop", "(", "name", ",", "None", ")", "self", ".", "_cookies", "[", "name", "]", "=", "value", "c", "=", "self", ".", "_cookies", "[", "name", "]", "if", "expires", "is", "not", "None", ":", "c", "[", "'expires'", "]", "=", "expires", "elif", "c", ".", "get", "(", "'expires'", ")", "==", "'Thu, 01 Jan 1970 00:00:00 GMT'", ":", "del", "c", "[", "'expires'", "]", "if", "domain", "is", "not", "None", ":", "c", "[", "'domain'", "]", "=", "domain", "if", "max_age", "is", "not", "None", ":", "c", "[", "'max-age'", "]", "=", "str", "(", "max_age", ")", "elif", "'max-age'", "in", "c", ":", "del", "c", "[", "'max-age'", "]", "c", "[", "'path'", "]", "=", "path", "if", "secure", "is", "not", "None", ":", "c", "[", "'secure'", "]", "=", "secure", "if", "httponly", "is", "not", "None", ":", "c", "[", "'httponly'", "]", "=", "httponly", "if", "version", "is", "not", "None", ":", "c", "[", "'version'", "]", "=", "version" ]
31.162791
14.883721
def validate_positive_float(option, value): """Validates that 'value' is a float, or can be converted to one, and is positive. """ errmsg = "%s must be an integer or float" % (option,) try: value = float(value) except ValueError: raise ValueError(errmsg) except TypeError: raise TypeError(errmsg) # float('inf') doesn't work in 2.4 or 2.5 on Windows, so just cap floats at # one billion - this is a reasonable approximation for infinity if not 0 < value < 1e9: raise ValueError("%s must be greater than 0 and " "less than one billion" % (option,)) return value
[ "def", "validate_positive_float", "(", "option", ",", "value", ")", ":", "errmsg", "=", "\"%s must be an integer or float\"", "%", "(", "option", ",", ")", "try", ":", "value", "=", "float", "(", "value", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "errmsg", ")", "except", "TypeError", ":", "raise", "TypeError", "(", "errmsg", ")", "# float('inf') doesn't work in 2.4 or 2.5 on Windows, so just cap floats at", "# one billion - this is a reasonable approximation for infinity", "if", "not", "0", "<", "value", "<", "1e9", ":", "raise", "ValueError", "(", "\"%s must be greater than 0 and \"", "\"less than one billion\"", "%", "(", "option", ",", ")", ")", "return", "value" ]
36
17.944444
def _set_view(self): """Assign a view to current graph""" if self.logarithmic: view_class = PolarThetaLogView else: view_class = PolarThetaView self.view = view_class( self.width - self.margin_box.x, self.height - self.margin_box.y, self._box )
[ "def", "_set_view", "(", "self", ")", ":", "if", "self", ".", "logarithmic", ":", "view_class", "=", "PolarThetaLogView", "else", ":", "view_class", "=", "PolarThetaView", "self", ".", "view", "=", "view_class", "(", "self", ".", "width", "-", "self", ".", "margin_box", ".", "x", ",", "self", ".", "height", "-", "self", ".", "margin_box", ".", "y", ",", "self", ".", "_box", ")" ]
29.363636
17.909091
def merge(self, po_file, source_files): """从源码中获取所有条目,合并到 po_file 中。 :param string po_file: 待写入的 po 文件路径。 :param list source_files : 所有待处理的原文件路径 list。 """ # Create a temporary file to write pot file pot_file = tempfile.NamedTemporaryFile(mode='wb', prefix='rookout_', delete=False) pot_filename = pot_file.name slog.info('Create POT file [%s].', pot_filename) xargs = [self._xgettext, "--package-name=main", "--package-version=0.1", "--default-domain=main", "--from-code=UTF-8", "-C", "-k_", "--output", pot_filename] txt = subprocess.check_output(xargs+source_files, stderr=subprocess.STDOUT, universal_newlines=True) if len(txt) > 0: raise(ChildProcessError(txt)) slog.info('Start merge [%s] to [%s].', pot_filename, po_file) xargs = [self._msgmerge, "-U", po_file, pot_filename] txt = subprocess.check_output(xargs, universal_newlines=True) slog.info(txt) pot_file.close() os.remove(pot_filename)
[ "def", "merge", "(", "self", ",", "po_file", ",", "source_files", ")", ":", "# Create a temporary file to write pot file", "pot_file", "=", "tempfile", ".", "NamedTemporaryFile", "(", "mode", "=", "'wb'", ",", "prefix", "=", "'rookout_'", ",", "delete", "=", "False", ")", "pot_filename", "=", "pot_file", ".", "name", "slog", ".", "info", "(", "'Create POT file [%s].'", ",", "pot_filename", ")", "xargs", "=", "[", "self", ".", "_xgettext", ",", "\"--package-name=main\"", ",", "\"--package-version=0.1\"", ",", "\"--default-domain=main\"", ",", "\"--from-code=UTF-8\"", ",", "\"-C\"", ",", "\"-k_\"", ",", "\"--output\"", ",", "pot_filename", "]", "txt", "=", "subprocess", ".", "check_output", "(", "xargs", "+", "source_files", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "universal_newlines", "=", "True", ")", "if", "len", "(", "txt", ")", ">", "0", ":", "raise", "(", "ChildProcessError", "(", "txt", ")", ")", "slog", ".", "info", "(", "'Start merge [%s] to [%s].'", ",", "pot_filename", ",", "po_file", ")", "xargs", "=", "[", "self", ".", "_msgmerge", ",", "\"-U\"", ",", "po_file", ",", "pot_filename", "]", "txt", "=", "subprocess", ".", "check_output", "(", "xargs", ",", "universal_newlines", "=", "True", ")", "slog", ".", "info", "(", "txt", ")", "pot_file", ".", "close", "(", ")", "os", ".", "remove", "(", "pot_filename", ")" ]
39.724138
12.586207
def annotate_rule_violation(self, rule: ValidationRule) -> None: """ Takes note of a rule validation failure by collecting its error message. :param rule: Rule that failed validation. :type rule: ValidationRule :return: None """ if self.errors.get(rule.label) is None: self.errors[rule.label] = [] self.errors[rule.label].append(rule.get_error_message())
[ "def", "annotate_rule_violation", "(", "self", ",", "rule", ":", "ValidationRule", ")", "->", "None", ":", "if", "self", ".", "errors", ".", "get", "(", "rule", ".", "label", ")", "is", "None", ":", "self", ".", "errors", "[", "rule", ".", "label", "]", "=", "[", "]", "self", ".", "errors", "[", "rule", ".", "label", "]", ".", "append", "(", "rule", ".", "get_error_message", "(", ")", ")" ]
38.272727
15.363636
def appendtextindex(table, index_or_dirname, indexname=None, merge=True, optimize=False): """ Load all rows from `table` into a Whoosh index, adding them to any existing data in the index. Keyword arguments: table A table container with the data to be loaded. index_or_dirname Either an instance of `whoosh.index.Index` or a string containing the directory path where the index is to be stored. indexname String containing the name of the index, if multiple indexes are stored in the same directory. merge Merge small segments during commit? optimize Merge all segments together? """ import whoosh.index # deal with polymorphic argument if isinstance(index_or_dirname, string_types): dirname = index_or_dirname index = whoosh.index.open_dir(dirname, indexname=indexname, readonly=False) needs_closing = True elif isinstance(index_or_dirname, whoosh.index.Index): index = index_or_dirname needs_closing = False else: raise ArgumentError('expected string or index, found %r' % index_or_dirname) writer = index.writer() try: for d in dicts(table): writer.add_document(**d) writer.commit(merge=merge, optimize=optimize) except Exception: writer.cancel() raise finally: if needs_closing: index.close()
[ "def", "appendtextindex", "(", "table", ",", "index_or_dirname", ",", "indexname", "=", "None", ",", "merge", "=", "True", ",", "optimize", "=", "False", ")", ":", "import", "whoosh", ".", "index", "# deal with polymorphic argument", "if", "isinstance", "(", "index_or_dirname", ",", "string_types", ")", ":", "dirname", "=", "index_or_dirname", "index", "=", "whoosh", ".", "index", ".", "open_dir", "(", "dirname", ",", "indexname", "=", "indexname", ",", "readonly", "=", "False", ")", "needs_closing", "=", "True", "elif", "isinstance", "(", "index_or_dirname", ",", "whoosh", ".", "index", ".", "Index", ")", ":", "index", "=", "index_or_dirname", "needs_closing", "=", "False", "else", ":", "raise", "ArgumentError", "(", "'expected string or index, found %r'", "%", "index_or_dirname", ")", "writer", "=", "index", ".", "writer", "(", ")", "try", ":", "for", "d", "in", "dicts", "(", "table", ")", ":", "writer", ".", "add_document", "(", "*", "*", "d", ")", "writer", ".", "commit", "(", "merge", "=", "merge", ",", "optimize", "=", "optimize", ")", "except", "Exception", ":", "writer", ".", "cancel", "(", ")", "raise", "finally", ":", "if", "needs_closing", ":", "index", ".", "close", "(", ")" ]
29.098039
20.980392
def execute(self): """Output environment name.""" # Disable other runway logging so the only response is the env name logging.getLogger('runway').setLevel(logging.ERROR) # This may be invoked from a module directory in an environment; # account for that here if necessary if not os.path.isfile('runway.yml'): self.env_root = os.path.dirname(os.getcwd()) self.runway_config_path = os.path.join(self.env_root, 'runway.yml') print(get_env( self.env_root, self.runway_config.get('ignore_git_branch', False) ))
[ "def", "execute", "(", "self", ")", ":", "# Disable other runway logging so the only response is the env name", "logging", ".", "getLogger", "(", "'runway'", ")", ".", "setLevel", "(", "logging", ".", "ERROR", ")", "# This may be invoked from a module directory in an environment;", "# account for that here if necessary", "if", "not", "os", ".", "path", ".", "isfile", "(", "'runway.yml'", ")", ":", "self", ".", "env_root", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "getcwd", "(", ")", ")", "self", ".", "runway_config_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "env_root", ",", "'runway.yml'", ")", "print", "(", "get_env", "(", "self", ".", "env_root", ",", "self", ".", "runway_config", ".", "get", "(", "'ignore_git_branch'", ",", "False", ")", ")", ")" ]
40.333333
22.333333
def __update_interval(self): """ highlight the current line """ self.update() self.after_id = self.text.after(250, self.__update_interval)
[ "def", "__update_interval", "(", "self", ")", ":", "self", ".", "update", "(", ")", "self", ".", "after_id", "=", "self", ".", "text", ".", "after", "(", "250", ",", "self", ".", "__update_interval", ")" ]
39.75
14.75
def draw(self, X, y, **kwargs): """ Called from the fit method, this method creates the parallel coordinates canvas and draws each instance and vertical lines on it. Parameters ---------- X : ndarray of shape n x m A matrix of n instances with m features y : ndarray of length n An array or series of target or class values kwargs : dict Pass generic arguments to the drawing method """ if self.fast: return self.draw_classes(X, y, **kwargs) return self.draw_instances(X, y, **kwargs)
[ "def", "draw", "(", "self", ",", "X", ",", "y", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "fast", ":", "return", "self", ".", "draw_classes", "(", "X", ",", "y", ",", "*", "*", "kwargs", ")", "return", "self", ".", "draw_instances", "(", "X", ",", "y", ",", "*", "*", "kwargs", ")" ]
30.25
19.75
def update(self, params, ignore_set=False, overwrite=False): """Set instance values from dictionary. :param dict params: Click context params. :param bool ignore_set: Skip already-set values instead of raising AttributeError. :param bool overwrite: Allow overwriting already-set values. """ log = logging.getLogger(__name__) valid = {i[0] for i in self} for key, value in params.items(): if not hasattr(self, key): raise AttributeError("'{}' object has no attribute '{}'".format(self.__class__.__name__, key)) if key not in valid: message = "'{}' object does not support item assignment on '{}'" raise AttributeError(message.format(self.__class__.__name__, key)) if key in self._already_set: if ignore_set: log.debug('%s already set in config, skipping.', key) continue if not overwrite: message = "'{}' object does not support item re-assignment on '{}'" raise AttributeError(message.format(self.__class__.__name__, key)) setattr(self, key, value) self._already_set.add(key)
[ "def", "update", "(", "self", ",", "params", ",", "ignore_set", "=", "False", ",", "overwrite", "=", "False", ")", ":", "log", "=", "logging", ".", "getLogger", "(", "__name__", ")", "valid", "=", "{", "i", "[", "0", "]", "for", "i", "in", "self", "}", "for", "key", ",", "value", "in", "params", ".", "items", "(", ")", ":", "if", "not", "hasattr", "(", "self", ",", "key", ")", ":", "raise", "AttributeError", "(", "\"'{}' object has no attribute '{}'\"", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "key", ")", ")", "if", "key", "not", "in", "valid", ":", "message", "=", "\"'{}' object does not support item assignment on '{}'\"", "raise", "AttributeError", "(", "message", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "key", ")", ")", "if", "key", "in", "self", ".", "_already_set", ":", "if", "ignore_set", ":", "log", ".", "debug", "(", "'%s already set in config, skipping.'", ",", "key", ")", "continue", "if", "not", "overwrite", ":", "message", "=", "\"'{}' object does not support item re-assignment on '{}'\"", "raise", "AttributeError", "(", "message", ".", "format", "(", "self", ".", "__class__", ".", "__name__", ",", "key", ")", ")", "setattr", "(", "self", ",", "key", ",", "value", ")", "self", ".", "_already_set", ".", "add", "(", "key", ")" ]
51.541667
19.791667
def _hexdecode(hexstring): """Convert a hex encoded string to a byte string. For example '4A' will return 'J', and '04' will return ``'\\x04'`` (which has length 1). Args: hexstring (str): Can be for example 'A3' or 'A3B4'. Must be of even length. Allowed characters are '0' to '9', 'a' to 'f' and 'A' to 'F' (not space). Returns: A string of half the length, with characters corresponding to all 0-255 values for each byte. Raises: TypeError, ValueError """ # Note: For Python3 the appropriate would be: raise TypeError(new_error_message) from err # but the Python2 interpreter will indicate SyntaxError. # Thus we need to live with this warning in Python3: # 'During handling of the above exception, another exception occurred' _checkString(hexstring, description='hexstring') if len(hexstring) % 2 != 0: raise ValueError('The input hexstring must be of even length. Given: {!r}'.format(hexstring)) if sys.version_info[0] > 2: by = bytes(hexstring, 'latin1') try: return str(binascii.unhexlify(by), encoding='latin1') except binascii.Error as err: new_error_message = 'Hexdecode reported an error: {!s}. Input hexstring: {}'.format(err.args[0], hexstring) raise TypeError(new_error_message) else: try: return hexstring.decode('hex') except TypeError as err: raise TypeError('Hexdecode reported an error: {}. Input hexstring: {}'.format(err.message, hexstring))
[ "def", "_hexdecode", "(", "hexstring", ")", ":", "# Note: For Python3 the appropriate would be: raise TypeError(new_error_message) from err", "# but the Python2 interpreter will indicate SyntaxError.", "# Thus we need to live with this warning in Python3:", "# 'During handling of the above exception, another exception occurred'", "_checkString", "(", "hexstring", ",", "description", "=", "'hexstring'", ")", "if", "len", "(", "hexstring", ")", "%", "2", "!=", "0", ":", "raise", "ValueError", "(", "'The input hexstring must be of even length. Given: {!r}'", ".", "format", "(", "hexstring", ")", ")", "if", "sys", ".", "version_info", "[", "0", "]", ">", "2", ":", "by", "=", "bytes", "(", "hexstring", ",", "'latin1'", ")", "try", ":", "return", "str", "(", "binascii", ".", "unhexlify", "(", "by", ")", ",", "encoding", "=", "'latin1'", ")", "except", "binascii", ".", "Error", "as", "err", ":", "new_error_message", "=", "'Hexdecode reported an error: {!s}. Input hexstring: {}'", ".", "format", "(", "err", ".", "args", "[", "0", "]", ",", "hexstring", ")", "raise", "TypeError", "(", "new_error_message", ")", "else", ":", "try", ":", "return", "hexstring", ".", "decode", "(", "'hex'", ")", "except", "TypeError", "as", "err", ":", "raise", "TypeError", "(", "'Hexdecode reported an error: {}. Input hexstring: {}'", ".", "format", "(", "err", ".", "message", ",", "hexstring", ")", ")" ]
39.205128
29.974359
async def async_get_image_names(self): """ Parse web server camera view for camera image names """ cookies = self.get_session_cookie() try: async with aiohttp.ClientSession(cookies=cookies) as session: resp = await session.get( self._base_url ) t = await resp.text() match = re.findall('(?:\w|\d|")/(.*?).(?:mjpg|jpg)', t) if len(match) == 0: raise XeomaError('Unable to find any camera image names') image_names = set(match) results = [] for image_name in image_names: match = re.search( image_name + '\.(?:mjpg|jpg).*?user=(.*?)&', t ) if match and len(match.group(1)) > 0: d = base64.b64decode(unquote(match.group(1))) \ .decode('ASCII') creds = d.split(':') if len(creds) < 2: raise XeomaError('Error parsing image credentials') results.append((image_name, creds[0], creds[1])) else: results.append((image_name, None, None)) return results except asyncio.TimeoutError as e: raise XeomaError("Unable to connect to Xeoma web server")
[ "async", "def", "async_get_image_names", "(", "self", ")", ":", "cookies", "=", "self", ".", "get_session_cookie", "(", ")", "try", ":", "async", "with", "aiohttp", ".", "ClientSession", "(", "cookies", "=", "cookies", ")", "as", "session", ":", "resp", "=", "await", "session", ".", "get", "(", "self", ".", "_base_url", ")", "t", "=", "await", "resp", ".", "text", "(", ")", "match", "=", "re", ".", "findall", "(", "'(?:\\w|\\d|\")/(.*?).(?:mjpg|jpg)'", ",", "t", ")", "if", "len", "(", "match", ")", "==", "0", ":", "raise", "XeomaError", "(", "'Unable to find any camera image names'", ")", "image_names", "=", "set", "(", "match", ")", "results", "=", "[", "]", "for", "image_name", "in", "image_names", ":", "match", "=", "re", ".", "search", "(", "image_name", "+", "'\\.(?:mjpg|jpg).*?user=(.*?)&'", ",", "t", ")", "if", "match", "and", "len", "(", "match", ".", "group", "(", "1", ")", ")", ">", "0", ":", "d", "=", "base64", ".", "b64decode", "(", "unquote", "(", "match", ".", "group", "(", "1", ")", ")", ")", ".", "decode", "(", "'ASCII'", ")", "creds", "=", "d", ".", "split", "(", "':'", ")", "if", "len", "(", "creds", ")", "<", "2", ":", "raise", "XeomaError", "(", "'Error parsing image credentials'", ")", "results", ".", "append", "(", "(", "image_name", ",", "creds", "[", "0", "]", ",", "creds", "[", "1", "]", ")", ")", "else", ":", "results", ".", "append", "(", "(", "image_name", ",", "None", ",", "None", ")", ")", "return", "results", "except", "asyncio", ".", "TimeoutError", "as", "e", ":", "raise", "XeomaError", "(", "\"Unable to connect to Xeoma web server\"", ")" ]
43.757576
15.515152
def _write_to_graph(self): """Write the coverage results to a graph""" traces = [] for byte_code, trace_data in self.coverage.items(): traces += [list(trace_data.keys()), list(trace_data.values()), "r--"] plt.plot(*traces) plt.axis([0, self.end - self.begin, 0, 100]) plt.xlabel("Duration (seconds)") plt.ylabel("Coverage (percentage)") plt.savefig("{}.png".format(self.name))
[ "def", "_write_to_graph", "(", "self", ")", ":", "traces", "=", "[", "]", "for", "byte_code", ",", "trace_data", "in", "self", ".", "coverage", ".", "items", "(", ")", ":", "traces", "+=", "[", "list", "(", "trace_data", ".", "keys", "(", ")", ")", ",", "list", "(", "trace_data", ".", "values", "(", ")", ")", ",", "\"r--\"", "]", "plt", ".", "plot", "(", "*", "traces", ")", "plt", ".", "axis", "(", "[", "0", ",", "self", ".", "end", "-", "self", ".", "begin", ",", "0", ",", "100", "]", ")", "plt", ".", "xlabel", "(", "\"Duration (seconds)\"", ")", "plt", ".", "ylabel", "(", "\"Coverage (percentage)\"", ")", "plt", ".", "savefig", "(", "\"{}.png\"", ".", "format", "(", "self", ".", "name", ")", ")" ]
36.916667
17.666667
def find_proc_date(header): """Search the HISTORY fields of a header looking for the FLIPS processing date. """ import string, re for h in header.ascardlist(): if h.key=="HISTORY": g=h.value if ( string.find(g,'FLIPS 1.0 -:') ): result=re.search('imred: FLIPS 1.0 - \S{3} (.*) - ([\s\d]\d:\d\d:\d\d)\s*$',g) if result: date=result.group(1) time=result.group(2) datetime=date+" "+time return datetime return None
[ "def", "find_proc_date", "(", "header", ")", ":", "import", "string", ",", "re", "for", "h", "in", "header", ".", "ascardlist", "(", ")", ":", "if", "h", ".", "key", "==", "\"HISTORY\"", ":", "g", "=", "h", ".", "value", "if", "(", "string", ".", "find", "(", "g", ",", "'FLIPS 1.0 -:'", ")", ")", ":", "result", "=", "re", ".", "search", "(", "'imred: FLIPS 1.0 - \\S{3} (.*) - ([\\s\\d]\\d:\\d\\d:\\d\\d)\\s*$'", ",", "g", ")", "if", "result", ":", "date", "=", "result", ".", "group", "(", "1", ")", "time", "=", "result", ".", "group", "(", "2", ")", "datetime", "=", "date", "+", "\" \"", "+", "time", "return", "datetime", "return", "None" ]
35.25
12.4375
async def save(self): """ Persists the model to the database. If the model holds no primary key, a new one will automatically created by RethinkDB. Otherwise it will overwrite the current model persisted to the database. """ if hasattr(self, "before_save"): self.before_save() query = r.table(self.table_name) if self._state.get("id"): query = query \ .get(self._state.get("id")) \ .update(self.__db_repr, return_changes=True) else: query = query \ .insert(self.__db_repr, return_changes=True) resp = await query.run(await conn.get()) try: changes = resp["changes"] if len(changes) > 0: self.wrap(resp["changes"][0]["new_val"]) except KeyError: raise UnexpectedDbResponse() if resp["skipped"] > 0: raise UnexpectedDbResponse( "Model with id `%s` not found in the database." % self._state.get("id")) return self
[ "async", "def", "save", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "\"before_save\"", ")", ":", "self", ".", "before_save", "(", ")", "query", "=", "r", ".", "table", "(", "self", ".", "table_name", ")", "if", "self", ".", "_state", ".", "get", "(", "\"id\"", ")", ":", "query", "=", "query", ".", "get", "(", "self", ".", "_state", ".", "get", "(", "\"id\"", ")", ")", ".", "update", "(", "self", ".", "__db_repr", ",", "return_changes", "=", "True", ")", "else", ":", "query", "=", "query", ".", "insert", "(", "self", ".", "__db_repr", ",", "return_changes", "=", "True", ")", "resp", "=", "await", "query", ".", "run", "(", "await", "conn", ".", "get", "(", ")", ")", "try", ":", "changes", "=", "resp", "[", "\"changes\"", "]", "if", "len", "(", "changes", ")", ">", "0", ":", "self", ".", "wrap", "(", "resp", "[", "\"changes\"", "]", "[", "0", "]", "[", "\"new_val\"", "]", ")", "except", "KeyError", ":", "raise", "UnexpectedDbResponse", "(", ")", "if", "resp", "[", "\"skipped\"", "]", ">", "0", ":", "raise", "UnexpectedDbResponse", "(", "\"Model with id `%s` not found in the database.\"", "%", "self", ".", "_state", ".", "get", "(", "\"id\"", ")", ")", "return", "self" ]
29.861111
19.083333
def setup_tempdir(dir, models, wav, alphabet, lm_binary, trie, binaries): r''' Copy models, libs and binary to a directory (new one if dir is None) ''' if dir is None: dir = tempfile.mkdtemp(suffix='dsbench') sorted_models = all_files(models=models) if binaries is None: maybe_download_binaries(dir) else: print('Using local binaries: %s' % (binaries)) shutil.copy2(binaries, dir) extract_native_client_tarball(dir) filenames = map(lambda x: os.path.join(dir, os.path.basename(x)), sorted_models) missing_models = filter(lambda x: not os.path.isfile(x), filenames) if len(missing_models) > 0: # If we have a ZIP file, directly extract it to the proper path if is_zip_file(models): print('Extracting %s to %s' % (models[0], dir)) zipfile.ZipFile(models[0]).extractall(path=dir) print('Extracted %s.' % models[0]) else: # If one model is missing, let's copy everything again. Be safe. for f in sorted_models: print('Copying %s to %s' % (f, dir)) shutil.copy2(f, dir) for extra_file in [ wav, alphabet, lm_binary, trie ]: if extra_file and not os.path.isfile(os.path.join(dir, os.path.basename(extra_file))): print('Copying %s to %s' % (extra_file, dir)) shutil.copy2(extra_file, dir) if ssh_conn: copy_tree(dir) return dir, sorted_models
[ "def", "setup_tempdir", "(", "dir", ",", "models", ",", "wav", ",", "alphabet", ",", "lm_binary", ",", "trie", ",", "binaries", ")", ":", "if", "dir", "is", "None", ":", "dir", "=", "tempfile", ".", "mkdtemp", "(", "suffix", "=", "'dsbench'", ")", "sorted_models", "=", "all_files", "(", "models", "=", "models", ")", "if", "binaries", "is", "None", ":", "maybe_download_binaries", "(", "dir", ")", "else", ":", "print", "(", "'Using local binaries: %s'", "%", "(", "binaries", ")", ")", "shutil", ".", "copy2", "(", "binaries", ",", "dir", ")", "extract_native_client_tarball", "(", "dir", ")", "filenames", "=", "map", "(", "lambda", "x", ":", "os", ".", "path", ".", "join", "(", "dir", ",", "os", ".", "path", ".", "basename", "(", "x", ")", ")", ",", "sorted_models", ")", "missing_models", "=", "filter", "(", "lambda", "x", ":", "not", "os", ".", "path", ".", "isfile", "(", "x", ")", ",", "filenames", ")", "if", "len", "(", "missing_models", ")", ">", "0", ":", "# If we have a ZIP file, directly extract it to the proper path", "if", "is_zip_file", "(", "models", ")", ":", "print", "(", "'Extracting %s to %s'", "%", "(", "models", "[", "0", "]", ",", "dir", ")", ")", "zipfile", ".", "ZipFile", "(", "models", "[", "0", "]", ")", ".", "extractall", "(", "path", "=", "dir", ")", "print", "(", "'Extracted %s.'", "%", "models", "[", "0", "]", ")", "else", ":", "# If one model is missing, let's copy everything again. Be safe.", "for", "f", "in", "sorted_models", ":", "print", "(", "'Copying %s to %s'", "%", "(", "f", ",", "dir", ")", ")", "shutil", ".", "copy2", "(", "f", ",", "dir", ")", "for", "extra_file", "in", "[", "wav", ",", "alphabet", ",", "lm_binary", ",", "trie", "]", ":", "if", "extra_file", "and", "not", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "dir", ",", "os", ".", "path", ".", "basename", "(", "extra_file", ")", ")", ")", ":", "print", "(", "'Copying %s to %s'", "%", "(", "extra_file", ",", "dir", ")", ")", "shutil", ".", "copy2", "(", "extra_file", ",", "dir", ")", "if", "ssh_conn", ":", "copy_tree", "(", "dir", ")", "return", "dir", ",", "sorted_models" ]
38.078947
21.815789
def hil_state_encode(self, time_usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed, lat, lon, alt, vx, vy, vz, xacc, yacc, zacc): ''' DEPRECATED PACKET! Suffers from missing airspeed fields and singularities due to Euler angles. Please use HIL_STATE_QUATERNION instead. Sent from simulation to autopilot. This packet is useful for high throughput applications such as hardware in the loop simulations. time_usec : Timestamp (microseconds since UNIX epoch or microseconds since system boot) (uint64_t) roll : Roll angle (rad) (float) pitch : Pitch angle (rad) (float) yaw : Yaw angle (rad) (float) rollspeed : Body frame roll / phi angular speed (rad/s) (float) pitchspeed : Body frame pitch / theta angular speed (rad/s) (float) yawspeed : Body frame yaw / psi angular speed (rad/s) (float) lat : Latitude, expressed as * 1E7 (int32_t) lon : Longitude, expressed as * 1E7 (int32_t) alt : Altitude in meters, expressed as * 1000 (millimeters) (int32_t) vx : Ground X Speed (Latitude), expressed as m/s * 100 (int16_t) vy : Ground Y Speed (Longitude), expressed as m/s * 100 (int16_t) vz : Ground Z Speed (Altitude), expressed as m/s * 100 (int16_t) xacc : X acceleration (mg) (int16_t) yacc : Y acceleration (mg) (int16_t) zacc : Z acceleration (mg) (int16_t) ''' return MAVLink_hil_state_message(time_usec, roll, pitch, yaw, rollspeed, pitchspeed, yawspeed, lat, lon, alt, vx, vy, vz, xacc, yacc, zacc)
[ "def", "hil_state_encode", "(", "self", ",", "time_usec", ",", "roll", ",", "pitch", ",", "yaw", ",", "rollspeed", ",", "pitchspeed", ",", "yawspeed", ",", "lat", ",", "lon", ",", "alt", ",", "vx", ",", "vy", ",", "vz", ",", "xacc", ",", "yacc", ",", "zacc", ")", ":", "return", "MAVLink_hil_state_message", "(", "time_usec", ",", "roll", ",", "pitch", ",", "yaw", ",", "rollspeed", ",", "pitchspeed", ",", "yawspeed", ",", "lat", ",", "lon", ",", "alt", ",", "vx", ",", "vy", ",", "vz", ",", "xacc", ",", "yacc", ",", "zacc", ")" ]
77.481481
46.518519
def length(self, t0=0, t1=1, error=LENGTH_ERROR, min_depth=LENGTH_MIN_DEPTH): """Calculate the length of the path up to a certain position""" if t0 == 0 and t1 == 1: if self._length_info['bpoints'] == self.bpoints() \ and self._length_info['error'] >= error \ and self._length_info['min_depth'] >= min_depth: return self._length_info['length'] # using scipy.integrate.quad is quick if _quad_available: s = quad(lambda tau: abs(self.derivative(tau)), t0, t1, epsabs=error, limit=1000)[0] else: s = segment_length(self, t0, t1, self.point(t0), self.point(t1), error, min_depth, 0) if t0 == 0 and t1 == 1: self._length_info['length'] = s self._length_info['bpoints'] = self.bpoints() self._length_info['error'] = error self._length_info['min_depth'] = min_depth return self._length_info['length'] else: return s
[ "def", "length", "(", "self", ",", "t0", "=", "0", ",", "t1", "=", "1", ",", "error", "=", "LENGTH_ERROR", ",", "min_depth", "=", "LENGTH_MIN_DEPTH", ")", ":", "if", "t0", "==", "0", "and", "t1", "==", "1", ":", "if", "self", ".", "_length_info", "[", "'bpoints'", "]", "==", "self", ".", "bpoints", "(", ")", "and", "self", ".", "_length_info", "[", "'error'", "]", ">=", "error", "and", "self", ".", "_length_info", "[", "'min_depth'", "]", ">=", "min_depth", ":", "return", "self", ".", "_length_info", "[", "'length'", "]", "# using scipy.integrate.quad is quick", "if", "_quad_available", ":", "s", "=", "quad", "(", "lambda", "tau", ":", "abs", "(", "self", ".", "derivative", "(", "tau", ")", ")", ",", "t0", ",", "t1", ",", "epsabs", "=", "error", ",", "limit", "=", "1000", ")", "[", "0", "]", "else", ":", "s", "=", "segment_length", "(", "self", ",", "t0", ",", "t1", ",", "self", ".", "point", "(", "t0", ")", ",", "self", ".", "point", "(", "t1", ")", ",", "error", ",", "min_depth", ",", "0", ")", "if", "t0", "==", "0", "and", "t1", "==", "1", ":", "self", ".", "_length_info", "[", "'length'", "]", "=", "s", "self", ".", "_length_info", "[", "'bpoints'", "]", "=", "self", ".", "bpoints", "(", ")", "self", ".", "_length_info", "[", "'error'", "]", "=", "error", "self", ".", "_length_info", "[", "'min_depth'", "]", "=", "min_depth", "return", "self", ".", "_length_info", "[", "'length'", "]", "else", ":", "return", "s" ]
44.416667
18.541667
def get_immediate_children(self): """ Return all direct subsidiaries of this company. Excludes subsidiaries of subsidiaries """ ownership = Ownership.objects.filter(parent=self) subsidiaries = Company.objects.filter(child__in=ownership).distinct() return subsidiaries
[ "def", "get_immediate_children", "(", "self", ")", ":", "ownership", "=", "Ownership", ".", "objects", ".", "filter", "(", "parent", "=", "self", ")", "subsidiaries", "=", "Company", ".", "objects", ".", "filter", "(", "child__in", "=", "ownership", ")", ".", "distinct", "(", ")", "return", "subsidiaries" ]
39.5
11.75
def log_fault_exc_str (exc, # pylint: disable=W0613 message = "", level = logging.CRITICAL, traceback = False): """Make a StringIO of the usual traceback information, followed by a listing of all the local variables in each frame. """ return log_fault_info_str (sys.exc_info (), message = message, level = level, traceback = traceback)
[ "def", "log_fault_exc_str", "(", "exc", ",", "# pylint: disable=W0613", "message", "=", "\"\"", ",", "level", "=", "logging", ".", "CRITICAL", ",", "traceback", "=", "False", ")", ":", "return", "log_fault_info_str", "(", "sys", ".", "exc_info", "(", ")", ",", "message", "=", "message", ",", "level", "=", "level", ",", "traceback", "=", "traceback", ")" ]
51.375
12
def maybe_automatically_publish_drafts_on_save(sender, instance, **kwargs): """ If automatic publishing is enabled, immediately publish a draft copy after it has been saved. """ # Skip processing if auto-publishing is not enabled if not is_automatic_publishing_enabled(sender): return # Skip missing or unpublishable instances if not instance or not hasattr(instance, 'publishing_linked'): return # Ignore saves of published copies if instance.is_published: return # Ignore saves of already-published draft copies if not instance.is_dirty: return # Immediately publish saved draft copy instance.publish()
[ "def", "maybe_automatically_publish_drafts_on_save", "(", "sender", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "# Skip processing if auto-publishing is not enabled", "if", "not", "is_automatic_publishing_enabled", "(", "sender", ")", ":", "return", "# Skip missing or unpublishable instances", "if", "not", "instance", "or", "not", "hasattr", "(", "instance", ",", "'publishing_linked'", ")", ":", "return", "# Ignore saves of published copies", "if", "instance", ".", "is_published", ":", "return", "# Ignore saves of already-published draft copies", "if", "not", "instance", ".", "is_dirty", ":", "return", "# Immediately publish saved draft copy", "instance", ".", "publish", "(", ")" ]
35.473684
16.210526
def set_session(self, cookies): """ sets the cookies (should be a dictionary) """ self.__session.cookies = requests.utils.cookiejar_from_dict(cookies)
[ "def", "set_session", "(", "self", ",", "cookies", ")", ":", "self", ".", "__session", ".", "cookies", "=", "requests", ".", "utils", ".", "cookiejar_from_dict", "(", "cookies", ")" ]
35.6
10.8
def from_random(self, power, gm, r0, omega=None, function='geoid', lmax=None, normalization='4pi', csphase=1, exact_power=False): """ Initialize the class of gravitational potential spherical harmonic coefficients as random variables with a given spectrum. Usage ----- x = SHGravCoeffs.from_random(power, gm, r0, [omega, function, lmax, normalization, csphase, exact_power]) Returns ------- x : SHGravCoeffs class instance. Parameters ---------- power : ndarray, shape (L+1) numpy array of shape (L+1) that specifies the expected power per degree l, where L is the maximum spherical harmonic bandwidth. gm : float The gravitational constant times the mass that is associated with the gravitational potential coefficients. r0 : float The reference radius of the spherical harmonic coefficients. omega : float, optional, default = None The angular rotation rate of the body. function : str, optional, default = 'geoid' The type of input power spectrum: 'potential' for the gravitational potential, 'geoid' for the geoid, 'radial' for the radial gravity, or 'total' for the total gravity field. lmax : int, optional, default = len(power) - 1 The maximum spherical harmonic degree l of the output coefficients. The coefficients will be set to zero for degrees greater than L. normalization : str, optional, default = '4pi' '4pi', 'ortho', 'schmidt', or 'unnorm' for geodesy 4pi normalized, orthonormalized, Schmidt semi-normalized, or unnormalized coefficients, respectively. csphase : int, optional, default = 1 Condon-Shortley phase convention: 1 to exclude the phase factor, or -1 to include it. exact_power : bool, optional, default = False The total variance of the coefficients is set exactly to the input power. The distribution of power at degree l amongst the angular orders is random, but the total power is fixed. Description ----------- This routine returns a random realization of spherical harmonic gravitational potential coefficients obtained from a normal distribution. The variance of each coefficient at degree l is equal to the total power at degree l divided by the number of coefficients at that degree (2l+1). These coefficients are then divided by a prefactor that depends upon the function being used to calculate the spectrum: gm/r0 for the gravitiational potential, r0 for the geoid, and (l+1)*gm/(r**2) for the radial gravity. The power spectrum of the random realization can be fixed exactly to the input spectrum by setting exact_power to True. Note that the degree 0 term is set to 1, and the degree-1 terms are set to 0. """ if type(normalization) != str: raise ValueError('normalization must be a string. ' 'Input type was {:s}' .format(str(type(normalization)))) if function.lower() not in ('potential', 'geoid', 'radial', 'total'): raise ValueError( "function must be of type 'potential', " "'geoid', 'radial', or 'total'. Provided value was {:s}" .format(repr(function)) ) if normalization.lower() not in ('4pi', 'ortho', 'schmidt', 'unnorm'): raise ValueError( "The input normalization must be '4pi', 'ortho', 'schmidt', " "or 'unnorm'. Provided value was {:s}" .format(repr(normalization)) ) if csphase != 1 and csphase != -1: raise ValueError( "csphase must be 1 or -1. Input value was {:s}" .format(repr(csphase)) ) if lmax is None: nl = len(power) lmax = nl - 1 else: if lmax <= len(power) - 1: nl = lmax + 1 else: nl = len(power) degrees = _np.arange(nl) if normalization.lower() == 'unnorm' and nl - 1 > 85: _warnings.warn("Calculations using unnormalized coefficients " "are stable only for degrees less than or equal " "to 85. lmax for the coefficients will be set to " "85. Input value was {:d}.".format(nl-1), category=RuntimeWarning) nl = 85 + 1 lmax = 85 # Create coefficients with unit variance, which returns an expected # total power per degree of (2l+1) for 4pi normalized harmonics. coeffs = _np.empty((2, nl, nl)) for l in degrees: coeffs[:2, l, :l+1] = _np.random.normal(size=(2, l+1)) if exact_power: power_per_l = _spectrum(coeffs, normalization='4pi', unit='per_l') coeffs *= _np.sqrt( power[0:nl] / power_per_l)[_np.newaxis, :, _np.newaxis] else: coeffs *= _np.sqrt( power[0:nl] / (2 * degrees + 1))[_np.newaxis, :, _np.newaxis] if normalization.lower() == '4pi': pass elif normalization.lower() == 'ortho': coeffs = _convert(coeffs, normalization_in='4pi', normalization_out='ortho') elif normalization.lower() == 'schmidt': coeffs = _convert(coeffs, normalization_in='4pi', normalization_out='schmidt') elif normalization.lower() == 'unnorm': coeffs = _convert(coeffs, normalization_in='4pi', normalization_out='unnorm') if function.lower() == 'potential': coeffs /= (gm / r0) elif function.lower() == 'geoid': coeffs /= r0 elif function.lower() == 'radial': for l in degrees: coeffs[:, l, :l+1] /= (gm * (l + 1) / r0**2) elif function.lower() == 'total': for l in degrees: coeffs[:, l, :l+1] /= (gm / r0**2) * _np.sqrt((l + 1) * (2 * l + 1)) if lmax > nl - 1: coeffs = _np.pad(coeffs, ((0, 0), (0, lmax - nl + 1), (0, lmax - nl + 1)), 'constant') coeffs[0, 0, 0] = 1.0 coeffs[:, 1, :] = 0.0 clm = SHGravRealCoeffs(coeffs, gm=gm, r0=r0, omega=omega, normalization=normalization.lower(), csphase=csphase) return clm
[ "def", "from_random", "(", "self", ",", "power", ",", "gm", ",", "r0", ",", "omega", "=", "None", ",", "function", "=", "'geoid'", ",", "lmax", "=", "None", ",", "normalization", "=", "'4pi'", ",", "csphase", "=", "1", ",", "exact_power", "=", "False", ")", ":", "if", "type", "(", "normalization", ")", "!=", "str", ":", "raise", "ValueError", "(", "'normalization must be a string. '", "'Input type was {:s}'", ".", "format", "(", "str", "(", "type", "(", "normalization", ")", ")", ")", ")", "if", "function", ".", "lower", "(", ")", "not", "in", "(", "'potential'", ",", "'geoid'", ",", "'radial'", ",", "'total'", ")", ":", "raise", "ValueError", "(", "\"function must be of type 'potential', \"", "\"'geoid', 'radial', or 'total'. Provided value was {:s}\"", ".", "format", "(", "repr", "(", "function", ")", ")", ")", "if", "normalization", ".", "lower", "(", ")", "not", "in", "(", "'4pi'", ",", "'ortho'", ",", "'schmidt'", ",", "'unnorm'", ")", ":", "raise", "ValueError", "(", "\"The input normalization must be '4pi', 'ortho', 'schmidt', \"", "\"or 'unnorm'. Provided value was {:s}\"", ".", "format", "(", "repr", "(", "normalization", ")", ")", ")", "if", "csphase", "!=", "1", "and", "csphase", "!=", "-", "1", ":", "raise", "ValueError", "(", "\"csphase must be 1 or -1. Input value was {:s}\"", ".", "format", "(", "repr", "(", "csphase", ")", ")", ")", "if", "lmax", "is", "None", ":", "nl", "=", "len", "(", "power", ")", "lmax", "=", "nl", "-", "1", "else", ":", "if", "lmax", "<=", "len", "(", "power", ")", "-", "1", ":", "nl", "=", "lmax", "+", "1", "else", ":", "nl", "=", "len", "(", "power", ")", "degrees", "=", "_np", ".", "arange", "(", "nl", ")", "if", "normalization", ".", "lower", "(", ")", "==", "'unnorm'", "and", "nl", "-", "1", ">", "85", ":", "_warnings", ".", "warn", "(", "\"Calculations using unnormalized coefficients \"", "\"are stable only for degrees less than or equal \"", "\"to 85. lmax for the coefficients will be set to \"", "\"85. Input value was {:d}.\"", ".", "format", "(", "nl", "-", "1", ")", ",", "category", "=", "RuntimeWarning", ")", "nl", "=", "85", "+", "1", "lmax", "=", "85", "# Create coefficients with unit variance, which returns an expected", "# total power per degree of (2l+1) for 4pi normalized harmonics.", "coeffs", "=", "_np", ".", "empty", "(", "(", "2", ",", "nl", ",", "nl", ")", ")", "for", "l", "in", "degrees", ":", "coeffs", "[", ":", "2", ",", "l", ",", ":", "l", "+", "1", "]", "=", "_np", ".", "random", ".", "normal", "(", "size", "=", "(", "2", ",", "l", "+", "1", ")", ")", "if", "exact_power", ":", "power_per_l", "=", "_spectrum", "(", "coeffs", ",", "normalization", "=", "'4pi'", ",", "unit", "=", "'per_l'", ")", "coeffs", "*=", "_np", ".", "sqrt", "(", "power", "[", "0", ":", "nl", "]", "/", "power_per_l", ")", "[", "_np", ".", "newaxis", ",", ":", ",", "_np", ".", "newaxis", "]", "else", ":", "coeffs", "*=", "_np", ".", "sqrt", "(", "power", "[", "0", ":", "nl", "]", "/", "(", "2", "*", "degrees", "+", "1", ")", ")", "[", "_np", ".", "newaxis", ",", ":", ",", "_np", ".", "newaxis", "]", "if", "normalization", ".", "lower", "(", ")", "==", "'4pi'", ":", "pass", "elif", "normalization", ".", "lower", "(", ")", "==", "'ortho'", ":", "coeffs", "=", "_convert", "(", "coeffs", ",", "normalization_in", "=", "'4pi'", ",", "normalization_out", "=", "'ortho'", ")", "elif", "normalization", ".", "lower", "(", ")", "==", "'schmidt'", ":", "coeffs", "=", "_convert", "(", "coeffs", ",", "normalization_in", "=", "'4pi'", ",", "normalization_out", "=", "'schmidt'", ")", "elif", "normalization", ".", "lower", "(", ")", "==", "'unnorm'", ":", "coeffs", "=", "_convert", "(", "coeffs", ",", "normalization_in", "=", "'4pi'", ",", "normalization_out", "=", "'unnorm'", ")", "if", "function", ".", "lower", "(", ")", "==", "'potential'", ":", "coeffs", "/=", "(", "gm", "/", "r0", ")", "elif", "function", ".", "lower", "(", ")", "==", "'geoid'", ":", "coeffs", "/=", "r0", "elif", "function", ".", "lower", "(", ")", "==", "'radial'", ":", "for", "l", "in", "degrees", ":", "coeffs", "[", ":", ",", "l", ",", ":", "l", "+", "1", "]", "/=", "(", "gm", "*", "(", "l", "+", "1", ")", "/", "r0", "**", "2", ")", "elif", "function", ".", "lower", "(", ")", "==", "'total'", ":", "for", "l", "in", "degrees", ":", "coeffs", "[", ":", ",", "l", ",", ":", "l", "+", "1", "]", "/=", "(", "gm", "/", "r0", "**", "2", ")", "*", "_np", ".", "sqrt", "(", "(", "l", "+", "1", ")", "*", "(", "2", "*", "l", "+", "1", ")", ")", "if", "lmax", ">", "nl", "-", "1", ":", "coeffs", "=", "_np", ".", "pad", "(", "coeffs", ",", "(", "(", "0", ",", "0", ")", ",", "(", "0", ",", "lmax", "-", "nl", "+", "1", ")", ",", "(", "0", ",", "lmax", "-", "nl", "+", "1", ")", ")", ",", "'constant'", ")", "coeffs", "[", "0", ",", "0", ",", "0", "]", "=", "1.0", "coeffs", "[", ":", ",", "1", ",", ":", "]", "=", "0.0", "clm", "=", "SHGravRealCoeffs", "(", "coeffs", ",", "gm", "=", "gm", ",", "r0", "=", "r0", ",", "omega", "=", "omega", ",", "normalization", "=", "normalization", ".", "lower", "(", ")", ",", "csphase", "=", "csphase", ")", "return", "clm" ]
43.770701
22.356688
def check_load_privatekey_callback(self): """ Call the function with an encrypted PEM and a passphrase callback. """ for i in xrange(self.iterations * 10): load_privatekey( FILETYPE_PEM, self.ENCRYPTED_PEM, lambda *args: "hello, secret")
[ "def", "check_load_privatekey_callback", "(", "self", ")", ":", "for", "i", "in", "xrange", "(", "self", ".", "iterations", "*", "10", ")", ":", "load_privatekey", "(", "FILETYPE_PEM", ",", "self", ".", "ENCRYPTED_PEM", ",", "lambda", "*", "args", ":", "\"hello, secret\"", ")" ]
41.571429
13.285714
def on_menu_save_interpretation(self, event): ''' save interpretations to a redo file ''' thellier_gui_redo_file = open( os.path.join(self.WD, "thellier_GUI.redo"), 'w') #-------------------------------------------------- # write interpretations to thellier_GUI.redo #-------------------------------------------------- spec_list = list(self.Data.keys()) spec_list.sort() redo_specimens_list = [] for sp in spec_list: if 'saved' not in self.Data[sp]['pars']: continue if not self.Data[sp]['pars']['saved']: continue redo_specimens_list.append(sp) thellier_gui_redo_file.write("%s %.0f %.0f\n" % ( sp, self.Data[sp]['pars']['measurement_step_min'], self.Data[sp]['pars']['measurement_step_max'])) dlg1 = wx.MessageDialog( self, caption="Saved:", message="File thellier_GUI.redo is saved in MagIC working folder", style=wx.OK) result = self.show_dlg(dlg1) if result == wx.ID_OK: dlg1.Destroy() thellier_gui_redo_file.close() return thellier_gui_redo_file.close() self.close_warning = False
[ "def", "on_menu_save_interpretation", "(", "self", ",", "event", ")", ":", "thellier_gui_redo_file", "=", "open", "(", "os", ".", "path", ".", "join", "(", "self", ".", "WD", ",", "\"thellier_GUI.redo\"", ")", ",", "'w'", ")", "#--------------------------------------------------", "# write interpretations to thellier_GUI.redo", "#--------------------------------------------------", "spec_list", "=", "list", "(", "self", ".", "Data", ".", "keys", "(", ")", ")", "spec_list", ".", "sort", "(", ")", "redo_specimens_list", "=", "[", "]", "for", "sp", "in", "spec_list", ":", "if", "'saved'", "not", "in", "self", ".", "Data", "[", "sp", "]", "[", "'pars'", "]", ":", "continue", "if", "not", "self", ".", "Data", "[", "sp", "]", "[", "'pars'", "]", "[", "'saved'", "]", ":", "continue", "redo_specimens_list", ".", "append", "(", "sp", ")", "thellier_gui_redo_file", ".", "write", "(", "\"%s %.0f %.0f\\n\"", "%", "(", "sp", ",", "self", ".", "Data", "[", "sp", "]", "[", "'pars'", "]", "[", "'measurement_step_min'", "]", ",", "self", ".", "Data", "[", "sp", "]", "[", "'pars'", "]", "[", "'measurement_step_max'", "]", ")", ")", "dlg1", "=", "wx", ".", "MessageDialog", "(", "self", ",", "caption", "=", "\"Saved:\"", ",", "message", "=", "\"File thellier_GUI.redo is saved in MagIC working folder\"", ",", "style", "=", "wx", ".", "OK", ")", "result", "=", "self", ".", "show_dlg", "(", "dlg1", ")", "if", "result", "==", "wx", ".", "ID_OK", ":", "dlg1", ".", "Destroy", "(", ")", "thellier_gui_redo_file", ".", "close", "(", ")", "return", "thellier_gui_redo_file", ".", "close", "(", ")", "self", ".", "close_warning", "=", "False" ]
37.666667
19.121212
def CMYK_to_CMY(cobj, *args, **kwargs): """ Converts CMYK to CMY. NOTE: CMYK and CMY values range from 0.0 to 1.0 """ cmy_c = cobj.cmyk_c * (1.0 - cobj.cmyk_k) + cobj.cmyk_k cmy_m = cobj.cmyk_m * (1.0 - cobj.cmyk_k) + cobj.cmyk_k cmy_y = cobj.cmyk_y * (1.0 - cobj.cmyk_k) + cobj.cmyk_k return CMYColor(cmy_c, cmy_m, cmy_y)
[ "def", "CMYK_to_CMY", "(", "cobj", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "cmy_c", "=", "cobj", ".", "cmyk_c", "*", "(", "1.0", "-", "cobj", ".", "cmyk_k", ")", "+", "cobj", ".", "cmyk_k", "cmy_m", "=", "cobj", ".", "cmyk_m", "*", "(", "1.0", "-", "cobj", ".", "cmyk_k", ")", "+", "cobj", ".", "cmyk_k", "cmy_y", "=", "cobj", ".", "cmyk_y", "*", "(", "1.0", "-", "cobj", ".", "cmyk_k", ")", "+", "cobj", ".", "cmyk_k", "return", "CMYColor", "(", "cmy_c", ",", "cmy_m", ",", "cmy_y", ")" ]
31.454545
14.909091
def element_should_be_visible(self, locator, loglevel='INFO'): """Verifies that element identified with locator is visible. Key attributes for arbitrary elements are `id` and `name`. See `introduction` for details about locating elements. New in AppiumLibrary 1.4.5 """ if not self._element_find(locator, True, True).is_displayed(): self.log_source(loglevel) raise AssertionError("Element '%s' should be visible " "but did not" % locator)
[ "def", "element_should_be_visible", "(", "self", ",", "locator", ",", "loglevel", "=", "'INFO'", ")", ":", "if", "not", "self", ".", "_element_find", "(", "locator", ",", "True", ",", "True", ")", ".", "is_displayed", "(", ")", ":", "self", ".", "log_source", "(", "loglevel", ")", "raise", "AssertionError", "(", "\"Element '%s' should be visible \"", "\"but did not\"", "%", "locator", ")" ]
46.75
18.166667
def click_on_label(step, label): """ Click on a label """ with AssertContextManager(step): elem = world.browser.find_element_by_xpath(str( '//label[normalize-space(text()) = "%s"]' % label)) elem.click()
[ "def", "click_on_label", "(", "step", ",", "label", ")", ":", "with", "AssertContextManager", "(", "step", ")", ":", "elem", "=", "world", ".", "browser", ".", "find_element_by_xpath", "(", "str", "(", "'//label[normalize-space(text()) = \"%s\"]'", "%", "label", ")", ")", "elem", ".", "click", "(", ")" ]
26.666667
14.444444
def send(token, title, **kwargs): """ Site: https://boxcar.io/ API: http://help.boxcar.io/knowledgebase/topics/48115-boxcar-api Desc: Best app for system administrators """ headers = { "Content-type": "application/x-www-form-urlencoded", "User-Agent": "DBMail/%s" % get_version(), } data = { "user_credentials": token, "notification[title]": from_unicode(title), "notification[sound]": "notifier-2" } for k, v in kwargs.items(): data['notification[%s]' % k] = from_unicode(v) http = HTTPSConnection(kwargs.pop("api_url", "new.boxcar.io")) http.request( "POST", "/api/notifications", headers=headers, body=urlencode(data)) response = http.getresponse() if response.status != 201: raise BoxcarError(response.reason) return True
[ "def", "send", "(", "token", ",", "title", ",", "*", "*", "kwargs", ")", ":", "headers", "=", "{", "\"Content-type\"", ":", "\"application/x-www-form-urlencoded\"", ",", "\"User-Agent\"", ":", "\"DBMail/%s\"", "%", "get_version", "(", ")", ",", "}", "data", "=", "{", "\"user_credentials\"", ":", "token", ",", "\"notification[title]\"", ":", "from_unicode", "(", "title", ")", ",", "\"notification[sound]\"", ":", "\"notifier-2\"", "}", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", ":", "data", "[", "'notification[%s]'", "%", "k", "]", "=", "from_unicode", "(", "v", ")", "http", "=", "HTTPSConnection", "(", "kwargs", ".", "pop", "(", "\"api_url\"", ",", "\"new.boxcar.io\"", ")", ")", "http", ".", "request", "(", "\"POST\"", ",", "\"/api/notifications\"", ",", "headers", "=", "headers", ",", "body", "=", "urlencode", "(", "data", ")", ")", "response", "=", "http", ".", "getresponse", "(", ")", "if", "response", ".", "status", "!=", "201", ":", "raise", "BoxcarError", "(", "response", ".", "reason", ")", "return", "True" ]
28
17.666667
def graph(data, src_lat, src_lon, dest_lat, dest_lon, linewidth=1, alpha=220, color='hot'): """Create a graph drawing a line between each pair of (src_lat, src_lon) and (dest_lat, dest_lon) :param data: data access object :param src_lat: field name of source latitude :param src_lon: field name of source longitude :param dest_lat: field name of destination latitude :param dest_lon: field name of destination longitude :param linewidth: line width :param alpha: color alpha :param color: color or colormap """ from geoplotlib.layers import GraphLayer _global_config.layers.append(GraphLayer(data, src_lat, src_lon, dest_lat, dest_lon, linewidth, alpha, color))
[ "def", "graph", "(", "data", ",", "src_lat", ",", "src_lon", ",", "dest_lat", ",", "dest_lon", ",", "linewidth", "=", "1", ",", "alpha", "=", "220", ",", "color", "=", "'hot'", ")", ":", "from", "geoplotlib", ".", "layers", "import", "GraphLayer", "_global_config", ".", "layers", ".", "append", "(", "GraphLayer", "(", "data", ",", "src_lat", ",", "src_lon", ",", "dest_lat", ",", "dest_lon", ",", "linewidth", ",", "alpha", ",", "color", ")", ")" ]
49.785714
17.642857
def dump_json(self, pretty=True): """ Return a string representation of this CloudFormation template. """ # Build template t = {} t['AWSTemplateFormatVersion'] = '2010-09-09' if self.description is not None: t['Description'] = self.description self.elements.dump_to_template_obj(self, t) return _CustomJSONEncoder(indent=2 if pretty else None, sort_keys=False).encode(t)
[ "def", "dump_json", "(", "self", ",", "pretty", "=", "True", ")", ":", "# Build template", "t", "=", "{", "}", "t", "[", "'AWSTemplateFormatVersion'", "]", "=", "'2010-09-09'", "if", "self", ".", "description", "is", "not", "None", ":", "t", "[", "'Description'", "]", "=", "self", ".", "description", "self", ".", "elements", ".", "dump_to_template_obj", "(", "self", ",", "t", ")", "return", "_CustomJSONEncoder", "(", "indent", "=", "2", "if", "pretty", "else", "None", ",", "sort_keys", "=", "False", ")", ".", "encode", "(", "t", ")" ]
37.307692
15.461538
def render(self, data, chart_type, chart_package='corechart', options=None, div_id="chart", head=""): """Render the data in HTML template.""" if not self.is_valid_name(div_id): raise ValueError( "Name {} is invalid. Only letters, numbers, '_', and '-' are permitted ".format( div_id)) return Template(head + self.template).render( div_id=div_id.replace(" ", "_"), data=json.dumps( data, indent=4).replace("'", "\\'").replace('"', "'"), chart_type=chart_type, chart_package=chart_package, options=json.dumps( options, indent=4).replace("'", "\\'").replace('"', "'"))
[ "def", "render", "(", "self", ",", "data", ",", "chart_type", ",", "chart_package", "=", "'corechart'", ",", "options", "=", "None", ",", "div_id", "=", "\"chart\"", ",", "head", "=", "\"\"", ")", ":", "if", "not", "self", ".", "is_valid_name", "(", "div_id", ")", ":", "raise", "ValueError", "(", "\"Name {} is invalid. Only letters, numbers, '_', and '-' are permitted \"", ".", "format", "(", "div_id", ")", ")", "return", "Template", "(", "head", "+", "self", ".", "template", ")", ".", "render", "(", "div_id", "=", "div_id", ".", "replace", "(", "\" \"", ",", "\"_\"", ")", ",", "data", "=", "json", ".", "dumps", "(", "data", ",", "indent", "=", "4", ")", ".", "replace", "(", "\"'\"", ",", "\"\\\\'\"", ")", ".", "replace", "(", "'\"'", ",", "\"'\"", ")", ",", "chart_type", "=", "chart_type", ",", "chart_package", "=", "chart_package", ",", "options", "=", "json", ".", "dumps", "(", "options", ",", "indent", "=", "4", ")", ".", "replace", "(", "\"'\"", ",", "\"\\\\'\"", ")", ".", "replace", "(", "'\"'", ",", "\"'\"", ")", ")" ]
38.095238
15.47619
def _world_from_cwl(fn_name, fnargs, work_dir): """Reconstitute a bcbio world data object from flattened CWL-compatible inputs. Converts the flat CWL representation into a nested bcbio world dictionary. Handles single sample inputs (returning a single world object) and multi-sample runs (returning a list of individual samples to get processed together). """ parallel = None output_cwl_keys = None runtime = {} out = [] data = {} passed_keys = [] for fnarg in fnargs: key, val = fnarg.split("=") # extra values pulling in nested indexes if key == "ignore": continue if key == "sentinel_parallel": parallel = val continue if key == "sentinel_runtime": runtime = dict(tz.partition(2, val.split(","))) continue if key == "sentinel_outputs": output_cwl_keys = _parse_output_keys(val) continue if key == "sentinel_inputs": input_order = collections.OrderedDict([x.split(":") for x in val.split(",")]) continue else: assert key not in passed_keys, "Multiple keys should be handled via JSON records" passed_keys.append(key) key = key.split("__") data = _update_nested(key, _convert_value(val), data) if data: out.append(_finalize_cwl_in(data, work_dir, passed_keys, output_cwl_keys, runtime)) # Read inputs from standard files instead of command line assert os.path.exists(os.path.join(work_dir, "cwl.inputs.json")) out, input_files = _read_from_cwlinput(os.path.join(work_dir, "cwl.inputs.json"), work_dir, runtime, parallel, input_order, output_cwl_keys) if parallel in ["single-parallel", "single-merge", "multi-parallel", "multi-combined", "multi-batch", "batch-split", "batch-parallel", "batch-merge", "batch-single"]: out = [out] else: assert len(out) == 1, "%s\n%s" % (pprint.pformat(out), pprint.pformat(fnargs)) return out, parallel, output_cwl_keys, input_files
[ "def", "_world_from_cwl", "(", "fn_name", ",", "fnargs", ",", "work_dir", ")", ":", "parallel", "=", "None", "output_cwl_keys", "=", "None", "runtime", "=", "{", "}", "out", "=", "[", "]", "data", "=", "{", "}", "passed_keys", "=", "[", "]", "for", "fnarg", "in", "fnargs", ":", "key", ",", "val", "=", "fnarg", ".", "split", "(", "\"=\"", ")", "# extra values pulling in nested indexes", "if", "key", "==", "\"ignore\"", ":", "continue", "if", "key", "==", "\"sentinel_parallel\"", ":", "parallel", "=", "val", "continue", "if", "key", "==", "\"sentinel_runtime\"", ":", "runtime", "=", "dict", "(", "tz", ".", "partition", "(", "2", ",", "val", ".", "split", "(", "\",\"", ")", ")", ")", "continue", "if", "key", "==", "\"sentinel_outputs\"", ":", "output_cwl_keys", "=", "_parse_output_keys", "(", "val", ")", "continue", "if", "key", "==", "\"sentinel_inputs\"", ":", "input_order", "=", "collections", ".", "OrderedDict", "(", "[", "x", ".", "split", "(", "\":\"", ")", "for", "x", "in", "val", ".", "split", "(", "\",\"", ")", "]", ")", "continue", "else", ":", "assert", "key", "not", "in", "passed_keys", ",", "\"Multiple keys should be handled via JSON records\"", "passed_keys", ".", "append", "(", "key", ")", "key", "=", "key", ".", "split", "(", "\"__\"", ")", "data", "=", "_update_nested", "(", "key", ",", "_convert_value", "(", "val", ")", ",", "data", ")", "if", "data", ":", "out", ".", "append", "(", "_finalize_cwl_in", "(", "data", ",", "work_dir", ",", "passed_keys", ",", "output_cwl_keys", ",", "runtime", ")", ")", "# Read inputs from standard files instead of command line", "assert", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "work_dir", ",", "\"cwl.inputs.json\"", ")", ")", "out", ",", "input_files", "=", "_read_from_cwlinput", "(", "os", ".", "path", ".", "join", "(", "work_dir", ",", "\"cwl.inputs.json\"", ")", ",", "work_dir", ",", "runtime", ",", "parallel", ",", "input_order", ",", "output_cwl_keys", ")", "if", "parallel", "in", "[", "\"single-parallel\"", ",", "\"single-merge\"", ",", "\"multi-parallel\"", ",", "\"multi-combined\"", ",", "\"multi-batch\"", ",", "\"batch-split\"", ",", "\"batch-parallel\"", ",", "\"batch-merge\"", ",", "\"batch-single\"", "]", ":", "out", "=", "[", "out", "]", "else", ":", "assert", "len", "(", "out", ")", "==", "1", ",", "\"%s\\n%s\"", "%", "(", "pprint", ".", "pformat", "(", "out", ")", ",", "pprint", ".", "pformat", "(", "fnargs", ")", ")", "return", "out", ",", "parallel", ",", "output_cwl_keys", ",", "input_files" ]
42.06
24.78
def call_proxy(self, engine, payload, method, analyze_json_error_param, retry_request_substr_variants, stream=False): """ :param engine: Система :param payload: Данные для запроса :param method: string Может содержать native_call | tsv | json_newline :param analyze_json_error_param: Нужно ли производить анализ параметра error в ответе прокси :param retry_request_substr_variants: Список подстрок, при наличии которых в ответе будет происходить перезапрос :param stream: :return: """ return self.__api_proxy_call(engine, payload, method, analyze_json_error_param, retry_request_substr_variants, stream)
[ "def", "call_proxy", "(", "self", ",", "engine", ",", "payload", ",", "method", ",", "analyze_json_error_param", ",", "retry_request_substr_variants", ",", "stream", "=", "False", ")", ":", "return", "self", ".", "__api_proxy_call", "(", "engine", ",", "payload", ",", "method", ",", "analyze_json_error_param", ",", "retry_request_substr_variants", ",", "stream", ")" ]
55.923077
29.461538
def _parse_arguments(): """Parses command line arguments. Returns: A Namespace of parsed arguments. """ # Handle version flag and exit if it was passed. param_util.handle_version_flag() parser = provider_base.create_parser(sys.argv[0]) parser.add_argument( '--version', '-v', default=False, help='Print the dsub version and exit.') parser.add_argument( '--jobs', '-j', nargs='*', help='A list of jobs IDs on which to check status') parser.add_argument( '--names', '-n', nargs='*', help='A list of job names on which to check status') parser.add_argument( '--tasks', '-t', nargs='*', help='A list of task IDs on which to check status') parser.add_argument( '--attempts', nargs='*', help='A list of task attempts on which to check status') parser.add_argument( '--users', '-u', nargs='*', default=[], help="""Lists only those jobs which were submitted by the list of users. Use "*" to list jobs of any user.""") parser.add_argument( '--status', '-s', nargs='*', default=['RUNNING'], choices=['RUNNING', 'SUCCESS', 'FAILURE', 'CANCELED', '*'], help="""Lists only those jobs which match the specified status(es). Choose from {'RUNNING', 'SUCCESS', 'FAILURE', 'CANCELED'}. Use "*" to list jobs of any status.""", metavar='STATUS') parser.add_argument( '--age', help="""List only those jobs newer than the specified age. Ages can be listed using a number followed by a unit. Supported units are s (seconds), m (minutes), h (hours), d (days), w (weeks). For example: '7d' (7 days). Bare numbers are treated as UTC.""") parser.add_argument( '--label', nargs='*', action=param_util.ListParamAction, default=[], help='User labels to match. Tasks returned must match all labels.', metavar='KEY=VALUE') parser.add_argument( '--poll-interval', default=10, type=int, help='Polling interval (in seconds) for checking job status ' 'when --wait is set.') parser.add_argument( '--wait', action='store_true', help='Wait until jobs have all completed.') parser.add_argument( '--limit', default=0, type=int, help='The maximum number of tasks to list. The default is unlimited.') parser.add_argument( '--format', choices=['text', 'json', 'yaml', 'provider-json'], help='Set the output format.') output_style = parser.add_mutually_exclusive_group() output_style.add_argument( '--full', '-f', action='store_true', help='Display output with full task information' ' and input parameters.') output_style.add_argument( '--summary', action='store_true', help='Display a summary of the results, grouped by (job, status).') # Shared arguments between the "google" and "google-v2" providers google_common = parser.add_argument_group( title='google-common', description='Options common to the "google" and "google-v2" providers') google_common.add_argument( '--project', help='Cloud project ID in which to find and delete the job(s)') return provider_base.parse_args( parser, { 'google': ['project'], 'google-v2': ['project'], 'test-fails': [], 'local': [], }, sys.argv[1:])
[ "def", "_parse_arguments", "(", ")", ":", "# Handle version flag and exit if it was passed.", "param_util", ".", "handle_version_flag", "(", ")", "parser", "=", "provider_base", ".", "create_parser", "(", "sys", ".", "argv", "[", "0", "]", ")", "parser", ".", "add_argument", "(", "'--version'", ",", "'-v'", ",", "default", "=", "False", ",", "help", "=", "'Print the dsub version and exit.'", ")", "parser", ".", "add_argument", "(", "'--jobs'", ",", "'-j'", ",", "nargs", "=", "'*'", ",", "help", "=", "'A list of jobs IDs on which to check status'", ")", "parser", ".", "add_argument", "(", "'--names'", ",", "'-n'", ",", "nargs", "=", "'*'", ",", "help", "=", "'A list of job names on which to check status'", ")", "parser", ".", "add_argument", "(", "'--tasks'", ",", "'-t'", ",", "nargs", "=", "'*'", ",", "help", "=", "'A list of task IDs on which to check status'", ")", "parser", ".", "add_argument", "(", "'--attempts'", ",", "nargs", "=", "'*'", ",", "help", "=", "'A list of task attempts on which to check status'", ")", "parser", ".", "add_argument", "(", "'--users'", ",", "'-u'", ",", "nargs", "=", "'*'", ",", "default", "=", "[", "]", ",", "help", "=", "\"\"\"Lists only those jobs which were submitted by the list of users.\n Use \"*\" to list jobs of any user.\"\"\"", ")", "parser", ".", "add_argument", "(", "'--status'", ",", "'-s'", ",", "nargs", "=", "'*'", ",", "default", "=", "[", "'RUNNING'", "]", ",", "choices", "=", "[", "'RUNNING'", ",", "'SUCCESS'", ",", "'FAILURE'", ",", "'CANCELED'", ",", "'*'", "]", ",", "help", "=", "\"\"\"Lists only those jobs which match the specified status(es).\n Choose from {'RUNNING', 'SUCCESS', 'FAILURE', 'CANCELED'}.\n Use \"*\" to list jobs of any status.\"\"\"", ",", "metavar", "=", "'STATUS'", ")", "parser", ".", "add_argument", "(", "'--age'", ",", "help", "=", "\"\"\"List only those jobs newer than the specified age. Ages can be\n listed using a number followed by a unit. Supported units are\n s (seconds), m (minutes), h (hours), d (days), w (weeks).\n For example: '7d' (7 days). Bare numbers are treated as UTC.\"\"\"", ")", "parser", ".", "add_argument", "(", "'--label'", ",", "nargs", "=", "'*'", ",", "action", "=", "param_util", ".", "ListParamAction", ",", "default", "=", "[", "]", ",", "help", "=", "'User labels to match. Tasks returned must match all labels.'", ",", "metavar", "=", "'KEY=VALUE'", ")", "parser", ".", "add_argument", "(", "'--poll-interval'", ",", "default", "=", "10", ",", "type", "=", "int", ",", "help", "=", "'Polling interval (in seconds) for checking job status '", "'when --wait is set.'", ")", "parser", ".", "add_argument", "(", "'--wait'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Wait until jobs have all completed.'", ")", "parser", ".", "add_argument", "(", "'--limit'", ",", "default", "=", "0", ",", "type", "=", "int", ",", "help", "=", "'The maximum number of tasks to list. The default is unlimited.'", ")", "parser", ".", "add_argument", "(", "'--format'", ",", "choices", "=", "[", "'text'", ",", "'json'", ",", "'yaml'", ",", "'provider-json'", "]", ",", "help", "=", "'Set the output format.'", ")", "output_style", "=", "parser", ".", "add_mutually_exclusive_group", "(", ")", "output_style", ".", "add_argument", "(", "'--full'", ",", "'-f'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Display output with full task information'", "' and input parameters.'", ")", "output_style", ".", "add_argument", "(", "'--summary'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Display a summary of the results, grouped by (job, status).'", ")", "# Shared arguments between the \"google\" and \"google-v2\" providers", "google_common", "=", "parser", ".", "add_argument_group", "(", "title", "=", "'google-common'", ",", "description", "=", "'Options common to the \"google\" and \"google-v2\" providers'", ")", "google_common", ".", "add_argument", "(", "'--project'", ",", "help", "=", "'Cloud project ID in which to find and delete the job(s)'", ")", "return", "provider_base", ".", "parse_args", "(", "parser", ",", "{", "'google'", ":", "[", "'project'", "]", ",", "'google-v2'", ":", "[", "'project'", "]", ",", "'test-fails'", ":", "[", "]", ",", "'local'", ":", "[", "]", ",", "}", ",", "sys", ".", "argv", "[", "1", ":", "]", ")" ]
31.849057
19.745283
def InputSplines(seq_length, n_bases=10, name=None, **kwargs): """Input placeholder for array returned by `encodeSplines` Wrapper for: `keras.layers.Input((seq_length, n_bases), name=name, **kwargs)` """ return Input((seq_length, n_bases), name=name, **kwargs)
[ "def", "InputSplines", "(", "seq_length", ",", "n_bases", "=", "10", ",", "name", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "Input", "(", "(", "seq_length", ",", "n_bases", ")", ",", "name", "=", "name", ",", "*", "*", "kwargs", ")" ]
45.333333
20.5
def remove_child_repository(self, repository_id, child_id): """Removes a child from a repository. arg: repository_id (osid.id.Id): the ``Id`` of a repository arg: child_id (osid.id.Id): the ``Id`` of the new child raise: NotFound - ``repository_id`` not a parent of ``child_id`` raise: NullArgument - ``repository_id`` or ``child_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.BinHierarchyDesignSession.remove_child_bin_template if self._catalog_session is not None: return self._catalog_session.remove_child_catalog(catalog_id=repository_id, child_id=child_id) return self._hierarchy_session.remove_child(id_=repository_id, child_id=child_id)
[ "def", "remove_child_repository", "(", "self", ",", "repository_id", ",", "child_id", ")", ":", "# Implemented from template for", "# osid.resource.BinHierarchyDesignSession.remove_child_bin_template", "if", "self", ".", "_catalog_session", "is", "not", "None", ":", "return", "self", ".", "_catalog_session", ".", "remove_child_catalog", "(", "catalog_id", "=", "repository_id", ",", "child_id", "=", "child_id", ")", "return", "self", ".", "_hierarchy_session", ".", "remove_child", "(", "id_", "=", "repository_id", ",", "child_id", "=", "child_id", ")" ]
50.894737
23.631579
def chartItems(self): """ Returns the chart items that are found within this scene. :return [<XChartWidgetItem>, ..] """ from projexui.widgets.xchartwidget import XChartWidgetItem return filter(lambda x: isinstance(x, XChartWidgetItem), self.items())
[ "def", "chartItems", "(", "self", ")", ":", "from", "projexui", ".", "widgets", ".", "xchartwidget", "import", "XChartWidgetItem", "return", "filter", "(", "lambda", "x", ":", "isinstance", "(", "x", ",", "XChartWidgetItem", ")", ",", "self", ".", "items", "(", ")", ")" ]
38.875
18.125
def add_state_group(self, name, *states): """ Add a group of managed states. Groups can be specified directly in the :class:`~coaster.utils.classes.LabeledEnum`. This method is only useful for grouping a conditional state with existing states. It cannot be used to form a group of groups. :param str name: Name of this group :param states: :class:`ManagedState` instances to be grouped together """ # See `_add_state_internal` for explanation of the following if hasattr(self, name): raise AttributeError( "State group name %s conflicts with existing attribute in the state manager" % name) mstate = ManagedStateGroup(name, self, states) self.states[name] = mstate setattr(self, name, mstate) setattr(self, 'is_' + name.lower(), mstate)
[ "def", "add_state_group", "(", "self", ",", "name", ",", "*", "states", ")", ":", "# See `_add_state_internal` for explanation of the following", "if", "hasattr", "(", "self", ",", "name", ")", ":", "raise", "AttributeError", "(", "\"State group name %s conflicts with existing attribute in the state manager\"", "%", "name", ")", "mstate", "=", "ManagedStateGroup", "(", "name", ",", "self", ",", "states", ")", "self", ".", "states", "[", "name", "]", "=", "mstate", "setattr", "(", "self", ",", "name", ",", "mstate", ")", "setattr", "(", "self", ",", "'is_'", "+", "name", ".", "lower", "(", ")", ",", "mstate", ")" ]
47.777778
18.555556
def run(self): """ this is the actual execution of the ReadProbes thread: continuously read values from the probes """ if self.probes is None: self._stop = True while True: if self._stop: break self.probes_values = { instrument_name: {probe_name: probe_instance.value for probe_name, probe_instance in probe.items()} for instrument_name, probe in self.probes.items() } self.updateProgress.emit(1) self.msleep(int(1e3*self.refresh_interval))
[ "def", "run", "(", "self", ")", ":", "if", "self", ".", "probes", "is", "None", ":", "self", ".", "_stop", "=", "True", "while", "True", ":", "if", "self", ".", "_stop", ":", "break", "self", ".", "probes_values", "=", "{", "instrument_name", ":", "{", "probe_name", ":", "probe_instance", ".", "value", "for", "probe_name", ",", "probe_instance", "in", "probe", ".", "items", "(", ")", "}", "for", "instrument_name", ",", "probe", "in", "self", ".", "probes", ".", "items", "(", ")", "}", "self", ".", "updateProgress", ".", "emit", "(", "1", ")", "self", ".", "msleep", "(", "int", "(", "1e3", "*", "self", ".", "refresh_interval", ")", ")" ]
30.45
23.15
def two_gaussian(freq, freq0_1, freq0_2, sigma1, sigma2, amp1, amp2, offset, drift): """ A two-Gaussian model. This is simply the sum of two gaussian functions in some part of the spectrum. Each individual gaussian has its own peak frequency, sigma, and amp, but they share common offset and drift parameters. """ return (gaussian(freq, freq0_1, sigma1, amp1, offset, drift) + gaussian(freq, freq0_2, sigma2, amp2, offset, drift))
[ "def", "two_gaussian", "(", "freq", ",", "freq0_1", ",", "freq0_2", ",", "sigma1", ",", "sigma2", ",", "amp1", ",", "amp2", ",", "offset", ",", "drift", ")", ":", "return", "(", "gaussian", "(", "freq", ",", "freq0_1", ",", "sigma1", ",", "amp1", ",", "offset", ",", "drift", ")", "+", "gaussian", "(", "freq", ",", "freq0_2", ",", "sigma2", ",", "amp2", ",", "offset", ",", "drift", ")", ")" ]
39.333333
22
def mainloop(self): """ The main loop. """ if not self.args: self.parser.error("No metafiles given, nothing to do!") if 1 < sum(bool(i) for i in (self.options.no_ssl, self.options.reannounce, self.options.reannounce_all)): self.parser.error("Conflicting options --no-ssl, --reannounce and --reannounce-all!") # Set filter criteria for metafiles filter_url_prefix = None if self.options.reannounce: # <scheme>://<netloc>/<path>?<query> filter_url_prefix = urlparse.urlsplit(self.options.reannounce, allow_fragments=False) filter_url_prefix = urlparse.urlunsplit(( filter_url_prefix.scheme, filter_url_prefix.netloc, '/', '', '' # bogus pylint: disable=E1103 )) self.LOG.info("Filtering for metafiles with announce URL prefix %r..." % filter_url_prefix) if self.options.reannounce_all: self.options.reannounce = self.options.reannounce_all else: # When changing the announce URL w/o changing the domain, don't change the info hash! self.options.no_cross_seed = True # Resolve tracker alias, if URL doesn't look like an URL if self.options.reannounce and not urlparse.urlparse(self.options.reannounce).scheme: tracker_alias, idx = self.options.reannounce, "0" if '.' in tracker_alias: tracker_alias, idx = tracker_alias.split('.', 1) try: idx = int(idx, 10) _, tracker_url = config.lookup_announce_alias(tracker_alias) self.options.reannounce = tracker_url[idx] except (KeyError, IndexError, TypeError, ValueError) as exc: raise error.UserError("Unknown tracker alias or bogus URL %r (%s)!" % ( self.options.reannounce, exc)) # go through given files bad = 0 changed = 0 for filename in self.args: try: # Read and remember current content metainfo = bencode.bread(filename) old_metainfo = bencode.bencode(metainfo) except (EnvironmentError, KeyError, bencode.BencodeError) as exc: self.LOG.warning("Skipping bad metafile %r (%s: %s)" % (filename, type(exc).__name__, exc)) bad += 1 else: # Check metafile integrity try: metafile.check_meta(metainfo) except ValueError as exc: self.LOG.warn("Metafile %r failed integrity check: %s" % (filename, exc,)) if not self.options.no_skip: continue # Skip any metafiles that don't meet the pre-conditions if filter_url_prefix and not metainfo['announce'].startswith(filter_url_prefix): self.LOG.warn("Skipping metafile %r no tracked by %r!" % (filename, filter_url_prefix,)) continue # Keep resume info safe libtorrent_resume = {} if "libtorrent_resume" in metainfo: try: libtorrent_resume["bitfield"] = metainfo["libtorrent_resume"]["bitfield"] except KeyError: pass # nothing to remember libtorrent_resume["files"] = copy.deepcopy(metainfo["libtorrent_resume"]["files"]) # Change private flag? if self.options.make_private and not metainfo["info"].get("private", 0): self.LOG.info("Setting private flag...") metainfo["info"]["private"] = 1 if self.options.make_public and metainfo["info"].get("private", 0): self.LOG.info("Clearing private flag...") del metainfo["info"]["private"] # Remove non-standard keys? if self.options.clean or self.options.clean_all or self.options.clean_xseed: metafile.clean_meta(metainfo, including_info=not self.options.clean, logger=self.LOG.info) # Restore resume info? if self.options.clean_xseed: if libtorrent_resume: self.LOG.info("Restoring key 'libtorrent_resume'...") metainfo.setdefault("libtorrent_resume", {}) metainfo["libtorrent_resume"].update(libtorrent_resume) else: self.LOG.warn("No resume information found!") # Clean rTorrent data? if self.options.clean_rtorrent: for key in self.RT_RESUMT_KEYS: if key in metainfo: self.LOG.info("Removing key %r..." % (key,)) del metainfo[key] # Change announce URL? if self.options.reannounce: metainfo['announce'] = self.options.reannounce if "announce-list" in metainfo: del metainfo["announce-list"] if not self.options.no_cross_seed: # Enforce unique hash per tracker metainfo["info"]["x_cross_seed"] = hashlib.md5(self.options.reannounce).hexdigest() if self.options.no_ssl: # We're assuming here the same (default) port is used metainfo['announce'] = (metainfo['announce'] .replace("https://", "http://").replace(":443/", ":80/")) # Change comment or creation date? if self.options.comment is not None: if self.options.comment: metainfo["comment"] = self.options.comment elif "comment" in metainfo: del metainfo["comment"] if self.options.bump_date: metainfo["creation date"] = int(time.time()) if self.options.no_date and "creation date" in metainfo: del metainfo["creation date"] # Add fast-resume data? if self.options.hashed: try: metafile.add_fast_resume(metainfo, self.options.hashed.replace("{}", metainfo["info"]["name"])) except EnvironmentError as exc: self.fatal("Error making fast-resume data (%s)" % (exc,)) raise # Set specific keys? metafile.assign_fields(metainfo, self.options.set) replace_fields(metainfo, self.options.regex) # Write new metafile, if changed new_metainfo = bencode.bencode(metainfo) if new_metainfo != old_metainfo: if self.options.output_directory: filename = os.path.join(self.options.output_directory, os.path.basename(filename)) self.LOG.info("Writing %r..." % filename) if not self.options.dry_run: bencode.bwrite(filename, metainfo) if "libtorrent_resume" in metainfo: # Also write clean version filename = filename.replace(".torrent", "-no-resume.torrent") del metainfo["libtorrent_resume"] self.LOG.info("Writing %r..." % filename) bencode.bwrite(filename, metainfo) else: self.LOG.info("Changing %r..." % filename) if not self.options.dry_run: # Write to temporary file tempname = os.path.join( os.path.dirname(filename), '.' + os.path.basename(filename), ) self.LOG.debug("Writing %r..." % tempname) bencode.bwrite(tempname, metainfo) # Replace existing file if os.name != "posix": # cannot rename to existing target on WIN32 os.remove(filename) try: os.rename(tempname, filename) except EnvironmentError as exc: # TODO: Try to write directly, keeping a backup! raise error.LoggableError("Can't rename tempfile %r to %r (%s)" % ( tempname, filename, exc )) changed += 1 # Print summary if changed: self.LOG.info("%s %d metafile(s)." % ( "Would've changed" if self.options.dry_run else "Changed", changed )) if bad: self.LOG.warn("Skipped %d bad metafile(s)!" % (bad))
[ "def", "mainloop", "(", "self", ")", ":", "if", "not", "self", ".", "args", ":", "self", ".", "parser", ".", "error", "(", "\"No metafiles given, nothing to do!\"", ")", "if", "1", "<", "sum", "(", "bool", "(", "i", ")", "for", "i", "in", "(", "self", ".", "options", ".", "no_ssl", ",", "self", ".", "options", ".", "reannounce", ",", "self", ".", "options", ".", "reannounce_all", ")", ")", ":", "self", ".", "parser", ".", "error", "(", "\"Conflicting options --no-ssl, --reannounce and --reannounce-all!\"", ")", "# Set filter criteria for metafiles", "filter_url_prefix", "=", "None", "if", "self", ".", "options", ".", "reannounce", ":", "# <scheme>://<netloc>/<path>?<query>", "filter_url_prefix", "=", "urlparse", ".", "urlsplit", "(", "self", ".", "options", ".", "reannounce", ",", "allow_fragments", "=", "False", ")", "filter_url_prefix", "=", "urlparse", ".", "urlunsplit", "(", "(", "filter_url_prefix", ".", "scheme", ",", "filter_url_prefix", ".", "netloc", ",", "'/'", ",", "''", ",", "''", "# bogus pylint: disable=E1103", ")", ")", "self", ".", "LOG", ".", "info", "(", "\"Filtering for metafiles with announce URL prefix %r...\"", "%", "filter_url_prefix", ")", "if", "self", ".", "options", ".", "reannounce_all", ":", "self", ".", "options", ".", "reannounce", "=", "self", ".", "options", ".", "reannounce_all", "else", ":", "# When changing the announce URL w/o changing the domain, don't change the info hash!", "self", ".", "options", ".", "no_cross_seed", "=", "True", "# Resolve tracker alias, if URL doesn't look like an URL", "if", "self", ".", "options", ".", "reannounce", "and", "not", "urlparse", ".", "urlparse", "(", "self", ".", "options", ".", "reannounce", ")", ".", "scheme", ":", "tracker_alias", ",", "idx", "=", "self", ".", "options", ".", "reannounce", ",", "\"0\"", "if", "'.'", "in", "tracker_alias", ":", "tracker_alias", ",", "idx", "=", "tracker_alias", ".", "split", "(", "'.'", ",", "1", ")", "try", ":", "idx", "=", "int", "(", "idx", ",", "10", ")", "_", ",", "tracker_url", "=", "config", ".", "lookup_announce_alias", "(", "tracker_alias", ")", "self", ".", "options", ".", "reannounce", "=", "tracker_url", "[", "idx", "]", "except", "(", "KeyError", ",", "IndexError", ",", "TypeError", ",", "ValueError", ")", "as", "exc", ":", "raise", "error", ".", "UserError", "(", "\"Unknown tracker alias or bogus URL %r (%s)!\"", "%", "(", "self", ".", "options", ".", "reannounce", ",", "exc", ")", ")", "# go through given files", "bad", "=", "0", "changed", "=", "0", "for", "filename", "in", "self", ".", "args", ":", "try", ":", "# Read and remember current content", "metainfo", "=", "bencode", ".", "bread", "(", "filename", ")", "old_metainfo", "=", "bencode", ".", "bencode", "(", "metainfo", ")", "except", "(", "EnvironmentError", ",", "KeyError", ",", "bencode", ".", "BencodeError", ")", "as", "exc", ":", "self", ".", "LOG", ".", "warning", "(", "\"Skipping bad metafile %r (%s: %s)\"", "%", "(", "filename", ",", "type", "(", "exc", ")", ".", "__name__", ",", "exc", ")", ")", "bad", "+=", "1", "else", ":", "# Check metafile integrity", "try", ":", "metafile", ".", "check_meta", "(", "metainfo", ")", "except", "ValueError", "as", "exc", ":", "self", ".", "LOG", ".", "warn", "(", "\"Metafile %r failed integrity check: %s\"", "%", "(", "filename", ",", "exc", ",", ")", ")", "if", "not", "self", ".", "options", ".", "no_skip", ":", "continue", "# Skip any metafiles that don't meet the pre-conditions", "if", "filter_url_prefix", "and", "not", "metainfo", "[", "'announce'", "]", ".", "startswith", "(", "filter_url_prefix", ")", ":", "self", ".", "LOG", ".", "warn", "(", "\"Skipping metafile %r no tracked by %r!\"", "%", "(", "filename", ",", "filter_url_prefix", ",", ")", ")", "continue", "# Keep resume info safe", "libtorrent_resume", "=", "{", "}", "if", "\"libtorrent_resume\"", "in", "metainfo", ":", "try", ":", "libtorrent_resume", "[", "\"bitfield\"", "]", "=", "metainfo", "[", "\"libtorrent_resume\"", "]", "[", "\"bitfield\"", "]", "except", "KeyError", ":", "pass", "# nothing to remember", "libtorrent_resume", "[", "\"files\"", "]", "=", "copy", ".", "deepcopy", "(", "metainfo", "[", "\"libtorrent_resume\"", "]", "[", "\"files\"", "]", ")", "# Change private flag?", "if", "self", ".", "options", ".", "make_private", "and", "not", "metainfo", "[", "\"info\"", "]", ".", "get", "(", "\"private\"", ",", "0", ")", ":", "self", ".", "LOG", ".", "info", "(", "\"Setting private flag...\"", ")", "metainfo", "[", "\"info\"", "]", "[", "\"private\"", "]", "=", "1", "if", "self", ".", "options", ".", "make_public", "and", "metainfo", "[", "\"info\"", "]", ".", "get", "(", "\"private\"", ",", "0", ")", ":", "self", ".", "LOG", ".", "info", "(", "\"Clearing private flag...\"", ")", "del", "metainfo", "[", "\"info\"", "]", "[", "\"private\"", "]", "# Remove non-standard keys?", "if", "self", ".", "options", ".", "clean", "or", "self", ".", "options", ".", "clean_all", "or", "self", ".", "options", ".", "clean_xseed", ":", "metafile", ".", "clean_meta", "(", "metainfo", ",", "including_info", "=", "not", "self", ".", "options", ".", "clean", ",", "logger", "=", "self", ".", "LOG", ".", "info", ")", "# Restore resume info?", "if", "self", ".", "options", ".", "clean_xseed", ":", "if", "libtorrent_resume", ":", "self", ".", "LOG", ".", "info", "(", "\"Restoring key 'libtorrent_resume'...\"", ")", "metainfo", ".", "setdefault", "(", "\"libtorrent_resume\"", ",", "{", "}", ")", "metainfo", "[", "\"libtorrent_resume\"", "]", ".", "update", "(", "libtorrent_resume", ")", "else", ":", "self", ".", "LOG", ".", "warn", "(", "\"No resume information found!\"", ")", "# Clean rTorrent data?", "if", "self", ".", "options", ".", "clean_rtorrent", ":", "for", "key", "in", "self", ".", "RT_RESUMT_KEYS", ":", "if", "key", "in", "metainfo", ":", "self", ".", "LOG", ".", "info", "(", "\"Removing key %r...\"", "%", "(", "key", ",", ")", ")", "del", "metainfo", "[", "key", "]", "# Change announce URL?", "if", "self", ".", "options", ".", "reannounce", ":", "metainfo", "[", "'announce'", "]", "=", "self", ".", "options", ".", "reannounce", "if", "\"announce-list\"", "in", "metainfo", ":", "del", "metainfo", "[", "\"announce-list\"", "]", "if", "not", "self", ".", "options", ".", "no_cross_seed", ":", "# Enforce unique hash per tracker", "metainfo", "[", "\"info\"", "]", "[", "\"x_cross_seed\"", "]", "=", "hashlib", ".", "md5", "(", "self", ".", "options", ".", "reannounce", ")", ".", "hexdigest", "(", ")", "if", "self", ".", "options", ".", "no_ssl", ":", "# We're assuming here the same (default) port is used", "metainfo", "[", "'announce'", "]", "=", "(", "metainfo", "[", "'announce'", "]", ".", "replace", "(", "\"https://\"", ",", "\"http://\"", ")", ".", "replace", "(", "\":443/\"", ",", "\":80/\"", ")", ")", "# Change comment or creation date?", "if", "self", ".", "options", ".", "comment", "is", "not", "None", ":", "if", "self", ".", "options", ".", "comment", ":", "metainfo", "[", "\"comment\"", "]", "=", "self", ".", "options", ".", "comment", "elif", "\"comment\"", "in", "metainfo", ":", "del", "metainfo", "[", "\"comment\"", "]", "if", "self", ".", "options", ".", "bump_date", ":", "metainfo", "[", "\"creation date\"", "]", "=", "int", "(", "time", ".", "time", "(", ")", ")", "if", "self", ".", "options", ".", "no_date", "and", "\"creation date\"", "in", "metainfo", ":", "del", "metainfo", "[", "\"creation date\"", "]", "# Add fast-resume data?", "if", "self", ".", "options", ".", "hashed", ":", "try", ":", "metafile", ".", "add_fast_resume", "(", "metainfo", ",", "self", ".", "options", ".", "hashed", ".", "replace", "(", "\"{}\"", ",", "metainfo", "[", "\"info\"", "]", "[", "\"name\"", "]", ")", ")", "except", "EnvironmentError", "as", "exc", ":", "self", ".", "fatal", "(", "\"Error making fast-resume data (%s)\"", "%", "(", "exc", ",", ")", ")", "raise", "# Set specific keys?", "metafile", ".", "assign_fields", "(", "metainfo", ",", "self", ".", "options", ".", "set", ")", "replace_fields", "(", "metainfo", ",", "self", ".", "options", ".", "regex", ")", "# Write new metafile, if changed", "new_metainfo", "=", "bencode", ".", "bencode", "(", "metainfo", ")", "if", "new_metainfo", "!=", "old_metainfo", ":", "if", "self", ".", "options", ".", "output_directory", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "self", ".", "options", ".", "output_directory", ",", "os", ".", "path", ".", "basename", "(", "filename", ")", ")", "self", ".", "LOG", ".", "info", "(", "\"Writing %r...\"", "%", "filename", ")", "if", "not", "self", ".", "options", ".", "dry_run", ":", "bencode", ".", "bwrite", "(", "filename", ",", "metainfo", ")", "if", "\"libtorrent_resume\"", "in", "metainfo", ":", "# Also write clean version", "filename", "=", "filename", ".", "replace", "(", "\".torrent\"", ",", "\"-no-resume.torrent\"", ")", "del", "metainfo", "[", "\"libtorrent_resume\"", "]", "self", ".", "LOG", ".", "info", "(", "\"Writing %r...\"", "%", "filename", ")", "bencode", ".", "bwrite", "(", "filename", ",", "metainfo", ")", "else", ":", "self", ".", "LOG", ".", "info", "(", "\"Changing %r...\"", "%", "filename", ")", "if", "not", "self", ".", "options", ".", "dry_run", ":", "# Write to temporary file", "tempname", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "filename", ")", ",", "'.'", "+", "os", ".", "path", ".", "basename", "(", "filename", ")", ",", ")", "self", ".", "LOG", ".", "debug", "(", "\"Writing %r...\"", "%", "tempname", ")", "bencode", ".", "bwrite", "(", "tempname", ",", "metainfo", ")", "# Replace existing file", "if", "os", ".", "name", "!=", "\"posix\"", ":", "# cannot rename to existing target on WIN32", "os", ".", "remove", "(", "filename", ")", "try", ":", "os", ".", "rename", "(", "tempname", ",", "filename", ")", "except", "EnvironmentError", "as", "exc", ":", "# TODO: Try to write directly, keeping a backup!", "raise", "error", ".", "LoggableError", "(", "\"Can't rename tempfile %r to %r (%s)\"", "%", "(", "tempname", ",", "filename", ",", "exc", ")", ")", "changed", "+=", "1", "# Print summary", "if", "changed", ":", "self", ".", "LOG", ".", "info", "(", "\"%s %d metafile(s).\"", "%", "(", "\"Would've changed\"", "if", "self", ".", "options", ".", "dry_run", "else", "\"Changed\"", ",", "changed", ")", ")", "if", "bad", ":", "self", ".", "LOG", ".", "warn", "(", "\"Skipped %d bad metafile(s)!\"", "%", "(", "bad", ")", ")" ]
48.304813
24.331551
def do_failures(self, arg): """Prints a list of test cases that failed for the current unit test and analysis group settings. To only check failure on specific output files, set the list of files to check as arguments. """ usable, filename, append = self._redirect_split(arg) a = self.tests[self.active] args = self.curargs splitargs = usable.split() if len(splitargs) > 0: tfilter = splitargs[0] else: tfilter = "*" outfiles = None if len(splitargs) > 1: outfiles = splitargs[1:len(splitargs)] result = a.failures(outfiles, args["threshold"], tfilter) self._redirect_output(result, filename, append, msg.info)
[ "def", "do_failures", "(", "self", ",", "arg", ")", ":", "usable", ",", "filename", ",", "append", "=", "self", ".", "_redirect_split", "(", "arg", ")", "a", "=", "self", ".", "tests", "[", "self", ".", "active", "]", "args", "=", "self", ".", "curargs", "splitargs", "=", "usable", ".", "split", "(", ")", "if", "len", "(", "splitargs", ")", ">", "0", ":", "tfilter", "=", "splitargs", "[", "0", "]", "else", ":", "tfilter", "=", "\"*\"", "outfiles", "=", "None", "if", "len", "(", "splitargs", ")", ">", "1", ":", "outfiles", "=", "splitargs", "[", "1", ":", "len", "(", "splitargs", ")", "]", "result", "=", "a", ".", "failures", "(", "outfiles", ",", "args", "[", "\"threshold\"", "]", ",", "tfilter", ")", "self", ".", "_redirect_output", "(", "result", ",", "filename", ",", "append", ",", "msg", ".", "info", ")" ]
37.05
16.65
def apply_weight_drop(block, local_param_regex, rate, axes=(), weight_dropout_mode='training'): """Apply weight drop to the parameter of a block. Parameters ---------- block : Block or HybridBlock The block whose parameter is to be applied weight-drop. local_param_regex : str The regex for parameter names used in the self.params.get(), such as 'weight'. rate : float Fraction of the input units to drop. Must be a number between 0 and 1. axes : tuple of int, default () The axes on which dropout mask is shared. If empty, regular dropout is applied. weight_drop_mode : {'training', 'always'}, default 'training' Whether the weight dropout should be applied only at training time, or always be applied. Examples -------- >>> net = gluon.rnn.LSTM(10, num_layers=2, bidirectional=True) >>> gluonnlp.model.apply_weight_drop(net, r'.*h2h_weight', 0.5) >>> net.collect_params() lstm0_ ( Parameter lstm0_l0_i2h_weight (shape=(40, 0), dtype=<class 'numpy.float32'>) WeightDropParameter lstm0_l0_h2h_weight (shape=(40, 10), dtype=<class 'numpy.float32'>, \ rate=0.5, mode=training) Parameter lstm0_l0_i2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_l0_h2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_r0_i2h_weight (shape=(40, 0), dtype=<class 'numpy.float32'>) WeightDropParameter lstm0_r0_h2h_weight (shape=(40, 10), dtype=<class 'numpy.float32'>, \ rate=0.5, mode=training) Parameter lstm0_r0_i2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_r0_h2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_l1_i2h_weight (shape=(40, 20), dtype=<class 'numpy.float32'>) WeightDropParameter lstm0_l1_h2h_weight (shape=(40, 10), dtype=<class 'numpy.float32'>, \ rate=0.5, mode=training) Parameter lstm0_l1_i2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_l1_h2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_r1_i2h_weight (shape=(40, 20), dtype=<class 'numpy.float32'>) WeightDropParameter lstm0_r1_h2h_weight (shape=(40, 10), dtype=<class 'numpy.float32'>, \ rate=0.5, mode=training) Parameter lstm0_r1_i2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) Parameter lstm0_r1_h2h_bias (shape=(40,), dtype=<class 'numpy.float32'>) ) >>> ones = mx.nd.ones((3, 4, 5)) >>> net.initialize() >>> with mx.autograd.train_mode(): ... net(ones).max().asscalar() != net(ones).max().asscalar() True """ if not rate: return existing_params = _find_params(block, local_param_regex) for (local_param_name, param), \ (ref_params_list, ref_reg_params_list) in existing_params.items(): dropped_param = WeightDropParameter(param, rate, weight_dropout_mode, axes) for ref_params in ref_params_list: ref_params[param.name] = dropped_param for ref_reg_params in ref_reg_params_list: ref_reg_params[local_param_name] = dropped_param if hasattr(block, local_param_name): local_attr = getattr(block, local_param_name) if local_attr == param: local_attr = dropped_param elif isinstance(local_attr, (list, tuple)): if isinstance(local_attr, tuple): local_attr = list(local_attr) for i, v in enumerate(local_attr): if v == param: local_attr[i] = dropped_param elif isinstance(local_attr, dict): for k, v in local_attr: if v == param: local_attr[k] = dropped_param else: continue if local_attr: super(Block, block).__setattr__(local_param_name, local_attr)
[ "def", "apply_weight_drop", "(", "block", ",", "local_param_regex", ",", "rate", ",", "axes", "=", "(", ")", ",", "weight_dropout_mode", "=", "'training'", ")", ":", "if", "not", "rate", ":", "return", "existing_params", "=", "_find_params", "(", "block", ",", "local_param_regex", ")", "for", "(", "local_param_name", ",", "param", ")", ",", "(", "ref_params_list", ",", "ref_reg_params_list", ")", "in", "existing_params", ".", "items", "(", ")", ":", "dropped_param", "=", "WeightDropParameter", "(", "param", ",", "rate", ",", "weight_dropout_mode", ",", "axes", ")", "for", "ref_params", "in", "ref_params_list", ":", "ref_params", "[", "param", ".", "name", "]", "=", "dropped_param", "for", "ref_reg_params", "in", "ref_reg_params_list", ":", "ref_reg_params", "[", "local_param_name", "]", "=", "dropped_param", "if", "hasattr", "(", "block", ",", "local_param_name", ")", ":", "local_attr", "=", "getattr", "(", "block", ",", "local_param_name", ")", "if", "local_attr", "==", "param", ":", "local_attr", "=", "dropped_param", "elif", "isinstance", "(", "local_attr", ",", "(", "list", ",", "tuple", ")", ")", ":", "if", "isinstance", "(", "local_attr", ",", "tuple", ")", ":", "local_attr", "=", "list", "(", "local_attr", ")", "for", "i", ",", "v", "in", "enumerate", "(", "local_attr", ")", ":", "if", "v", "==", "param", ":", "local_attr", "[", "i", "]", "=", "dropped_param", "elif", "isinstance", "(", "local_attr", ",", "dict", ")", ":", "for", "k", ",", "v", "in", "local_attr", ":", "if", "v", "==", "param", ":", "local_attr", "[", "k", "]", "=", "dropped_param", "else", ":", "continue", "if", "local_attr", ":", "super", "(", "Block", ",", "block", ")", ".", "__setattr__", "(", "local_param_name", ",", "local_attr", ")" ]
49.886076
24.594937
def play(self, start_pos=0, end_pos=0, count=0): """Play internal color pattern :param start_pos: pattern line to start from :param end_pos: pattern line to end at :param count: number of times to play, 0=play forever """ if ( self.dev == None ): return '' buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0] return self.write(buf);
[ "def", "play", "(", "self", ",", "start_pos", "=", "0", ",", "end_pos", "=", "0", ",", "count", "=", "0", ")", ":", "if", "(", "self", ".", "dev", "==", "None", ")", ":", "return", "''", "buf", "=", "[", "REPORT_ID", ",", "ord", "(", "'p'", ")", ",", "1", ",", "int", "(", "start_pos", ")", ",", "int", "(", "end_pos", ")", ",", "int", "(", "count", ")", ",", "0", ",", "0", ",", "0", "]", "return", "self", ".", "write", "(", "buf", ")" ]
46.555556
12
def new_branch(self, branch, parent, parent_turn, parent_tick): """Declare that the ``branch`` is descended from ``parent`` at ``parent_turn``, ``parent_tick`` """ return self.sql('branches_insert', branch, parent, parent_turn, parent_tick, parent_turn, parent_tick)
[ "def", "new_branch", "(", "self", ",", "branch", ",", "parent", ",", "parent_turn", ",", "parent_tick", ")", ":", "return", "self", ".", "sql", "(", "'branches_insert'", ",", "branch", ",", "parent", ",", "parent_turn", ",", "parent_tick", ",", "parent_turn", ",", "parent_tick", ")" ]
49
22.166667
def _profile_package(self): """Runs statistical profiler on a package.""" with _StatProfiler() as prof: prof.base_frame = inspect.currentframe() try: runpy.run_path(self._run_object, run_name='__main__') except SystemExit: pass call_tree = prof.call_tree return { 'objectName': self._object_name, 'sampleInterval': _SAMPLE_INTERVAL, 'runTime': prof.run_time, 'callStats': call_tree, 'totalSamples': call_tree.get('sampleCount', 0), 'timestamp': int(time.time()) }
[ "def", "_profile_package", "(", "self", ")", ":", "with", "_StatProfiler", "(", ")", "as", "prof", ":", "prof", ".", "base_frame", "=", "inspect", ".", "currentframe", "(", ")", "try", ":", "runpy", ".", "run_path", "(", "self", ".", "_run_object", ",", "run_name", "=", "'__main__'", ")", "except", "SystemExit", ":", "pass", "call_tree", "=", "prof", ".", "call_tree", "return", "{", "'objectName'", ":", "self", ".", "_object_name", ",", "'sampleInterval'", ":", "_SAMPLE_INTERVAL", ",", "'runTime'", ":", "prof", ".", "run_time", ",", "'callStats'", ":", "call_tree", ",", "'totalSamples'", ":", "call_tree", ".", "get", "(", "'sampleCount'", ",", "0", ")", ",", "'timestamp'", ":", "int", "(", "time", ".", "time", "(", ")", ")", "}" ]
34.833333
14
def validate_course_run_id(self, value): """ Validates that the course run id is part of the Enterprise Customer's catalog. """ enterprise_customer = self.context.get('enterprise_customer') if not enterprise_customer.catalog_contains_course(value): raise serializers.ValidationError( 'The course run id {course_run_id} is not in the catalog ' 'for Enterprise Customer {enterprise_customer}'.format( course_run_id=value, enterprise_customer=enterprise_customer.name, ) ) return value
[ "def", "validate_course_run_id", "(", "self", ",", "value", ")", ":", "enterprise_customer", "=", "self", ".", "context", ".", "get", "(", "'enterprise_customer'", ")", "if", "not", "enterprise_customer", ".", "catalog_contains_course", "(", "value", ")", ":", "raise", "serializers", ".", "ValidationError", "(", "'The course run id {course_run_id} is not in the catalog '", "'for Enterprise Customer {enterprise_customer}'", ".", "format", "(", "course_run_id", "=", "value", ",", "enterprise_customer", "=", "enterprise_customer", ".", "name", ",", ")", ")", "return", "value" ]
39.3125
21.6875
def getOption(self, name): """ Get the current value of the specified option. If the option does not exist, returns None. Args: name: Option name. Returns: Value of the option. Raises: InvalidArgumet: if the option name is not valid. """ try: value = lock_and_call( lambda: self._impl.getOption(name).value(), self._lock ) except RuntimeError: return None else: try: return int(value) except ValueError: try: return float(value) except ValueError: return value
[ "def", "getOption", "(", "self", ",", "name", ")", ":", "try", ":", "value", "=", "lock_and_call", "(", "lambda", ":", "self", ".", "_impl", ".", "getOption", "(", "name", ")", ".", "value", "(", ")", ",", "self", ".", "_lock", ")", "except", "RuntimeError", ":", "return", "None", "else", ":", "try", ":", "return", "int", "(", "value", ")", "except", "ValueError", ":", "try", ":", "return", "float", "(", "value", ")", "except", "ValueError", ":", "return", "value" ]
25.310345
18
def check_outdated(package, version): """ Given the name of a package on PyPI and a version (both strings), checks if the given version is the latest version of the package available. Returns a 2-tuple (is_outdated, latest_version) where is_outdated is a boolean which is True if the given version is earlier than the latest version, which is the string latest_version. Attempts to cache on disk the HTTP call it makes for 24 hours. If this somehow fails the exception is converted to a warning (OutdatedCacheFailedWarning) and the function continues normally. """ from pkg_resources import parse_version parsed_version = parse_version(version) latest = None with utils.cache_file(package, 'r') as f: content = f.read() if content: # in case cache_file fails and so f is a dummy file latest, cache_dt = json.loads(content) if not utils.cache_is_valid(cache_dt): latest = None def get_latest(): url = 'https://pypi.python.org/pypi/%s/json' % package response = utils.get_url(url) return json.loads(response)['info']['version'] if latest is None: latest = get_latest() parsed_latest = parse_version(latest) if parsed_version > parsed_latest: # Probably a stale cached value latest = get_latest() parsed_latest = parse_version(latest) if parsed_version > parsed_latest: raise ValueError('Version %s is greater than the latest version on PyPI: %s' % (version, latest)) is_latest = parsed_version == parsed_latest assert is_latest or parsed_version < parsed_latest with utils.cache_file(package, 'w') as f: data = [latest, utils.format_date(datetime.now())] json.dump(data, f) return not is_latest, latest
[ "def", "check_outdated", "(", "package", ",", "version", ")", ":", "from", "pkg_resources", "import", "parse_version", "parsed_version", "=", "parse_version", "(", "version", ")", "latest", "=", "None", "with", "utils", ".", "cache_file", "(", "package", ",", "'r'", ")", "as", "f", ":", "content", "=", "f", ".", "read", "(", ")", "if", "content", ":", "# in case cache_file fails and so f is a dummy file", "latest", ",", "cache_dt", "=", "json", ".", "loads", "(", "content", ")", "if", "not", "utils", ".", "cache_is_valid", "(", "cache_dt", ")", ":", "latest", "=", "None", "def", "get_latest", "(", ")", ":", "url", "=", "'https://pypi.python.org/pypi/%s/json'", "%", "package", "response", "=", "utils", ".", "get_url", "(", "url", ")", "return", "json", ".", "loads", "(", "response", ")", "[", "'info'", "]", "[", "'version'", "]", "if", "latest", "is", "None", ":", "latest", "=", "get_latest", "(", ")", "parsed_latest", "=", "parse_version", "(", "latest", ")", "if", "parsed_version", ">", "parsed_latest", ":", "# Probably a stale cached value", "latest", "=", "get_latest", "(", ")", "parsed_latest", "=", "parse_version", "(", "latest", ")", "if", "parsed_version", ">", "parsed_latest", ":", "raise", "ValueError", "(", "'Version %s is greater than the latest version on PyPI: %s'", "%", "(", "version", ",", "latest", ")", ")", "is_latest", "=", "parsed_version", "==", "parsed_latest", "assert", "is_latest", "or", "parsed_version", "<", "parsed_latest", "with", "utils", ".", "cache_file", "(", "package", ",", "'w'", ")", "as", "f", ":", "data", "=", "[", "latest", ",", "utils", ".", "format_date", "(", "datetime", ".", "now", "(", ")", ")", "]", "json", ".", "dump", "(", "data", ",", "f", ")", "return", "not", "is_latest", ",", "latest" ]
33.833333
20.907407
def get_attribute_information_profile( url: str, profile: Optional[Tuple[str]]=None, categories: Optional[Tuple[str]]=None) -> Dict: """ Get the information content for a list of phenotypes and the annotation sufficiency simple and and categorical scores if categories are provied Ref: https://zenodo.org/record/834091#.W8ZnCxhlCV4 Note that the simple score varies slightly from the pub in that it uses max_max_ic instead of mean_max_ic If no arguments are passed this function returns the system (loaded cohort) stats :raises JSONDecodeError: If the response body does not contain valid json. """ owlsim_url = url + 'getAttributeInformationProfile' params = { 'a': profile, 'r': categories } return requests.get(owlsim_url, params=params, timeout=TIMEOUT).json()
[ "def", "get_attribute_information_profile", "(", "url", ":", "str", ",", "profile", ":", "Optional", "[", "Tuple", "[", "str", "]", "]", "=", "None", ",", "categories", ":", "Optional", "[", "Tuple", "[", "str", "]", "]", "=", "None", ")", "->", "Dict", ":", "owlsim_url", "=", "url", "+", "'getAttributeInformationProfile'", "params", "=", "{", "'a'", ":", "profile", ",", "'r'", ":", "categories", "}", "return", "requests", ".", "get", "(", "owlsim_url", ",", "params", "=", "params", ",", "timeout", "=", "TIMEOUT", ")", ".", "json", "(", ")" ]
35.166667
18.75
def setOutputPoint(self, point): """ Sets the scene space point for where this connection should draw \ its output from. This value will only be used if no output \ node is defined. :param point | <QPointF> """ self._outputPoint = point self.setPath(self.rebuild())
[ "def", "setOutputPoint", "(", "self", ",", "point", ")", ":", "self", ".", "_outputPoint", "=", "point", "self", ".", "setPath", "(", "self", ".", "rebuild", "(", ")", ")" ]
34
13.2
def _list(self): """List all the objects saved in the namespace. :param search_from: TBI :param search_to: TBI :param offset: TBI :param limit: max number of values to be shows. :return: list with transactions. """ all = self.driver.instance.metadata.get(search=self.namespace) list = [] for id in all: try: if not self._get(id['id']) in list: list.append(self._get(id['id'])) except Exception: pass return list
[ "def", "_list", "(", "self", ")", ":", "all", "=", "self", ".", "driver", ".", "instance", ".", "metadata", ".", "get", "(", "search", "=", "self", ".", "namespace", ")", "list", "=", "[", "]", "for", "id", "in", "all", ":", "try", ":", "if", "not", "self", ".", "_get", "(", "id", "[", "'id'", "]", ")", "in", "list", ":", "list", ".", "append", "(", "self", ".", "_get", "(", "id", "[", "'id'", "]", ")", ")", "except", "Exception", ":", "pass", "return", "list" ]
29.421053
17
def run(self, node): """ Captures the use of locals() in render function. """ if self.get_call_name(node) != 'render': return issues = [] for arg in node.args: if isinstance(arg, ast.Call) and arg.func.id == 'locals': issues.append( DJ03( lineno=node.lineno, col=node.col_offset, ) ) return issues
[ "def", "run", "(", "self", ",", "node", ")", ":", "if", "self", ".", "get_call_name", "(", "node", ")", "!=", "'render'", ":", "return", "issues", "=", "[", "]", "for", "arg", "in", "node", ".", "args", ":", "if", "isinstance", "(", "arg", ",", "ast", ".", "Call", ")", "and", "arg", ".", "func", ".", "id", "==", "'locals'", ":", "issues", ".", "append", "(", "DJ03", "(", "lineno", "=", "node", ".", "lineno", ",", "col", "=", "node", ".", "col_offset", ",", ")", ")", "return", "issues" ]
30.125
13.75
def get_filename(self, prefix, url): """ Creates a file path on the form: current-working-directory/prefix/cleaned-url.txt :param prefix: The prefix from the .get() and .put() methods. :param url: The url of the request. :return: The created path. """ return '{}.txt'.format(os.path.join(os.getcwd(), prefix, self.clean_url(url)))
[ "def", "get_filename", "(", "self", ",", "prefix", ",", "url", ")", ":", "return", "'{}.txt'", ".", "format", "(", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "prefix", ",", "self", ".", "clean_url", "(", "url", ")", ")", ")" ]
42.111111
19.666667
def compute_percentile(self, percentage): """ Returns a Position object that represents percentage%-far-of-the-way through the larger task, as specified by this query. @param percentage a number between 0 and 100. """ all_patches = self.get_all_patches() return all_patches[ int(len(all_patches) * percentage / 100) ].start_position
[ "def", "compute_percentile", "(", "self", ",", "percentage", ")", ":", "all_patches", "=", "self", ".", "get_all_patches", "(", ")", "return", "all_patches", "[", "int", "(", "len", "(", "all_patches", ")", "*", "percentage", "/", "100", ")", "]", ".", "start_position" ]
36.545455
14.363636
def get_filtered_dfs(lib, expr): """ Main: Get all data frames that match the given expression :return dict: Filenames and data frames (filtered) """ logger_dataframes.info("enter get_filtered_dfs") dfs = {} tt = None # Process all lipds files or one lipds file? specific_files = _check_expr_filename(expr) # Determine the table type wanted if "chron" in expr: tt = "chron" elif "paleo" in expr: tt = "paleo" # Get all filenames of target type. if tt: if specific_files: # The user has specified a single LiPD file to get data frames from. for file in specific_files: if file in lib: lo_meta = lib[file].get_metadata() lo_dfs = lib[file].get_dfs() # Only start a search if this lipds file has data frames available. Otherwise, pointless. if lo_dfs: # Get list of all matching filenames filenames = _match_dfs_expr(lo_meta, expr, tt) # Update our output data frames dictionary dfs.update(_match_filenames_w_dfs(filenames, lo_dfs)) else: print("Unable to find LiPD file in Library: {}".format(file)) # Process all LiPD files in the library. A file has not been specified in the expression. else: # Loop once on each lipds object in the library for ln, lo in lib.items(): # Get the lo_meta = lo.get_metadata() lo_dfs = lo.get_dfs() # Only start a search if this lipds file has data frames available. Otherwise, pointless. if lo_dfs: # Get list of all matching filenames filenames = _match_dfs_expr(lo_meta, expr, tt) # Update our output data frames dictionary dfs.update(_match_filenames_w_dfs(filenames, lo_dfs)) logger_dataframes.info("exit get_filtered_dfs") return dfs
[ "def", "get_filtered_dfs", "(", "lib", ",", "expr", ")", ":", "logger_dataframes", ".", "info", "(", "\"enter get_filtered_dfs\"", ")", "dfs", "=", "{", "}", "tt", "=", "None", "# Process all lipds files or one lipds file?", "specific_files", "=", "_check_expr_filename", "(", "expr", ")", "# Determine the table type wanted", "if", "\"chron\"", "in", "expr", ":", "tt", "=", "\"chron\"", "elif", "\"paleo\"", "in", "expr", ":", "tt", "=", "\"paleo\"", "# Get all filenames of target type.", "if", "tt", ":", "if", "specific_files", ":", "# The user has specified a single LiPD file to get data frames from.", "for", "file", "in", "specific_files", ":", "if", "file", "in", "lib", ":", "lo_meta", "=", "lib", "[", "file", "]", ".", "get_metadata", "(", ")", "lo_dfs", "=", "lib", "[", "file", "]", ".", "get_dfs", "(", ")", "# Only start a search if this lipds file has data frames available. Otherwise, pointless.", "if", "lo_dfs", ":", "# Get list of all matching filenames", "filenames", "=", "_match_dfs_expr", "(", "lo_meta", ",", "expr", ",", "tt", ")", "# Update our output data frames dictionary", "dfs", ".", "update", "(", "_match_filenames_w_dfs", "(", "filenames", ",", "lo_dfs", ")", ")", "else", ":", "print", "(", "\"Unable to find LiPD file in Library: {}\"", ".", "format", "(", "file", ")", ")", "# Process all LiPD files in the library. A file has not been specified in the expression.", "else", ":", "# Loop once on each lipds object in the library", "for", "ln", ",", "lo", "in", "lib", ".", "items", "(", ")", ":", "# Get the", "lo_meta", "=", "lo", ".", "get_metadata", "(", ")", "lo_dfs", "=", "lo", ".", "get_dfs", "(", ")", "# Only start a search if this lipds file has data frames available. Otherwise, pointless.", "if", "lo_dfs", ":", "# Get list of all matching filenames", "filenames", "=", "_match_dfs_expr", "(", "lo_meta", ",", "expr", ",", "tt", ")", "# Update our output data frames dictionary", "dfs", ".", "update", "(", "_match_filenames_w_dfs", "(", "filenames", ",", "lo_dfs", ")", ")", "logger_dataframes", ".", "info", "(", "\"exit get_filtered_dfs\"", ")", "return", "dfs" ]
37.527273
23.054545
def get_next_question(self, question_id, answered=None, reverse=False, honor_sequential=True): """Inspects question map to return the next available question. if answered == False: only return next unanswered question if answered == True: only return next answered question if answered in None: return next question whether answered or not if reverse == True: go backwards - effectively get_previous_question if honor_sequential == True: only return questions if section or part is set to sequential items """ self._update_questions() # Make sure questions list is current question_map = self._get_question_map(question_id) # will raise NotFound() questions = list(self._my_map['questions']) if reverse: questions = questions[::-1] error_text = ' previous ' else: if 'missingResponse' in question_map: if self._is_question_sequential(question_map) and honor_sequential: raise errors.IllegalState('Next question is not yet available') error_text = ' next ' if questions[-1] == question_map: raise errors.IllegalState('No ' + error_text + ' questions available') index = questions.index(question_map) + 1 for question_map in questions[index:]: latest_question_response = question_map['responses'][0] question_answered = False # take missingResponse == UNANSWERED or NULL_RESPONSE as an unanswered question if 'missingResponse' not in latest_question_response: question_answered = True if answered is None or question_answered == answered: return self.get_question(question_map=question_map) raise errors.IllegalState('No ' + error_text + ' question matching parameters was found')
[ "def", "get_next_question", "(", "self", ",", "question_id", ",", "answered", "=", "None", ",", "reverse", "=", "False", ",", "honor_sequential", "=", "True", ")", ":", "self", ".", "_update_questions", "(", ")", "# Make sure questions list is current", "question_map", "=", "self", ".", "_get_question_map", "(", "question_id", ")", "# will raise NotFound()", "questions", "=", "list", "(", "self", ".", "_my_map", "[", "'questions'", "]", ")", "if", "reverse", ":", "questions", "=", "questions", "[", ":", ":", "-", "1", "]", "error_text", "=", "' previous '", "else", ":", "if", "'missingResponse'", "in", "question_map", ":", "if", "self", ".", "_is_question_sequential", "(", "question_map", ")", "and", "honor_sequential", ":", "raise", "errors", ".", "IllegalState", "(", "'Next question is not yet available'", ")", "error_text", "=", "' next '", "if", "questions", "[", "-", "1", "]", "==", "question_map", ":", "raise", "errors", ".", "IllegalState", "(", "'No '", "+", "error_text", "+", "' questions available'", ")", "index", "=", "questions", ".", "index", "(", "question_map", ")", "+", "1", "for", "question_map", "in", "questions", "[", "index", ":", "]", ":", "latest_question_response", "=", "question_map", "[", "'responses'", "]", "[", "0", "]", "question_answered", "=", "False", "# take missingResponse == UNANSWERED or NULL_RESPONSE as an unanswered question", "if", "'missingResponse'", "not", "in", "latest_question_response", ":", "question_answered", "=", "True", "if", "answered", "is", "None", "or", "question_answered", "==", "answered", ":", "return", "self", ".", "get_question", "(", "question_map", "=", "question_map", ")", "raise", "errors", ".", "IllegalState", "(", "'No '", "+", "error_text", "+", "' question matching parameters was found'", ")" ]
55.941176
24.235294
def _remove_prefix(name): """Strip the possible prefix 'Table: ' from one or more table names.""" if isinstance(name, str): return _do_remove_prefix(name) return [_do_remove_prefix(nm) for nm in name]
[ "def", "_remove_prefix", "(", "name", ")", ":", "if", "isinstance", "(", "name", ",", "str", ")", ":", "return", "_do_remove_prefix", "(", "name", ")", "return", "[", "_do_remove_prefix", "(", "nm", ")", "for", "nm", "in", "name", "]" ]
43.2
7.4
def request(self, endpoint, data=None, json=None, filename=None, save_to=None): """ Perform a REST API request to the backend H2O server. :param endpoint: (str) The endpoint's URL, for example "GET /4/schemas/KeyV4" :param data: data payload for POST (and sometimes GET) requests. This should be a dictionary of simple key/value pairs (values can also be arrays), which will be sent over in x-www-form-encoded format. :param json: also data payload, but it will be sent as a JSON body. Cannot be used together with `data`. :param filename: file to upload to the server. Cannot be used with `data` or `json`. :param save_to: if provided, will write the response to that file (additionally, the response will be streamed, so large files can be downloaded seamlessly). This parameter can be either a file name, or a folder name. If the folder doesn't exist, it will be created automatically. :returns: an H2OResponse object representing the server's response (unless ``save_to`` parameter is provided, in which case the output file's name will be returned). :raises H2OConnectionError: if the H2O server cannot be reached (or connection is not initialized) :raises H2OServerError: if there was a server error (http 500), or server returned malformed JSON :raises H2OResponseError: if the server returned an H2OErrorV3 response (e.g. if the parameters were invalid) """ if self._stage == 0: raise H2OConnectionError("Connection not initialized; run .connect() first.") if self._stage == -1: raise H2OConnectionError("Connection was closed, and can no longer be used.") # Prepare URL assert_is_type(endpoint, str) match = assert_matches(str(endpoint), r"^(GET|POST|PUT|DELETE|PATCH|HEAD) (/.*)$") method = match.group(1) urltail = match.group(2) url = self._base_url + urltail # Prepare data if filename is not None: assert_is_type(filename, str) assert_is_type(json, None, "Argument `json` should be None when `filename` is used.") assert_is_type(data, None, "Argument `data` should be None when `filename` is used.") assert_satisfies(method, method == "POST", "File uploads can only be done via POST method, got %s" % method) elif data is not None: assert_is_type(data, dict) assert_is_type(json, None, "Argument `json` should be None when `data` is used.") elif json is not None: assert_is_type(json, dict) data = self._prepare_data_payload(data) files = self._prepare_file_payload(filename) params = None if method == "GET" and data: params = data data = None stream = False if save_to is not None: assert_is_type(save_to, str) stream = True if self._cookies is not None and isinstance(self._cookies, list): self._cookies = ";".join(self._cookies) # Make the request start_time = time.time() try: self._log_start_transaction(endpoint, data, json, files, params) headers = {"User-Agent": "H2O Python client/" + sys.version.replace("\n", ""), "X-Cluster": self._cluster_id, "Cookie": self._cookies} resp = requests.request(method=method, url=url, data=data, json=json, files=files, params=params, headers=headers, timeout=self._timeout, stream=stream, auth=self._auth, verify=self._verify_ssl_cert, proxies=self._proxies) self._log_end_transaction(start_time, resp) return self._process_response(resp, save_to) except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError) as e: if self._local_server and not self._local_server.is_running(): self._log_end_exception("Local server has died.") raise H2OConnectionError("Local server has died unexpectedly. RIP.") else: self._log_end_exception(e) raise H2OConnectionError("Unexpected HTTP error: %s" % e) except requests.exceptions.Timeout as e: self._log_end_exception(e) elapsed_time = time.time() - start_time raise H2OConnectionError("Timeout after %.3fs" % elapsed_time) except H2OResponseError as e: err = e.args[0] err.endpoint = endpoint err.payload = (data, json, files, params) raise
[ "def", "request", "(", "self", ",", "endpoint", ",", "data", "=", "None", ",", "json", "=", "None", ",", "filename", "=", "None", ",", "save_to", "=", "None", ")", ":", "if", "self", ".", "_stage", "==", "0", ":", "raise", "H2OConnectionError", "(", "\"Connection not initialized; run .connect() first.\"", ")", "if", "self", ".", "_stage", "==", "-", "1", ":", "raise", "H2OConnectionError", "(", "\"Connection was closed, and can no longer be used.\"", ")", "# Prepare URL", "assert_is_type", "(", "endpoint", ",", "str", ")", "match", "=", "assert_matches", "(", "str", "(", "endpoint", ")", ",", "r\"^(GET|POST|PUT|DELETE|PATCH|HEAD) (/.*)$\"", ")", "method", "=", "match", ".", "group", "(", "1", ")", "urltail", "=", "match", ".", "group", "(", "2", ")", "url", "=", "self", ".", "_base_url", "+", "urltail", "# Prepare data", "if", "filename", "is", "not", "None", ":", "assert_is_type", "(", "filename", ",", "str", ")", "assert_is_type", "(", "json", ",", "None", ",", "\"Argument `json` should be None when `filename` is used.\"", ")", "assert_is_type", "(", "data", ",", "None", ",", "\"Argument `data` should be None when `filename` is used.\"", ")", "assert_satisfies", "(", "method", ",", "method", "==", "\"POST\"", ",", "\"File uploads can only be done via POST method, got %s\"", "%", "method", ")", "elif", "data", "is", "not", "None", ":", "assert_is_type", "(", "data", ",", "dict", ")", "assert_is_type", "(", "json", ",", "None", ",", "\"Argument `json` should be None when `data` is used.\"", ")", "elif", "json", "is", "not", "None", ":", "assert_is_type", "(", "json", ",", "dict", ")", "data", "=", "self", ".", "_prepare_data_payload", "(", "data", ")", "files", "=", "self", ".", "_prepare_file_payload", "(", "filename", ")", "params", "=", "None", "if", "method", "==", "\"GET\"", "and", "data", ":", "params", "=", "data", "data", "=", "None", "stream", "=", "False", "if", "save_to", "is", "not", "None", ":", "assert_is_type", "(", "save_to", ",", "str", ")", "stream", "=", "True", "if", "self", ".", "_cookies", "is", "not", "None", "and", "isinstance", "(", "self", ".", "_cookies", ",", "list", ")", ":", "self", ".", "_cookies", "=", "\";\"", ".", "join", "(", "self", ".", "_cookies", ")", "# Make the request", "start_time", "=", "time", ".", "time", "(", ")", "try", ":", "self", ".", "_log_start_transaction", "(", "endpoint", ",", "data", ",", "json", ",", "files", ",", "params", ")", "headers", "=", "{", "\"User-Agent\"", ":", "\"H2O Python client/\"", "+", "sys", ".", "version", ".", "replace", "(", "\"\\n\"", ",", "\"\"", ")", ",", "\"X-Cluster\"", ":", "self", ".", "_cluster_id", ",", "\"Cookie\"", ":", "self", ".", "_cookies", "}", "resp", "=", "requests", ".", "request", "(", "method", "=", "method", ",", "url", "=", "url", ",", "data", "=", "data", ",", "json", "=", "json", ",", "files", "=", "files", ",", "params", "=", "params", ",", "headers", "=", "headers", ",", "timeout", "=", "self", ".", "_timeout", ",", "stream", "=", "stream", ",", "auth", "=", "self", ".", "_auth", ",", "verify", "=", "self", ".", "_verify_ssl_cert", ",", "proxies", "=", "self", ".", "_proxies", ")", "self", ".", "_log_end_transaction", "(", "start_time", ",", "resp", ")", "return", "self", ".", "_process_response", "(", "resp", ",", "save_to", ")", "except", "(", "requests", ".", "exceptions", ".", "ConnectionError", ",", "requests", ".", "exceptions", ".", "HTTPError", ")", "as", "e", ":", "if", "self", ".", "_local_server", "and", "not", "self", ".", "_local_server", ".", "is_running", "(", ")", ":", "self", ".", "_log_end_exception", "(", "\"Local server has died.\"", ")", "raise", "H2OConnectionError", "(", "\"Local server has died unexpectedly. RIP.\"", ")", "else", ":", "self", ".", "_log_end_exception", "(", "e", ")", "raise", "H2OConnectionError", "(", "\"Unexpected HTTP error: %s\"", "%", "e", ")", "except", "requests", ".", "exceptions", ".", "Timeout", "as", "e", ":", "self", ".", "_log_end_exception", "(", "e", ")", "elapsed_time", "=", "time", ".", "time", "(", ")", "-", "start_time", "raise", "H2OConnectionError", "(", "\"Timeout after %.3fs\"", "%", "elapsed_time", ")", "except", "H2OResponseError", "as", "e", ":", "err", "=", "e", ".", "args", "[", "0", "]", "err", ".", "endpoint", "=", "endpoint", "err", ".", "payload", "=", "(", "data", ",", "json", ",", "files", ",", "params", ")", "raise" ]
53.356322
30.275862
def scrape(self, url): ''' Execute Web-Scraping. The target dom objects are in self.__dom_object_list. Args: url: Web site url. Returns: The result. this is a string. @TODO(chimera0): check URLs format. ''' if isinstance(url, str) is False: raise TypeError("The type of url must be str.") if self.readable_web_pdf is not None and self.readable_web_pdf.is_pdf_url(url) is True: web_data = self.readable_web_pdf.url_to_text(url) else: web_data = "" req = urllib.request.Request(url=url) with urllib.request.urlopen(req) as f: web = f.read().decode('utf-8') dom = pq(web) [dom(remove_object).remove() for remove_object in self.__remove_object_list] for dom_object in self.__dom_object_list: web_data += dom(dom_object).text() sleep(1) return web_data
[ "def", "scrape", "(", "self", ",", "url", ")", ":", "if", "isinstance", "(", "url", ",", "str", ")", "is", "False", ":", "raise", "TypeError", "(", "\"The type of url must be str.\"", ")", "if", "self", ".", "readable_web_pdf", "is", "not", "None", "and", "self", ".", "readable_web_pdf", ".", "is_pdf_url", "(", "url", ")", "is", "True", ":", "web_data", "=", "self", ".", "readable_web_pdf", ".", "url_to_text", "(", "url", ")", "else", ":", "web_data", "=", "\"\"", "req", "=", "urllib", ".", "request", ".", "Request", "(", "url", "=", "url", ")", "with", "urllib", ".", "request", ".", "urlopen", "(", "req", ")", "as", "f", ":", "web", "=", "f", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", "dom", "=", "pq", "(", "web", ")", "[", "dom", "(", "remove_object", ")", ".", "remove", "(", ")", "for", "remove_object", "in", "self", ".", "__remove_object_list", "]", "for", "dom_object", "in", "self", ".", "__dom_object_list", ":", "web_data", "+=", "dom", "(", "dom_object", ")", ".", "text", "(", ")", "sleep", "(", "1", ")", "return", "web_data" ]
31.935484
22.83871
def add_section(self, section): """Create a new section in the configuration. Extends RawConfigParser.add_section by validating if the section name is a string.""" section, _, _ = self._validate_value_types(section=section) super(ConfigParser, self).add_section(section)
[ "def", "add_section", "(", "self", ",", "section", ")", ":", "section", ",", "_", ",", "_", "=", "self", ".", "_validate_value_types", "(", "section", "=", "section", ")", "super", "(", "ConfigParser", ",", "self", ")", ".", "add_section", "(", "section", ")" ]
51
13.666667
def _F(self, X): """ analytic solution of the projection integral :param x: R/Rs :type x: float >0 """ if isinstance(X, int) or isinstance(X, float): if X < 1 and X > 0: a = 1/(X**2-1)*(1-2/np.sqrt(1-X**2)*np.arctanh(np.sqrt((1-X)/(1+X)))) elif X == 1: a = 1./3 elif X > 1: a = 1/(X**2-1)*(1-2/np.sqrt(X**2-1)*np.arctan(np.sqrt((X-1)/(1+X)))) else: # X == 0: c = 0.0000001 a = 1/(-1)*(1-2/np.sqrt(1)*np.arctanh(np.sqrt((1-c)/(1+c)))) else: a = np.empty_like(X) x = X[(X < 1) & (X > 0)] a[(X < 1) & (X > 0)] = 1/(x**2-1)*(1-2/np.sqrt(1-x**2)*np.arctanh(np.sqrt((1-x)/(1+x)))) a[X == 1] = 1./3. x = X[X > 1] a[X > 1] = 1/(x**2-1)*(1-2/np.sqrt(x**2-1)*np.arctan(np.sqrt((x-1)/(1+x)))) # a[X>y] = 0 c = 0.0000001 a[X == 0] = 1/(-1)*(1-2/np.sqrt(1)*np.arctanh(np.sqrt((1-c)/(1+c)))) return a
[ "def", "_F", "(", "self", ",", "X", ")", ":", "if", "isinstance", "(", "X", ",", "int", ")", "or", "isinstance", "(", "X", ",", "float", ")", ":", "if", "X", "<", "1", "and", "X", ">", "0", ":", "a", "=", "1", "/", "(", "X", "**", "2", "-", "1", ")", "*", "(", "1", "-", "2", "/", "np", ".", "sqrt", "(", "1", "-", "X", "**", "2", ")", "*", "np", ".", "arctanh", "(", "np", ".", "sqrt", "(", "(", "1", "-", "X", ")", "/", "(", "1", "+", "X", ")", ")", ")", ")", "elif", "X", "==", "1", ":", "a", "=", "1.", "/", "3", "elif", "X", ">", "1", ":", "a", "=", "1", "/", "(", "X", "**", "2", "-", "1", ")", "*", "(", "1", "-", "2", "/", "np", ".", "sqrt", "(", "X", "**", "2", "-", "1", ")", "*", "np", ".", "arctan", "(", "np", ".", "sqrt", "(", "(", "X", "-", "1", ")", "/", "(", "1", "+", "X", ")", ")", ")", ")", "else", ":", "# X == 0:", "c", "=", "0.0000001", "a", "=", "1", "/", "(", "-", "1", ")", "*", "(", "1", "-", "2", "/", "np", ".", "sqrt", "(", "1", ")", "*", "np", ".", "arctanh", "(", "np", ".", "sqrt", "(", "(", "1", "-", "c", ")", "/", "(", "1", "+", "c", ")", ")", ")", ")", "else", ":", "a", "=", "np", ".", "empty_like", "(", "X", ")", "x", "=", "X", "[", "(", "X", "<", "1", ")", "&", "(", "X", ">", "0", ")", "]", "a", "[", "(", "X", "<", "1", ")", "&", "(", "X", ">", "0", ")", "]", "=", "1", "/", "(", "x", "**", "2", "-", "1", ")", "*", "(", "1", "-", "2", "/", "np", ".", "sqrt", "(", "1", "-", "x", "**", "2", ")", "*", "np", ".", "arctanh", "(", "np", ".", "sqrt", "(", "(", "1", "-", "x", ")", "/", "(", "1", "+", "x", ")", ")", ")", ")", "a", "[", "X", "==", "1", "]", "=", "1.", "/", "3.", "x", "=", "X", "[", "X", ">", "1", "]", "a", "[", "X", ">", "1", "]", "=", "1", "/", "(", "x", "**", "2", "-", "1", ")", "*", "(", "1", "-", "2", "/", "np", ".", "sqrt", "(", "x", "**", "2", "-", "1", ")", "*", "np", ".", "arctan", "(", "np", ".", "sqrt", "(", "(", "x", "-", "1", ")", "/", "(", "1", "+", "x", ")", ")", ")", ")", "# a[X>y] = 0", "c", "=", "0.0000001", "a", "[", "X", "==", "0", "]", "=", "1", "/", "(", "-", "1", ")", "*", "(", "1", "-", "2", "/", "np", ".", "sqrt", "(", "1", ")", "*", "np", ".", "arctanh", "(", "np", ".", "sqrt", "(", "(", "1", "-", "c", ")", "/", "(", "1", "+", "c", ")", ")", ")", ")", "return", "a" ]
33.15625
23.65625
def ipv6_acl_ipv6_access_list_standard_seq_action(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") ipv6_acl = ET.SubElement(config, "ipv6-acl", xmlns="urn:brocade.com:mgmt:brocade-ipv6-access-list") ipv6 = ET.SubElement(ipv6_acl, "ipv6") access_list = ET.SubElement(ipv6, "access-list") standard = ET.SubElement(access_list, "standard") name_key = ET.SubElement(standard, "name") name_key.text = kwargs.pop('name') seq = ET.SubElement(standard, "seq") seq_id_key = ET.SubElement(seq, "seq-id") seq_id_key.text = kwargs.pop('seq_id') action = ET.SubElement(seq, "action") action.text = kwargs.pop('action') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "ipv6_acl_ipv6_access_list_standard_seq_action", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "ipv6_acl", "=", "ET", ".", "SubElement", "(", "config", ",", "\"ipv6-acl\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocade-ipv6-access-list\"", ")", "ipv6", "=", "ET", ".", "SubElement", "(", "ipv6_acl", ",", "\"ipv6\"", ")", "access_list", "=", "ET", ".", "SubElement", "(", "ipv6", ",", "\"access-list\"", ")", "standard", "=", "ET", ".", "SubElement", "(", "access_list", ",", "\"standard\"", ")", "name_key", "=", "ET", ".", "SubElement", "(", "standard", ",", "\"name\"", ")", "name_key", ".", "text", "=", "kwargs", ".", "pop", "(", "'name'", ")", "seq", "=", "ET", ".", "SubElement", "(", "standard", ",", "\"seq\"", ")", "seq_id_key", "=", "ET", ".", "SubElement", "(", "seq", ",", "\"seq-id\"", ")", "seq_id_key", ".", "text", "=", "kwargs", ".", "pop", "(", "'seq_id'", ")", "action", "=", "ET", ".", "SubElement", "(", "seq", ",", "\"action\"", ")", "action", ".", "text", "=", "kwargs", ".", "pop", "(", "'action'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
45.333333
13.277778
def get_all_client_properties(self, params=None): """ Get all contacts of client This will iterate over all pages until it gets all elements. So if the rate limit exceeded it will throw an Exception and you will get nothing :param params: search params :return: list """ return self._iterate_through_pages( get_function=self.get_client_properties_per_page, resource=CLIENT_PROPERTIES, **{'params': params} )
[ "def", "get_all_client_properties", "(", "self", ",", "params", "=", "None", ")", ":", "return", "self", ".", "_iterate_through_pages", "(", "get_function", "=", "self", ".", "get_client_properties_per_page", ",", "resource", "=", "CLIENT_PROPERTIES", ",", "*", "*", "{", "'params'", ":", "params", "}", ")" ]
35.928571
15.642857
def parse_networking( text = None ): """ Access address and port parameters via the builtins or __builtin__ module. Relish the nonsense. """ try: address = _builtins.address port = _builtins.port except: address = None port = None triggers = [] if address and port: triggers.extend([ trigger_keyphrases( text = text, keyphrases = [ "reverse SSH", "reverse ssh" ], function = engage_command, kwargs = {"command": "ssh -R " + str(port) + ":localhost:22 " + address}, confirm = True, confirmation_prompt = "Do you want to reverse SSH " "connect? (y/n)", confirmation_feedback_confirm = "confirm reverse SSH connect: " "ssh localhost -p " + str(port), confirmation_feedback_deny = "deny reverse SSH connect" ) ]) if any(triggers): responses = [response for response in triggers if response] if len(responses) > 1: return responses else: return responses[0] else: return False
[ "def", "parse_networking", "(", "text", "=", "None", ")", ":", "try", ":", "address", "=", "_builtins", ".", "address", "port", "=", "_builtins", ".", "port", "except", ":", "address", "=", "None", "port", "=", "None", "triggers", "=", "[", "]", "if", "address", "and", "port", ":", "triggers", ".", "extend", "(", "[", "trigger_keyphrases", "(", "text", "=", "text", ",", "keyphrases", "=", "[", "\"reverse SSH\"", ",", "\"reverse ssh\"", "]", ",", "function", "=", "engage_command", ",", "kwargs", "=", "{", "\"command\"", ":", "\"ssh -R \"", "+", "str", "(", "port", ")", "+", "\":localhost:22 \"", "+", "address", "}", ",", "confirm", "=", "True", ",", "confirmation_prompt", "=", "\"Do you want to reverse SSH \"", "\"connect? (y/n)\"", ",", "confirmation_feedback_confirm", "=", "\"confirm reverse SSH connect: \"", "\"ssh localhost -p \"", "+", "str", "(", "port", ")", ",", "confirmation_feedback_deny", "=", "\"deny reverse SSH connect\"", ")", "]", ")", "if", "any", "(", "triggers", ")", ":", "responses", "=", "[", "response", "for", "response", "in", "triggers", "if", "response", "]", "if", "len", "(", "responses", ")", ">", "1", ":", "return", "responses", "else", ":", "return", "responses", "[", "0", "]", "else", ":", "return", "False" ]
37.7
21.8
def POST(self, **kwargs): ''' Send one or more Salt commands in the request body .. http:post:: / :reqheader X-Auth-Token: |req_token| :reqheader Accept: |req_accept| :reqheader Content-Type: |req_ct| :resheader Content-Type: |res_ct| :status 200: |200| :status 400: |400| :status 401: |401| :status 406: |406| :term:`lowstate` data describing Salt commands must be sent in the request body. **Example request:** .. code-block:: bash curl -sSik https://localhost:8000 \\ -b ~/cookies.txt \\ -H "Accept: application/x-yaml" \\ -H "Content-type: application/json" \\ -d '[{"client": "local", "tgt": "*", "fun": "test.ping"}]' .. code-block:: text POST / HTTP/1.1 Host: localhost:8000 Accept: application/x-yaml X-Auth-Token: d40d1e1e Content-Type: application/json [{"client": "local", "tgt": "*", "fun": "test.ping"}] **Example response:** .. code-block:: text HTTP/1.1 200 OK Content-Length: 200 Allow: GET, HEAD, POST Content-Type: application/x-yaml return: - ms-0: true ms-1: true ms-2: true ms-3: true ms-4: true ''' return { 'return': list(self.exec_lowstate( token=cherrypy.session.get('token'))) }
[ "def", "POST", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return", "{", "'return'", ":", "list", "(", "self", ".", "exec_lowstate", "(", "token", "=", "cherrypy", ".", "session", ".", "get", "(", "'token'", ")", ")", ")", "}" ]
26.2
20.233333
def prediction_error(model, X, y=None, ax=None, alpha=0.75, **kwargs): """ Quick method: Plot the actual targets from the dataset against the predicted values generated by our model(s). This helper function is a quick wrapper to utilize the PredictionError ScoreVisualizer for one-off analysis. Parameters ---------- model : the Scikit-Learn estimator (should be a regressor) X : ndarray or DataFrame of shape n x m A matrix of n instances with m features. y : ndarray or Series of length n An array or series of target or class values. ax : matplotlib Axes The axes to plot the figure on. shared_limits : bool, default: True If shared_limits is True, the range of the X and Y axis limits will be identical, creating a square graphic with a true 45 degree line. In this form, it is easier to diagnose under- or over- prediction, though the figure will become more sparse. To localize points, set shared_limits to False, but note that this will distort the figure and should be accounted for during analysis. besfit : bool, default: True Draw a linear best fit line to estimate the correlation between the predicted and measured value of the target variable. The color of the bestfit line is determined by the ``line_color`` argument. identity: bool, default: True Draw the 45 degree identity line, y=x in order to better show the relationship or pattern of the residuals. E.g. to estimate if the model is over- or under- estimating the given values. The color of the identity line is a muted version of the ``line_color`` argument. point_color : color Defines the color of the error points; can be any matplotlib color. line_color : color Defines the color of the best fit line; can be any matplotlib color. alpha : float, default: 0.75 Specify a transparency where 1 is completely opaque and 0 is completely transparent. This property makes densely clustered points more visible. kwargs : dict Keyword arguments that are passed to the base class and may influence the visualization as defined in other Visualizers. Returns ------- ax : matplotlib Axes Returns the axes that the prediction error plot was drawn on. """ # Instantiate the visualizer visualizer = PredictionError(model, ax, alpha=alpha, **kwargs) # Create the train and test splits X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2) # Fit and transform the visualizer (calls draw) visualizer.fit(X_train, y_train, **kwargs) visualizer.score(X_test, y_test) visualizer.finalize() # Return the axes object on the visualizer return visualizer.ax
[ "def", "prediction_error", "(", "model", ",", "X", ",", "y", "=", "None", ",", "ax", "=", "None", ",", "alpha", "=", "0.75", ",", "*", "*", "kwargs", ")", ":", "# Instantiate the visualizer", "visualizer", "=", "PredictionError", "(", "model", ",", "ax", ",", "alpha", "=", "alpha", ",", "*", "*", "kwargs", ")", "# Create the train and test splits", "X_train", ",", "X_test", ",", "y_train", ",", "y_test", "=", "train_test_split", "(", "X", ",", "y", ",", "test_size", "=", "0.2", ")", "# Fit and transform the visualizer (calls draw)", "visualizer", ".", "fit", "(", "X_train", ",", "y_train", ",", "*", "*", "kwargs", ")", "visualizer", ".", "score", "(", "X_test", ",", "y_test", ")", "visualizer", ".", "finalize", "(", ")", "# Return the axes object on the visualizer", "return", "visualizer", ".", "ax" ]
37.702703
24.945946
def deprecated(since, message='', name='', alternative='', pending=False, addendum='', removal=''): """ Decorator to mark a function or a class as deprecated. Parameters ---------- since : str The release at which this API became deprecated. This is required. message : str, optional Override the default deprecation message. The format specifier `%(name)s` may be used for the name of the object, and `%(alternative)s` may be used in the deprecation message to insert the name of an alternative to the deprecated object. name : str, optional The name of the deprecated object; if not provided the name is automatically determined from the passed in object, though this is useful in the case of renamed functions, where the new function is just assigned to the name of the deprecated function. For example:: def new_function(): ... oldFunction = new_function alternative : str, optional An alternative API that the user may use in place of the deprecated API. The deprecation warning will tell the user about this alternative if provided. pending : bool, optional If True, uses a PendingDeprecationWarning instead of a DeprecationWarning. Cannot be used together with *removal*. removal : str, optional The expected removal version. With the default (an empty string), a removal version is automatically computed from *since*. Set to other Falsy values to not schedule a removal date. Cannot be used together with *pending*. addendum : str, optional Additional text appended directly to the final message. Examples -------- Basic example:: @deprecated('1.4.0') def the_function_to_deprecate(): pass """ def deprecate(obj, message=message, name=name, alternative=alternative, pending=pending, addendum=addendum): if not name: name = obj.__name__ if isinstance(obj, type): obj_type = "class" old_doc = obj.__doc__ func = obj.__init__ def finalize(wrapper, new_doc): obj.__doc__ = new_doc obj.__init__ = wrapper return obj else: obj_type = "function" if isinstance(obj, classmethod): func = obj.__func__ old_doc = func.__doc__ def finalize(wrapper, new_doc): wrapper = functools.wraps(func)(wrapper) wrapper.__doc__ = new_doc return classmethod(wrapper) else: func = obj old_doc = func.__doc__ def finalize(wrapper, new_doc): wrapper = functools.wraps(func)(wrapper) wrapper.__doc__ = new_doc return wrapper message = _generate_deprecation_message( since, message, name, alternative, pending, obj_type, addendum, removal=removal) category = (PendingDeprecationWarning if pending else _projectWarning) def wrapper(*args, **kwargs): warnings.warn(message, category, stacklevel=2) return func(*args, **kwargs) old_doc = textwrap.dedent(old_doc or '').strip('\n') message = message.strip() new_doc = (('\n.. deprecated:: %(since)s' '\n %(message)s\n\n' % {'since': since, 'message': message}) + old_doc) if not old_doc: # This is to prevent a spurious 'unexected unindent' warning from # docutils when the original docstring was blank. new_doc += r'\ ' return finalize(wrapper, new_doc) return deprecate
[ "def", "deprecated", "(", "since", ",", "message", "=", "''", ",", "name", "=", "''", ",", "alternative", "=", "''", ",", "pending", "=", "False", ",", "addendum", "=", "''", ",", "removal", "=", "''", ")", ":", "def", "deprecate", "(", "obj", ",", "message", "=", "message", ",", "name", "=", "name", ",", "alternative", "=", "alternative", ",", "pending", "=", "pending", ",", "addendum", "=", "addendum", ")", ":", "if", "not", "name", ":", "name", "=", "obj", ".", "__name__", "if", "isinstance", "(", "obj", ",", "type", ")", ":", "obj_type", "=", "\"class\"", "old_doc", "=", "obj", ".", "__doc__", "func", "=", "obj", ".", "__init__", "def", "finalize", "(", "wrapper", ",", "new_doc", ")", ":", "obj", ".", "__doc__", "=", "new_doc", "obj", ".", "__init__", "=", "wrapper", "return", "obj", "else", ":", "obj_type", "=", "\"function\"", "if", "isinstance", "(", "obj", ",", "classmethod", ")", ":", "func", "=", "obj", ".", "__func__", "old_doc", "=", "func", ".", "__doc__", "def", "finalize", "(", "wrapper", ",", "new_doc", ")", ":", "wrapper", "=", "functools", ".", "wraps", "(", "func", ")", "(", "wrapper", ")", "wrapper", ".", "__doc__", "=", "new_doc", "return", "classmethod", "(", "wrapper", ")", "else", ":", "func", "=", "obj", "old_doc", "=", "func", ".", "__doc__", "def", "finalize", "(", "wrapper", ",", "new_doc", ")", ":", "wrapper", "=", "functools", ".", "wraps", "(", "func", ")", "(", "wrapper", ")", "wrapper", ".", "__doc__", "=", "new_doc", "return", "wrapper", "message", "=", "_generate_deprecation_message", "(", "since", ",", "message", ",", "name", ",", "alternative", ",", "pending", ",", "obj_type", ",", "addendum", ",", "removal", "=", "removal", ")", "category", "=", "(", "PendingDeprecationWarning", "if", "pending", "else", "_projectWarning", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "warnings", ".", "warn", "(", "message", ",", "category", ",", "stacklevel", "=", "2", ")", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "old_doc", "=", "textwrap", ".", "dedent", "(", "old_doc", "or", "''", ")", ".", "strip", "(", "'\\n'", ")", "message", "=", "message", ".", "strip", "(", ")", "new_doc", "=", "(", "(", "'\\n.. deprecated:: %(since)s'", "'\\n %(message)s\\n\\n'", "%", "{", "'since'", ":", "since", ",", "'message'", ":", "message", "}", ")", "+", "old_doc", ")", "if", "not", "old_doc", ":", "# This is to prevent a spurious 'unexected unindent' warning from", "# docutils when the original docstring was blank.", "new_doc", "+=", "r'\\ '", "return", "finalize", "(", "wrapper", ",", "new_doc", ")", "return", "deprecate" ]
37.563107
18.553398
def get_area(self): """Calculate area of bounding box.""" return (self.p2.x-self.p1.x)*(self.p2.y-self.p1.y)
[ "def", "get_area", "(", "self", ")", ":", "return", "(", "self", ".", "p2", ".", "x", "-", "self", ".", "p1", ".", "x", ")", "*", "(", "self", ".", "p2", ".", "y", "-", "self", ".", "p1", ".", "y", ")" ]
40.666667
13
def shot2shot(insurvey, inshot, calculate_lrud=True): """Convert a PocketTopo `Shot` to a Compass `Shot`""" # FIXME: requires angles in degrees only, no grads splays = insurvey.splays[inshot['FROM']] if calculate_lrud and not inshot.is_splay and splays: # Try our best to convert PocketTopo splay shots into LRUDs print '\n\n' 'sta %s has %d splays' % (inshot['FROM'], len(splays)) left_azm, right_azm = (inshot['AZM'] - 90) % 360, (inshot['AZM'] + 90) % 360 left_shot, right_shot = None, None left_candidates = find_candidate_splays(splays, left_azm, 0) if left_candidates: left_shot = max(left_candidates, key=lambda shot: hd(shot['INC'], shot['LENGTH'])) left = hd(left_shot['INC'], left_shot['LENGTH']) else: left = 0 right_candidates = find_candidate_splays(splays, right_azm, 0) if right_candidates: right_shot = max(right_candidates, key=lambda shot: hd(shot['INC'], shot['LENGTH'])) right = hd(right_shot['INC'], right_shot['LENGTH']) else: right = 0 print '\t' 'left=%.1f azm=%.1f right=%.1f' % (left_azm, inshot['AZM'], right_azm) print '\t' '%d candidate LEFT shots' % len(left_candidates) for splay in left_candidates: print '\t\t\t' + str(splay) print '\t\t' '%.1f - Chose: %s' % (left, str(left_shot)) print '\t' '%d candidate RIGHT shots' % len(right_candidates) for splay in right_candidates: print '\t\t\t' + str(splay) print '\t\t' '%.1f - Chose: %s' % (right, str(right_shot)) up_candidates = find_candidate_vert_splays(splays, 90) if up_candidates: up_shot = max(up_candidates, key=lambda splay: vd(splay['INC'], splay['LENGTH'])) up = vd(up_shot['INC'], up_shot['LENGTH']) else: up = 0 down_candidates = find_candidate_vert_splays(splays, -90) if down_candidates: down_shot = max(down_candidates, key=lambda splay: vd(splay['INC'], splay['LENGTH'])) # TODO: should vd() give negative and we find min()? down = vd(down_shot['INC'], down_shot['LENGTH']) else: down = 0 print '\t', inshot, 'LRUD=', ', '.join(('%0.1f' % v) for v in (left, right, up, down)) assert(all(v >=0 for v in (left, right, up, down))) else: up, down, left, right = None, None, None, None return compass.Shot([ ('FROM', inshot['FROM']), # Compass requires a named TO station, so we must invent one for splays ('TO', inshot['TO'] or '%s.s%03d' % (inshot['FROM'], random.randint(0,1000))), ('LENGTH', m2ft(inshot.length)), # BEARING/AZM named inconsistently in Davies to reflect each program's arbitrary name. We # can't use `inshot.azm` here because we need the "raw" compass value without declination ('BEARING', inshot['AZM']), ('INC', inshot.inc), ('LEFT', m2ft(left) if left is not None else -9.90), ('UP', m2ft(up) if left is not None else -9.90), ('DOWN', m2ft(down) if left is not None else -9.90), ('RIGHT', m2ft(right) if left is not None else -9.90), # Compass requires this order! # Compass 'L' flag excludes splays from cave length calculation ('FLAGS', (compass.Exclude.LENGTH, compass.Exclude.PLOT) if inshot.is_splay else ()), # COMMENTS/COMMENT named inconsistently in Davies to reflect each program's arbitrary name ('COMMENTS', inshot['COMMENT']) ])
[ "def", "shot2shot", "(", "insurvey", ",", "inshot", ",", "calculate_lrud", "=", "True", ")", ":", "# FIXME: requires angles in degrees only, no grads", "splays", "=", "insurvey", ".", "splays", "[", "inshot", "[", "'FROM'", "]", "]", "if", "calculate_lrud", "and", "not", "inshot", ".", "is_splay", "and", "splays", ":", "# Try our best to convert PocketTopo splay shots into LRUDs", "print", "'\\n\\n'", "'sta %s has %d splays'", "%", "(", "inshot", "[", "'FROM'", "]", ",", "len", "(", "splays", ")", ")", "left_azm", ",", "right_azm", "=", "(", "inshot", "[", "'AZM'", "]", "-", "90", ")", "%", "360", ",", "(", "inshot", "[", "'AZM'", "]", "+", "90", ")", "%", "360", "left_shot", ",", "right_shot", "=", "None", ",", "None", "left_candidates", "=", "find_candidate_splays", "(", "splays", ",", "left_azm", ",", "0", ")", "if", "left_candidates", ":", "left_shot", "=", "max", "(", "left_candidates", ",", "key", "=", "lambda", "shot", ":", "hd", "(", "shot", "[", "'INC'", "]", ",", "shot", "[", "'LENGTH'", "]", ")", ")", "left", "=", "hd", "(", "left_shot", "[", "'INC'", "]", ",", "left_shot", "[", "'LENGTH'", "]", ")", "else", ":", "left", "=", "0", "right_candidates", "=", "find_candidate_splays", "(", "splays", ",", "right_azm", ",", "0", ")", "if", "right_candidates", ":", "right_shot", "=", "max", "(", "right_candidates", ",", "key", "=", "lambda", "shot", ":", "hd", "(", "shot", "[", "'INC'", "]", ",", "shot", "[", "'LENGTH'", "]", ")", ")", "right", "=", "hd", "(", "right_shot", "[", "'INC'", "]", ",", "right_shot", "[", "'LENGTH'", "]", ")", "else", ":", "right", "=", "0", "print", "'\\t'", "'left=%.1f azm=%.1f right=%.1f'", "%", "(", "left_azm", ",", "inshot", "[", "'AZM'", "]", ",", "right_azm", ")", "print", "'\\t'", "'%d candidate LEFT shots'", "%", "len", "(", "left_candidates", ")", "for", "splay", "in", "left_candidates", ":", "print", "'\\t\\t\\t'", "+", "str", "(", "splay", ")", "print", "'\\t\\t'", "'%.1f - Chose: %s'", "%", "(", "left", ",", "str", "(", "left_shot", ")", ")", "print", "'\\t'", "'%d candidate RIGHT shots'", "%", "len", "(", "right_candidates", ")", "for", "splay", "in", "right_candidates", ":", "print", "'\\t\\t\\t'", "+", "str", "(", "splay", ")", "print", "'\\t\\t'", "'%.1f - Chose: %s'", "%", "(", "right", ",", "str", "(", "right_shot", ")", ")", "up_candidates", "=", "find_candidate_vert_splays", "(", "splays", ",", "90", ")", "if", "up_candidates", ":", "up_shot", "=", "max", "(", "up_candidates", ",", "key", "=", "lambda", "splay", ":", "vd", "(", "splay", "[", "'INC'", "]", ",", "splay", "[", "'LENGTH'", "]", ")", ")", "up", "=", "vd", "(", "up_shot", "[", "'INC'", "]", ",", "up_shot", "[", "'LENGTH'", "]", ")", "else", ":", "up", "=", "0", "down_candidates", "=", "find_candidate_vert_splays", "(", "splays", ",", "-", "90", ")", "if", "down_candidates", ":", "down_shot", "=", "max", "(", "down_candidates", ",", "key", "=", "lambda", "splay", ":", "vd", "(", "splay", "[", "'INC'", "]", ",", "splay", "[", "'LENGTH'", "]", ")", ")", "# TODO: should vd() give negative and we find min()?", "down", "=", "vd", "(", "down_shot", "[", "'INC'", "]", ",", "down_shot", "[", "'LENGTH'", "]", ")", "else", ":", "down", "=", "0", "print", "'\\t'", ",", "inshot", ",", "'LRUD='", ",", "', '", ".", "join", "(", "(", "'%0.1f'", "%", "v", ")", "for", "v", "in", "(", "left", ",", "right", ",", "up", ",", "down", ")", ")", "assert", "(", "all", "(", "v", ">=", "0", "for", "v", "in", "(", "left", ",", "right", ",", "up", ",", "down", ")", ")", ")", "else", ":", "up", ",", "down", ",", "left", ",", "right", "=", "None", ",", "None", ",", "None", ",", "None", "return", "compass", ".", "Shot", "(", "[", "(", "'FROM'", ",", "inshot", "[", "'FROM'", "]", ")", ",", "# Compass requires a named TO station, so we must invent one for splays", "(", "'TO'", ",", "inshot", "[", "'TO'", "]", "or", "'%s.s%03d'", "%", "(", "inshot", "[", "'FROM'", "]", ",", "random", ".", "randint", "(", "0", ",", "1000", ")", ")", ")", ",", "(", "'LENGTH'", ",", "m2ft", "(", "inshot", ".", "length", ")", ")", ",", "# BEARING/AZM named inconsistently in Davies to reflect each program's arbitrary name. We", "# can't use `inshot.azm` here because we need the \"raw\" compass value without declination", "(", "'BEARING'", ",", "inshot", "[", "'AZM'", "]", ")", ",", "(", "'INC'", ",", "inshot", ".", "inc", ")", ",", "(", "'LEFT'", ",", "m2ft", "(", "left", ")", "if", "left", "is", "not", "None", "else", "-", "9.90", ")", ",", "(", "'UP'", ",", "m2ft", "(", "up", ")", "if", "left", "is", "not", "None", "else", "-", "9.90", ")", ",", "(", "'DOWN'", ",", "m2ft", "(", "down", ")", "if", "left", "is", "not", "None", "else", "-", "9.90", ")", ",", "(", "'RIGHT'", ",", "m2ft", "(", "right", ")", "if", "left", "is", "not", "None", "else", "-", "9.90", ")", ",", "# Compass requires this order!", "# Compass 'L' flag excludes splays from cave length calculation", "(", "'FLAGS'", ",", "(", "compass", ".", "Exclude", ".", "LENGTH", ",", "compass", ".", "Exclude", ".", "PLOT", ")", "if", "inshot", ".", "is_splay", "else", "(", ")", ")", ",", "# COMMENTS/COMMENT named inconsistently in Davies to reflect each program's arbitrary name", "(", "'COMMENTS'", ",", "inshot", "[", "'COMMENT'", "]", ")", "]", ")" ]
50.428571
27.157143
def _node(handler, single=None, multi=None): """Return an _AbstractSyntaxTreeNode with some elements defaulted.""" return _AbstractSyntaxTreeNode(handler=handler, single=(single if single else []), multi=(multi if multi else []))
[ "def", "_node", "(", "handler", ",", "single", "=", "None", ",", "multi", "=", "None", ")", ":", "return", "_AbstractSyntaxTreeNode", "(", "handler", "=", "handler", ",", "single", "=", "(", "single", "if", "single", "else", "[", "]", ")", ",", "multi", "=", "(", "multi", "if", "multi", "else", "[", "]", ")", ")" ]
60.6
14
def on_complete(cls, req): """ Callback called when the request to REST is done. Handles the errors and if there is none, :class:`.OutputPicker` is shown. """ # handle http errors if not (req.status == 200 or req.status == 0): ViewController.log_view.add(req.text) alert(req.text) # TODO: better handling return try: resp = json.loads(req.text) except ValueError: resp = None if not resp: alert("Chyba při konverzi!") # TODO: better ViewController.log_view.add( "Error while generating MARC: %s" % resp.text ) return OutputPicker.show(resp)
[ "def", "on_complete", "(", "cls", ",", "req", ")", ":", "# handle http errors", "if", "not", "(", "req", ".", "status", "==", "200", "or", "req", ".", "status", "==", "0", ")", ":", "ViewController", ".", "log_view", ".", "add", "(", "req", ".", "text", ")", "alert", "(", "req", ".", "text", ")", "# TODO: better handling", "return", "try", ":", "resp", "=", "json", ".", "loads", "(", "req", ".", "text", ")", "except", "ValueError", ":", "resp", "=", "None", "if", "not", "resp", ":", "alert", "(", "\"Chyba při konverzi!\")", " ", " TODO: better", "ViewController", ".", "log_view", ".", "add", "(", "\"Error while generating MARC: %s\"", "%", "resp", ".", "text", ")", "return", "OutputPicker", ".", "show", "(", "resp", ")" ]
30.25
18.166667
def _write_value(value, path): """ Writes specified value into path. Note that the value is wrapped in single quotes in the command, to prevent injecting bash commands. :param value: The value to write (usually a number or string) :param path: A valid system path """ base_command = "echo '{0}' > {1}" # There is no common method for redirecting stderr to a null sink, so the # command string is platform-dependent if platform == 'win32': command = "{0} > NUL".format(base_command) else: command = "exec 2> /dev/null; {0}".format(base_command) os.system(command.format(value, path))
[ "def", "_write_value", "(", "value", ",", "path", ")", ":", "base_command", "=", "\"echo '{0}' > {1}\"", "# There is no common method for redirecting stderr to a null sink, so the", "# command string is platform-dependent", "if", "platform", "==", "'win32'", ":", "command", "=", "\"{0} > NUL\"", ".", "format", "(", "base_command", ")", "else", ":", "command", "=", "\"exec 2> /dev/null; {0}\"", ".", "format", "(", "base_command", ")", "os", ".", "system", "(", "command", ".", "format", "(", "value", ",", "path", ")", ")" ]
42.133333
14.666667
def annotations_from_file(filename): """Get a list of event annotations from an EDF (European Data Format file or EDF+ file, using edflib. Args: filename: EDF+ file Returns: list: annotation events, each in the form [start_time, duration, text] """ import edflib e = edflib.EdfReader(filename, annotations_mode='all') return e.read_annotations()
[ "def", "annotations_from_file", "(", "filename", ")", ":", "import", "edflib", "e", "=", "edflib", ".", "EdfReader", "(", "filename", ",", "annotations_mode", "=", "'all'", ")", "return", "e", ".", "read_annotations", "(", ")" ]
29.153846
19.461538
def include_revision(revision_num, skip_factor=1.1): """Decide whether to include a revision. If the number of revisions is large, we exclude some revisions to avoid a quadratic blowup in runtime, since the article is likely also large. We make the ratio between consecutive included revision numbers appproximately equal to "factor". Args: revision_num: an integer skip_factor: a floating point number >= 1.0 Returns: a boolean """ if skip_factor <= 1.0: return True return (int(math.log1p(revision_num) / math.log(skip_factor)) != int( math.log(revision_num + 2.0) / math.log(skip_factor)))
[ "def", "include_revision", "(", "revision_num", ",", "skip_factor", "=", "1.1", ")", ":", "if", "skip_factor", "<=", "1.0", ":", "return", "True", "return", "(", "int", "(", "math", ".", "log1p", "(", "revision_num", ")", "/", "math", ".", "log", "(", "skip_factor", ")", ")", "!=", "int", "(", "math", ".", "log", "(", "revision_num", "+", "2.0", ")", "/", "math", ".", "log", "(", "skip_factor", ")", ")", ")" ]
30.95
23.4