Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
383,900
def mod_repo(repo, **kwargs): s configuration by setting a key to a blank value. Bear in mind that a name cannot be deleted, and a URL can only be deleted if a ``mirrorlist`` is specified (or vice versa). CLI Examples: .. code-block:: bash salt pkg.mod_repo alias alias=new_alias salt pkg.mod_repo alias url= mirrorlist=http://host.com/ rooturlmirrorlistbaseurlRepository \ not found, and neither \ nor \ was specifiedRepository \ not found and URL for baseurl/mirrorlist is malformed/baseurlRepository \ already exists as \.arFailed add new repository \ for unspecified reason. Please check zypper logs.baseurlbaseurlbaseurlcacheenabledenabled--enable--disablerefreshrefresh--refresh--no-refreshcachecache--keep-packages--no-keep-packagesgpgcheckgpgcheck--gpgcheck--no-gpgcheckprioritypriorityhumanname{0}humannamegpgautoimport--gpg-auto-import-keysmrrefreshSpecified arguments did not result in modification of repocomment'] = comment return repo
Modify one or more values for a repo. If the repo does not exist, it will be created, so long as the following values are specified: repo or alias alias by which Zypper refers to the repo url, mirrorlist or baseurl the URL for Zypper to reference enabled Enable or disable (True or False) repository, but do not remove if disabled. refresh Enable or disable (True or False) auto-refresh of the repository. cache Enable or disable (True or False) RPM files caching. gpgcheck Enable or disable (True or False) GPG check for this repository. gpgautoimport : False If set to True, automatically trust and import public GPG key for the repository. root operate on a different root directory. Key/Value pairs may also be removed from a repo's configuration by setting a key to a blank value. Bear in mind that a name cannot be deleted, and a URL can only be deleted if a ``mirrorlist`` is specified (or vice versa). CLI Examples: .. code-block:: bash salt '*' pkg.mod_repo alias alias=new_alias salt '*' pkg.mod_repo alias url= mirrorlist=http://host.com/
383,901
def cleanup(arctic_lib, symbol, version_ids, versions_coll, shas_to_delete=None, pointers_cfgs=None): pointers_cfgs = set(pointers_cfgs) if pointers_cfgs else set() collection = arctic_lib.get_top_level_collection() version_ids = list(version_ids) all_symbol_pointers_cfgs = _get_symbol_pointer_cfgs(symbol, versions_coll) all_symbol_pointers_cfgs.update(pointers_cfgs) if all_symbol_pointers_cfgs == {FwPointersCfg.DISABLED} or not all_symbol_pointers_cfgs: _cleanup_parent_pointers(collection, symbol, version_ids) return if FwPointersCfg.DISABLED not in all_symbol_pointers_cfgs: _cleanup_fw_pointers(collection, symbol, version_ids, versions_coll, shas_to_delete=shas_to_delete, do_clean=True) return _cleanup_mixed(symbol, collection, version_ids, versions_coll)
Helper method for cleaning up chunks from a version store
383,902
def setDataFrame(self, dataFrame, copyDataFrame=False, filePath=None): if not isinstance(dataFrame, pandas.core.frame.DataFrame): raise TypeError("not of type pandas.core.frame.DataFrame") self.layoutAboutToBeChanged.emit() if copyDataFrame: self._dataFrame = dataFrame.copy() else: self._dataFrame = dataFrame self._columnDtypeModel = ColumnDtypeModel(dataFrame) self._columnDtypeModel.dtypeChanged.connect(self.propagateDtypeChanges) self._columnDtypeModel.changeFailed.connect( lambda columnName, index, dtype: self.changingDtypeFailed.emit(columnName, index, dtype) ) if filePath is not None: self._filePath = filePath self.layoutChanged.emit() self.dataChanged.emit() self.dataFrameChanged.emit()
Setter function to _dataFrame. Holds all data. Note: It's not implemented with python properties to keep Qt conventions. Raises: TypeError: if dataFrame is not of type pandas.core.frame.DataFrame. Args: dataFrame (pandas.core.frame.DataFrame): assign dataFrame to _dataFrame. Holds all the data displayed. copyDataFrame (bool, optional): create a copy of dataFrame or use it as is. defaults to False. If you use it as is, you can change it from outside otherwise you have to reset the dataFrame after external changes.
383,903
def getAttributeNode(self, attr: str) -> Optional[Attr]: return self.attributes.getNamedItem(attr)
Get attribute of this node as Attr format. If this node does not have ``attr``, return None.
383,904
def processes(self, plantuml_text): url = self.get_url(plantuml_text) try: response, content = self.http.request(url, **self.request_opts) except self.HttpLib2Error as e: raise PlantUMLConnectionError(e) if response.status != 200: raise PlantUMLHTTPError(response, content) return content
Processes the plantuml text into the raw PNG image data. :param str plantuml_text: The plantuml markup to render :returns: the raw image data
383,905
def add(name, function_name, cron): lambder.add_event(name=name, function_name=function_name, cron=cron)
Create an event
383,906
def verify(self, verifier, consumer_key=None, consumer_secret=None, access_token=None, access_token_secret=None): self.consumer_key = consumer_key or self.consumer_key self.consumer_secret = consumer_secret or self.consumer_secret self.access_token = access_token or self.access_token self.access_token_secret = access_token_secret or self.access_token_secret oauth = OAuth1( self.consumer_key, client_secret=self.consumer_secret, resource_owner_key=self.access_token, resource_owner_secret=self.access_token_secret, verifier=verifier) r = requests.post(self.url(), auth=oauth) if r.status_code == 200: creds = parse_qs(r.content) else: return (False, r.content) self.finalize_oauth(creds.get()[0], creds.get()[0]) return (True, None)
After converting the token into verifier, call this to finalize the authorization.
383,907
def disvecinf(self, x, y, aq=None): if aq is None: aq = self.model.aq.find_aquifer_data(x, y) rv = np.zeros((2, self.nparam, aq.naq)) if aq == self.aq: qxqy = np.zeros((2, aq.naq)) qxqy[:, :] = self.bessel.disbeslsho(float(x), float(y), self.z1, self.z2, aq.lab, 0, aq.ilap, aq.naq) rv[0] = self.aq.coef[self.layers] * qxqy[0] rv[1] = self.aq.coef[self.layers] * qxqy[1] return rv
Can be called with only one x,y value
383,908
def desaturate(c, k=0): from matplotlib.colors import ColorConverter c = ColorConverter().to_rgb(c) intensity = 0.299 * c[0] + 0.587 * c[1] + 0.114 * c[2] return [intensity * k + i * (1 - k) for i in c]
Utility function to desaturate a color c by an amount k.
383,909
def add_marker_to_qtls(qtlfile, mapfile, outputfile=): qtl_list = read_input_file(qtlfile, ) map_list = read_input_file(mapfile, ) if not qtl_list or not map_list: return qtl_list[0].append() qtls = [] qtls.append(qtl_list[0]) for qtl in qtl_list[1:]: qtl.append(add_marker_to_qtl(qtl, map_list)) qtls.append(qtl) LOG.info( % (len(qtls), qtlfile)) write_matrix(outputfile, qtls)
This function adds to a list of QTLs, the closest marker to the QTL peak. :arg qtlfile: a CSV list of all the QTLs found. The file should be structured as follow:: Trait, Linkage group, position, other columns The other columns will not matter as long as the first three columns are as such. :arg mapfile: a CSV representation of the map used for the QTL mapping analysis. The file should be structured as follow:: Marker, Linkage group, position :kwarg outputfile: the name of the output file in which the list of QTLs with their closest marker will be written.
383,910
def initial(self, request, *args, **kwargs): super(NodeImageList, self).initial(request, *args, **kwargs) self.node = get_queryset_or_404( Node.objects.published().accessible_to(request.user), {: self.kwargs[]} ) self.check_object_permissions(request, self.node)
Custom initial method: * ensure node exists and store it in an instance attribute * change queryset to return only images of current node
383,911
def _setSampleSizeBytes(self): self.sampleSizeBytes = self.getPacketSize() if self.sampleSizeBytes > 0: self.maxBytesPerFifoRead = (32 // self.sampleSizeBytes)
updates the current record of the packet size per sample and the relationship between this and the fifo reads.
383,912
def get_authorizations_for_resource_and_function(self, resource_id, function_id): collection = JSONClientValidated(, collection=, runtime=self._runtime) result = collection.find( dict({: str(resource_id), : str(function_id)}, **self._view_filter())).sort(, ASCENDING) return objects.AuthorizationList(result, runtime=self._runtime)
Gets a list of ``Authorizations`` associated with a given resource. Authorizations related to the given resource, including those related through an ``Agent,`` are returned. In plenary mode, the returned list contains all known authorizations or an error results. Otherwise, the returned list may contain only those authorizations that are accessible through this session. arg: resource_id (osid.id.Id): a resource ``Id`` arg: function_id (osid.id.Id): a function ``Id`` return: (osid.authorization.AuthorizationList) - the returned ``Authorization list`` raise: NullArgument - ``resource_id`` or ``function_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.*
383,913
def from_seqfeature(s, **kwargs): source = s.qualifiers.get(, )[0] score = s.qualifiers.get(, )[0] seqid = s.qualifiers.get(, )[0] frame = s.qualifiers.get(, )[0] strand = _feature_strand[s.strand] start = s.location.start.position + 1 stop = s.location.end.position featuretype = s.type id = s.id attributes = dict(s.qualifiers) attributes.pop(, ) attributes.pop(, ) attributes.pop(, ) attributes.pop(, ) return Feature(seqid, source, featuretype, start, stop, score, strand, frame, attributes, id=id, **kwargs)
Converts a Bio.SeqFeature object to a gffutils.Feature object. The GFF fields `source`, `score`, `seqid`, and `frame` are assumed to be stored as qualifiers. Any other qualifiers will be assumed to be GFF attributes.
383,914
def remove_straddlers(events, time, s_freq, toler=0.1): dur = (events[:, -1] - 1 - events[:, 0]) / s_freq continuous = time[events[:, -1] - 1] - time[events[:, 0]] - dur < toler return events[continuous, :]
Reject an event if it straddles a stitch, by comparing its duration to its timespan. Parameters ---------- events : ndarray (dtype='int') N x M matrix with start, ..., end samples time : ndarray (dtype='float') vector with time points s_freq : float sampling frequency toler : float, def=0.1 maximum tolerated difference between event duration and timespan Returns ------- ndarray (dtype='int') N x M matrix with start , ..., end samples
383,915
def ekopr(fname): fname = stypes.stringToCharP(fname) handle = ctypes.c_int() libspice.ekopr_c(fname, ctypes.byref(handle)) return handle.value
Open an existing E-kernel file for reading. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ekopr_c.html :param fname: Name of EK file. :type fname: str :return: Handle attached to EK file. :rtype: int
383,916
def build_machine(system_info, core_resource=Cores, sdram_resource=SDRAM, sram_resource=SRAM): try: max_cores = max(c.num_cores for c in itervalues(system_info)) except ValueError: max_cores = 0 try: max_sdram = max(c.largest_free_sdram_block for c in itervalues(system_info)) except ValueError: max_sdram = 0 try: max_sram = max(c.largest_free_sram_block for c in itervalues(system_info)) except ValueError: max_sram = 0 return Machine(width=system_info.width, height=system_info.height, chip_resources={ core_resource: max_cores, sdram_resource: max_sdram, sram_resource: max_sram, }, chip_resource_exceptions={ chip: { core_resource: info.num_cores, sdram_resource: info.largest_free_sdram_block, sram_resource: info.largest_free_sram_block, } for chip, info in iteritems(system_info) if (info.num_cores != max_cores or info.largest_free_sdram_block != max_sdram or info.largest_free_sram_block != max_sram) }, dead_chips=set(system_info.dead_chips()), dead_links=set(system_info.dead_links()))
Build a :py:class:`~rig.place_and_route.Machine` object from a :py:class:`~rig.machine_control.machine_controller.SystemInfo` object. .. note:: Links are tested by sending a 'PEEK' command down the link which checks to see if the remote device responds correctly. If the link is dead, no response will be received and the link will be assumed dead. Since peripherals do not generally respond to 'PEEK' commands, working links attached to peripherals will also be marked as dead. .. note:: The returned object does not report how much memory is free, nor how many cores are idle but rather the total number of working cores and the size of the heap. See :py:func:`.build_resource_constraints` for a function which can generate a set of :py:class:`~rig.place_and_route.constraints` which prevent the use of already in-use cores and memory. .. note:: This method replaces the deprecated :py:meth:`rig.machine_control.MachineController.get_machine` method. Its functionality may be recreated using :py:meth:`rig.machine_control.MachineController.get_system_info` along with this function like so:: >> sys_info = mc.get_system_info() >> machine = build_machine(sys_info) Parameters ---------- system_info : :py:class:`rig.machine_control.machine_controller.SystemInfo` The resource availability information for a SpiNNaker machine, typically produced by :py:meth:`rig.machine_control.MachineController.get_system_info`. core_resource : resource (default: :py:class:`rig.place_and_route.Cores`) The resource type to use to represent the number of working cores on a chip, including the monitor, those already in use and all idle cores. sdram_resource : resource (default: :py:class:`rig.place_and_route.SDRAM`) The resource type to use to represent SDRAM on a chip. This resource will be set to the number of bytes in the largest free block in the SDRAM heap. This gives a conservative estimate of the amount of free SDRAM on the chip which will be an underestimate in the presence of memory fragmentation. sram_resource : resource (default: :py:class:`rig.place_and_route.SRAM`) The resource type to use to represent SRAM (a.k.a. system RAM) on a chip. This resource will be set to the number of bytes in the largest free block in the SRAM heap. This gives a conservative estimate of the amount of free SRAM on the chip which will be an underestimate in the presence of memory fragmentation. Returns ------- :py:class:`rig.place_and_route.Machine` A :py:class:`~rig.place_and_route.Machine` object representing the resources available within a SpiNNaker machine in the form used by the place-and-route infrastructure.
383,917
def calibrate(self, data, key): logger.warning() if key.calibration == : pass elif key.calibration == : pass else: pass return data
Data calibration.
383,918
def get_app_name(self): app_name = self.get_attribute_value(, ) if app_name is None: activities = self.get_main_activities() main_activity_name = None if len(activities) > 0: main_activity_name = activities.pop() app_name = self.get_attribute_value(, , name=main_activity_name) if app_name is None: log.warning("It looks like that no app name is set for the main activity!") return "" if app_name.startswith("@"): res_parser = self.get_android_resources() if not res_parser: return app_name res_id, package = res_parser.parse_id(app_name) if package and package != self.get_package(): if package == : log.warning("Resource ID with android package name encountered! " "Will not resolve, framework-res.apk would be required.") return app_name else: log.warning("Resource ID with Package name encountered! Will not resolve".format(package)) return app_name try: app_name = res_parser.get_resolved_res_configs( res_id, ARSCResTableConfig.default_config())[0][1] except Exception as e: log.warning("Exception selecting app name: %s" % e) return app_name
Return the appname of the APK This name is read from the AndroidManifest.xml using the application android:label. If no label exists, the android:label of the main activity is used. If there is also no main activity label, an empty string is returned. :rtype: :class:`str`
383,919
def gather_votes(self, candidates): votes = [] for a in self.get_agents(addr=False): vote = a.vote(candidates) votes.append(vote) return votes
Gather votes for the given candidates from the agents in the environment. Returned votes are anonymous, i.e. they cannot be tracked to any individual agent afterwards. :returns: A list of votes. Each vote is a list of ``(artifact, preference)`` -tuples sorted in a preference order of a single agent.
383,920
def _do_link_patterns(self, text): link_from_hash = {} for regex, repl in self.link_patterns: replacements = [] for match in regex.finditer(text): if hasattr(repl, "__call__"): href = repl(match) else: href = match.expand(repl) replacements.append((match.span(), href)) for (start, end), href in reversed(replacements): escaped_href = ( href.replace(, ) .replace(, self._escape_table[]) .replace(, self._escape_table[])) link = % (escaped_href, text[start:end]) hash = _hash_text(link) link_from_hash[hash] = link text = text[:start] + hash + text[end:] for hash, link in list(link_from_hash.items()): text = text.replace(hash, link) return text
Caveat emptor: there isn't much guarding against link patterns being formed inside other standard Markdown links, e.g. inside a [link def][like this]. Dev Notes: *Could* consider prefixing regexes with a negative lookbehind assertion to attempt to guard against this.
383,921
def _prompt_choice(var_name, options): choice_map = OrderedDict( (.format(i), value) for i, value in enumerate(options, 1) if value[0] != ) choices = choice_map.keys() default = choice_lines = [.format(c[0], c[1][0], c[1][1]) for c in choice_map.items()] prompt = .join(( .format(var_name), .join(choice_lines), .format(.join(choices)) )) user_choice = click.prompt( prompt, type=click.Choice(choices), default=default ) return choice_map[user_choice]
Prompt the user to choose between a list of options, index each one by adding an enumerator based on https://github.com/audreyr/cookiecutter/blob/master/cookiecutter/prompt.py#L51 :param var_name: The question to ask the user :type var_name: ``str`` :param options: A list of options :type options: ``list`` of ``tupple`` :rtype: ``tuple`` :returns: The selected user
383,922
def find_by_id(cls, id): obj = cls.find_one(cls._id_spec(id)) if not obj: raise NotFoundException(cls.collection, id) return obj
Finds a single document by its ID. Throws a NotFoundException if the document does not exist (the assumption being if you've got an id you should be pretty certain the thing exists)
383,923
def get_commands(self, command_name, **kwargs): chip_id = kwargs.pop("ChipID", self.chip_id_bitarray) commands = [] if command_name == "zeros": bv = bitarray(endian=) if "length" in kwargs: bv += bitarray(kwargs["length"], endian=) elif kwargs: raise ValueError("Unknown parameter(s): %s" % ", ".join(kwargs.iterkeys())) bv.setall(0) commands.append(bv) elif command_name == "ones": bv = bitarray(endian=) if "length" in kwargs: bv += bitarray(kwargs["length"], endian=) elif kwargs: raise ValueError("Unknown parameter(s): %s" % ", ".join(kwargs.iterkeys())) bv.setall(1) commands.append(bv) elif command_name == "WrRegister": register_addresses = self.get_global_register_attributes("addresses", **kwargs) register_bitsets = self.get_global_register_bitsets(register_addresses) commands.extend([self.build_command(command_name, Address=register_address, GlobalData=register_bitset, ChipID=chip_id, **kwargs) for register_address, register_bitset in zip(register_addresses, register_bitsets)]) elif command_name == "RdRegister": register_addresses = self.get_global_register_attributes(, **kwargs) commands.extend([self.build_command(command_name, Address=register_address, ChipID=chip_id) for register_address in register_addresses]) elif command_name == "WrFrontEnd": registers = ["S0", "S1", "SR_Clr", "CalEn", "DIGHITIN_SEL", "GateHitOr", "ReadErrorReq", "StopClkPulse", "SR_Clock", "Efuse_Sense", "HITLD_IN", "Colpr_Mode", "Colpr_Addr"] if self.fei4a: registers.append("ReadSkipped") elif self.fei4b: registers.append("SR_Read") self.create_restore_point() dcs = kwargs.pop("dcs", range(40)) if not dcs: dcs = range(40) joint_write = kwargs.pop("joint_write", False) same_mask_for_all_dc = kwargs.pop("same_mask_for_all_dc", False) register_objects = self.get_pixel_register_objects(do_sort=[], **kwargs) if not self.broadcast: self.set_global_register_value("Colpr_Mode", 0) self.set_global_register_value("Colpr_Addr", 40) commands.extend(self.get_commands("WrRegister", name=["Colpr_Mode", "Colpr_Addr"], ChipID=8)) self.set_global_register_value("S0", 0) self.set_global_register_value("S1", 0) self.set_global_register_value("SR_Clr", 0) self.set_global_register_value("CalEn", 0) self.set_global_register_value("DIGHITIN_SEL", 0) self.set_global_register_value("GateHitOr", 0) self.set_global_register_value("ReadErrorReq", 0) self.set_global_register_value("StopClkPulse", 0) self.set_global_register_value("SR_Clock", 0) self.set_global_register_value("Efuse_Sense", 0) self.set_global_register_value("HITLD_IN", 0) self.set_global_register_value("Colpr_Mode", 3 if same_mask_for_all_dc else 0) self.set_global_register_value("Colpr_Addr", 0) if self.fei4a: self.set_global_register_value("ReadSkipped", 0) elif self.fei4b: self.set_global_register_value("SR_Read", 0) commands.extend(self.get_commands("WrRegister", name=registers)) if joint_write: pxstrobes = 0 first_read = True do_latch = False for register_object in register_objects: if register_object[] != 1: raise ValueError( % register_object[]) pxstrobe = register_object[] if not isinstance(pxstrobe, basestring): do_latch = True pxstrobes += 2 ** register_object[] if first_read: pixel_reg_value = register_object[] first_read = False else: if np.array_equal(pixel_reg_value, register_object[]): pixel_reg_value = register_object[] else: raise ValueError( % register_object[]) if do_latch: self.set_global_register_value("Latch_En", 1) else: self.set_global_register_value("Latch_En", 0) self.set_global_register_value("Pixel_Strobes", pxstrobes) commands.extend(self.get_commands("WrRegister", name=["Pixel_Strobes", "Latch_En"])) for dc_no in (dcs[:1] if same_mask_for_all_dc else dcs): self.set_global_register_value("Colpr_Addr", dc_no) commands.extend(self.get_commands("WrRegister", name=["Colpr_Addr"])) register_bitset = self.get_pixel_register_bitset(register_objects[0], 0, dc_no) commands.extend([self.build_command(command_name, PixelData=register_bitset, ChipID=8, **kwargs)]) if do_latch: commands.extend(self.get_commands("GlobalPulse", Width=0)) else: for register_object in register_objects: pxstrobe = register_object[] if isinstance(pxstrobe, basestring): do_latch = False self.set_global_register_value("Pixel_Strobes", 0) self.set_global_register_value("Latch_En", 0) commands.extend(self.get_commands("WrRegister", name=["Pixel_Strobes", "Latch_En"])) else: do_latch = True self.set_global_register_value("Latch_En", 1) commands.extend(self.get_commands("WrRegister", name=["Latch_En"])) bitlength = register_object[] for bit_no, pxstrobe_bit_no in (enumerate(range(bitlength)) if (register_object[] is False) else enumerate(reversed(range(bitlength)))): if do_latch: self.set_global_register_value("Pixel_Strobes", 2 ** (pxstrobe + bit_no)) commands.extend(self.get_commands("WrRegister", name=["Pixel_Strobes"])) for dc_no in (dcs[:1] if same_mask_for_all_dc else dcs): self.set_global_register_value("Colpr_Addr", dc_no) commands.extend(self.get_commands("WrRegister", name=["Colpr_Addr"])) register_bitset = self.get_pixel_register_bitset(register_object, pxstrobe_bit_no, dc_no) commands.extend([self.build_command(command_name, PixelData=register_bitset, ChipID=8, **kwargs)]) if do_latch: commands.extend(self.get_commands("GlobalPulse", Width=0)) self.restore(pixel_register=False) commands.extend(self.get_commands("WrRegister", name=registers)) elif command_name == "RdFrontEnd": registers = ["Conf_AddrEnable", "S0", "S1", "SR_Clr", "CalEn", "DIGHITIN_SEL", "GateHitOr", "ReadErrorReq", "StopClkPulse", "SR_Clock", "Efuse_Sense", "HITLD_IN", "Colpr_Mode", "Colpr_Addr", "Pixel_Strobes", "Latch_En"] if self.fei4a: registers.append("ReadSkipped") elif self.fei4b: registers.append("SR_Read") self.create_restore_point() dcs = kwargs.pop("dcs", range(40)) if not dcs: dcs = range(40) register_objects = self.get_pixel_register_objects(**kwargs) self.set_global_register_value(, 1) self.set_global_register_value("S0", 0) self.set_global_register_value("S1", 0) self.set_global_register_value("SR_Clr", 0) if self.fei4b: self.set_global_register_value("SR_Read", 0) self.set_global_register_value("CalEn", 0) self.set_global_register_value("DIGHITIN_SEL", 0) self.set_global_register_value("GateHitOr", 0) if self.fei4a: self.set_global_register_value("ReadSkipped", 0) self.set_global_register_value("ReadErrorReq", 0) self.set_global_register_value("StopClkPulse", 0) self.set_global_register_value("SR_Clock", 0) self.set_global_register_value("Efuse_Sense", 0) self.set_global_register_value("HITLD_IN", 0) self.set_global_register_value("Colpr_Mode", 0) self.set_global_register_value("Colpr_Addr", 0) self.set_global_register_value("Latch_En", 0) self.set_global_register_value("Pixel_Strobes", 0) commands.extend(self.get_commands("WrRegister", name=registers)) for index, register_object in enumerate(register_objects): if register_object[] == : register_objects[0], register_objects[index] = register_objects[index], register_objects[0] break for register_object in register_objects: pxstrobe = register_object[] bitlength = register_object[] for pxstrobe_bit_no in range(bitlength): logging.debug(, register_object[], pxstrobe_bit_no) do_latch = True try: self.set_global_register_value("Pixel_Strobes", 2 ** (pxstrobe + pxstrobe_bit_no)) except TypeError: self.set_global_register_value("Pixel_Strobes", 0) do_latch = False commands.extend(self.get_commands("WrRegister", name=["Pixel_Strobes"])) for dc_no in dcs: self.set_global_register_value("Colpr_Addr", dc_no) commands.extend(self.get_commands("WrRegister", name=["Colpr_Addr"])) if do_latch is True: self.set_global_register_value("S0", 1) self.set_global_register_value("S1", 1) self.set_global_register_value("SR_Clock", 1) commands.extend(self.get_commands("WrRegister", name=["S0", "S1", "SR_Clock"])) commands.extend(self.get_commands("GlobalPulse", Width=0)) self.set_global_register_value("S0", 0) self.set_global_register_value("S1", 0) self.set_global_register_value("SR_Clock", 0) commands.extend(self.get_commands("WrRegister", name=["S0", "S1", "SR_Clock"])) register_bitset = self.get_pixel_register_bitset(register_object, pxstrobe_bit_no if (register_object[] is False) else register_object[] - pxstrobe_bit_no - 1, dc_no) if self.fei4b: self.set_global_register_value("SR_Read", 1) commands.extend(self.get_commands("WrRegister", name=["SR_Read"])) commands.extend([self.build_command("WrFrontEnd", PixelData=register_bitset, ChipID=chip_id)]) if self.fei4b: self.set_global_register_value("SR_Read", 0) commands.extend(self.get_commands("WrRegister", name=["SR_Read"])) self.restore(pixel_register=False) commands.extend(self.get_commands("WrRegister", name=registers)) else: commands.append(self.build_command(command_name, ChipID=chip_id, **kwargs)) return commands
get fe_command from command name and keyword arguments wrapper for build_commands() implements FEI4 specific behavior
383,924
def sortByColumn(self, column, order=QtCore.Qt.AscendingOrder): super(XTreeWidget, self).sortByColumn(column, order) self._sortOrder = order
Overloads the default sortByColumn to record the order for later \ reference. :param column | <int> order | <QtCore.Qt.SortOrder>
383,925
def get_service_health(service_id: str) -> str: if DC.get_replicas(service_id) != DC.get_actual_replica(service_id): health_status = "Unhealthy" else: health_status = "Healthy" return health_status
Get the health of a service using service_id. Args: service_id Returns: str, health status
383,926
def set_repo_permission(self, repo, permission): assert isinstance(repo, github.Repository.Repository), repo put_parameters = { "permission": permission, } headers, data = self._requester.requestJsonAndCheck( "PUT", self.url + "/repos/" + repo._identity, input=put_parameters )
:calls: `PUT /teams/:id/repos/:org/:repo <http://developer.github.com/v3/orgs/teams>`_ :param repo: :class:`github.Repository.Repository` :param permission: string :rtype: None
383,927
def colorbar(ax, im, fig=None, loc="right", size="5%", pad="3%"): if fig is None: fig = ax.get_figure() if loc == "left" or loc == "right": width = fig.get_figwidth() new = width * (1 + _pc2f(size) + _pc2f(pad)) _logger.debug(.format(new)) elif loc == "top" or loc == "bottom": height = fig.get_figheight() new = height * (1 + _pc2f(size) + _pc2f(pad)) _logger.debug(.format(new)) divider = _ag1.make_axes_locatable(ax) cax = divider.append_axes(loc, size=size, pad=pad) return cax, _plt.colorbar(im, cax=cax)
Adds a polite colorbar that steals space so :func:`matplotlib.pyplot.tight_layout` works nicely. .. versionadded:: 1.3 Parameters ---------- ax : :class:`matplotlib.axis.Axis` The axis to plot to. im : :class:`matplotlib.image.AxesImage` The plotted image to use for the colorbar. fig : :class:`matplotlib.figure.Figure`, optional The figure to plot to. loc : str, optional The location to place the axes. size : str, optional The size to allocate for the colorbar. pad : str, optional The amount to pad the colorbar.
383,928
def init_layout(self): super(AndroidRadioGroup, self).init_layout() d = self.declaration w = self.widget if d.checked: self.set_checked(d.checked) else: for c in d.children: if c.checked: d.checked = c w.setOnCheckedChangeListener(w.getId()) w.onCheckedChanged.connect(self.on_checked_changed)
Set the checked state after all children have been populated.
383,929
def detailxy(self, canvas, button, data_x, data_y): if button == 0: chviewer = self.fv.getfocus_viewer() if chviewer != self.fitsimage: return True data_x = data_x + self.pick_x1 data_y = data_y + self.pick_y1 return self.fv.showxy(chviewer, data_x, data_y)
Motion event in the pick fits window. Show the pointing information under the cursor.
383,930
def make_back_author_contributions(self, body): cont_expr = "./front/article-meta/author-notes/fn[@fn-type=]" contribution = self.article.root.xpath(cont_expr) if contribution: author_contrib = deepcopy(contribution[0]) remove_all_attributes(author_contrib) author_contrib.tag = author_contrib.attrib[] = title = etree.Element() title.text = author_contrib.insert(0, title) body.append(author_contrib)
Though this goes in the back of the document with the rest of the back matter, it is not an element found under <back>. I don't expect to see more than one of these. Compare this method to make_article_info_competing_interests()
383,931
def get_usedby_aql(self, params): if self._usedby is None: return None _result = {} params = self.merge_valued(params) for k, v in self._usedby[].items(): if isinstance(v, str): k = k.format(**params) v = v.format(**params) _result[k] = v return _result
Возвращает запрос AQL (без репозитория), из файла конфигурации :param params: :return:
383,932
def _multiplyThroughputs(self): index = 0 for component in self.components: if component.throughput != None: break index += 1 return BaseObservationMode._multiplyThroughputs(self, index)
Overrides base class in order to deal with opaque components.
383,933
def _set_LED(self, status): self.hw.remote_at( dest_addr=self.remote_addr, command=, parameter= if status else )
_set_LED: boolean -> None Sets the status of the remote LED
383,934
def remove_unweighted_sources(graph: BELGraph, key: Optional[str] = None) -> None: nodes = list(get_unweighted_sources(graph, key=key)) graph.remove_nodes_from(nodes)
Prune unannotated nodes on the periphery of the sub-graph. :param graph: A BEL graph :param key: The key in the node data dictionary representing the experimental data. Defaults to :data:`pybel_tools.constants.WEIGHT`.
383,935
def open(self, url): cache = self.cache() id = self.mangle(url, ) d = cache.get(id) if d is None: d = self.fn(url, self.options) cache.put(id, d) else: d.options = self.options for imp in d.imports: imp.imported.options = self.options return d
Open a WSDL at the specified I{url}. First, the WSDL attempted to be retrieved from the I{object cache}. After unpickled from the cache, the I{options} attribute is restored. If not found, it is downloaded and instantiated using the I{fn} constructor and added to the cache for the next open(). @param url: A WSDL url. @type url: str. @return: The WSDL object. @rtype: I{Definitions}
383,936
def weld_standard_deviation(array, weld_type): weld_obj_var = weld_variance(array, weld_type) obj_id, weld_obj = create_weld_object(weld_obj_var) weld_obj_var_id = get_weld_obj_id(weld_obj, weld_obj_var) weld_template = _weld_std_code weld_obj.weld_code = weld_template.format(var=weld_obj_var_id) return weld_obj
Returns the *sample* standard deviation of the array. Parameters ---------- array : numpy.ndarray or WeldObject Input array. weld_type : WeldType Type of each element in the input array. Returns ------- WeldObject Representation of this computation.
383,937
def same(*values): if not values: return True first, rest = values[0], values[1:] return all(value == first for value in rest)
Check if all values in a sequence are equal. Returns True on empty sequences. Examples -------- >>> same(1, 1, 1, 1) True >>> same(1, 2, 1) False >>> same() True
383,938
def update_path(self, path): oldpath = self.path self.path = [] for p in path: if p[0] != : break router = self.router_container.router_from_id(p) self.path.append(router) if len(self.path) > len(oldpath): for x in self.listeners: x.circuit_extend(self, router) oldpath = self.path
There are EXTENDED messages which don't include any routers at all, and any of the EXTENDED messages may have some arbitrary flags in them. So far, they're all upper-case and none start with $ luckily. The routers in the path should all be LongName-style router names (this depends on them starting with $). For further complication, it's possible to extend a circuit to a router which isn't in the consensus. nickm via #tor thought this might happen in the case of hidden services choosing a rendevouz point not in the current consensus.
383,939
def _attributeLinesToDict(attributeLines): attributes = dict() for line in attributeLines: attributeId, attributeValue = line.split(, 1) attributes[attributeId.strip()] = attributeValue.strip() return attributes
Converts a list of obo 'Term' lines to a dictionary. :param attributeLines: a list of obo 'Term' lines. Each line contains a key and a value part which are separated by a ':'. :return: a dictionary containing the attributes of an obo 'Term' entry. NOTE: Some attributes can occur multiple times in one single term, for example 'is_a' or 'relationship'. However, currently only the last occurence is stored.
383,940
def is_cozy_registered(): req = curl_couchdb() users = req.json()[] if len(users) > 0: return True else: return False
Check if a Cozy is registered
383,941
def update(self, membershipId, isModerator=None, **request_parameters): check_type(membershipId, basestring, may_be_none=False) check_type(isModerator, bool) put_data = dict_from_items_with_values( request_parameters, isModerator=isModerator, ) json_data = self._session.put(API_ENDPOINT + + membershipId, json=put_data) return self._object_factory(OBJECT_TYPE, json_data)
Update a team membership, by ID. Args: membershipId(basestring): The team membership ID. isModerator(bool): Set to True to make the person a team moderator. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: TeamMembership: A TeamMembership object with the updated Webex Teams team-membership details. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error.
383,942
def get_asset_form_for_update(self, asset_id=None): if asset_id is None: raise NullArgument() try: url_path = construct_url(, bank_id=self._catalog_idstr, asset_id=asset_id) asset = objects.Asset(self._get_request(url_path)) except Exception: raise asset_form = objects.AssetForm(asset._my_map) self._forms[asset_form.get_id().get_identifier()] = not UPDATED return asset_form
Gets the asset form for updating an existing asset. A new asset form should be requested for each update transaction. :param asset_id: the ``Id`` of the ``Asset`` :type asset_id: ``osid.id.Id`` :return: the asset form :rtype: ``osid.repository.AssetForm`` :raise: ``NotFound`` -- ``asset_id`` is not found :raise: ``NullArgument`` -- ``asset_id`` is null :raise: ``OperationFailed`` -- unable to complete request :raise: ``PermissionDenied`` -- authorization failure *compliance: mandatory -- This method must be implemented.*
383,943
def create_initial_tree(channel): config.LOGGER.info(" Setting up initial channel structure... ") tree = ChannelManager(channel) config.LOGGER.info(" Validating channel structure...") channel.print_tree() tree.validate() config.LOGGER.info(" Tree is valid\n") return tree
create_initial_tree: Create initial tree structure Args: channel (Channel): channel to construct Returns: tree manager to run rest of steps
383,944
def _open_interface(self, client, uuid, iface, key): conn_id = self._validate_connection(, uuid, key) if conn_id is None: return conn_data = self._connections[uuid] conn_data[] = monotonic() slug = self._build_device_slug(uuid) try: resp = yield self._manager.open_interface(conn_id, iface) except Exception as exc: self._logger.exception("Error in manager open interface") resp = {: False, : "Internal error: %s" % str(exc)} message = {: , : , : client} message[] = resp[] if not message[]: message[] = resp[] self._publish_response(slug, message)
Open an interface on a connected device. Args: client (string): The client id who is requesting this operation uuid (int): The id of the device we're opening the interface on iface (string): The name of the interface that we're opening key (string): The key to authenticate the caller
383,945
async def handle_exception(self, exc: Exception, action: str, request_id): if isinstance(exc, APIException): await self.reply( action=action, errors=self._format_errors(exc.detail), status=exc.status_code, request_id=request_id ) elif exc == Http404 or isinstance(exc, Http404): await self.reply( action=action, errors=self._format_errors(), status=404, request_id=request_id ) else: raise exc
Handle any exception that occurs, by sending an appropriate message
383,946
def _get_2d_plot(self, label_stable=True, label_unstable=True, ordering=None, energy_colormap=None, vmin_mev=-60.0, vmax_mev=60.0, show_colorbar=True, process_attributes=False, plt=None): if plt is None: plt = pretty_plot(8, 6) from matplotlib.font_manager import FontProperties if ordering is None: (lines, labels, unstable) = self.pd_plot_data else: (_lines, _labels, _unstable) = self.pd_plot_data (lines, labels, unstable) = order_phase_diagram( _lines, _labels, _unstable, ordering) if energy_colormap is None: if process_attributes: for x, y in lines: plt.plot(x, y, "k-", linewidth=3, markeredgecolor="k") for x, y in labels.keys(): if labels[(x, y)].attribute is None or \ labels[(x, y)].attribute == "existing": plt.plot(x, y, "ko", **self.plotkwargs) else: plt.plot(x, y, "k*", **self.plotkwargs) else: for x, y in lines: plt.plot(x, y, "ko-", **self.plotkwargs) else: from matplotlib.colors import Normalize, LinearSegmentedColormap from matplotlib.cm import ScalarMappable for x, y in lines: plt.plot(x, y, "k-", markeredgecolor="k") vmin = vmin_mev / 1000.0 vmax = vmax_mev / 1000.0 if energy_colormap == : mid = - vmin / (vmax - vmin) cmap = LinearSegmentedColormap.from_list( , [(0.0, ), (mid, ), (mid, ), (1.0, )]) else: cmap = energy_colormap norm = Normalize(vmin=vmin, vmax=vmax) _map = ScalarMappable(norm=norm, cmap=cmap) _energies = [self._pd.get_equilibrium_reaction_energy(entry) for coord, entry in labels.items()] energies = [en if en < 0.0 else -0.00000001 for en in _energies] vals_stable = _map.to_rgba(energies) ii = 0 if process_attributes: for x, y in labels.keys(): if labels[(x, y)].attribute is None or \ labels[(x, y)].attribute == "existing": plt.plot(x, y, "o", markerfacecolor=vals_stable[ii], markersize=12) else: plt.plot(x, y, "*", markerfacecolor=vals_stable[ii], markersize=18) ii += 1 else: for x, y in labels.keys(): plt.plot(x, y, "o", markerfacecolor=vals_stable[ii], markersize=15) ii += 1 font = FontProperties() font.set_weight("bold") font.set_size(24) if len(self._pd.elements) == 3: plt.axis("equal") plt.xlim((-0.1, 1.2)) plt.ylim((-0.1, 1.0)) plt.axis("off") center = (0.5, math.sqrt(3) / 6) else: all_coords = labels.keys() miny = min([c[1] for c in all_coords]) ybuffer = max(abs(miny) * 0.1, 0.1) plt.xlim((-0.1, 1.1)) plt.ylim((miny - ybuffer, ybuffer)) center = (0.5, miny / 2) plt.xlabel("Fraction", fontsize=28, fontweight=) plt.ylabel("Formation energy (eV/fu)", fontsize=28, fontweight=) for coords in sorted(labels.keys(), key=lambda x: -x[1]): entry = labels[coords] label = entry.name vec = (np.array(coords) - center) vec = vec / np.linalg.norm(vec) * 10 if np.linalg.norm(vec) != 0 \ else vec valign = "bottom" if vec[1] > 0 else "top" if vec[0] < -0.01: halign = "right" elif vec[0] > 0.01: halign = "left" else: halign = "center" if label_stable: if process_attributes and entry.attribute == : plt.annotate(latexify(label), coords, xytext=vec, textcoords="offset points", horizontalalignment=halign, verticalalignment=valign, fontproperties=font, color=) else: plt.annotate(latexify(label), coords, xytext=vec, textcoords="offset points", horizontalalignment=halign, verticalalignment=valign, fontproperties=font) if self.show_unstable: font = FontProperties() font.set_size(16) energies_unstable = [self._pd.get_e_above_hull(entry) for entry, coord in unstable.items()] if energy_colormap is not None: energies.extend(energies_unstable) vals_unstable = _map.to_rgba(energies_unstable) ii = 0 for entry, coords in unstable.items(): ehull = self._pd.get_e_above_hull(entry) if ehull < self.show_unstable: vec = (np.array(coords) - center) vec = vec / np.linalg.norm(vec) * 10 \ if np.linalg.norm(vec) != 0 else vec label = entry.name if energy_colormap is None: plt.plot(coords[0], coords[1], "ks", linewidth=3, markeredgecolor="k", markerfacecolor="r", markersize=8) else: plt.plot(coords[0], coords[1], "s", linewidth=3, markeredgecolor="k", markerfacecolor=vals_unstable[ii], markersize=8) if label_unstable: plt.annotate(latexify(label), coords, xytext=vec, textcoords="offset points", horizontalalignment=halign, color="b", verticalalignment=valign, fontproperties=font) ii += 1 if energy_colormap is not None and show_colorbar: _map.set_array(energies) cbar = plt.colorbar(_map) cbar.set_label( , rotation=-90, ha=, va=) ticks = cbar.ax.get_yticklabels() f = plt.gcf() f.set_size_inches((8, 6)) plt.subplots_adjust(left=0.09, right=0.98, top=0.98, bottom=0.07) return plt
Shows the plot using pylab. Usually I won't do imports in methods, but since plotting is a fairly expensive library to load and not all machines have matplotlib installed, I have done it this way.
383,947
def interface_endpoints(self): api_version = self._get_api_version() if api_version == : from .v2018_08_01.operations import InterfaceEndpointsOperations as OperationClass else: raise NotImplementedError("APIVersion {} is not available".format(api_version)) return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
Instance depends on the API version: * 2018-08-01: :class:`InterfaceEndpointsOperations<azure.mgmt.network.v2018_08_01.operations.InterfaceEndpointsOperations>`
383,948
def auth(self, encoded): message, signature = self.split(encoded) computed = self.sign(message) if not hmac.compare_digest(signature, computed): raise AuthenticatorInvalidSignature
Validate integrity of encoded bytes
383,949
def stopService(self): self._service.factory.stopTrying() yield self._service.factory.stopFactory() yield service.MultiService.stopService(self)
Gracefully stop the service. Returns: defer.Deferred: a Deferred which is triggered when the service has finished shutting down.
383,950
def create(self, request, *args, **kwargs): return super(AlertViewSet, self).create(request, *args, **kwargs)
Run **POST** against */api/alerts/* to create or update alert. If alert with posted scope and alert_type already exists - it will be updated. Only users with staff privileges can create alerts. Request example: .. code-block:: javascript POST /api/alerts/ Accept: application/json Content-Type: application/json Authorization: Token c84d653b9ec92c6cbac41c706593e66f567a7fa4 Host: example.com { "scope": "http://testserver/api/projects/b9e8a102b5ff4469b9ac03253fae4b95/", "message": "message#1", "alert_type": "first_alert", "severity": "Debug" }
383,951
def check_date_str_format(s, default_time="00:00:00"): try: str_fmt = s if ":" not in s: str_fmt = .format(s, default_time) dt_obj = datetime.strptime(str_fmt, "%Y-%m-%d %H:%M:%S") return RET_OK, dt_obj except ValueError: error_str = ERROR_STR_PREFIX + "wrong time or time format" return RET_ERROR, error_str
Check the format of date string
383,952
def keywords_for(*args): if isinstance(args[0], Model): obj = args[0] if getattr(obj, "content_model", None): obj = obj.get_content_model() keywords_name = obj.get_keywordsfield_name() keywords_queryset = getattr(obj, keywords_name).all() prefetched = getattr(obj, "_prefetched_objects_cache", {}) if keywords_name not in prefetched: keywords_queryset = keywords_queryset.select_related("keyword") return [assigned.keyword for assigned in keywords_queryset] try: app_label, model = args[0].split(".", 1) except ValueError: return [] content_type = ContentType.objects.get(app_label=app_label, model=model) assigned = AssignedKeyword.objects.filter(content_type=content_type) keywords = Keyword.objects.filter(assignments__in=assigned) keywords = keywords.annotate(item_count=Count("assignments")) if not keywords: return [] counts = [keyword.item_count for keyword in keywords] min_count, max_count = min(counts), max(counts) factor = (settings.TAG_CLOUD_SIZES - 1.) if min_count != max_count: factor /= (max_count - min_count) for kywd in keywords: kywd.weight = int(round((kywd.item_count - min_count) * factor)) + 1 return keywords
Return a list of ``Keyword`` objects for the given model instance or a model class. In the case of a model class, retrieve all keywords for all instances of the model and apply a ``weight`` attribute that can be used to create a tag cloud.
383,953
def fix_symbol_store_path(symbol_store_path = None, remote = True, force = False): try: if symbol_store_path is None: local_path = "C:\\SYMBOLS" if not path.isdir(local_path): local_path = "C:\\Windows\\Symbols" if not path.isdir(local_path): local_path = path.abspath(".") if remote: symbol_store_path = ( "cache*;SRV*" + local_path + "*" "http://msdl.microsoft.com/download/symbols" ) else: symbol_store_path = "cache*;SRV*" + local_path previous = os.environ.get("_NT_SYMBOL_PATH", None) if not previous or force: os.environ["_NT_SYMBOL_PATH"] = symbol_store_path return previous except Exception: e = sys.exc_info()[1] warnings.warn("Cannot fix symbol path, reason: %s" % str(e), RuntimeWarning)
Fix the symbol store path. Equivalent to the C{.symfix} command in Microsoft WinDbg. If the symbol store path environment variable hasn't been set, this method will provide a default one. @type symbol_store_path: str or None @param symbol_store_path: (Optional) Symbol store path to set. @type remote: bool @param remote: (Optional) Defines the symbol store path to set when the C{symbol_store_path} is C{None}. If C{True} the default symbol store path is set to the Microsoft symbol server. Debug symbols will be downloaded through HTTP. This gives the best results but is also quite slow. If C{False} the default symbol store path is set to the local cache only. This prevents debug symbols from being downloaded and is faster, but unless you've installed the debug symbols on this machine or downloaded them in a previous debugging session, some symbols may be missing. If the C{symbol_store_path} argument is not C{None}, this argument is ignored entirely. @type force: bool @param force: (Optional) If C{True} the new symbol store path is set always. If C{False} the new symbol store path is only set if missing. This allows you to call this method preventively to ensure the symbol server is always set up correctly when running your script, but without messing up whatever configuration the user has. Example:: from winappdbg import Debug, System def simple_debugger( argv ): # Instance a Debug object debug = Debug( MyEventHandler() ) try: # Make sure the remote symbol store is set System.fix_symbol_store_path(remote = True, force = False) # Start a new process for debugging debug.execv( argv ) # Wait for the debugee to finish debug.loop() # Stop the debugger finally: debug.stop() @rtype: str or None @return: The previously set symbol store path if any, otherwise returns C{None}.
383,954
def feeds(self): url = self._build_url() json = self._json(self._get(url), 200) del json[] del json[] urls = [ , , , , , , ] for url in urls: json[url] = URITemplate(json[url]) links = json.get(, {}) for d in links.values(): d[] = URITemplate(d[]) return json
List GitHub's timeline resources in Atom format. :returns: dictionary parsed to include URITemplates
383,955
def add_to_matching_blacklist(db, entity): with db.connect() as session: try: add_to_matching_blacklist_db(session, entity) except ValueError as e: raise InvalidValueError(e)
Add entity to the matching blacklist. This function adds an 'entity' o term to the matching blacklist. The term to add cannot have a None or empty value, in this case a InvalidValueError will be raised. If the given 'entity' exists in the registry, the function will raise an AlreadyExistsError exception. :param db: database manager :param entity: term, word or value to blacklist :raises InvalidValueError: raised when entity is None or an empty string :raises AlreadyExistsError: raised when the entity already exists in the registry.
383,956
def connect(self, host=, port=3306, user=, password=, database=None): if database is None: raise exceptions.RequiresDatabase() self._db_args = { : host, : port, : user, : password, : database } with self._db_conn() as conn: conn.query() return self
Connect to the database specified
383,957
def clear_title(self): metadata = Metadata(**settings.METADATA[]) if metadata.is_read_only() or metadata.is_required(): raise NoAccess() self._my_map[][] =
Removes the title. :raise: ``NoAccess`` -- ``Metadata.isRequired()`` is ``true`` or ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.*
383,958
async def debug(self, client_id, conn_string, command, args): conn_id = self._client_info(client_id, )[conn_string] return await self.adapter.debug(conn_id, command, args)
Send a debug command to a device on behalf of a client. See :meth:`AbstractDeviceAdapter.send_script`. Args: client_id (str): The client we are working for. conn_string (str): A connection string that will be passed to the underlying device adapter. command (str): The name of the debug command to run. args (dict): Any command arguments. Returns: object: The response to the debug command. Raises: DeviceServerError: There is an issue with your client_id such as not being connected to the device. DeviceAdapterError: The adapter had a protocol issue sending the debug command.
383,959
def print_vertical(vertical_rows, labels, color, args): if color: sys.stdout.write(f) for row in vertical_rows: print(*row) sys.stdout.write() print("-" * len(row) + "Values" + "-" * len(row)) for value in zip_longest(*value_list, fillvalue=): print(" ".join(value)) if args[] == False: print("-" * len(row) + "Labels" + "-" * len(row)) for label in zip_longest(*labels, fillvalue=): print(" ".join(label))
Print the whole vertical graph.
383,960
def transfer(sendContext, receiveContext, chunkSize): try: chunkSize = receiveContext.chunkSize except AttributeError: pass if sendContext is not None and receiveContext is not None: with receiveContext as writer: with sendContext as reader: checkBefore = None if hasattr(writer, ): checkBefore = hasattr(reader, ) while True: if checkBefore is True: (size, checkSum) = reader.checkSum(chunkSize) if writer.skipChunk(size, checkSum): reader.seek(size, io.SEEK_CUR) continue data = reader.read(chunkSize) if len(data) == 0: break if checkBefore is False: checkSum = hashlib.md5(data).hexdigest() if writer.skipChunk(len(data), checkSum, data): continue writer.write(data)
Transfer (large) data from sender to receiver.
383,961
def of(fixture_classes: Iterable[type], context: Union[None, ] = None) -> Iterable[]: classes = list(copy.copy(fixture_classes)) fixtures = [] while len(classes): current = classes.pop() subclasses = current.__subclasses__() if len(subclasses): classes.extend(subclasses) elif current not in fixture_classes: fixtures.append(current(context)) return fixtures
Obtain all Fixture objects of the provided classes. **Parameters** :``fixture_classes``: classes inheriting from ``torment.fixtures.Fixture`` :``context``: a ``torment.TestContext`` to initialize Fixtures with **Return Value(s)** Instantiated ``torment.fixtures.Fixture`` objects for each individual fixture class that inherits from one of the provided classes.
383,962
def routing(routes, request): path = request.path.strip() args = {} for name, route in routes.items(): if route[] == : } raise TornNotFoundError
Definition for route matching : helper
383,963
def _enable_l1_keepalives(self, command): env = os.environ.copy() if "IOURC" not in os.environ: env["IOURC"] = self.iourc_path try: output = yield from gns3server.utils.asyncio.subprocess_check_output(self._path, "-h", cwd=self.working_dir, env=env, stderr=True) if re.search("-l\s+Enable Layer 1 keepalive messages", output): command.extend(["-l"]) else: raise IOUError("layer 1 keepalive messages are not supported by {}".format(os.path.basename(self._path))) except (OSError, subprocess.SubprocessError) as e: log.warning("could not determine if layer 1 keepalive messages are supported by {}: {}".format(os.path.basename(self._path), e))
Enables L1 keepalive messages if supported. :param command: command line
383,964
def __parse_identities(self, json): try: for uidentity in json[].values(): uuid = self.__encode(uidentity[]) uid = UniqueIdentity(uuid=uuid) if uidentity[]: profile = uidentity[] if type(profile[]) != bool: msg = "invalid json format. must have a bool value" raise InvalidFormatError(cause=msg) is_bot = profile[] gender = profile.get(, None) if gender is not None: gender = self.__encode(gender) gender_acc = profile.get(, None) if gender_acc is not None: if type(gender_acc) != int: msg = "invalid json format. must have an integer value" raise InvalidFormatError(cause=msg) elif not 0 <= gender_acc <= 100: msg = "invalid json format. is not in range (0,100)" raise InvalidFormatError(cause=msg) name = self.__encode(profile[]) email = self.__encode(profile[]) prf = Profile(uuid=uuid, name=name, email=email, gender=gender, gender_acc=gender_acc, is_bot=is_bot) if profile[]: alpha3 = self.__encode(profile[][]) code = self.__encode(profile[][]) name = self.__encode(profile[][]) c = Country(alpha3=alpha3, code=code, name=name) prf.country_code = code prf.country = c uid.profile = prf for identity in uidentity[]: identity_id = self.__encode(identity[]) name = self.__encode(identity[]) email = self.__encode(identity[]) username = self.__encode(identity[]) source = self.__encode(identity[]) sh_id = Identity(id=identity_id, name=name, email=email, username=username, source=source, uuid=uuid) uid.identities.append(sh_id) for enrollment in uidentity[]: organization = self.__encode(enrollment[]) org = self._organizations.get(organization, None) if not org: org = Organization(name=organization) self._organizations[organization] = org try: start = str_to_datetime(enrollment[]) end = str_to_datetime(enrollment[]) except InvalidDateError as e: raise InvalidFormatError(cause=str(e)) rol = Enrollment(start=start, end=end, organization=org) uid.enrollments.append(rol) self._identities.append(uid) except KeyError as e: msg = "invalid json format. Attribute %s not found" % e.args raise InvalidFormatError(cause=msg)
Parse identities using Sorting Hat format. The Sorting Hat identities format is a JSON stream on which its keys are the UUID of the unique identities. Each unique identity object has a list of identities and enrollments. When the unique identity does not have a UUID, it will be considered as an anonymous unique identity. This means that the UUID of these identities will be regenerated during the loading process. Next, there is an example of a valid stream: { "uidentities": { "[email protected]": { "enrollments": [], "identities": [], "uuid": null }, "03e12d00e37fd45593c49a5a5a1652deca4cf302": { "enrollments": [ { "end": "2100-01-01T00:00:00", "start": "1900-01-01T00:00:00", "organization": "Example", "uuid": "03e12d00e37fd45593c49a5a5a1652deca4cf302" } ], "identities": [ { "email": "[email protected]", "id": "03e12d00e37fd45593c49a5a5a1652deca4cf302", "name": "John Smith", "source": "scm", "username": "jsmith", "uuid": "03e12d00e37fd45593c49a5a5a1652deca4cf302" }, { "email": "[email protected]", "id": "75d95d6c8492fd36d24a18bd45d62161e05fbc97", "name": "John Smith", "source": "scm", "username": null, "uuid": "03e12d00e37fd45593c49a5a5a1652deca4cf302" } ], "profile": { "country": { "alpha3": "USA", "code": "US", "name": "United States of America" }, "email": "[email protected]", "gender": "male", "gender_acc": 100, "name": null, "is_bot": true, "uuid": "03e12d00e37fd45593c49a5a5a1652deca4cf302" }, "uuid": "03e12d00e37fd45593c49a5a5a1652deca4cf302" } } } :param stream: stream to parse :raises InvalidFormatError: raised when the format of the stream is not valid.
383,965
def retrieve(self, key): column_file = os.path.join(self._hash_dir, % key) cache_file = os.path.join(self._hash_dir, % key) if os.path.exists(cache_file): data = np.load(cache_file) if os.path.exists(column_file): with open(column_file, ) as json_file: columns = json.load(json_file) data = pd.DataFrame(data, columns=columns) else: return None return data
Retrieves a cached array if possible.
383,966
def load_image(name, n, m=None, gpu=None, square=None): if m is None: m = n if gpu is None: gpu = 0 if square is None: square = 0 command = (.format(name, n, m, gpu, square)) return j.eval(command)
Function to load images with certain size.
383,967
def tablestructure(tablename, dataman=True, column=True, subtable=False, sort=False): t = table(tablename, ack=False) six.print_(t.showstructure(dataman, column, subtable, sort))
Print the structure of a table. It is the same as :func:`table.showstructure`, but without the need to open the table first.
383,968
def get_all_preordered_namespace_hashes( self ): cur = self.db.cursor() namespace_hashes = namedb_get_all_preordered_namespace_hashes( cur, self.lastblock ) return namespace_hashes
Get all oustanding namespace preorder hashes that have not expired. Used for testing
383,969
def feed_arthur(): logger.info("Collecting items from redis queue") db_url = conn = redis.StrictRedis.from_url(db_url) logger.debug("Redis connection stablished with %s.", db_url) pipe = conn.pipeline() pipe.lrange(Q_STORAGE_ITEMS, 0, -1) pipe.ltrim(Q_STORAGE_ITEMS, 1, 0) items = pipe.execute()[0] for item in items: arthur_item = pickle.loads(item) if arthur_item[] not in arthur_items: arthur_items[arthur_item[]] = [] arthur_items[arthur_item[]].append(arthur_item) for tag in arthur_items: logger.debug("Items for %s: %i", tag, len(arthur_items[tag]))
Feed Ocean with backend data collected from arthur redis queue
383,970
def path(self): if isinstance(self.dir, Directory): return self.dir._path elif isinstance(self.dir, ROOT.TDirectory): return self.dir.GetPath() elif isinstance(self.dir, _FolderView): return self.dir.path() else: return str(self.dir)
Get the path of the wrapped folder
383,971
def list(self, entity=None): uri = "/%s" % self.uri_base if entity: uri = "%s?entityId=%s" % (uri, utils.get_id(entity)) resp, resp_body = self._list(uri, return_raw=True) return resp_body
Returns a dictionary of data, optionally filtered for a given entity.
383,972
def start_server_background(port): if sys.version_info[0] == 2: lines = ( ) cell = lines.format(port=port) else: path = repr(os.path.dirname(os.path.realpath(__file__))) lines = ( ) cell = lines.format(path=path, port=port) line = "python --proc proc --bg --err error --out output" ip = get_ipython() ip.run_cell_magic("script", line, cell) return ip.user_ns[]
Start the newtab server as a background process.
383,973
def write(self, output_io): for name, tax in self.taxonomy.items(): output_io.write("%s\t%s\n" % (name, .join(tax)))
Write a taxonomy to an open stream out in GG format. Code calling this function must open and close the io object.
383,974
def set_triggered_by_event(self, value): if value is None or not isinstance(value, bool): raise TypeError("TriggeredByEvent must be set to a bool") else: self.__triggered_by_event = value
Setter for 'triggered_by_event' field. :param value - a new value of 'triggered_by_event' field. Must be a boolean type. Does not accept None value.
383,975
def bind(self, attribute, cls, buffer, fmt, *, offset=0, stride=0, divisor=0, normalize=False) -> None: self.mglo.bind(attribute, cls, buffer.mglo, fmt, offset, stride, divisor, normalize)
Bind individual attributes to buffers. Args: location (int): The attribute location. cls (str): The attribute class. Valid values are ``f``, ``i`` or ``d``. buffer (Buffer): The buffer. format (str): The buffer format. Keyword Args: offset (int): The offset. stride (int): The stride. divisor (int): The divisor. normalize (bool): The normalize parameter, if applicable.
383,976
def plotloc(data, circleinds=[], crossinds=[], edgeinds=[], url_path=None, fileroot=None, tools="hover,tap,pan,box_select,wheel_zoom,reset", plot_width=450, plot_height=400): fields = [, , , , , ] if not circleinds: circleinds = range(len(data[])) datalen = len(data[]) inds = circleinds + crossinds + edgeinds l1 = [data[][i] for i in inds] l1_min = min(l1) l1_max = max(l1) m1 = [data[][i] for i in inds] m1_min = min(m1) m1_max = max(m1) source = ColumnDataSource(data = dict({(key, tuple([value[i] for i in circleinds if i not in edgeinds])) for (key, value) in data.iteritems() if key in fields})) loc = Figure(plot_width=plot_width, plot_height=plot_height, toolbar_location="left", x_axis_label=, y_axis_label=, x_range=(l1_min, l1_max), y_range=(m1_min,m1_max), tools=tools, output_backend=) loc.circle(, , size=, line_color=None, fill_color=, fill_alpha=0.2, source=source) if crossinds: sourceneg = ColumnDataSource(data = dict({(key, tuple([value[i] for i in crossinds])) for (key, value) in data.iteritems() if key in fields})) loc.cross(, , size=, line_color=, line_alpha=0.3, source=sourceneg) if edgeinds: sourceedge = ColumnDataSource(data = dict({(key, tuple([value[i] for i in edgeinds])) for (key, value) in data.iteritems() if key in fields})) loc.circle(, , size=, line_color=, fill_color=, source=sourceedge, line_alpha=0.5, fill_alpha=0.2) hover = loc.select(dict(type=HoverTool)) hover.tooltips = OrderedDict([(, ), (, )]) if url_path and fileroot: url = .format(url_path, fileroot) taptool = loc.select(type=TapTool) taptool.callback = OpenURL(url=url) return loc
Make a light-weight loc figure
383,977
def _add_rg(unmapped_file, config, names): picard = broad.runner_from_path("picard", config) rg_fixed = picard.run_fn("picard_fix_rgs", unmapped_file, names) return rg_fixed
Add the missing RG header.
383,978
def format_help(self): if not self._cell_args: return super(CommandParser, self).format_help() else: return orig_help
Override help doc to add cell args.
383,979
def HandleSimpleResponses( self, timeout_ms=None, info_cb=DEFAULT_MESSAGE_CALLBACK): return self._AcceptResponses(b, info_cb, timeout_ms=timeout_ms)
Accepts normal responses from the device. Args: timeout_ms: Timeout in milliseconds to wait for each response. info_cb: Optional callback for text sent from the bootloader. Returns: OKAY packet's message.
383,980
def mkdir(self, mdir, parents=False): assert mdir.startswith(), "%s: invalid manta path" % mdir parts = mdir.split() assert len(parts) > 3, "%s: cannot create top-level dirs" % mdir if not parents: self.put_directory(mdir) else: end = len(parts) + 1 start = 3 while start < end - 1: idx = int((end - start) // 2 + start) d = .join(parts[:idx]) try: self.put_directory(d) except errors.MantaAPIError: _, ex, _ = sys.exc_info() if ex.code == : end = idx else: raise else: start = idx for i in range(end, len(parts) + 1): d = .join(parts[:i]) self.put_directory(d)
Make a directory. Note that this will not error out if the directory already exists (that is how the PutDirectory Manta API behaves). @param mdir {str} A manta path, e.g. '/trent/stor/mydir'. @param parents {bool} Optional. Default false. Like 'mkdir -p', this will create parent dirs as necessary.
383,981
def visible(self, request): s instance set ' return self.apply_visible(self.get_queryset(), request) if self.check_visible(self.model, request) is not False else self.get_queryset().none()
Checks the both, check_visible and apply_visible, against the owned model and it's instance set
383,982
def DictProduct(dictionary): keys, values = Unzip(iteritems(dictionary)) for product_values in itertools.product(*values): yield dict(zip(keys, product_values))
Computes a cartesian product of dict with iterable values. This utility function, accepts a dictionary with iterable values, computes cartesian products of these values and yields dictionaries of expanded values. Examples: >>> list(DictProduct({"a": [1, 2], "b": [3, 4]})) [{"a": 1, "b": 3}, {"a": 1, "b": 4}, {"a": 2, "b": 3}, {"a": 2, "b": 4}] Args: dictionary: A dictionary with iterable values. Yields: Dictionaries with values being a result of cartesian product of values of the input dictionary.
383,983
def from_table(table, fields=None): if fields is None: fields = elif isinstance(fields, list): fields = .join(fields) return Query( % (fields, table._repr_sql_()))
Return a Query for the given Table object Args: table: the Table object to construct a Query out of fields: the fields to return. If None, all fields will be returned. This can be a string which will be injected into the Query after SELECT, or a list of field names. Returns: A Query object that will return the specified fields from the records in the Table.
383,984
def dictionary_merge(a, b): for key, value in b.items(): if key in a and isinstance(a[key], dict) and isinstance(value, dict): dictionary_merge(a[key], b[key]) continue a[key] = b[key] return a
merges dictionary b into a Like dict.update, but recursive
383,985
def formfield_for_dbfield(self, db_field, **kwargs): formfield = super(TweetableAdminMixin, self).formfield_for_dbfield(db_field, **kwargs) if Api and db_field.name == "status" and get_auth_settings(): def wrapper(render): def wrapped(*args, **kwargs): rendered = render(*args, **kwargs) label = _("Send to Twitter") return mark_safe(rendered + FORMFIELD_HTML % label) return wrapped formfield.widget.render = wrapper(formfield.widget.render) return formfield
Adds the "Send to Twitter" checkbox after the "status" field, provided by any ``Displayable`` models. The approach here is quite a hack, however the sane approach of using a custom form with a boolean field defined, and then adding it to the formssets attribute of the admin class fell apart quite horrifically.
383,986
def collect_conflicts_between( context: ValidationContext, conflicts: List[Conflict], cached_fields_and_fragment_names: Dict, compared_fragment_pairs: "PairSet", parent_fields_are_mutually_exclusive: bool, field_map1: NodeAndDefCollection, field_map2: NodeAndDefCollection, ) -> None: for response_name, fields1 in field_map1.items(): fields2 = field_map2.get(response_name) if fields2: for field1 in fields1: for field2 in fields2: conflict = find_conflict( context, cached_fields_and_fragment_names, compared_fragment_pairs, parent_fields_are_mutually_exclusive, response_name, field1, field2, ) if conflict: conflicts.append(conflict)
Collect all Conflicts between two collections of fields. This is similar to, but different from the `collectConflictsWithin` function above. This check assumes that `collectConflictsWithin` has already been called on each provided collection of fields. This is true because this validator traverses each individual selection set.
383,987
def reconnect(self): log.debug() try: self.ssl_skt.close() except socket.error: log.error() log.debug() self.authenticate()
Try to reconnect and re-authenticate with the server.
383,988
def get_children_graph(self, item_ids=None, language=None, forbidden_item_ids=None): if forbidden_item_ids is None: forbidden_item_ids = set() def _children(item_ids): if item_ids is None: items = Item.objects.filter(active=True).prefetch_related() else: item_ids = [ii for iis in item_ids.values() for ii in iis] items = Item.objects.filter(id__in=item_ids, active=True).prefetch_related() return { item.id: sorted([ _item.id for _item in item.children.all() if _item.active and _item.id not in forbidden_item_ids ]) for item in items if item.id not in forbidden_item_ids } if item_ids is None: return self._reachable_graph(None, _children, language=language) else: graph = self.get_children_graph(None, language, forbidden_item_ids=forbidden_item_ids) return self._subset_graph(graph, set(item_ids) - set(forbidden_item_ids))
Get a subgraph of items reachable from the given set of items through the 'child' relation. Args: item_ids (list): items which are taken as roots for the reachability language (str): if specified, filter out items which are not available in the given language Returns: dict: item id -> list of items (child items), root items are referenced by None key
383,989
def _write_nex(self, mdict, nlocus): max_name_len = max([len(i) for i in mdict]) namestring = "{:<" + str(max_name_len+1) + "} {}\n" matrix = "" for i in mdict.items(): matrix += namestring.format(i[0], i[1]) minidir = os.path.realpath(os.path.join(self.workdir, self.name)) if not os.path.exists(minidir): os.makedirs(minidir) handle = os.path.join(minidir, "{}.nex".format(nlocus)) with open(handle, ) as outnex: outnex.write(NEXBLOCK.format(**{ "ntax": len(mdict), "nchar": len(mdict.values()[0]), "matrix": matrix, "ngen": self.params.mb_mcmc_ngen, "sfreq": self.params.mb_mcmc_sample_freq, "burnin": self.params.mb_mcmc_burnin, }))
function that takes a dictionary mapping names to sequences, and a locus number, and writes it as a NEXUS file with a mrbayes analysis block given a set of mcmc arguments.
383,990
def print_common_terms(common_terms): if not common_terms: print() else: for set_pair in common_terms: set1, set2, terms = set_pair print(.format(set1, set2)) for term in terms: print(.format(term))
Print common terms for each pair of word sets. :param common_terms: Output of get_common_terms().
383,991
def clean(self): username = self.cleaned_data.get("username") password = self.cleaned_data.get("password") self._user = authenticate(username=username, password=password) if self._user is None: raise forms.ValidationError( ugettext("Invalid username/email and password")) elif not self._user.is_active: raise forms.ValidationError(ugettext("Your account is inactive")) return self.cleaned_data
Authenticate the given username/email and password. If the fields are valid, store the authenticated user for returning via save().
383,992
def profile(self, profile): self._staging_data = None lang = profile.get(, {}).get(, ) profile_args = ArgBuilder(lang, self.profile_args(profile.get())) self._profile = profile self._profile[] = profile_args self.load_tcex() self.reports.profile(profile.get()) self._create_tc_dirs()
Set the current profile. Args: profile (dict): The profile data.
383,993
def draw_triangle(a, b, c, color, draw): draw.polygon([a, b, c], fill=color)
Draws a triangle with the given vertices in the given color.
383,994
def clear_n_of_m(self): if (self.get_n_of_m_metadata().is_read_only() or self.get_n_of_m_metadata().is_required()): raise NoAccess() self.my_osid_object_form._my_map[] = \ int(self._n_of_m_metadata[][0])
stub
383,995
def genome_alignment_iterator(fn, reference_species, index_friendly=False, verbose=False): kw_args = {"reference_species": reference_species} for e in maf.maf_iterator(fn, index_friendly=index_friendly, yield_class=GenomeAlignmentBlock, yield_kw_args=kw_args, verbose=verbose): yield e
build an iterator for an MAF file of genome alignment blocks. :param fn: filename or stream-like object to iterate over. :param reference_species: which species in the alignment should be treated as the reference? :param index_friendly: if True, buffering is disabled to support using the iterator to build an index. :return an iterator that yields GenomeAlignment objects
383,996
def get_structure_seqs(pdb_file, file_type): my_structure = StructureIO(pdb_file) model = my_structure.first_model structure_seqs = {} for chain in model: chain_seq = tracker = 0 for res in chain.get_residues(): if Polypeptide.is_aa(res, standard=True): full_id = res.get_full_id() end_tracker = full_id[3][1] i_code = full_id[3][2] aa = Polypeptide.three_to_one(res.get_resname()) chain_seq += aa tracker = end_tracker else: continue structure_seqs[chain.get_id()] = chain_seq return structure_seqs
Get a dictionary of a PDB file's sequences. Special cases include: - Insertion codes. In the case of residue numbers like "15A", "15B", both residues are written out. Example: 9LPR - HETATMs. Currently written as an "X", or unknown amino acid. Args: pdb_file: Path to PDB file Returns: dict: Dictionary of: {chain_id: sequence}
383,997
def add_motifs(self, args): self.lock.acquire() if args is None or len(args) != 2 or len(args[1]) != 3: try: job = args[0] logger.warn("job %s failed", job) self.finished.append(job) except Exception: logger.warn("job failed") return job, (motifs, stdout, stderr) = args logger.info("%s finished, found %s motifs", job, len(motifs)) for motif in motifs: if self.do_counter: self.counter += 1 motif.id = "gimme_{}_".format(self.counter) + motif.id f = open(self.outfile, "a") f.write("%s\n" % motif.to_pfm()) f.close() self.motifs.append(motif) if self.do_stats and len(motifs) > 0: logger.debug("Starting stats job of %s motifs", len(motifs)) for bg_name, bg_fa in self.background.items(): job = self.job_server.apply_async( mp_calc_stats, (motifs, self.fg_fa, bg_fa, bg_name), callback=self.add_stats ) self.stat_jobs.append(job) logger.debug("stdout %s: %s", job, stdout) logger.debug("stdout %s: %s", job, stderr) self.finished.append(job) self.lock.release()
Add motifs to the result object.
383,998
def __get_jp(self, extractor_processor, sub_output=None): if sub_output is None and extractor_processor.output_field is None: raise ValueError( "ExtractorProcessors input paths cannot be unioned across fields. Please specify either a sub_output or use a single scalar output_field") if extractor_processor.get_output_jsonpath_with_name(sub_output) is not None: return extractor_processor.get_output_jsonpath_with_name(sub_output) else: return extractor_processor.get_output_jsonpath(sub_output)
Tries to get name from ExtractorProcessor to filter on first. Otherwise falls back to filtering based on its metadata
383,999
def _find_penultimate_layer(model, layer_idx, penultimate_layer_idx): if penultimate_layer_idx is None: for idx, layer in utils.reverse_enumerate(model.layers[:layer_idx - 1]): if isinstance(layer, Wrapper): layer = layer.layer if isinstance(layer, (_Conv, _Pooling1D, _Pooling2D, _Pooling3D)): penultimate_layer_idx = idx break if penultimate_layer_idx is None: raise ValueError( .format(layer_idx)) if layer_idx < 0: layer_idx = len(model.layers) + layer_idx if penultimate_layer_idx > layer_idx: raise ValueError() return model.layers[penultimate_layer_idx]
Searches for the nearest penultimate `Conv` or `Pooling` layer. Args: model: The `keras.models.Model` instance. layer_idx: The layer index within `model.layers`. penultimate_layer_idx: The pre-layer to `layer_idx`. If set to None, the nearest penultimate `Conv` or `Pooling` layer is used. Returns: The penultimate layer.