Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
20,500
def getFiledAgainst(self, filedagainst_name, projectarea_id=None, projectarea_name=None, archived=False): self.log.debug("Try to get <FiledAgainst %s>", filedagainst_name) if not isinstance(filedagainst_name, six.string_types) or not filedagainst_name: excp_msg = "Please specify a valid FiledAgainst name" self.log.error(excp_msg) raise exception.BadValue(excp_msg) fas = self._getFiledAgainsts(projectarea_id=projectarea_id, projectarea_name=projectarea_name, archived=archived, filedagainst_name=filedagainst_name) if fas is not None: filedagainst = fas[0] self.log.info("Find <FiledAgainst %s>", filedagainst) return filedagainst error_msg = "No FiledAgainst named %s" % filedagainst_name self.log.error(error_msg) raise exception.NotFound(error_msg)
Get :class:`rtcclient.models.FiledAgainst` object by its name :param filedagainst_name: the filedagainst name :param projectarea_id: the :class:`rtcclient.project_area.ProjectArea` id :param projectarea_name: the project area name :param archived: (default is False) whether the filedagainst is archived :return: the :class:`rtcclient.models.FiledAgainst` object :rtype: rtcclient.models.FiledAgainst
20,501
def prepare_encoder(inputs, hparams, attention_type="local_1d"): x = prepare_image(inputs, hparams, name="enc_channels") x = add_pos_signals(x, hparams, "enc_pos") x_shape = common_layers.shape_list(x) if attention_type == "local_1d": x = tf.reshape(x, [x_shape[0], x_shape[1]*x_shape[2], hparams.hidden_size]) x.set_shape([None, None, hparams.hidden_size]) elif attention_type == "local_2d": x.set_shape([None, None, None, hparams.hidden_size]) return x
Prepare encoder for images.
20,502
def x_lower_limit(self, limit=None): if limit is None: if self._x_lower_limit is None: if self.smallest_x() < 0: if self.smallest_x() == self.largest_x(): return int(self.smallest_x() - 1) else: return self.smallest_x() else: return 0 else: return self._x_lower_limit else: if not is_numeric(limit): raise TypeError( "lower x limit must be numeric, not " % str(limit) ) if limit >= self.largest_x(): raise ValueError( "lower x limit must be less than upper limit (%s), not %s" % ( str(self.largest_x()), str(limit) ) ) self._x_lower_limit = limit
Returns or sets (if a value is provided) the value at which the x-axis should start. By default this is zero (unless there are negative values). :param limit: If given, the chart's x_lower_limit will be set to this. :raises ValueError: if you try to make the lower limit larger than the\ upper limit.
20,503
def element(element, name, default=None): element_value = element.find(name) return element_value.text if element_value is not None else default
Returns the value of an element, or a default if it's not defined :param element: The XML Element object :type element: etree._Element :param name: The name of the element to evaluate :type name: str :param default: The default value to return if the element is not defined
20,504
def _next_regular(target): if target <= 6: return target p5 = 1 while p5 < target: p35 = p5 while p35 < target: quotient = -(-target // p35) try: p2 = 2 ** ((quotient - 1).bit_length()) except AttributeError: p2 = 2 ** _bit_length_26(quotient - 1) N = p2 * p35 if N == target: return N elif N < match: match = N p35 *= 3 if p35 == target: return p35 if p35 < match: match = p35 p5 *= 5 if p5 == target: return p5 if p5 < match: match = p5 return match
Find the next regular number greater than or equal to target. Regular numbers are composites of the prime factors 2, 3, and 5. Also known as 5-smooth numbers or Hamming numbers, these are the optimal size for inputs to FFTPACK. Target must be a positive integer.
20,505
def _get_result_paths(self, data): assignment_fp = str(self.Parameters[].Value).strip() if not os.path.isabs(assignment_fp): assignment_fp = os.path.relpath(assignment_fp, self.WorkingDir) return {: ResultPath(assignment_fp, IsWritten=True)}
Return a dict of ResultPath objects representing all possible output
20,506
def cutadaptit_pairs(data, sample): LOGGER.debug("Entering cutadaptit_pairs - {}".format(sample.name)) sname = sample.name finput_r1 = sample.files.concat[0][0] finput_r2 = sample.files.concat[0][1] if not data.barcodes: try: data._link_barcodes() except Exception as inst: LOGGER.warning(" error adding barcodes info: %s", inst) if data.barcodes: try: adapter1 = fullcomp(data.paramsdict["restriction_overhang"][1])[::-1] \ + data._hackersonly["p3_adapter"] if isinstance(sample.barcode, list): bcode = fullcomp(sample.barcode[0])[::-1] elif isinstance(data.barcodes[sample.name], list): bcode = fullcomp(data.barcodes[sample.name][0][::-1]) else: bcode = fullcomp(data.barcodes[sample.name])[::-1] adapter2 = fullcomp(data.paramsdict["restriction_overhang"][0])[::-1] \ + bcode \ + data._hackersonly["p5_adapter"] except KeyError as inst: msg = .format(sample.name) LOGGER.error(msg) raise IPyradWarningExit(msg) else: print(NO_BARS_GBS_WARNING) adapter1 = data._hackersonly["p3_adapter"] adapter2 = fullcomp(data._hackersonly["p5_adapter"]) trim5r1 = trim5r2 = trim3r1 = trim3r2 = [] if data.paramsdict.get("trim_reads"): trimlen = data.paramsdict.get("trim_reads") LOGGER.debug(" ".join(cmdf1)) try: proc1 = sps.Popen(cmdf1, stderr=sps.STDOUT, stdout=sps.PIPE, close_fds=True) res1 = proc1.communicate()[0] except KeyboardInterrupt: proc1.kill() LOGGER.info("this is where I want it to interrupt") raise KeyboardInterrupt() if proc1.returncode: raise IPyradWarningExit(" error [returncode={}]: {}\n{}"\ .format(proc1.returncode, " ".join(cmdf1), res1)) LOGGER.debug("Exiting cutadaptit_pairs - {}".format(sname)) return res1
Applies trim & filters to pairs, including adapter detection. If we have barcode information then we use it to trim reversecut+bcode+adapter from reverse read, if not then we have to apply a more general cut to make sure we remove the barcode, this uses wildcards and so will have more false positives that trim a little extra from the ends of reads. Should we add a warning about this when filter_adapters=2 and no barcodes?
20,507
def simplex_connect(self, solution_g): component nl = solution_g.get_node_list() current = nl[0] pred = solution_g.simplex_search(current, current) separated = list(pred.keys()) for n in nl: if solution_g.get_node(n).get_attr() != current: for m in separated: if (n,m) in self.edge_attr: solution_g.add_edge(n,m) return True elif (m,n) in self.edge_attr: solution_g.add_edge(m,n) return True return False
API: simplex_connect(self, solution_g) Description: At this point we assume that the solution does not have a cycle. We check if all the nodes are connected, if not we add an arc to solution_g that does not create a cycle and return True. Otherwise we do nothing and return False. Pre: (1) We assume there is no cycle in the solution. Input: solution_g: current spanning tree solution instance. Post: (1) solution_g is updated. An arc that does not create a cycle is added. (2) 'component' attribute of nodes are changed. Return: Returns True if an arc is added, returns False otherwise.
20,508
def get_revisions(page, page_num=1): revisions = page.revisions.order_by() current = page.get_latest_revision() if current: revisions.exclude(id=current.id) paginator = Paginator(revisions, 5) try: revisions = paginator.page(page_num) except PageNotAnInteger: revisions = paginator.page(1) except EmptyPage: revisions = paginator.page(paginator.num_pages) return revisions
Returns paginated queryset of PageRevision instances for specified Page instance. :param page: the page instance. :param page_num: the pagination page number. :rtype: django.db.models.query.QuerySet.
20,509
def tag_add(self, item, tag): tags = self.item(item, "tags") self.item(item, tags=tags + (tag,))
Add tag to the tags of item. :param item: item identifier :type item: str :param tag: tag name :type tag: str
20,510
def save_params(self, fname): arg_params, aux_params = self.get_params() save_dict = {( % k) : v.as_in_context(cpu()) for k, v in arg_params.items()} save_dict.update({( % k) : v.as_in_context(cpu()) for k, v in aux_params.items()}) ndarray.save(fname, save_dict)
Saves model parameters to file. Parameters ---------- fname : str Path to output param file. Examples -------- >>> # An example of saving module parameters. >>> mod.save_params('myfile')
20,511
def DbGetProperty(self, argin): self._log.debug("In DbGetProperty()") object_name = argin[0] return self.db.get_property(object_name, argin[1:])
Get free object property :param argin: Str[0] = Object name Str[1] = Property name Str[n] = Property name :type: tango.DevVarStringArray :return: Str[0] = Object name Str[1] = Property number Str[2] = Property name Str[3] = Property value number (array case) Str[4] = Property value 1 Str[n] = Property value n (array case) Str[n + 1] = Property name Str[n + 2] = Property value number (array case) Str[n + 3] = Property value 1 Str[n + m] = Property value m :rtype: tango.DevVarStringArray
20,512
def _proxy(self): if self._context is None: self._context = PhoneNumberContext(self._version, phone_number=self._solution[], ) return self._context
Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: PhoneNumberContext for this PhoneNumberInstance :rtype: twilio.rest.lookups.v1.phone_number.PhoneNumberContext
20,513
def _add_to_typedef(self, typedef_curr, line, lnum): mtch = re.match(r, line) if mtch: field_name = mtch.group(1) field_value = mtch.group(2).split()[0].rstrip() if field_name == "id": self._chk_none(typedef_curr.id, lnum) typedef_curr.id = field_value elif field_name == "name": self._chk_none(typedef_curr.name, lnum) typedef_curr.name = field_value elif field_name == "transitive_over": typedef_curr.transitive_over.append(field_value) elif field_name == "inverse_of": self._chk_none(typedef_curr.inverse_of, lnum) typedef_curr.inverse_of = field_value else: self._die("UNEXPECTED FIELD CONTENT: {L}\n".format(L=line), lnum)
Add new fields to the current typedef.
20,514
def rand_unicode(min_char=MIN_UNICHR, max_char=MAX_UNICHR, min_len=MIN_STRLEN, max_len=MAX_STRLEN, **kwargs): from syn.five import unichr return unicode(rand_str(min_char, max_char, min_len, max_len, unichr))
For values in the unicode range, regardless of Python version.
20,515
def create(cls, name, protocol_number, protocol_agent=None, comment=None): json = {: name, : protocol_number, : element_resolver(protocol_agent) or None, : comment} return ElementCreator(cls, json)
Create the IP Service :param str name: name of ip-service :param int protocol_number: ip proto number for this service :param str,ProtocolAgent protocol_agent: optional protocol agent for this service :param str comment: optional comment :raises CreateElementFailed: failure creating element with reason :return: instance with meta :rtype: IPService
20,516
def errprt(op, lenout, inlist): lenout = ctypes.c_int(lenout) op = stypes.stringToCharP(op) inlist = ctypes.create_string_buffer(str.encode(inlist), lenout.value) inlistptr = ctypes.c_char_p(ctypes.addressof(inlist)) libspice.errdev_c(op, lenout, inlistptr) return stypes.toPythonString(inlistptr)
Retrieve or set the list of error message items to be output when an error is detected. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/errprt_c.html :param op: The operation, "GET" or "SET". :type op: str :param lenout: Length of list for output. :type lenout: int :param inlist: Specification of error messages to be output. :type inlist: list of str. :return: A list of error message items. :rtype: list of str.
20,517
def refocus(self, distance, method="helmholtz", h5file=None, h5mode="a"): field2 = nrefocus.refocus(field=self.field, d=distance/self["pixel size"], nm=self["medium index"], res=self["wavelength"]/self["pixel size"], method=method ) if "identifier" in self: ident = self["identifier"] else: ident = "" meta_data = self.meta meta_data["identifier"] = "{}@{}{:.5e}m".format(ident, method[0], distance) qpi2 = QPImage(data=field2, which_data="field", meta_data=meta_data, h5file=h5file, h5mode=h5mode) return qpi2
Compute a numerically refocused QPImage Parameters ---------- distance: float Focusing distance [m] method: str Refocusing method, one of ["helmholtz","fresnel"] h5file: str, h5py.Group, h5py.File, or None A path to an hdf5 data file where the QPImage is cached. If set to `None` (default), all data will be handled in memory using the "core" driver of the :mod:`h5py`'s :class:`h5py:File` class. If the file does not exist, it is created. If the file already exists, it is opened with the file mode defined by `hdf5_mode`. If this is an instance of h5py.Group or h5py.File, then this will be used to internally store all data. h5mode: str Valid file modes are (only applies if `h5file` is a path) - "r": Readonly, file must exist - "r+": Read/write, file must exist - "w": Create file, truncate if exists - "w-" or "x": Create file, fail if exists - "a": Read/write if exists, create otherwise (default) Returns ------- qpi: qpimage.QPImage Refocused phase and amplitude data See Also -------- :mod:`nrefocus`: library used for numerical focusing
20,518
def setGroupIcon( cls, groupName, icon ): if ( cls._groupIcons is None ): cls._groupIcons = {} cls._groupIcons[nativestring(groupName)] = icon
Sets the group icon for the wizard plugin to the inputed icon. :param groupName | <str> icon | <str>
20,519
def multivariate_neg_logposterior(self,beta): post = self.neg_loglik(beta) for k in range(0,self.z_no): if self.latent_variables.z_list[k].prior.covariance_prior is True: post += -self.latent_variables.z_list[k].prior.logpdf(self.custom_covariance(beta)) break else: post += -self.latent_variables.z_list[k].prior.logpdf(beta[k]) return post
Returns negative log posterior, for a model with a covariance matrix Parameters ---------- beta : np.array Contains untransformed starting values for latent_variables Returns ---------- Negative log posterior
20,520
def set_owner(self): owner = self.soup.find() try: self.owner_name = owner.find().string except AttributeError: self.owner_name = None try: self.owner_email = owner.find().string except AttributeError: self.owner_email = None
Parses owner name and email then sets value
20,521
def inFocus(self): previous_flags = self.window.flags() self.window.setFlags(previous_flags | QtCore.Qt.WindowStaysOnTopHint)
Set GUI on-top flag
20,522
def import_event_definition_elements(diagram_graph, element, event_definitions): element_id = element.getAttribute(consts.Consts.id) event_def_list = [] for definition_type in event_definitions: event_def_xml = element.getElementsByTagNameNS("*", definition_type) for index in range(len(event_def_xml)): event_def_tmp = {consts.Consts.id: event_def_xml[index].getAttribute(consts.Consts.id), consts.Consts.definition_type: definition_type} event_def_list.append(event_def_tmp) diagram_graph.node[element_id][consts.Consts.event_definitions] = event_def_list
Helper function, that adds event definition elements (defines special types of events) to corresponding events. :param diagram_graph: NetworkX graph representing a BPMN process diagram, :param element: object representing a BPMN XML event element, :param event_definitions: list of event definitions, that belongs to given event.
20,523
def attach_to_fbo(self): gl.glFramebufferTexture2DEXT(gl.GL_FRAMEBUFFER_EXT, self.attachment_point, self.target0, self.id, 0)
Attach the texture to a bound FBO object, for rendering to texture.
20,524
def prep_fastq_inputs(in_files, data): if len(in_files) == 1 and _is_bam_input(in_files): out = _bgzip_from_bam(in_files[0], data["dirs"], data) elif len(in_files) == 1 and _is_cram_input(in_files): out = _bgzip_from_cram(in_files[0], data["dirs"], data) elif len(in_files) in [1, 2] and _ready_gzip_fastq(in_files, data): out = _symlink_in_files(in_files, data) else: if len(in_files) > 2: fpairs = fastq.combine_pairs(in_files) pair_types = set([len(xs) for xs in fpairs]) assert len(pair_types) == 1 fpairs.sort(key=lambda x: os.path.basename(x[0])) organized = [[xs[0] for xs in fpairs]] if len(fpairs[0]) > 1: organized.append([xs[1] for xs in fpairs]) in_files = organized parallel = {"type": "local", "num_jobs": len(in_files), "cores_per_job": max(1, data["config"]["algorithm"]["num_cores"] // len(in_files))} inputs = [{"in_file": x, "read_num": i, "dirs": data["dirs"], "config": data["config"], "is_cwl": "cwl_keys" in data, "rgnames": data["rgnames"]} for i, x in enumerate(in_files) if x] out = run_multicore(_bgzip_from_fastq_parallel, [[d] for d in inputs], data["config"], parallel) return out
Prepare bgzipped fastq inputs
20,525
def _bbox(nodes): left, bottom = np.min(nodes, axis=1) right, top = np.max(nodes, axis=1) return left, right, bottom, top
Get the bounding box for set of points. .. note:: There is also a Fortran implementation of this function, which will be used if it can be built. Args: nodes (numpy.ndarray): A set of points. Returns: Tuple[float, float, float, float]: The left, right, bottom and top bounds for the box.
20,526
def _create_related(self, obj, related, subfield_dict): for field, items in related.items(): subobjects = [] all_subrelated = [] Subtype, reverse_id_field, subsubdict = subfield_dict[field] for order, item in enumerate(items): subrelated = {} for subfield in subsubdict: subrelated[subfield] = item.pop(subfield) if field in self.preserve_order: item[] = order item[reverse_id_field] = obj.id try: subobjects.append(Subtype(**item)) all_subrelated.append(subrelated) except Exception as e: raise DataImportError(.format(e, item, Subtype)) try: Subtype.objects.bulk_create(subobjects) except Exception as e: raise DataImportError(.format(e, subobjects, Subtype)) for subobj, subrel in zip(subobjects, all_subrelated): self._create_related(subobj, subrel, subsubdict)
create DB objects related to a base object obj: a base object to create related related: dict mapping field names to lists of related objects subfield_list: where to get the next layer of subfields
20,527
def get_dates_file(path): with open(path) as f: dates = f.readlines() return [(convert_time_string(date_string.split(" ")[0]), float(date_string.split(" ")[1])) for date_string in dates]
parse dates file of dates and probability of choosing
20,528
def where_equals(self, field_name, value, exact=False): if field_name is None: raise ValueError("None field_name is invalid") field_name = Query.escape_if_needed(field_name) self._add_operator_if_needed() token = "equals" if self.negate: self.negate = False token = "not_equals" self.last_equality = {field_name: value} token = _Token(field_name=field_name, value=self.add_query_parameter(value), token=token, exact=exact) token.write = self.rql_where_write(token) self._where_tokens.append(token) return self
To get all the document that equal to the value in the given field_name @param str field_name: The field name in the index you want to query. @param value: The value will be the fields value you want to query @param bool exact: If True getting exact match of the query
20,529
def delete_repository(self, namespace, repository): return self._http_call(self.REPO, delete, namespace=namespace, repository=repository)
DELETE /v1/repositories/(namespace)/(repository)/
20,530
def image_predict(self, X): pixels = self.extract_pixels(X) predictions = self.classifier.predict(pixels) return predictions.reshape(X.shape[0], X.shape[1], X.shape[2])
Predicts class label for the entire image. :param X: Array of images to be classified. :type X: numpy array, shape = [n_images, n_pixels_y, n_pixels_x, n_bands] :return: raster classification map :rtype: numpy array, [n_samples, n_pixels_y, n_pixels_x]
20,531
def get_next_redirect_url(request, redirect_field_name="next"): redirect_to = get_request_param(request, redirect_field_name) if not get_adapter(request).is_safe_url(redirect_to): redirect_to = None return redirect_to
Returns the next URL to redirect to, if it was explicitly passed via the request.
20,532
def load_servers_from_env(self, filter=[], dynamic=None): /localhosthost.cxtcomp1.rtccomp2.rtc if dynamic == None: dynamic = self._dynamic if NAMESERVERS_ENV_VAR in os.environ: servers = [s for s in os.environ[NAMESERVERS_ENV_VAR].split() \ if s] self._parse_name_servers(servers, filter, dynamic)
Load the name servers environment variable and parse each server in the list. @param filter Restrict the parsed objects to only those in this path. For example, setting filter to [['/', 'localhost', 'host.cxt', 'comp1.rtc']] will prevent 'comp2.rtc' in the same naming context from being parsed. @param dynamic Override the tree-wide dynamic setting. If not provided, the value given when the tree was created will be used.
20,533
def _to_dict(self): _dict = {} if hasattr(self, ) and self.entities is not None: _dict[] = [x._to_dict() for x in self.entities] return _dict
Return a json dictionary representing this model.
20,534
def diamond_search_output_basename(self, out_path): return os.path.join(self.outdir, out_path, "%s_diamond_search" % self.basename)
Does not include the .daa part that diamond creates
20,535
def find(): names = (, , , ) current_dir = os.getcwd() configconfig_file = os.path.join(current_dir, ) default_config_dir = os.path.join(current_dir, ) if os.path.isfile(configconfig_file): logger.debug(, configconfig_file) with open(configconfig_file) as stream: config_dir = os.path.join(current_dir, stream.read()).strip() elif os.path.isdir(default_config_dir): config_dir = default_config_dir else: config_dir = current_dir logger.debug(, config_dir) for name in names: config_file = os.path.join(config_dir, name) logger.debug(, config_file) if os.path.isfile(config_file): logger.debug(, config_file) return config_file logger.debug() return None
Find the configuration file if any.
20,536
def _get_key_redis_key(bank, key): opts = _get_redis_keys_opts() return .format( prefix=opts[], separator=opts[], bank=bank, key=key )
Return the Redis key given the bank name and the key name.
20,537
def general_setting(key, default=None, expected_type=None, qsettings=None): if qsettings is None: qsettings = QSettings() try: if isinstance(expected_type, type): return qsettings.value(key, default, type=expected_type) else: return qsettings.value(key, default) except TypeError as e: LOGGER.debug( % e) LOGGER.debug( % (key, default, expected_type)) return qsettings.value(key, default)
Helper function to get a value from settings. :param key: Unique key for setting. :type key: basestring :param default: The default value in case of the key is not found or there is an error. :type default: basestring, None, boolean, int, float :param expected_type: The type of object expected. :type expected_type: type :param qsettings: A custom QSettings to use. If it's not defined, it will use the default one. :type qsettings: qgis.PyQt.QtCore.QSettings :returns: The value of the key in the setting. :rtype: object Note: The API for QSettings to get a value is different for PyQt and Qt C++. In PyQt we can specify the expected type. See: http://pyqt.sourceforge.net/Docs/PyQt4/qsettings.html#value
20,538
def to_html(self, protocol=, d3_url=None, d3_scale_chromatic_url=None, html_base=None): httphttps d3_url_struct = D3URLs(d3_url, d3_scale_chromatic_url) ExternalJSUtilts.ensure_valid_protocol(protocol) javascript_to_insert = .join([ PackedDataUtils.full_content_of_javascript_files(), self.scatterplot_structure._visualization_data.to_javascript(), self.scatterplot_structure.get_js_to_call_build_scatterplot() ]) html_template = (PackedDataUtils.full_content_of_default_html_template() if html_base is None else self._format_html_base(html_base)) html_content = ( html_template .replace(, javascript_to_insert, 1) .replace(, d3_url_struct.get_d3_url(), 1) .replace(, d3_url_struct.get_d3_scale_chromatic_url()) ) <!-- INSERT SEMIOTIC SQUARE --> extra_libs = if self.scatterplot_structure._save_svg_button: extra_libs = html_content = (html_content .replace(, extra_libs, 1) .replace(, protocol + )) return html_content
Parameters ---------- protocol : str 'http' or 'https' for including external urls d3_url, str None by default. The url (or path) of d3, to be inserted into <script src="..."/> By default, this is `DEFAULT_D3_URL` declared in `ScatterplotStructure`. d3_scale_chromatic_url : str None by default. URL of d3_scale_chromatic_url, to be inserted into <script src="..."/> By default, this is `DEFAULT_D3_SCALE_CHROMATIC` declared in `ScatterplotStructure`. html_base : str None by default. HTML of semiotic square to be inserted above plot. Returns ------- str, the html file representation
20,539
def _compute_and_transfer_to_final_run(self, process_name, start_timeperiod, end_timeperiod, job_record): source_collection_name = context.process_context[process_name].source start_id = self.ds.highest_primary_key(source_collection_name, start_timeperiod, end_timeperiod) end_id = self.ds.lowest_primary_key(source_collection_name, start_timeperiod, end_timeperiod) uow, transfer_to_final = self.insert_and_publish_uow(job_record, start_id, end_id) self.update_job(job_record, uow, job.STATE_FINAL_RUN) if transfer_to_final: self._process_state_final_run(job_record)
method computes new unit_of_work and transfers the job to STATE_FINAL_RUN it also shares _fuzzy_ DuplicateKeyError logic from _compute_and_transfer_to_progress method
20,540
def extract_subnetworks( partition_file, network_file, output_dir, max_cores=DEFAULT_MAX_CORES, max_size_matrix=DEFAULT_MAX_SIZE_MATRIX, saturation_threshold=DEFAULT_SATURATION_THRESHOLD, ): logger.info("Loading partition...") data_chunks = np.loadtxt(partition_file, usecols=(1,), dtype=np.int32) logger.info("Loading network...") network = np.loadtxt(network_file, dtype=np.int32) cores = data_chunks core_network = np.copy(network) core_network[:, 0] = cores[network[:, 0]] core_network[:, 1] = cores[network[:, 1]] n = np.amax(cores) + 1 def extract(network_to_keep, filename): subnetwork = np.copy(network[network_to_keep]) subnetwork[:, 0] -= 1 subnetwork[:, 1] -= 1 np.savetxt(filename, subnetwork, fmt="%i") return subnetwork def draw(subnetwork, filename): try: row = subnetwork[:, 0] col = subnetwork[:, 1] data = subnetwork[:, 2] except TypeError: row = subnetwork.row col = subnetwork.col data = subnetwork.data row_indices = stats.rankdata( np.concatenate((row, col)), method="dense" ) col_indices = stats.rankdata( np.concatenate((col, row)), method="dense" ) data = np.concatenate((data, data)) unique_row = np.unique(row) unique_col = np.unique(col) size = len(np.unique(np.concatenate((unique_row, unique_col)))) + 1 try: sparse_subnet = sparse.coo_matrix( (data, (row_indices, col_indices)), shape=(size, size) ) binning_factor = (size // max_size_matrix) + 1 binned_subnet = hcs.bin_sparse( sparse_subnet, subsampling_factor=binning_factor ) dense_subnet = binned_subnet.todense() diagonal = np.diag(np.diag(dense_subnet)) normed_subnet = hcs.normalize_dense(dense_subnet - diagonal) vmax = np.percentile(normed_subnet, saturation_threshold) spaceless_pdf_plot_maker(normed_subnet, filename, vmax=vmax) except MemoryError: logger.warning( "Warning, couldn't save matrix due to memory issues" ) def extract_and_draw(network_to_keep, filename_text, filename_image): subnetwork = extract(network_to_keep, filename=filename_text) draw(subnetwork, filename=filename_image) global_network_indices_list = [] for i in range(1, n): if i > max_cores: break network_to_keep_1 = core_network[:, 0] == i network_to_keep_2 = core_network[:, 1] == i network_to_keep = network_to_keep_1 * network_to_keep_2 nonzero_indices, = np.nonzero(network_to_keep) global_network_indices_list += nonzero_indices.tolist() subnetwork_file = os.path.join( output_dir, "subnetwork_core_{}.dat".format(i) ) image_name = os.path.join(output_dir, "core_{}.eps".format(i)) extract_and_draw( network_to_keep=network_to_keep, filename_text=subnetwork_file, filename_image=image_name, )
Extract bin subnetworks from the main network Identify bins, extract subnets, draws the adjacency matrices, saves it all in a specified output directory. Parameters ---------- partition_file : file, str or pathlib.Path The file containing, for each chunk, the communities it was assigned to at each iteration. network_file : file, str or pathlib.Path The file containing the network in sparse (edge list) format output_dir : str or pathlib.Path The output directory to write the subnetworks into. max_cores : int, optional The maximum number of bins to extract. Default is 100. max_size_matrix : int, optional When rendering contact maps for each bin, the maximum size for the matrix. Default is 2000. saturation_threshold : float, optional When rendering contact maps for each bin, the percentile value over which the color map should be saturated. Default is 80.
20,541
def prompt(msg, default=NO_DEFAULT, validate=None): while True: response = input(msg + " ").strip() if not response: if default is NO_DEFAULT: continue return default if validate is None or validate(response): return response
Prompt user for input
20,542
def _get_client_fqdn(self, client_info_contents): yamldict = yaml.safe_load(client_info_contents) fqdn = yamldict[][] client_id = yamldict[].split()[1] return client_id, fqdn
Extracts a GRR client's FQDN from its client_info.yaml file. Args: client_info_contents: The contents of the client_info.yaml file. Returns: A (str, str) tuple representing client ID and client FQDN.
20,543
def present(name, object_name, object_type, defprivileges=None, grant_option=None, prepend=, maintenance_db=None, user=None, db_password=None, db_host=None, db_port=None, db_user=None): ALLtablesequence ret = { : name, : {}, : True, : } defprivileges = .join(defprivileges) if defprivileges else None kwargs = { : defprivileges, : grant_option, : prepend, : maintenance_db, : user, : db_host, : db_user, : db_port, : db_password, } if not __salt__[]( name, object_name, object_type, **kwargs): _defprivs = object_name if object_type == else defprivileges if __opts__[]: ret[] = None ret[] = ( ).format(_defprivs, name) return ret if __salt__[]( name, object_name, object_type, **kwargs): ret[] = ( ).format(_defprivs, name) ret[][name] = else: ret[] = ( ).format(_defprivs, name) ret[] = False return ret
Grant the requested privilege(s) on the specified object to a role name Name of the role to which privileges should be granted object_name Name of the object on which the grant is to be performed. 'ALL' may be used for objects of type 'table' or 'sequence'. object_type The object type, which can be one of the following: - table - sequence - schema - group - function View permissions should specify `object_type: table`. privileges List of privileges to grant, from the list below: - INSERT - CREATE - TRUNCATE - CONNECT - TRIGGER - SELECT - USAGE - TEMPORARY - UPDATE - EXECUTE - REFERENCES - DELETE - ALL :note: privileges should not be set when granting group membership grant_option If grant_option is set to True, the recipient of the privilege can in turn grant it to others prepend Table and Sequence object types live under a schema so this should be provided if the object is not under the default `public` schema maintenance_db The name of the database in which the language is to be installed user System user all operations should be performed on behalf of db_user database username if different from config or default db_password user password if any password for a specified user db_host Database host if different from config or default db_port Database port if different from config or default
20,544
def evaluate(self, num_eval_batches=None): num_eval_batches = num_eval_batches or self.num_eval_batches with tf.Graph().as_default() as graph: self.tensors = self.model.build_eval_graph(self.eval_data_paths, self.batch_size) self.summary = tf.summary.merge_all() self.saver = tf.train.Saver() self.summary_writer = tf.summary.FileWriter(self.output_path) self.sv = tf.train.Supervisor( graph=graph, logdir=self.output_path, summary_op=None, global_step=None, saver=self.saver) last_checkpoint = tf.train.latest_checkpoint(self.checkpoint_path) with self.sv.managed_session(master=, start_standard_services=False) as session: self.sv.saver.restore(session, last_checkpoint) if not self.batch_of_examples: self.sv.start_queue_runners(session) for i in range(num_eval_batches): self.batch_of_examples.append(session.run(self.tensors.examples)) for i in range(num_eval_batches): session.run(self.tensors.metric_updates, {self.tensors.examples: self.batch_of_examples[i]}) metric_values = session.run(self.tensors.metric_values) global_step = tf.train.global_step(session, self.tensors.global_step) summary = session.run(self.summary) self.summary_writer.add_summary(summary, global_step) self.summary_writer.flush() return metric_values
Run one round of evaluation, return loss and accuracy.
20,545
def from_dict(cls, data, intersect=False, orient=, dtype=None): from collections import defaultdict orient = orient.lower() if orient == : new_data = defaultdict(OrderedDict) for col, df in data.items(): for item, s in df.items(): new_data[item][col] = s data = new_data elif orient != : raise ValueError() d = cls._homogenize_dict(cls, data, intersect=intersect, dtype=dtype) ks = list(d[].keys()) if not isinstance(d[], OrderedDict): ks = list(sorted(ks)) d[cls._info_axis_name] = Index(ks) return cls(**d)
Construct Panel from dict of DataFrame objects. Parameters ---------- data : dict {field : DataFrame} intersect : boolean Intersect indexes of input DataFrames orient : {'items', 'minor'}, default 'items' The "orientation" of the data. If the keys of the passed dict should be the items of the result panel, pass 'items' (default). Otherwise if the columns of the values of the passed DataFrame objects should be the items (which in the case of mixed-dtype data you should do), instead pass 'minor' dtype : dtype, default None Data type to force, otherwise infer Returns ------- Panel
20,546
def enable_console_debug_logging(): logger = logging.getLogger("github") logger.setLevel(logging.DEBUG) logger.addHandler(logging.StreamHandler())
This function sets up a very simple logging configuration (log everything on standard output) that is useful for troubleshooting.
20,547
def add_parser_arguments(parser, args, group=None, prefix=DATA_PREFIX): if group: parser = parser.add_argument_group(group) for arg, kwargs in iteritems(args): arg_name = kwargs.pop(, arg.replace(, )) if not in kwargs: kwargs[] = arg.upper() if in kwargs: kwargs[] = prefix + kwargs[] else: kwargs[] = prefix + arg parser.add_argument( + arg_name, **kwargs)
Helper method that populates parser arguments. The argument values can be later retrieved with `extract_arguments` method. The `args` argument to this method should be a dict with strings as keys and dicts as values. The keys will be used as keys in returned data. Their values will be passed as kwargs to `parser.add_argument`. There is special value `arg` that will be used as argument name if present, otherwise a name will be generated based on the key. If `group` is a string, it will be used as group header in help output.
20,548
def get_compiler(compiler, **compiler_attrs): if compiler is None or isinstance(compiler, str): cc = ccompiler.new_compiler(compiler=compiler, verbose=0) customize_compiler(cc) if cc.compiler_type == : customize_mingw(cc) else: cc = compiler customize_gcc(cc) for name, val in compiler_attrs.items(): setattr(cc, name, val) return cc
get and customize a compiler
20,549
def _getPattern(self, ipattern, done=None): if ipattern is None: return None if ipattern is True: if done is not None: return ([(None, None, done)], {}) return ([(0, False)], {}) def _getReverse(pm): return pm == def _getIndex(k): try: return int(k) except ValueError: raise InvalidPatternError(k, "Invalid level number") def _getDone(p): v = p.split() if len(v) == 2: try: return (Model.indexes[v[0]], v[1], done) except KeyError: raise InvalidPatternError(v[0], ) return (None, v[0], done) ipattern1 = list() ipattern2 = dict() for s in ipattern.split(): if done is not None: v = done else: v = _getReverse(s[-1]) k = s.split() if len(k) == 1: if done is not None: ipattern1.append(_getDone(k[0])) continue ko = k[0][:-1] try: if len(k[0]) == 1: k = 0 else: k = Model.indexes[ko] except KeyError: k = _getIndex(k[0][:-1]) else: ipattern1.append((k, v)) continue v = (0, v) elif len(k) == 2: try: if done is not None: v = _getDone(k[1]) else: v = (Model.indexes[k[1][:-1]], v) k = _getIndex(k[0]) except KeyError: raise InvalidPatternError(k[1][:-1], ) else: raise InvalidPatternError(s, ) ipattern2.setdefault(k, []).append(v) return (ipattern1, ipattern2)
Parses sort pattern. :ipattern: A pattern to parse. :done: If :ipattern: refers to done|undone, use this to indicate proper state. :returns: A pattern suitable for Model.modify.
20,550
def _find_listeners(): for i in range(31): try: if gpib.listener(BOARD, i) and gpib.ask(BOARD, 1) != i: yield i except gpib.GpibError as e: logger.debug("GPIB error in _find_listeners(): %s", repr(e))
Find GPIB listeners.
20,551
def depth(args): import seaborn as sns p = OptionParser(depth.__doc__) opts, args, iopts = p.set_image_options(args, figsize="14x14") if len(args) != 1: sys.exit(not p.print_help()) tsvfile, = args fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(ncols=2, nrows=2, figsize=(iopts.w, iopts.h)) plt.tight_layout(pad=6) data = pd.read_csv(tsvfile, sep="\t", low_memory=False) ids, treds = read_treds() for (dp, ax, title) in zip(("FDP", "PDP", "RDP", "PEDP"), (ax1, ax2, ax3, ax4), ("Spanning reads", "Partial reads", "Repeat-only reads", "Paired-end reads")): logging.debug("Build {}".format(title)) xd = [] mdp = [] for tred, motif in zip(treds["abbreviation"], treds["motif"]): if tred in ignore: logging.debug("Ignore {}".format(tred)) continue if len(motif) > 4: if "/" in motif: motif = motif.split("/")[0] else: motif = motif[:4] + ".." xtred = "{} {}".format(tred, motif) md = [x for x in data[tred + + dp] if x >= 0] subsample = 10000 if dp == "RDP" else 1000 md = sample(md, subsample) pmd = [x for x in md if x > 0] median = np.median(pmd) if pmd else 0 mdp.append((xtred, median)) for d in md: xd.append((xtred, d)) mdp.sort(key=lambda x: x[1]) order, mdp = zip(*mdp) xt, xd = zip(*xd) sns.boxplot(xt, xd, ax=ax, order=order, fliersize=2) xticklabels = ax.get_xticklabels() ax.set_xticklabels(xticklabels, rotation=45, ha="right") ax.set_title("Number of {} per locus".format(title), size=18) ylim = 30 if dp == "RDP" else 100 ax.set_ylim(0, ylim) yticklabels = [int(x) for x in ax.get_yticks()] ax.set_yticklabels(yticklabels, family=, size=14) root = fig.add_axes([0, 0, 1, 1]) pad = .04 panel_labels(root, ((pad, 1 - pad, "A"), (1 / 2. + pad / 2, 1 - pad, "B"), (pad, .5 - pad / 2, "C"), (1 / 2. + pad / 2, .5 - pad / 2, "D"))) normalize_axes(root) image_name = "depth." + iopts.format savefig(image_name, dpi=iopts.dpi, iopts=iopts)
%prog depth DP.tsv Plot read depths across all TREDs.
20,552
def RFC3156_micalg_from_algo(hash_algo): algo = gpg.core.hash_algo_name(hash_algo) if algo is None: raise GPGProblem(.format(algo), code=GPGCode.INVALID_HASH_ALGORITHM) return + algo.lower()
Converts a GPGME hash algorithm name to one conforming to RFC3156. GPGME returns hash algorithm names such as "SHA256", but RFC3156 says that programs need to use names such as "pgp-sha256" instead. :param str hash_algo: GPGME hash_algo :returns: the lowercase name of of the algorithm with "pgp-" prepended :rtype: str
20,553
def erase_up (self): self.erase_start_of_line () self.fill_region (self.cur_r-1, 1, 1, self.cols)
Erases the screen from the current line up to the top of the screen.
20,554
def rest(self, method, uri, data=None, status_codes=None, parse=True, **kwargs): r = self.pool.request_encode_body(method, uri, fields=data, encode_multipart=False) if not r.status in (status_codes if status_codes else (200,201)): print cl( % (uri, method), ) print cl(data, ) print r.headers raise Exception, "Invalid status code: %s" % r.status if not parse: " return raw urllib3 response" return r if not self.debug_loads: " return parsed edn" return loads(r.data) "time edn parse time and return parsed edn" return self.debug(loads, args=(r_data, ), kwargs={}, fmt=, color=)
Rest helpers
20,555
def __update_action(self, revision): patch = revision.get("patch") if patch.get("_id"): del patch["_id"] update_response = yield self.collection.patch(revision.get("master_id"), self.__make_storeable_patch_patchable(patch)) if update_response.get("n") == 0: raise RevisionNotFoundException()
Update a master document and revision history document :param dict revision: The revision dictionary
20,556
def spline_fit_magseries(times, mags, errs, period, knotfraction=0.01, maxknots=30, sigclip=30.0, plotfit=False, ignoreinitfail=False, magsarefluxes=False, verbose=True): asymmetricfittypesplinefitinfonknotsfitmagsfitepochfitchisqs chi-sq, :the reduced chi-sq value, : the output fit plot if fitplot is not None, :{ :input times in phase order of the model, :the phases of the model mags, :input mags/fluxes in the phase order of the model, :errs in the phase order of the model, :input value of magsarefluxes kwarg } } t have np.diff(phase) > 0.0 phase_diffs_ind = npdiff(phase) > 0.0 incphase_ind = npconcatenate((nparray([True]), phase_diffs_ind)) phase, pmags, perrs = (phase[incphase_ind], pmags[incphase_ind], perrs[incphase_ind]) spl = LSQUnivariateSpline(phase, pmags, t=splineknots, w=1.0/perrs) fitmags = spl(phase) fitchisq = npsum( ((fitmags - pmags)*(fitmags - pmags)) / (perrs*perrs) ) fitredchisq = fitchisq/(len(pmags) - nknots - 1) if verbose: LOGINFO( % (nknots, fitchisq, fitredchisq) ) if not magsarefluxes: fitmagminind = npwhere(fitmags == npmax(fitmags)) else: fitmagminind = npwhere(fitmags == npmin(fitmags)) if len(fitmagminind[0]) > 1: fitmagminind = (fitmagminind[0][0],) magseriesepoch = ptimes[fitmagminind] returndict = { :, :{ :nknots, :fitmags, :magseriesepoch }, :fitchisq, :fitredchisq, :None, :{ :ptimes, :phase, :pmags, :perrs, :magsarefluxes }, } if plotfit and isinstance(plotfit, str): make_fit_plot(phase, pmags, perrs, fitmags, period, mintime, magseriesepoch, plotfit, magsarefluxes=magsarefluxes) returndict[] = plotfit return returndict
This fits a univariate cubic spline to the phased light curve. This fit may be better than the Fourier fit for sharply variable objects, like EBs, so can be used to distinguish them from other types of variables. Parameters ---------- times,mags,errs : np.array The input mag/flux time-series to fit a spline to. period : float The period to use for the spline fit. knotfraction : float The knot fraction is the number of internal knots to use for the spline. A value of 0.01 (or 1%) of the total number of non-nan observations appears to work quite well, without over-fitting. maxknots controls the maximum number of knots that will be allowed. maxknots : int The maximum number of knots that will be used even if `knotfraction` gives a value to use larger than `maxknots`. This helps dealing with over-fitting to short time-scale variations. sigclip : float or int or sequence of two floats/ints or None If a single float or int, a symmetric sigma-clip will be performed using the number provided as the sigma-multiplier to cut out from the input time-series. If a list of two ints/floats is provided, the function will perform an 'asymmetric' sigma-clip. The first element in this list is the sigma value to use for fainter flux/mag values; the second element in this list is the sigma value to use for brighter flux/mag values. For example, `sigclip=[10., 3.]`, will sigclip out greater than 10-sigma dimmings and greater than 3-sigma brightenings. Here the meaning of "dimming" and "brightening" is set by *physics* (not the magnitude system), which is why the `magsarefluxes` kwarg must be correctly set. If `sigclip` is None, no sigma-clipping will be performed, and the time-series (with non-finite elems removed) will be passed through to the output. magsarefluxes : bool If True, will treat the input values of `mags` as fluxes for purposes of plotting the fit and sig-clipping. plotfit : str or False If this is a string, this function will make a plot for the fit to the mag/flux time-series and writes the plot to the path specified here. ignoreinitfail : bool If this is True, ignores the initial failure to find a set of optimized Fourier parameters using the global optimization function and proceeds to do a least-squares fit anyway. verbose : bool If True, will indicate progress and warn of any problems. Returns ------- dict This function returns a dict containing the model fit parameters, the minimized chi-sq value and the reduced chi-sq value. The form of this dict is mostly standardized across all functions in this module:: { 'fittype':'spline', 'fitinfo':{ 'nknots': the number of knots used for the fit 'fitmags': the model fit mags, 'fitepoch': the epoch of minimum light for the fit, }, 'fitchisq': the minimized value of the fit's chi-sq, 'fitredchisq':the reduced chi-sq value, 'fitplotfile': the output fit plot if fitplot is not None, 'magseries':{ 'times':input times in phase order of the model, 'phase':the phases of the model mags, 'mags':input mags/fluxes in the phase order of the model, 'errs':errs in the phase order of the model, 'magsarefluxes':input value of magsarefluxes kwarg } }
20,557
def cublasDspr2(handle, uplo, n, alpha, x, incx, y, incy, AP): status = _libcublas.cublasDspr2_v2(handle, _CUBLAS_FILL_MODE[uplo], n, ctypes.byref(ctypes.c_double(alpha)), int(x), incx, int(y), incy, int(AP)) cublasCheckStatus(status)
Rank-2 operation on real symmetric-packed matrix.
20,558
def get_object(self, queryset=None): assert queryset is None, "Passing a queryset is disabled" queryset = self.filter_queryset(self.get_queryset()) lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field lookup = self.kwargs.get(lookup_url_kwarg, None) assert lookup is not None, "Other lookup methods are disabled" filter_kwargs = {self.lookup_field: lookup} obj = self.get_object_or_404(queryset, **filter_kwargs) self.check_object_permissions(self.request, obj) return obj
Return the object the view is displaying. Same as rest_framework.generics.GenericAPIView, but: - Failed assertions instead of deprecations
20,559
def dateint_to_datetime(dateint): if len(str(dateint)) != 8: raise ValueError( ) year, month, day = decompose_dateint(dateint) return datetime(year=year, month=month, day=day)
Converts the given dateint to a datetime object, in local timezone. Arguments --------- dateint : int An integer object decipting a specific calendaric day; e.g. 20161225. Returns ------- datetime.datetime A timezone-unaware datetime object representing the start of the given day (so at 0 hours, 0 minutes, etc...) in the local timezone.
20,560
def master_ref(self): return ReferencesDataFrame(self._engine_dataframe.getMaster(), self._session, self._implicits) return self.ref()
Filters the current DataFrame to only contain those rows whose reference is master. >>> master_df = refs_df.master_ref :rtype: ReferencesDataFrame
20,561
def _upgrade_schema(engine): inspector = reflection.Inspector.from_engine(engine) with engine.connect() as conn: if not in [x[] for x in inspector.get_columns()]: logger.warning() conn.execute() conn.execute() if in engine.dialect.name: conn.execute() elif in engine.dialect.name: conn.execute() elif in engine.dialect.name: conn.execute() elif in engine.dialect.name: if str([x for x in inspector.get_columns() if x[] == ][0][]) != : conn.execute() elif in engine.dialect.name: for i in conn.execute().fetchall(): if i[] == and i[] != : logger.warning( ) else: logger.warning( .format( engine.dialect ) )
Ensure the database schema is up to date with the codebase. :param engine: SQLAlchemy engine of the underlying database.
20,562
def all_pairs_normalized_distances_reference(X): n_samples, n_cols = X.shape D = np.ones((n_samples, n_samples), dtype="float32") * np.inf for i in range(n_samples): diffs = X - X[i, :].reshape((1, n_cols)) missing_diffs = np.isnan(diffs) missing_counts_per_row = missing_diffs.sum(axis=1) valid_rows = missing_counts_per_row < n_cols D[i, valid_rows] = np.nanmean( diffs[valid_rows, :] ** 2, axis=1) return D
Reference implementation of normalized all-pairs distance, used for testing the more efficient implementation above for equivalence.
20,563
def do_lmfit(data, params, B=None, errs=None, dojac=True): params = copy.deepcopy(params) data = np.array(data) mask = np.where(np.isfinite(data)) def residual(params, **kwargs): f = ntwodgaussian_lmfit(params) model = f(*mask) if B is None: return model - data[mask] else: return (model - data[mask]).dot(B) if dojac: result = lmfit.minimize(residual, params, kws={: mask[0], : mask[1], : B, : errs}, Dfun=lmfit_jacobian) else: result = lmfit.minimize(residual, params, kws={: mask[0], : mask[1], : B, : errs}) if B is not None: result.residual = result.residual.dot(inv(B)) return result, params
Fit the model to the data data may contain 'flagged' or 'masked' data with the value of np.NaN Parameters ---------- data : 2d-array Image data params : lmfit.Parameters Initial model guess. B : 2d-array B matrix to be used in residual calculations. Default = None. errs : 1d-array dojac : bool If true then an analytic jacobian will be passed to the fitting routine. Returns ------- result : ? lmfit.minimize result. params : lmfit.Params Fitted model. See Also -------- :func:`AegeanTools.fitting.lmfit_jacobian`
20,564
def _get_containing_contigs(self, hits_dict): containing = {} for qry_name in hits_dict: d = self._containing_contigs(hits_dict[qry_name]) if len(d): containing[qry_name] = d return containing
Given dictionary of nucmer hits (made by self._load_nucmer_hits()), returns a dictionary. key=contig name. Value = set of contigs that contain the key.
20,565
def print_attrs(data_file, node_name=, which=, compress=False): node = data_file.get_node(node_name) print ( % node) for attr in node._v_attrs._f_list(): print ( % attr) attr_content = repr(node._v_attrs[attr]) if compress: attr_content = attr_content.split()[0] print ("\t %s" % attr_content)
Print the HDF5 attributes for `node_name`. Parameters: data_file (pytables HDF5 file object): the data file to print node_name (string): name of the path inside the file to be printed. Can be either a group or a leaf-node. Default: '/', the root node. which (string): Valid values are 'user' for user-defined attributes, 'sys' for pytables-specific attributes and 'all' to print both groups of attributes. Default 'user'. compress (bool): if True displays at most a line for each attribute. Default False.
20,566
def EMAIL_REQUIRED(self): from allauth.account import app_settings as account_settings return self._setting("EMAIL_REQUIRED", account_settings.EMAIL_REQUIRED)
The user is required to hand over an e-mail address when signing up
20,567
def plugins(self): if not self._plugins: self._plugins = [ (plugin_name, plugin_cfg[], plugin_cfg) for plugin_name, plugin_cfg in self.validated.items() if ( plugin_name not in self.base_schema.keys()) and plugin_cfg[]] return self._plugins
:returns: [(plugin_name, plugin_package, plugin_config), ...] :rtype: list of tuple
20,568
def send_UDP_message(self, message): x = 0 if self.tracking_enabled: try: proc = udp_messenger(self.domain_name, self.UDP_IP, self.UDP_PORT, self.sock_timeout, message) self.procs.append(proc) except Exception as e: logger.debug("Usage tracking failed: {}".format(e)) else: x = -1 return x
Send UDP message.
20,569
def render(self, dt): for frame in self._frozen: for body in frame: self.draw_body(body) for body in self.world.bodies: self.draw_body(body) if hasattr(self.world, ): window.glColor4f(0.9, 0.1, 0.1, 0.9) window.glLineWidth(3) for j in self.world.markers.joints.values(): window.glBegin(window.GL_LINES) window.glVertex3f(*j.getAnchor()) window.glVertex3f(*j.getAnchor2()) window.glEnd()
Draw all bodies in the world.
20,570
def find_table_links(self): html = urlopen(self.model_url).read() doc = lh.fromstring(html) href_list = [area.attrib[] for area in doc.cssselect()] tables = self._inception_table_links(href_list) return tables
When given a url, this function will find all the available table names for that EPA dataset.
20,571
def ref(self, tickers, flds, ovrds=None): ovrds = [] if not ovrds else ovrds logger = _get_logger(self.debug) if type(tickers) is not list: tickers = [tickers] if type(flds) is not list: flds = [flds] request = self._create_req(, tickers, flds, ovrds, []) logger.info(.format(request)) self._session.sendRequest(request, identity=self._identity) data = self._parse_ref(flds) data = pd.DataFrame(data) data.columns = [, , ] return data
Make a reference data request, get tickers and fields, return long pandas DataFrame with columns [ticker, field, value] Parameters ---------- tickers: {list, string} String or list of strings corresponding to tickers flds: {list, string} String or list of strings corresponding to FLDS ovrds: list of tuples List of tuples where each tuple corresponds to the override field and value Example ------- >>> import pdblp >>> con = pdblp.BCon() >>> con.start() >>> con.ref("CL1 Comdty", ["FUT_GEN_MONTH"]) Notes ----- This returns reference data which has singleton values. In raw format the messages passed back contain data of the form fieldData = { FUT_GEN_MONTH = "FGHJKMNQUVXZ" }
20,572
def ot_validate(nexson, **kwargs): codes_to_skip = [NexsonWarningCodes.UNVALIDATED_ANNOTATION] v_log, adaptor = validate_nexson(nexson, codes_to_skip, **kwargs) annotation = v_log.prepare_annotation(author_name=, description=) return annotation, v_log, adaptor
Returns three objects: an annotation dict (NexSON formmatted), the validation_log object created when NexSON validation was performed, and the object of class NexSON which was created from nexson. This object may alias parts of the nexson dict that is passed in as an argument. Currently the only kwargs used is 'max_num_trees_per_study'
20,573
def save_file(self, filename, text): _defaultdir = self.DEFAULTDIR try: if not filename.endswith(): filename += try: self.DEFAULTDIR = ( + hydpy.pub.timegrids.sim.lastdate.to_string()) except AttributeError: pass path = os.path.join(self.currentpath, filename) with open(path, , encoding="utf-8") as file_: file_.write(text) except BaseException: objecttools.augment_excmessage( % filename) finally: self.DEFAULTDIR = _defaultdir
Save the given text under the given condition filename and the current path. If the current directory is not defined explicitly, the directory name is constructed with the actual simulation end date. If such an directory does not exist, it is created immediately.
20,574
def close(self): if self._dev is not None: usb.util.dispose_resources(self._dev) self._dev = None
Close and release the current usb device. :return: None
20,575
def save(self, savefile): with open(str(savefile), ) as f: self.write_to_fp(f) log.debug("Saved to %s", savefile)
Do the TTS API request and write result to file. Args: savefile (string): The path and file name to save the ``mp3`` to. Raises: :class:`gTTSError`: When there's an error with the API request.
20,576
def get(self, path_or_index, default=None): err, value = self._resolve(path_or_index) value = default if err else value return err, value
Get details about a given result :param path_or_index: The path (or index) of the result to fetch. :param default: If the given result does not exist, return this value instead :return: A tuple of `(error, value)`. If the entry does not exist then `(err, default)` is returned, where `err` is the actual error which occurred. You can use :meth:`couchbase.exceptions.CouchbaseError.rc_to_exctype` to convert the error code to a proper exception class :raise: :exc:`IndexError` or :exc:`KeyError` if `path_or_index` is not an initially requested path. This is a programming error as opposed to a constraint error where the path is not found.
20,577
def analyze(fqdn, result, argl, argd): package = fqdn.split()[0] if package not in _methods: _load_methods(package) if _methods[package] is not None and fqdn in _methods[package]: return _methods[package][fqdn](fqdn, result, *argl, **argd)
Analyzes the result from calling the method with the specified FQDN. Args: fqdn (str): full-qualified name of the method that was called. result: result of calling the method with `fqdn`. argl (tuple): positional arguments passed to the method call. argd (dict): keyword arguments passed to the method call.
20,578
def expand_hostdef(self, hostdef): try: hosts_todo = [hostdef] hosts_done = [] while hosts_todo: host = hosts_todo.pop(0) if not in host: hosts_done.append(host) continue head, rest = host.split(, 1) pattern, tail = rest.split(, 1) start, end = pattern.split() fill = False if start.startswith() and len(start) > 0: fill = len(start) try: for i in range(int(start), int(end) + 1): if fill: range_nr = str(i).zfill(fill) else: range_nr = i new_host = .format(head, range_nr, tail) if in new_host: hosts_todo.append(new_host) else: hosts_done.append(new_host) except ValueError: for i in range(ord(start), ord(end) + 1): new_host = .format(head, chr(i), tail) if in new_host: hosts_todo.append(new_host) else: hosts_done.append(new_host) return [host_name.split()[0] for host_name in hosts_done] except Exception as e: self.log.warning("Couldn{0}': {1}".format(hostdef, e)) return []
Expand a host definition (e.g. "foo[001:010].bar.com") into seperate hostnames. Supports zero-padding, numbered ranges and alphabetical ranges. Multiple patterns in a host defnition are also supported. Returns a list of the fully expanded hostnames. Ports are also removed from hostnames as a bonus (e.g. "foo.bar.com:8022" -> "foo.bar.com")
20,579
def norm(self, order=2): return (sum(val**order for val in abs(self).values()))**(1/order)
Find the vector norm, with the given order, of the values
20,580
def add_pool(arg, opts, shell_opts): p = Pool() p.name = opts.get() p.description = opts.get() p.default_type = opts.get() p.ipv4_default_prefix_length = opts.get() p.ipv6_default_prefix_length = opts.get() if in opts: tags = list(csv.reader([opts.get(, )], escapechar=))[0] p.tags = {} for tag_name in tags: tag = Tag() tag.name = tag_name p.tags[tag_name] = tag for avp in opts.get(, []): try: key, value = avp.split(, 1) except ValueError: print("ERROR: Incorrect extra-attribute: %s. Accepted form: \n" % avp, file=sys.stderr) return p.avps[key] = value try: p.save() except pynipap.NipapError as exc: print("Could not add pool to NIPAP: %s" % str(exc), file=sys.stderr) sys.exit(1) print("Pool created." % (p.name))
Add a pool.
20,581
def cmd_center(self, args): if len(args) < 3: print("map center LAT LON") return lat = float(args[1]) lon = float(args[2]) self.map.set_center(lat, lon)
control center of view
20,582
def search(connect_spec, base, scope=, filterstr=, attrlist=None, attrsonly=0): subtreebaseonelevels immediate children. :param filterstr: String representation of the filter to apply in the search. :param attrlist: Limit the returned attributes to those in the specified list. If ``None``, all attributes of each entry are returned. :param attrsonly: If non-zero, dons distinguished name to a dict that maps each of the matching attribute names to a list of its values. CLI example: .. code-block:: bash salt ldap3.search "{ : , : { : , : , : , }, }" "base=" SCOPE_' + scope.upper()) try: results = l.c.search_s(base, scope, filterstr, attrlist, attrsonly) except ldap.NO_SUCH_OBJECT: results = [] except ldap.LDAPError as e: _convert_exception(e) return dict(results)
Search an LDAP database. :param connect_spec: See the documentation for the ``connect_spec`` parameter for :py:func:`connect`. :param base: Distinguished name of the entry at which to start the search. :param scope: One of the following: * ``'subtree'`` Search the base and all of its descendants. * ``'base'`` Search only the base itself. * ``'onelevel'`` Search only the base's immediate children. :param filterstr: String representation of the filter to apply in the search. :param attrlist: Limit the returned attributes to those in the specified list. If ``None``, all attributes of each entry are returned. :param attrsonly: If non-zero, don't return any attribute values. :returns: a dict of results. The dict is empty if there are no results. The dict maps each returned entry's distinguished name to a dict that maps each of the matching attribute names to a list of its values. CLI example: .. code-block:: bash salt '*' ldap3.search "{ 'url': 'ldaps://ldap.example.com/', 'bind': { 'method': 'simple', 'dn': 'cn=admin,dc=example,dc=com', 'password': 'secret', }, }" "base='dc=example,dc=com'"
20,583
def get_active_sessions(self): for last_timestamp, i, events in self.recently_active: yield Session(events[-1].user, unpack_events(events))
Retrieves the active, unexpired sessions. :Returns: A generator of :class:`~mwsessions.Session`
20,584
def make_dict(name, words, *args, **kwargs): info = CzechHashBuilder(words, *args, **kwargs) doc = % (__name__, make_dict.__name__, name) return create_dict_subclass(name, info.hash_function, info.words, doc)
make_dict(name, words, *args, **kwargs) -> mapping subclass Takes a sequence of words (or a pre-built Czech HashInfo) and returns a mapping subclass called `name` (used a dict) that employs the use of the minimal perfect hash. This mapping subclass has guaranteed O(1) worst-case lookups, additions, and deletions, however is slower than dict() in practice. >>> months = 'jan feb mar apr may jun jul aug sep oct nov dec'.split() >>> MyDict = make_dict('MyDict', months) >>> d = MyDict(dec=21, feb=None, may='hello') >>> d['jul'] = False >>> d MyDict([('feb', None), ('may', 'hello'), ('jul', False), ('dec', 21)]) >>> del d['may'] >>> del d['apr'] Traceback (most recent call last): ... KeyError: 'apr' >>> len(d) 3
20,585
def parse_date(date, default=None): if date == "": if default is not None: return default else: raise Exception("Unknown format for " + date) for format_type in ["%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M", "%Y-%m-%d %H", "%Y-%m-%d", "%d/%m/%Y %H:%M:%S", "%d/%m/%Y %H:%M", "%d/%m/%Y %H", "%d/%m/%Y"]: try: return datetime.strptime(date, format_type) except ValueError: pass raise Exception("Unknown format for " + date)
Parse a valid date
20,586
def find_by_id(self, attachment, params={}, **options): path = "/attachments/%s" % (attachment) return self.client.get(path, params, **options)
Returns the full record for a single attachment. Parameters ---------- attachment : {Id} Globally unique identifier for the attachment. [params] : {Object} Parameters for the request
20,587
def random_matrix(rows, cols, mean=0, std=1, sparsity=0, radius=0, diagonal=0, rng=None): if rng is None or isinstance(rng, int): rng = np.random.RandomState(rng) arr = mean + std * rng.randn(rows, cols) if 1 > sparsity > 0: k = min(rows, cols) mask = rng.binomial(n=1, p=1 - sparsity, size=(rows, cols)).astype(bool) mask[:k, :k] |= np.eye(k).astype(bool) arr *= mask if radius > 0: u, s, vT = np.linalg.svd(arr, full_matrices=False) arr = np.dot(np.dot(u, np.diag(radius * s / abs(s[0]))), vT) if diagonal != 0: arr = diagonal * np.eye(max(rows, cols))[:rows, :cols] return arr.astype(FLOAT)
Create a matrix of randomly-initialized weights. Parameters ---------- rows : int Number of rows of the weight matrix -- equivalently, the number of "input" units that the weight matrix connects. cols : int Number of columns of the weight matrix -- equivalently, the number of "output" units that the weight matrix connects. mean : float, optional Draw initial weight values from a normal with this mean. Defaults to 0. std : float, optional Draw initial weight values from a normal with this standard deviation. Defaults to 1. sparsity : float in (0, 1), optional If given, ensure that the given fraction of the weight matrix is set to zero. Defaults to 0, meaning all weights are nonzero. radius : float, optional If given, rescale the initial weights to have this spectral radius. No scaling is performed by default. diagonal : float, optional If nonzero, create a matrix containing all zeros except for this value along the diagonal. If nonzero, other arguments (except for rows and cols) will be ignored. rng : :class:`numpy.random.RandomState` or int, optional A random number generator, or an integer seed for a random number generator. If not provided, the random number generator will be created with an automatically chosen seed. Returns ------- matrix : numpy array An array containing random values. These often represent the weights connecting each "input" unit to each "output" unit in a layer.
20,588
def open_config(self,type="shared"): try: output = self.dev.rpc("<open-configuration><{0}/></open-configuration>".format(type)) except Exception as err: print err
Opens the configuration of the currently connected device Args: :type: The type of configuration you want to open. Any string can be provided, however the standard supported options are: **exclusive**, **private**, and **shared**. The default mode is **shared**. Examples: .. code-block:: python #Open shared config from pyJunosManager import JunosDevice dev = JunosDevice(host="1.2.3.4",username="root",password="Juniper") dev.open() dev.open_config() dev.close_config() dev.close() #Open private config from pyJunosManager import JunosDevice dev = JunosDevice(host="1.2.3.4",username="root",password="Juniper") dev.open() dev.open_config("private") dev.close_config() dev.close()
20,589
def check_hash(self, checker, filename, tfp): checker.report( self.debug, "Validating %%s checksum for %s" % filename) if not checker.is_valid(): tfp.close() os.unlink(filename) raise DistutilsError( "%s validation failed for %s; " "possible download problem?" % (checker.hash.name, os.path.basename(filename)) )
checker is a ContentChecker
20,590
def param_extract(args, short_form, long_form, default=None): val = default for i, a in enumerate(args): elems = a.split("=", 1) if elems[0] in [short_form, long_form]: if len(elems) == 1: if i + 1 < len(args) and not args[i + 1].startswith("-"): val = args[i + 1] else: val = "" else: val = elems[1] break return val
Quick extraction of a parameter from the command line argument list. In some cases we need to parse a few arguments before the official arg-parser starts. Returns parameter value, or None if not present.
20,591
def extern_store_tuple(self, context_handle, vals_ptr, vals_len): c = self._ffi.from_handle(context_handle) return c.to_value(tuple(c.from_value(val[0]) for val in self._ffi.unpack(vals_ptr, vals_len)))
Given storage and an array of Handles, return a new Handle to represent the list.
20,592
def followers(self): if self._followers is None: self.assert_bind_client() if self.follower_count > 0: self._followers = self.bind_client.get_athlete_followers(self.id) else: self._followers = [] return self._followers
:return: Iterator of :class:`stravalib.model.Athlete` followers objects for this athlete.
20,593
def toDict(self): if six.PY3: result = super().toDict() else: result = AARead.toDict(self) result.update({ : self.start, : self.stop, : self.openLeft, : self.openRight, }) return result
Get information about this read in a dictionary. @return: A C{dict} with keys/values for the attributes of self.
20,594
def SetAndLoadTagFile(self, tagging_file_path): tag_file = tagging_file.TaggingFile(tagging_file_path) self._tagging_rules = tag_file.GetEventTaggingRules()
Sets the tag file to be used by the plugin. Args: tagging_file_path (str): path of the tagging file.
20,595
def create_address(kwargs=None, call=None): if call != : raise SaltCloudSystemExit( ) if not kwargs or not in kwargs: log.error( ) return False if not in kwargs: log.error( ) return False name = kwargs[] ex_region = kwargs[] ex_address = kwargs.get("address", None) kwargs[] = _expand_region(kwargs[]) conn = get_conn() __utils__[]( , , , args=salt.utils.data.simple_types_filter(kwargs), sock_dir=__opts__[], transport=__opts__[] ) addy = conn.ex_create_address(name, ex_region, ex_address) __utils__[]( , , , args=salt.utils.data.simple_types_filter(kwargs), sock_dir=__opts__[], transport=__opts__[] ) log.info(, name) return _expand_address(addy)
Create a static address in a region. CLI Example: .. code-block:: bash salt-cloud -f create_address gce name=my-ip region=us-central1 address=IP
20,596
def draw_if_interactive(): fig = Gcf.get_active().canvas.figure if not hasattr(fig, ): fig.show = lambda *a: send_figure(fig) if not matplotlib.is_interactive(): return try: show._to_draw.remove(fig) except ValueError: pass show._to_draw.append(fig) show._draw_called = True
Is called after every pylab drawing command
20,597
def get_order(self, order_id): resp = self.get(.format(order_id)) return Order(resp)
Get an order
20,598
def raw(self, raw): if raw is None: raise ValueError("Invalid value for `raw`, must not be `None`") if raw is not None and not re.search(r, raw): raise ValueError(r"Invalid value for `raw`, must be a follow pattern or equal to `/^(?:[A-Za-z0-9+\/]{4})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/`") self._raw = raw
Sets the raw of this RuntimeRawExtension. Raw is the underlying serialization of this object. # noqa: E501 :param raw: The raw of this RuntimeRawExtension. # noqa: E501 :type: str
20,599
def get_system_data() -> typing.Union[None, dict]: site_packages = get_site_packages() path_prefixes = [(, p) for p in site_packages] path_prefixes.append((, sys.exec_prefix)) packages = [ module_to_package_data(name, entry, path_prefixes) for name, entry in list(sys.modules.items()) ] python_data = dict( version=list(sys.version_info), executable=simplify_path(sys.executable), directory=simplify_path(sys.exec_prefix), site_packages=[simplify_path(sp) for sp in site_packages] ) return dict( python=python_data, packages=[p for p in packages if p is not None] )
Returns information about the system in which Cauldron is running. If the information cannot be found, None is returned instead. :return: Dictionary containing information about the Cauldron system, whic includes: * name * location * version