Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
24,100
def _stringify_column(self, column_index): table_column = TableTranspose(self.table)[column_index] prior_cell = None for row_index in range(self.start[0], self.end[0]): cell, changed = self._check_interpret_cell(table_column[row_index], prior_cell, row_index, column_index) if changed: table_column[row_index] = cell prior_cell = cell
Same as _stringify_row but for columns.
24,101
def _swap_slice_indices(self, slc, make_slice=False): try: start = slc.start stop = slc.stop slc_step = slc.step except AttributeError: if make_slice: if slc < 0: slc += self.length() return slice(slc, slc + 1) else: return slc else: if not start and start != 0: slc_stop = self.length() elif start < 0: slc_stop = self.length() + start + 1 else: slc_stop = start + 1 if not stop and stop != 0: slc_start = 0 elif stop < 0: slc_start = self.length() + stop else: slc_start = stop return slice(slc_start, slc_stop, slc_step)
Swap slice indices Change slice indices from Verilog slicing (e.g. IEEE 1800-2012) to Python slicing.
24,102
def node_to_evenly_discretized(node): if not all([node.attrib["minMag"], node.attrib["binWidth"], node.nodes[0].text]): return None rates = [float(x) for x in node.nodes[0].text.split()] return mfd.evenly_discretized.EvenlyDiscretizedMFD( float(node.attrib["minMag"]), float(node.attrib["binWidth"]), rates)
Parses the evenly discretized mfd node to an instance of the :class: openquake.hazardlib.mfd.evenly_discretized.EvenlyDiscretizedMFD, or to None if not all parameters are available
24,103
def repr_size(n_bytes): if n_bytes < 1024: return .format(n_bytes) i = -1 while n_bytes > 1023: n_bytes /= 1024.0 i += 1 return .format(round(n_bytes, 1), si_prefixes[i])
>>> repr_size(1000) '1000 Bytes' >>> repr_size(8257332324597) '7.5 TiB'
24,104
def rbac_policy_update(request, policy_id, **kwargs): body = {: kwargs} rbac_policy = neutronclient(request).update_rbac_policy( policy_id, body=body).get() return RBACPolicy(rbac_policy)
Update a RBAC Policy. :param request: request context :param policy_id: target policy id :param target_tenant: target tenant of the policy :return: RBACPolicy object
24,105
def dockprep(self, force_rerun=False): log.debug(.format(self.id)) prep_mol2 = op.join(self.dock_dir, .format(self.id)) prep_py = op.join(self.dock_dir, "prep.py") if ssbio.utils.force_rerun(flag=force_rerun, outfile=prep_mol2): with open(prep_py, "w") as f: f.write() f.write() f.write() f.write() f.write() f.write(.format(prep_mol2)) cmd = .format(self.structure_path, prep_py) os.system(cmd) os.remove(prep_py) os.remove(.format(prep_py)) if ssbio.utils.is_non_zero_file(prep_mol2): self.dockprep_path = prep_mol2 log.debug(.format(self.dockprep_path)) else: log.critical(.format(self.structure_path))
Prepare a PDB file for docking by first converting it to mol2 format. Args: force_rerun (bool): If method should be rerun even if output file exists
24,106
def sync_memoize(f): memory = {} lock = Lock() @wraps(f) def new_f(*args): try: return memory[args] except KeyError: with lock: try: return memory[args] except KeyError: r = f(*args) memory[args] = r return r return new_f
Like memoize, but guarantees that decorated function is only called once, even when multiple threads are calling the decorating function with multiple parameters.
24,107
def cmd_up(self, args): if len(args) == 0: adjust = 5.0 else: adjust = float(args[0]) old_trim = self.get_mav_param(, None) if old_trim is None: print("Existing trim value unknown!") return new_trim = int(old_trim + (adjust*100)) if math.fabs(new_trim - old_trim) > 1000: print("Adjustment by %d too large (from %d to %d)" % (adjust*100, old_trim, new_trim)) return print("Adjusting TRIM_PITCH_CD from %d to %d" % (old_trim, new_trim)) self.param_set(, new_trim)
adjust TRIM_PITCH_CD up by 5 degrees
24,108
def MoveToAttribute(self, name): ret = libxml2mod.xmlTextReaderMoveToAttribute(self._o, name) return ret
Moves the position of the current instance to the attribute with the specified qualified name.
24,109
def in6_chksum(nh, u, p): ph6 = PseudoIPv6() ph6.nh = nh rthdr = 0 hahdr = 0 final_dest_addr_found = 0 while u is not None and not isinstance(u, IPv6): if (isinstance(u, IPv6ExtHdrRouting) and u.segleft != 0 and len(u.addresses) != 0 and final_dest_addr_found == 0): rthdr = u.addresses[-1] final_dest_addr_found = 1 elif (isinstance(u, IPv6ExtHdrSegmentRouting) and u.segleft != 0 and len(u.addresses) != 0 and final_dest_addr_found == 0): rthdr = u.addresses[0] final_dest_addr_found = 1 elif (isinstance(u, IPv6ExtHdrDestOpt) and (len(u.options) == 1) and isinstance(u.options[0], HAO)): hahdr = u.options[0].hoa u = u.underlayer if u is None: warning("No IPv6 underlayer to compute checksum. Leaving null.") return 0 if hahdr: ph6.src = hahdr else: ph6.src = u.src if rthdr: ph6.dst = rthdr else: ph6.dst = u.dst ph6.uplen = len(p) ph6s = raw(ph6) return checksum(ph6s + p)
As Specified in RFC 2460 - 8.1 Upper-Layer Checksums Performs IPv6 Upper Layer checksum computation. Provided parameters are: - 'nh' : value of upper layer protocol - 'u' : upper layer instance (TCP, UDP, ICMPv6*, ). Instance must be provided with all under layers (IPv6 and all extension headers, for example) - 'p' : the payload of the upper layer provided as a string Functions operate by filling a pseudo header class instance (PseudoIPv6) with - Next Header value - the address of _final_ destination (if some Routing Header with non segleft field is present in underlayer classes, last address is used.) - the address of _real_ source (basically the source address of an IPv6 class instance available in the underlayer or the source address in HAO option if some Destination Option header found in underlayer includes this option). - the length is the length of provided payload string ('p')
24,110
def _print_divide(self): for space in self.AttributesLength: self.StrTable += "+ " + "- " * space self.StrTable += "+" + "\n"
Prints all those table line dividers.
24,111
def data_to_imagesurface (data, **kwargs): import cairo data = np.atleast_2d (data) if data.ndim != 2: raise ValueError () argb32 = data_to_argb32 (data, **kwargs) format = cairo.FORMAT_ARGB32 height, width = argb32.shape stride = cairo.ImageSurface.format_stride_for_width (format, width) if argb32.strides[0] != stride: raise ValueError () return cairo.ImageSurface.create_for_data (argb32, format, width, height, stride)
Turn arbitrary data values into a Cairo ImageSurface. The method and arguments are the same as data_to_argb32, except that the data array will be treated as 2D, and higher dimensionalities are not allowed. The return value is a Cairo ImageSurface object. Combined with the write_to_png() method on ImageSurfaces, this is an easy way to quickly visualize 2D data.
24,112
def rvs(self, size=1, param=None): if param is not None: dtype = [(param, float)] else: dtype = [(p, float) for p in self.params] size = int(size) arr = numpy.zeros(size, dtype=dtype) draws = self._kde.resample(size) draws = {param: draws[ii,:] for ii,param in enumerate(self.params)} for (param,_) in dtype: try: tparam = self._tparams[param] tdraws = {tparam: draws[param]} draws[param] = self._transforms[tparam].inverse_transform( tdraws)[param] except KeyError: pass arr[param] = draws[param] return arr
Gives a set of random values drawn from the kde. Parameters ---------- size : {1, int} The number of values to generate; default is 1. param : {None, string} If provided, will just return values for the given parameter. Otherwise, returns random values for each parameter. Returns ------- structured array The random values in a numpy structured array. If a param was specified, the array will only have an element corresponding to the given parameter. Otherwise, the array will have an element for each parameter in self's params.
24,113
def push(self, next_dfa, next_state, node_type, lineno, column): dfa, state, node = self.stack[-1] new_node = Node(node_type, None, [], lineno, column) self.stack[-1] = (dfa, next_state, node) self.stack.append((next_dfa, 0, new_node))
Push a terminal and adjust the current state.
24,114
def to_match(self): template = u return template.format( field_name=self.field.to_match(), lower_bound=self.lower_bound.to_match(), upper_bound=self.upper_bound.to_match())
Return a unicode object with the MATCH representation of this BetweenClause.
24,115
def check_auth(name, sock_dir=None, queue=None, timeout=300): event = salt.utils.event.SaltEvent(, sock_dir, listen=True) starttime = time.mktime(time.localtime()) newtimeout = timeout log.debug(, name) while newtimeout > 0: newtimeout = timeout - (time.mktime(time.localtime()) - starttime) ret = event.get_event(full=True) if ret is None: continue if ret[] == .format(name): queue.put(name) newtimeout = 0 log.debug(, name)
This function is called from a multiprocess instance, to wait for a minion to become available to receive salt commands
24,116
def change_email(self, email): def cb(): if not utils.is_email_valid(email): raise exceptions.AuthError("Email address invalid") self.user.change_email(email) return email return signals.user_update(self, ACTIONS["EMAIL"], cb, {"email": self.email})
Change user's login email :param user: AuthUser :param email: :return:
24,117
def create_venv(local=, test=, general=): if not path.isdir(project_paths.venv): execute(, , , project_paths.venv) project.execute_python(, , ) if local.lower() == and project_paths.local_requirements: project.execute_pip(, , , project_paths.local_requirements) if test.lower() == and project_paths.test_requirements: project.execute_pip(, , , project_paths.test_requirements) if general.lower() == and project_paths.requirements_txt: project.execute_pip(, , , project_paths.requirements_txt)
Create virtualenv w/requirements. Specify y/n for local/test/general to control installation.
24,118
def _dump_query_timestamps(self, current_time: float): windows = [10, 11, 15, 20, 30, 60] print("GraphQL requests:", file=sys.stderr) for query_hash, times in self._graphql_query_timestamps.items(): print(" {}".format(query_hash), file=sys.stderr) for window in windows: reqs_in_sliding_window = sum(t > current_time - window * 60 for t in times) print(" last {} minutes: {} requests".format(window, reqs_in_sliding_window), file=sys.stderr)
Output the number of GraphQL queries grouped by their query_hash within the last time.
24,119
def notify(self, message, priority=, timeout=0, block=False): def build_line(msg, prio): cols = urwid.Columns([urwid.Text(msg)]) att = settings.get_theming_attribute(, + prio) return urwid.AttrMap(cols, att) msgs = [build_line(message, priority)] if not self._notificationbar: self._notificationbar = urwid.Pile(msgs) else: newpile = self._notificationbar.widget_list + msgs self._notificationbar = urwid.Pile(newpile) self.update() def clear(*_): self.clear_notify(msgs) if block: txt = build_line(, priority) overlay = urwid.Overlay(txt, self.root_widget, (, 0), (, 0), (, 0), None) self.show_as_root_until_keypress(overlay, , afterwards=clear) else: if timeout >= 0: if timeout == 0: timeout = settings.get() self.mainloop.set_alarm_in(timeout, clear) return msgs[0]
opens notification popup. :param message: message to print :type message: str :param priority: priority string, used to format the popup: currently, 'normal' and 'error' are defined. If you use 'X' here, the attribute 'global_notify_X' is used to format the popup. :type priority: str :param timeout: seconds until message disappears. Defaults to the value of 'notify_timeout' in the general config section. A negative value means never time out. :type timeout: int :param block: this notification blocks until a keypress is made :type block: bool :returns: an urwid widget (this notification) that can be handed to :meth:`clear_notify` for removal
24,120
def verify_ticket(self, ticket, **kwargs): try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree page = self.fetch_saml_validation(ticket) try: user = None attributes = {} response = page.content tree = ElementTree.fromstring(response) success = tree.find( + SAML_1_0_PROTOCOL_NS + ) if success is not None and success.attrib[].endswith(): name_identifier = tree.find( + SAML_1_0_ASSERTION_NS + ) if name_identifier is not None: user = name_identifier.text attrs = tree.findall( + SAML_1_0_ASSERTION_NS + ) for at in attrs: if self.username_attribute in list(at.attrib.values()): user = at.find(SAML_1_0_ASSERTION_NS + ).text attributes[] = user values = at.findall(SAML_1_0_ASSERTION_NS + ) if len(values) > 1: values_array = [] for v in values: values_array.append(v.text) attributes[at.attrib[]] = values_array else: attributes[at.attrib[]] = values[0].text return user, attributes, None finally: page.close()
Verifies CAS 3.0+ XML-based authentication ticket and returns extended attributes. @date: 2011-11-30 @author: Carlos Gonzalez Vila <[email protected]> Returns username and attributes on success and None,None on failure.
24,121
def contains_point(self, x, y, d=2): if self.path != None and len(self.path) > 1 \ and self.path.contains(x, y): if not self.path.contains(x+d, y) \ or not self.path.contains(x, y+d) \ or not self.path.contains(x-d, y) \ or not self.path.contains(x, y-d) \ or not self.path.contains(x+d, y+d) \ or not self.path.contains(x-d, y-d) \ or not self.path.contains(x+d, y-d) \ or not self.path.contains(x-d, y+d): return True return False
Returns true when x, y is on the path stroke outline.
24,122
def make_ar_transition_matrix(coefficients): top_row = tf.expand_dims(coefficients, -2) coef_shape = dist_util.prefer_static_shape(coefficients) batch_shape, order = coef_shape[:-1], coef_shape[-1] remaining_rows = tf.concat([ tf.eye(order - 1, dtype=coefficients.dtype, batch_shape=batch_shape), tf.zeros(tf.concat([batch_shape, (order - 1, 1)], axis=0), dtype=coefficients.dtype) ], axis=-1) ar_matrix = tf.concat([top_row, remaining_rows], axis=-2) return ar_matrix
Build transition matrix for an autoregressive StateSpaceModel. When applied to a vector of previous values, this matrix computes the expected new value (summing the previous states according to the autoregressive coefficients) in the top dimension of the state space, and moves all previous values down by one dimension, 'forgetting' the final (least recent) value. That is, it looks like this: ``` ar_matrix = [ coefs[0], coefs[1], ..., coefs[order] 1., 0 , ..., 0. 0., 1., ..., 0. ... 0., 0., ..., 1., 0. ] ``` Args: coefficients: float `Tensor` of shape `concat([batch_shape, [order]])`. Returns: ar_matrix: float `Tensor` with shape `concat([batch_shape, [order, order]])`.
24,123
def get_service(self, name): if not self.discovery_strategies: raise ServiceConfigurationError("No service registry available") cached = self.remote_service_cache.get_entry(name) if cached: return cached.remote_service for strategy in self.discovery_strategies: endpoints = strategy.locate(name) if not endpoints: continue random.shuffle(endpoints) for url in endpoints: try: service = get_remote_service_instance_for_url(url) self.remote_service_cache.add_entry(name, service) return service except ConnectionError: continue raise ValueError("Service could not be located")
Locates a remote service by name. The name can be a glob-like pattern (``"project.worker.*"``). If multiple services match the given name, a random instance will be chosen. There might be multiple services that match a given name if there are multiple services with the same name running, or when the pattern matches multiple different services. .. todo:: Make this use self.io_loop to resolve the request. The current implementation is blocking and slow :param name: a pattern for the searched service. :return: a :py:class:`gemstone.RemoteService` instance :raises ValueError: when the service can not be located :raises ServiceConfigurationError: when there is no configured discovery strategy
24,124
def min(a, axis=None): axes = _normalise_axis(axis, a) assert axes is not None and len(axes) == 1 return _Aggregation(a, axes[0], _MinStreamsHandler, _MinMaskedStreamsHandler, a.dtype, {})
Request the minimum of an Array over any number of axes. .. note:: Currently limited to operating on a single axis. Parameters ---------- a : Array object The object whose minimum is to be found. axis : None, or int, or iterable of ints Axis or axes along which the operation is performed. The default (axis=None) is to perform the operation over all the dimensions of the input array. The axis may be negative, in which case it counts from the last to the first axis. If axis is a tuple of ints, the operation is performed over multiple axes. Returns ------- out : Array The Array representing the requested mean.
24,125
def _find_by_name(tree_data, name, is_dir, start_at): try: item = tree_data[start_at] if item and item[2] == name and S_ISDIR(item[1]) == is_dir: tree_data[start_at] = None return item except IndexError: pass for index, item in enumerate(tree_data): if item and item[2] == name and S_ISDIR(item[1]) == is_dir: tree_data[index] = None return item return None
return data entry matching the given name and tree mode or None. Before the item is returned, the respective data item is set None in the tree_data list to mark it done
24,126
def relativefrom(base, path): base_parts = list(iteratepath(base)) path_parts = list(iteratepath(path)) common = 0 for component_a, component_b in zip(base_parts, path_parts): if component_a != component_b: break common += 1 return "/".join([".."] * (len(base_parts) - common) + path_parts[common:])
Return a path relative from a given base path. Insert backrefs as appropriate to reach the path from the base. Arguments: base (str): Path to a directory. path (str): Path to make relative. Returns: str: the path to ``base`` from ``path``. >>> relativefrom("foo/bar", "baz/index.html") '../../baz/index.html'
24,127
def convert_to_argument(self): field_list = [ "action", "nargs", "const", "default", "type", "choices", "required", "help", "metavar", "dest" ] return ( self.name, { field: getattr(self, field) for field in field_list if getattr(self, field) is not None } )
Convert the Argument object to a tuple use in :meth:`~argparse.ArgumentParser.add_argument` calls on the parser
24,128
def _removePunctuation(text_string): try: return text_string.translate(None, _punctuation) except TypeError: return text_string.translate(str.maketrans(, , _punctuation))
Removes punctuation symbols from a string. :param text_string: A string. :type text_string: str. :returns: The input ``text_string`` with punctuation symbols removed. :rtype: str. >>> from rnlp.textprocessing import __removePunctuation >>> example = 'Hello, World!' >>> __removePunctuation(example) 'Hello World'
24,129
def compare(self, reference_ids: Iterable, query_profiles: Iterable[Iterable], method: Optional) -> SimResult: pass
Given two lists of entities (classes, individuals), resolves them to some type (phenotypes, go terms, etc) and returns their similarity
24,130
def email_link_expired(self, now=None): if not now: now = datetime.datetime.utcnow() return self.email_link_expires < now
Check if email link expired
24,131
def is_armed(self): alarm_code = self.get_armed_status() if alarm_code == YALE_STATE_ARM_FULL: return True if alarm_code == YALE_STATE_ARM_PARTIAL: return True return False
Return True or False if the system is armed in any way
24,132
def isosceles(cls, origin=None, base=1, alpha=90): o = Point(origin) base = o.x + base return cls(o, [base, o.y], [base / 2, o.y + base])
:origin: optional Point :base: optional float describing triangle base length :return: Triangle initialized with points comprising a isosceles triangle. XXX isoceles triangle definition
24,133
def add_quality_score_vs_no_of_observations_section(self): sample_data = [] data_labels = [] for rt_type_name, rt_type in recal_table_type._asdict().items(): sample_tables = self.gatk_base_recalibrator[rt_type][] if len(sample_tables) == 0: continue sample_data.append({ sample: {int(x): int(y) for x, y in zip(table[], table[])} for sample, table in sample_tables.items() }) sample_y_sums = { sample: sum(int(y) for y in table[]) for sample, table in sample_tables.items() } sample_data.append({ sample: { int(x): float(y) / sample_y_sums[sample] for x, y in zip(table[], table[]) } for sample, table in sample_tables.items() }) flat_proportions = [float(y) / sample_y_sums[sample] for sample, table in sample_tables.items() for y in table[]] prop_ymax = max(flat_proportions) data_labels.append({: "{} Count".format(rt_type_name.capitalize().replace(, )), : }) data_labels.append({: prop_ymax, : "{} Percent".format(rt_type_name.capitalize().replace(, )), : }) plot = linegraph.plot( sample_data, pconfig={ : "Observed Quality Score Counts", : , : , : , : False, : data_labels, }) self.add_section( name=, description=( ), helptext=( s description of BQSR](https://gatkforums.broadinstitute.org/gatk/discussion/44/base-quality-score-recalibration-bqsr).' ), plot=plot, )
Add a section for the quality score vs number of observations line plot
24,134
def add_column(self, column): self.columns.append(column.name) self.column_funcs.append(column.path) if column.mask is not None: self.mask_parts.add(column.mask)
Add a new column along with a formatting function.
24,135
def auth(view, **kwargs): endpoint_namespace = view.__name__ + ":%s" view_name = view.__name__ UserModel = kwargs.pop("model") User = UserModel.User login_view = endpoint_namespace % "login" on_signin_view = kwargs.get("signin_view", "Index:index") on_signout_view = kwargs.get("signout_view", "Index:index") template_dir = kwargs.get("template_dir", "Juice/Plugin/User/Account") template_page = template_dir + "/%s.html" login_manager = LoginManager() login_manager.login_view = login_view login_manager.login_message_category = "error" init_app(login_manager.init_app) menu_context = view _menu = kwargs.get("menu", {}) if _menu: @menu(**_menu) class UserAccountMenu(object): pass menu_context = UserAccountMenu @login_manager.user_loader def load_user(userid): return User.get(userid) View.g(__USER_AUTH_ENABLED__=True) class Auth(object): decorators = view.decorators + [login_required] SESSION_KEY_SET_EMAIL_DATA = "set_email_tmp_data" TEMP_DATA_KEY = "login_tmp_data" @property def tmp_data(self): return session[self.TEMP_DATA_KEY] @tmp_data.setter def tmp_data(self, data): session[self.TEMP_DATA_KEY] = data def _login_enabled(self): if self.get_config("USER_AUTH_ALLOW_LOGIN") is not True: abort("UserLoginDisabledError") def _signup_enabled(self): if self.get_config("USER_AUTH_ALLOW_SIGNUP") is not True: abort("UserSignupDisabledError") def _oauth_enabled(self): if self.get_config("USER_AUTH_ALLOW_OAUTH") is not True: abort("UserOAuthDisabledError") def _send_reset_password(self, user): delivery = self.get_config("USER_AUTH_PASSWORD_RESET_METHOD") token_reset_ttl = self.get_config("USER_AUTH_TOKEN_RESET_TTL", 60) new_password = None if delivery.upper() == "TOKEN": token = user.set_temp_login(token_reset_ttl) url = url_for(endpoint_namespace % "reset_password", token=token, _external=True) else: new_password = user.set_password(password=None, random=True) url = url_for(endpoint_namespace % "login", _external=True) mail.send(template="reset-password.txt", method_=delivery, to=user.email, name=user.email, url=url, new_password=new_password) @classmethod def login_user(cls, user): login_user(user) now = datetime.datetime.now() user.update(last_login=now, last_visited=now) @menu("Login", endpoint=endpoint_namespace % "login", visible_with_auth_user=False, extends=menu_context) @template(template_page % "login", endpoint_namespace=endpoint_namespace) @route("login/", methods=["GET", "POST"], endpoint=endpoint_namespace % "login") @no_login_required def login(self): self._login_enabled() logout_user() self.tmp_data = None self.meta_tags(title="Login") if request.method == "POST": email = request.form.get("email").strip() password = request.form.get("password").strip() if not email or not password: flash("Email or Password is empty", "error") return redirect(url_for(login_view, next=request.form.get("next"))) user = User.get_by_email(email) if user and user.password_hash and user.password_matched(password): self.login_user(user) return redirect(request.form.get("next") or url_for(on_signin_view)) else: flash("Email or Password is invalid", "error") return redirect(url_for(login_view, next=request.form.get("next"))) return dict(login_url_next=request.args.get("next", ""), login_url_default=url_for(on_signin_view), signup_enabled=self.get_config("USER_AUTH_ALLOW_SIGNUP"), oauth_enabled=self.get_config("USER_AUTH_ALLOW_LOGIN")) @menu("Logout", endpoint=endpoint_namespace % "logout", visible_with_auth_user=True, order=100, extends=menu_context) @route("logout/", endpoint=endpoint_namespace % "logout") @no_login_required def logout(self): logout_user() return redirect(url_for(on_signout_view or login_view)) @menu("Signup", endpoint=endpoint_namespace % "signup", visible_with_auth_user=False, extends=menu_context) @template(template_page % "signup", endpoint_namespace=endpoint_namespace) @route("signup/", methods=["GET", "POST"], endpoint=endpoint_namespace % "signup") @no_login_required def signup(self): self._login_enabled() self._signup_enabled() self.meta_tags(title="Signup") if request.method == "POST": if not recaptcha.verify(): flash("Invalid Security code", "error") return redirect(url_for(endpoint_namespace % "signup", next=request.form.get("next"))) try: name = request.form.get("name") email = request.form.get("email") password = request.form.get("password") password2 = request.form.get("password2") profile_image_url = request.form.get("profile_image_url", None) if not name: raise UserError("Name is required") elif not utils.is_valid_email(email): raise UserError("Invalid email address " % email) elif not password.strip() or password.strip() != password2.strip(): raise UserError("Passwords don%s%s%st match") elif not utils.is_valid_password(password): raise UserWarning("Invalid password") else: user_context.set_password(password) return True else: raise UserWarning("Password is empty") @route("oauth-login/<provider>", methods=["GET", "POST"], endpoint=endpoint_namespace % "oauth_login") @template(template_page % "oauth_login", endpoint_namespace=endpoint_namespace) @no_login_required def oauth_login(self, provider): self._login_enabled() self._oauth_enabled() provider = provider.lower() result = oauth.login(provider) response = oauth.response popup_js_custom = { "action": "", "url": "" } if result: if result.error: pass elif result.user: result.user.update() oauth_user = result.user user = User.get_by_oauth(provider=provider, provider_user_id=oauth_user.id) if not user: if oauth_user.email and User.get_by_email(oauth_user.email): flash("Account already exists with this email . " "Try to login or retrieve your password " % oauth_user.email, "error") popup_js_custom.update({ "action": "redirect", "url": url_for(login_view, next=request.form.get("next")) }) else: tmp_data = { "is_oauth": True, "provider": provider, "id": oauth_user.id, "name": oauth_user.name, "picture": oauth_user.picture, "first_name": oauth_user.first_name, "last_name": oauth_user.last_name, "email": oauth_user.email, "link": oauth_user.link } if not oauth_user.email: self.tmp_data = tmp_data popup_js_custom.update({ "action": "redirect", "url": url_for(endpoint_namespace % "setup_login") }) else: try: picture = oauth_user.picture user = User.new(email=oauth_user.email, name=oauth_user.name, signup_method=provider, profile_image_url=picture ) user.add_oauth(provider, oauth_user.provider_id, name=oauth_user.name, email=oauth_user.email, profile_image_url=oauth_user.picture, link=oauth_user.link) except ModelError as e: flash(e.message, "error") popup_js_custom.update({ "action": "redirect", "url": url_for(endpoint_namespace % "login") }) if user: self.login_user(user) return dict(popup_js=result.popup_js(custom=popup_js_custom), template_=template_page % "oauth_login") return response @template(template_page % "setup_login", endpoint_namespace=endpoint_namespace) @route("setup-login/", methods=["GET", "POST"], endpoint=endpoint_namespace % "setup_login") def setup_login(self): self._login_enabled() self.meta_tags(title="Setup Login") if current_user.is_authenticated() and current_user.email: return redirect(url_for(endpoint_namespace % "account_settings")) if self.tmp_data: if request.method == "POST": if not self.tmp_data["is_oauth"]: return redirect(endpoint_namespace % "login") try: email = request.form.get("email") password = request.form.get("password") password2 = request.form.get("password2") if not utils.is_valid_email(email): raise UserError("Invalid email address " % email) elif User.get_by_email(email): raise UserError("An account exists already with this email address " % email) elif not password.strip() or password.strip() != password2.strip(): raise UserError("Passwords don't match") elif not utils.is_valid_password(password): raise UserError("Invalid password") else: user = User.new(email=email, password=password.strip(), name=self.tmp_data["name"], profile_image_url=self.tmp_data["picture"], signup_method=self.tmp_data["provider"]) user.add_oauth(self.tmp_data["provider"], self.tmp_data["id"], name=self.tmp_data["name"], email=email, profile_image_url=self.tmp_data["picture"], link=self.tmp_data["link"]) self.login_user(user) self.tmp_data = None return redirect(request.form.get("next") or url_for(on_signin_view)) except ApplicationError as ex: flash(ex.message, "error") return redirect(url_for(endpoint_namespace % "login")) return dict(provider=self.tmp_data) else: return redirect(url_for(endpoint_namespace % "login")) @route("reset-password/<token>", methods=["GET", "POST"], endpoint=endpoint_namespace % "reset_password") @template(template_page % "reset_password", endpoint_namespace=endpoint_namespace) @no_login_required def reset_password(self, token): self._login_enabled() logout_user() self.meta_tags(title="Reset Password") user = User.get_by_temp_login(token) if user: if not user.has_temp_login: return redirect(url_for(on_signin_view)) if request.method == "POST": try: self.change_password_handler(user_context=user) user.clear_temp_login() flash("Password updated successfully!", "success") return redirect(url_for(on_signin_view)) except Exception as ex: flash("Error: %s" % ex.message, "error") return redirect(url_for(endpoint_namespace % "reset_password", token=token)) else: return dict(token=token) else: abort(404, "Invalid token") @route("oauth-connect", methods=["POST"], endpoint="%s:oauth_connect" % endpoint_namespace) def oauth_connect(self): email = request.form.get("email").strip() name = request.form.get("name").strip() provider = request.form.get("provider").strip() provider_user_id = request.form.get("provider_user_id").strip() image_url = request.form.get("image_url").strip() next = request.form.get("next", "") try: current_user.oauth_connect(provider=provider, provider_user_id=provider_user_id, email=email, name=name, image_url=image_url) except Exception as ex: flash("Unable to link your account", "error") return redirect(url_for(endpoint_namespace % "account_settings")) return Auth
This plugin allow user to login to application kwargs: - signin_view - signout_view - template_dir - menu: - name - group_name - ... @plugin(user.login, model=model.User) class MyAccount(Juice): pass
24,136
def K_run_converging_Crane(D_run, D_branch, Q_run, Q_branch, angle=90): r beta = (D_branch/D_run) beta2 = beta*beta Q_comb = Q_run + Q_branch Q_ratio = Q_branch/Q_comb if angle < 75.0: C = 1.0 else: return 1.55*(Q_ratio) - Q_ratio*Q_ratio D, E = 0.0, 1.0 F = interp(angle, run_converging_Crane_angles, run_converging_Crane_Fs) K = C*(1. + D*(Q_ratio/beta2)**2 - E*(1. - Q_ratio)**2 - F/beta2*Q_ratio**2) return K
r'''Returns the loss coefficient for the run of a converging tee or wye according to the Crane method [1]_. .. math:: K_{branch} = C\left[1 + D\left(\frac{Q_{branch}}{Q_{comb}\cdot \beta_{branch}^2}\right)^2 - E\left(1 - \frac{Q_{branch}}{Q_{comb}} \right)^2 - \frac{F}{\beta_{branch}^2} \left(\frac{Q_{branch}} {Q_{comb}}\right)^2\right] .. math:: \beta_{branch} = \frac{D_{branch}}{D_{comb}} In the above equation, C=1, D=0, E=1. See the notes for definitions of F and also the special case of 90°. Parameters ---------- D_run : float Diameter of the straight-through inlet portion of the tee or wye [m] D_branch : float Diameter of the pipe attached at an angle to the straight-through, [m] Q_run : float Volumetric flow rate in the straight-through inlet of the tee or wye, [m^3/s] Q_branch : float Volumetric flow rate in the pipe attached at an angle to the straight- through, [m^3/s] angle : float, optional Angle the branch makes with the straight-through (tee=90, wye<90) [degrees] Returns ------- K : float Loss coefficient of run with respect to the velocity and inside diameter of the combined flow outlet [-] Notes ----- F is linearly interpolated from the table of angles below. There is no cutoff to prevent angles from being larger or smaller than 30 or 60 degrees. The switch to the special 90° happens at 75°. +-----------+------+ | Angle [°] | | +===========+======+ | 30 | 1.74 | +-----------+------+ | 45 | 1.41 | +-----------+------+ | 60 | 1 | +-----------+------+ For the special case of 90°, the formula used is as follows. .. math:: K_{run} = 1.55\left(\frac{Q_{branch}}{Q_{comb}} \right) - \left(\frac{Q_{branch}}{Q_{comb}}\right)^2 Examples -------- Example 7-35 of [1]_. A DN100 schedule 40 tee has 1135 liters/minute of water passing through the straight leg, and 380 liters/minute of water converging with it through a 90° branch. Calculate the loss coefficient in the run. The calculated value there is 0.03258. >>> K_run_converging_Crane(0.1023, 0.1023, 0.018917, 0.00633) 0.32575847854551254 References ---------- .. [1] Crane Co. Flow of Fluids Through Valves, Fittings, and Pipe. Crane, 2009.
24,137
def _get_compose_volumes(app_name, assembled_specs): volumes = [] volumes.append(_get_cp_volume_mount(app_name)) volumes += get_app_volume_mounts(app_name, assembled_specs) return volumes
This returns formatted volume specifications for a docker-compose app. We mount the app as well as any libs it needs so that local code is used in our container, instead of whatever code was in the docker image. Additionally, we create a volume for the /cp directory used by Dusty to facilitate easy file transfers using `dusty cp`.
24,138
def validate_boundary(reference_intervals, estimated_intervals, trim): if trim: min_size = 1 if len(reference_intervals) < min_size: warnings.warn("Reference intervals are empty.") if len(estimated_intervals) < min_size: warnings.warn("Estimated intervals are empty.") for intervals in [reference_intervals, estimated_intervals]: util.validate_intervals(intervals)
Checks that the input annotations to a segment boundary estimation metric (i.e. one that only takes in segment intervals) look like valid segment times, and throws helpful errors if not. Parameters ---------- reference_intervals : np.ndarray, shape=(n, 2) reference segment intervals, in the format returned by :func:`mir_eval.io.load_intervals` or :func:`mir_eval.io.load_labeled_intervals`. estimated_intervals : np.ndarray, shape=(m, 2) estimated segment intervals, in the format returned by :func:`mir_eval.io.load_intervals` or :func:`mir_eval.io.load_labeled_intervals`. trim : bool will the start and end events be trimmed?
24,139
def draw_line(self, img, pixmapper, pt1, pt2, colour, linewidth): pix1 = pixmapper(pt1) pix2 = pixmapper(pt2) clipped = cv.ClipLine((img.width, img.height), pix1, pix2) if clipped is None: return (pix1, pix2) = clipped cv.Line(img, pix1, pix2, colour, linewidth) cv.Circle(img, pix2, linewidth*2, colour)
draw a line on the image
24,140
def servo_output_raw_send(self, time_usec, port, servo1_raw, servo2_raw, servo3_raw, servo4_raw, servo5_raw, servo6_raw, servo7_raw, servo8_raw, force_mavlink1=False): return self.send(self.servo_output_raw_encode(time_usec, port, servo1_raw, servo2_raw, servo3_raw, servo4_raw, servo5_raw, servo6_raw, servo7_raw, servo8_raw), force_mavlink1=force_mavlink1)
The RAW values of the servo outputs (for RC input from the remote, use the RC_CHANNELS messages). The standard PPM modulation is as follows: 1000 microseconds: 0%, 2000 microseconds: 100%. time_usec : Timestamp (microseconds since system boot) (uint32_t) port : Servo output port (set of 8 outputs = 1 port). Most MAVs will just use one, but this allows to encode more than 8 servos. (uint8_t) servo1_raw : Servo output 1 value, in microseconds (uint16_t) servo2_raw : Servo output 2 value, in microseconds (uint16_t) servo3_raw : Servo output 3 value, in microseconds (uint16_t) servo4_raw : Servo output 4 value, in microseconds (uint16_t) servo5_raw : Servo output 5 value, in microseconds (uint16_t) servo6_raw : Servo output 6 value, in microseconds (uint16_t) servo7_raw : Servo output 7 value, in microseconds (uint16_t) servo8_raw : Servo output 8 value, in microseconds (uint16_t)
24,141
def update(self, alert_condition_infra_id, policy_id, name, condition_type, alert_condition_configuration, enabled=True): data = { "data": alert_condition_configuration } data[][] = condition_type data[][] = policy_id data[][] = name data[][] = enabled return self._put( url=.format(self.URL, alert_condition_infra_id), headers=self.headers, data=data )
This API endpoint allows you to update an alert condition for infrastucture :type alert_condition_infra_id: int :param alert_condition_infra_id: Alert Condition Infra ID :type policy_id: int :param policy_id: Alert policy id :type name: str :param name: The name of the alert condition :type condition_type: str :param condition_type: The type of the alert condition can be infra_process_running, infra_metric or infra_host_not_reporting :type alert_condition_configuration: hash :param alert_condition_configuration: hash containing config for the alert :type enabled: bool :param enabled: Whether to enable that alert condition :rtype: dict :return: The JSON response of the API :: { "data": { "id": "integer", "policy_id": "integer", "type": "string", "name": "string", "enabled": "boolean", "where_clause": "string", "comparison": "string", "filter": "hash", "critical_threshold": "hash", "event_type": "string", "process_where_clause": "string", "created_at_epoch_millis": "time", "updated_at_epoch_millis": "time" } }
24,142
def set_chuid(ctx, management_key, pin): controller = ctx.obj[] _ensure_authenticated(ctx, controller, pin, management_key) controller.update_chuid()
Generate and set a CHUID on the YubiKey.
24,143
def draw_char_screen(self): self.screen = Image.new("RGB", (self.height, self.width)) self.drawer = ImageDraw.Draw(self.screen) for sy, line in enumerate(self.char_buffer): for sx, tinfo in enumerate(line): self.drawer.text((sx * 6, sy * 9), tinfo[0], fill=tinfo[1:]) self.output_device.interrupt()
Draws the output buffered in the char_buffer.
24,144
def _cleanup_channel(self, channel_id): with self.lock: if channel_id not in self._channels: return del self._channels[channel_id]
Remove the the channel from the list of available channels. :param int channel_id: Channel id :return:
24,145
def words(ctx, input, output): log.info() log.info( % input.name) doc = Document.from_file(input) for element in doc.elements: if isinstance(element, Text): for sentence in element.sentences: output.write(u.join(sentence.raw_tokens)) output.write(u)
Read input document, and output words.
24,146
def before_create(self, context, resource): if resource.get(, ) == : self.data.load(resource[]) else: self.data.load(resource[].file) self.generate_budget_data_package(resource)
When triggered the resource which can either be uploaded or linked to will be parsed and analysed to see if it possibly is a budget data package resource (checking if all required headers and any of the recommended headers exist in the csv). The budget data package specific fields are then appended to the resource which makes it useful for export the dataset as a budget data package.
24,147
def remap( x, oMin, oMax, nMin, nMax ): if oMin == oMax: log.warning("Zero input range, unable to rescale") return x if nMin == nMax: log.warning("Zero output range, unable to rescale") return x reverseInput = False oldMin = min( oMin, oMax ) oldMax = max( oMin, oMax ) if not oldMin == oMin: reverseInput = True reverseOutput = False newMin = min( nMin, nMax ) newMax = max( nMin, nMax ) if not newMin == nMin : reverseOutput = True portion = (x-oldMin)*(newMax-newMin)/(oldMax-oldMin) if reverseInput: portion = (oldMax-x)*(newMax-newMin)/(oldMax-oldMin) result = portion + newMin if reverseOutput: result = newMax - portion return result
Map to a 0 to 1 scale http://stackoverflow.com/questions/929103/convert-a-number-range-to-another-range-maintaining-ratio
24,148
def greenlet_logs(self): while True: try: self.flush_logs() except Exception as e: self.log.error("When flushing logs: %s" % e) finally: time.sleep(self.config["report_interval"])
This greenlet always runs in background to update current logs in MongoDB every 10 seconds. Caution: it might get delayed when doing long blocking operations. Should we do this in a thread instead?
24,149
def lu_companion(top_row, value): r degree, = top_row.shape lu_mat = np.zeros((degree, degree), order="F") if degree == 1: lu_mat[0, 0] = top_row[0] - value return lu_mat, abs(lu_mat[0, 0]) curr_coeff = top_row[last_row] horner_curr = value * horner_curr + curr_coeff one_norm = max(one_norm, abs(value) + abs(curr_coeff)) lu_mat[last_row - 1, last_row] = -value lu_mat[last_row, last_row] = horner_curr return lu_mat, one_norm
r"""Compute an LU-factored :math:`C - t I` and its 1-norm. .. _dgecon: http://www.netlib.org/lapack/explore-html/dd/d9a/group__double_g_ecomputational_ga188b8d30443d14b1a3f7f8331d87ae60.html#ga188b8d30443d14b1a3f7f8331d87ae60 .. _dgetrf: http://www.netlib.org/lapack/explore-html/dd/d9a/group__double_g_ecomputational_ga0019443faea08275ca60a734d0593e60.html#ga0019443faea08275ca60a734d0593e60 .. note:: The output of this function is intended to be used with `dgecon`_ from LAPACK. ``dgecon`` expects both the 1-norm of the matrix and expects the matrix to be passed in an already LU-factored form (via `dgetrf`_). The companion matrix :math:`C` is given by the ``top_row``, for example, the polynomial :math:`t^3 + 3 t^2 - t + 2` has a top row of ``-3, 1, -2`` and the corresponding companion matrix is: .. math:: \left[\begin{array}{c c c} -3 & 1 & -2 \\ 1 & 0 & 0 \\ 0 & 1 & 0 \end{array}\right] After doing a full cycle of the rows (shifting the first to the last and moving all other rows up), row reduction of :math:`C - t I` yields .. math:: \left[\begin{array}{c c c} 1 & -t & 0 \\ 0 & 1 & -t \\ -3 - t & 1 & -2 \end{array}\right] = \left[\begin{array}{c c c} 1 & 0 & 0 \\ 0 & 1 & 0 \\ -3 - t & 1 + t(-3 - t) & 1 \end{array}\right] \left[\begin{array}{c c c} 1 & -t & 0 \\ 0 & 1 & -t \\ 0 & 0 & -2 + t(1 + t(-3 - t)) \end{array}\right] and in general, the terms in the bottom row correspond to the intermediate values involved in evaluating the polynomial via `Horner's method`_. .. _Horner's method: https://en.wikipedia.org/wiki/Horner%27s_method .. testsetup:: lu-companion import numpy as np import numpy.linalg from bezier._algebraic_intersection import lu_companion .. doctest:: lu-companion >>> top_row = np.asfortranarray([-3.0, 1.0, -2.0]) >>> t_val = 0.5 >>> lu_mat, one_norm = lu_companion(top_row, t_val) >>> lu_mat array([[ 1. , -0.5 , 0. ], [ 0. , 1. , -0.5 ], [-3.5 , -0.75 , -2.375]]) >>> one_norm 4.5 >>> l_mat = np.tril(lu_mat, k=-1) + np.eye(3) >>> u_mat = np.triu(lu_mat) >>> a_mat = l_mat.dot(u_mat) >>> a_mat array([[ 1. , -0.5, 0. ], [ 0. , 1. , -0.5], [-3.5, 1. , -2. ]]) >>> np.linalg.norm(a_mat, ord=1) 4.5 Args: top_row (numpy.ndarray): 1D array, top row of companion matrix. value (float): The :math:`t` value used to form :math:`C - t I`. Returns: Tuple[numpy.ndarray, float]: Pair of * 2D array of LU-factored form of :math:`C - t I`, with the non-diagonal part of :math:`L` stored in the strictly lower triangle and :math:`U` stored in the upper triangle (we skip the permutation matrix, as it won't impact the 1-norm) * the 1-norm the matrix :math:`C - t I` As mentioned above, these two values are meant to be used with `dgecon`_.
24,150
def _get_environ_vars(self): for key, val in os.environ.items(): should_be_yielded = ( key.startswith("PIP_") and key[4:].lower() not in self._ignore_env_names ) if should_be_yielded: yield key[4:].lower(), val
Returns a generator with all environmental vars with prefix PIP_
24,151
def subscriber_choice_control(self): self.current.task_data[] = None self.current.task_data[], names = self.return_selected_form_items( self.input[][]) self.current.task_data[ ] = "You should choose at least one subscriber for migration operation." if self.current.task_data[]: self.current.task_data[] = self.input[] del self.current.task_data[]
It controls subscribers choice and generates error message if there is a non-choice.
24,152
def compare_ecp_pots(potential1, potential2, compare_meta=False, rel_tol=0.0): if potential1[] != potential2[]: return False rexponents1 = potential1[] rexponents2 = potential2[] gexponents1 = potential1[] gexponents2 = potential2[] coefficients1 = potential1[] coefficients2 = potential2[] if rexponents1 != rexponents2: return False if not _compare_vector(gexponents1, gexponents2, rel_tol): return False if not _compare_matrix(coefficients1, coefficients2, rel_tol): return False if compare_meta: if potential1[] != potential2[]: return False return True else: return True
Compare two ecp potentials for approximate equality (exponents/coefficients are within a tolerance) If compare_meta is True, the metadata is also compared for exact equality.
24,153
def mtFeatureExtractionToFile(fileName, midTermSize, midTermStep, shortTermSize, shortTermStep, outPutFile, storeStFeatures=False, storeToCSV=False, PLOT=False): [fs, x] = audioBasicIO.readAudioFile(fileName) x = audioBasicIO.stereo2mono(x) if storeStFeatures: [mtF, stF, _] = mtFeatureExtraction(x, fs, round(fs * midTermSize), round(fs * midTermStep), round(fs * shortTermSize), round(fs * shortTermStep)) else: [mtF, _, _] = mtFeatureExtraction(x, fs, round(fs*midTermSize), round(fs * midTermStep), round(fs * shortTermSize), round(fs * shortTermStep)) numpy.save(outPutFile, mtF) if PLOT: print("Mid-term numpy file: " + outPutFile + ".npy saved") if storeToCSV: numpy.savetxt(outPutFile+".csv", mtF.T, delimiter=",") if PLOT: print("Mid-term CSV file: " + outPutFile + ".csv saved") if storeStFeatures: numpy.save(outPutFile+"_st", stF) if PLOT: print("Short-term numpy file: " + outPutFile + "_st.npy saved") if storeToCSV: numpy.savetxt(outPutFile+"_st.csv", stF.T, delimiter=",") if PLOT: print("Short-term CSV file: " + outPutFile + "_st.csv saved")
This function is used as a wrapper to: a) read the content of a WAV file b) perform mid-term feature extraction on that signal c) write the mid-term feature sequences to a numpy file
24,154
def query_organism_host(): args = get_args( request_args=request.args, allowed_str_args=[], allowed_int_args=[, ] ) return jsonify(query.organism_host(**args))
Returns list of host organism by query parameters --- tags: - Query functions parameters: - name: taxid in: query type: integer required: false description: NCBI taxonomy identifier default: 9606 - name: entry_name in: query type: string required: false description: UniProt entry name default: A4_HUMAN - name: limit in: query type: integer required: false description: limit of results numbers default: 10
24,155
def use_plenary_agent_view(self): self._object_views[] = PLENARY for session in self._get_provider_sessions(): try: session.use_plenary_agent_view() except AttributeError: pass
Pass through to provider ResourceAgentSession.use_plenary_agent_view
24,156
def start_watching(self, cluster, callback): logger.debug("starting to watch cluster %s", cluster.name) wait_on_any(self.connected, self.shutdown) logger.debug("done waiting on (connected, shutdown)") znode_path = "/".join([self.base_path, cluster.name]) self.stop_events[znode_path] = threading.Event() def should_stop(): return ( znode_path not in self.stop_events or self.stop_events[znode_path].is_set() or self.shutdown.is_set() ) while not should_stop(): try: if self.client.exists(znode_path): break except exceptions.ConnectionClosedError: break wait_on_any( self.stop_events[znode_path], self.shutdown, timeout=NO_NODE_INTERVAL ) logger.debug("setting up ChildrenWatch for %s", znode_path) @self.client.ChildrenWatch(znode_path) def watch(children): if should_stop(): return False logger.debug("znode children changed! (%s)", znode_path) new_nodes = [] for child in children: child_path = "/".join([znode_path, child]) try: new_nodes.append( Node.deserialize(self.client.get(child_path)[0]) ) except ValueError: logger.exception("Invalid node at path ", child) continue cluster.nodes = new_nodes callback()
Initiates the "watching" of a cluster's associated znode. This is done via kazoo's ChildrenWatch object. When a cluster's znode's child nodes are updated, a callback is fired and we update the cluster's `nodes` attribute based on the existing child znodes and fire a passed-in callback with no arguments once done. If the cluster's znode does not exist we wait for `NO_NODE_INTERVAL` seconds before trying again as long as no ChildrenWatch exists for the given cluster yet and we are not in the process of shutting down.
24,157
def copy_config_file(self, config_file, path=None, overwrite=False): dst = os.path.join(self.location, config_file) if os.path.isfile(dst) and not overwrite: return False if path is None: path = os.path.join(get_ipython_package_dir(), u, u, u) src = os.path.join(path, config_file) shutil.copy(src, dst) return True
Copy a default config file into the active profile directory. Default configuration files are kept in :mod:`IPython.config.default`. This function moves these from that location to the working profile directory.
24,158
def deactivate_workflow_transitions(cr, model, transitions=None): transition_ids = [] if transitions: data_obj = RegistryManager.get(cr.dbname)[] for module, name in transitions: try: transition_ids.append( data_obj.get_object_reference( cr, SUPERUSER_ID, module, name)[1]) except ValueError: continue else: cr.execute( , (model,)) transition_ids = [i for i, in cr.fetchall()] cr.execute( , (tuple(transition_ids),)) transition_conditions = dict(cr.fetchall()) cr.execute( "update wkf_transition set condition = WHERE id in %s", (tuple(transition_ids),)) return transition_conditions
Disable workflow transitions for workflows on a given model. This can be necessary for automatic workflow transitions when writing to an object via the ORM in the post migration step. Returns a dictionary to be used on reactivate_workflow_transitions :param model: the model for which workflow transitions should be \ deactivated :param transitions: a list of ('module', 'name') xmlid tuples of \ transitions to be deactivated. Don't pass this if there's no specific \ reason to do so, the default is to deactivate all transitions .. versionadded:: 7.0
24,159
def subject_sequence_retriever(fasta_handle, b6_handle, e_value, *args, **kwargs): filtered_b6 = defaultdict(list) for entry in b6_evalue_filter(b6_handle, e_value, *args, **kwargs): filtered_b6[entry.subject].append( (entry.subject_start, entry.subject_end, entry._evalue_str)) for fastaEntry in fasta_iter(fasta_handle): if fastaEntry.id in filtered_b6: for alignment in filtered_b6[fastaEntry.id]: start = alignment[0] - 1 end = alignment[1] - 1 if start < end: subject_sequence = fastaEntry.sequence[start:end] elif start > end: subject_sequence = fastaEntry.sequence[end:start][::-1] else: subject_sequence = fastaEntry.sequence[start] fastaEntry.sequence = subject_sequence if fastaEntry.description == : fastaEntry.description = else: fastaEntry.description += fastaEntry.description += alignment[2] yield fastaEntry
Returns FASTA entries for subject sequences from BLAST hits Stores B6/M8 entries with E-Values below the e_value cutoff. Then iterates through the FASTA file and if an entry matches the subject of an B6/M8 entry, it's sequence is extracted and returned as a FASTA entry plus the E-Value. Args: fasta_handle (file): FASTA file handle, can technically be any iterable that returns FASTA "lines" b6_handle (file): B6/M8 file handle, can technically be any iterable that returns B6/M8 "lines" e_value (float): Max E-Value of entry to return *args: Variable length argument list for b6_iter **kwargs: Arbitrary keyword arguments for b6_iter Yields: FastaEntry: class containing all FASTA data Example: Note: These doctests will not pass, examples are only in doctest format as per convention. bio_utils uses pytests for testing. >>> fasta_handle = open('test.fasta') >>> b6_handle = open('test.b6') >>> for entry in subject_sequence_retriever(fasta_handle, ... b6_handle, 1e5) ... print(entry.sequence) # Print aligned subject sequence
24,160
def _check_iou_licence(self): try: license_check = self._config().getboolean("license_check", True) except ValueError: raise IOUError("Invalid licence check setting") if license_check is False: return config = configparser.ConfigParser() try: with open(self.iourc_path, encoding="utf-8") as f: config.read_file(f) except OSError as e: raise IOUError("Could not open iourc file {}: {}".format(self.iourc_path, e)) except configparser.Error as e: raise IOUError("Could not parse iourc file {}: {}".format(self.iourc_path, e)) except UnicodeDecodeError as e: raise IOUError("Non ascii characters in iourc file {}, please remove them: {}".format(self.iourc_path, e)) if "license" not in config: raise IOUError("License section not found in iourc file {}".format(self.iourc_path)) hostname = socket.gethostname() if len(hostname) > 15: log.warning("Older IOU images may not boot because hostname length is above 15 characters".format(hostname)) if hostname not in config["license"]: raise IOUError("Hostname \"{}\" not found in iourc file {}".format(hostname, self.iourc_path)) user_ioukey = config["license"][hostname] if user_ioukey[-1:] != : raise IOUError("IOU key not ending with ; in iourc file {}".format(self.iourc_path)) if len(user_ioukey) != 17: raise IOUError("IOU key length is not 16 characters in iourc file {}".format(self.iourc_path)) user_ioukey = user_ioukey[:16] if not hasattr(sys, "_called_from_test"): try: hostid = (yield from gns3server.utils.asyncio.subprocess_check_output("hostid")).strip() except FileNotFoundError as e: raise IOUError("Could not find hostid: {}".format(e)) except subprocess.SubprocessError as e: raise IOUError("Could not execute hostid: {}".format(e)) try: ioukey = int(hostid, 16) except ValueError: raise IOUError("Invalid hostid detected: {}".format(hostid)) for x in hostname: ioukey += ord(x) pad1 = b pad2 = b + 39 * b ioukey = hashlib.md5(pad1 + pad2 + struct.pack(, ioukey) + pad1).hexdigest()[:16] if ioukey != user_ioukey: raise IOUError("Invalid IOU license key {} detected in iourc file {} for host {}".format(user_ioukey, self.iourc_path, hostname))
Checks for a valid IOU key in the iourc file (paranoid mode).
24,161
def determine_node(self): if self.node_type == "simultaneous": if self.nat_type != "unknown": return "simultaneous" unspecific_bind = ["0.0.0.0", "127.0.0.1", "localhost"] if self.passive_bind in unspecific_bind: lan_ip = get_lan_ip(self.interface) else: lan_ip = self.passive_bind if lan_ip is not None \ and self.passive_port is not None and self.enable_forwarding: self.debug_print("Checking if port is forwarded.") if is_port_forwarded(lan_ip, self.passive_port, "TCP", self.forwarding_servers): msg = "Port already forwarded. Skipping NAT traversal." self.debug_print(msg) self.forwarding_type = "forwarded" return "passive" else: self.debug_print("Port is not already forwarded.") try: self.debug_print("Trying UPnP") UPnP(self.interface).forward_port("TCP", self.passive_port, lan_ip) if is_port_forwarded(lan_ip, self.passive_port, "TCP", self.forwarding_servers): self.forwarding_type = "UPnP" self.debug_print("Forwarded port with UPnP.") else: self.debug_print("UPnP failed to forward port.") except Exception as e: error = parse_exception(e) log_exception(self.error_log_path, error) self.debug_print("UPnP failed to forward port.") try: self.debug_print("Trying NATPMP.") NatPMP(self.interface).forward_port("TCP", self.passive_port, lan_ip) if is_port_forwarded(lan_ip, self.passive_port, "TCP", self.forwarding_servers): self.forwarding_type = "NATPMP" self.debug_print("Port forwarded with NATPMP.") else: self.debug_print("Failed to forward port with NATPMP.") self.debug_print("Falling back on TCP hole punching or" " proxying.") except Exception as e: error = parse_exception(e) log_exception(self.error_log_path, error) self.debug_print("Failed to forward port with NATPMP.") if self.forwarding_type != "manual": return "passive" if self.nat_type != "unknown": return "simultaneous" else: return "active"
Determines the type of node based on a combination of forwarding reachability and NAT type.
24,162
def parse_node_response(self, response): for key, value in response.items(): if key == "console": self._console = value elif key == "node_directory": self._node_directory = value elif key == "command_line": self._command_line = value elif key == "status": self._status = value elif key == "console_type": self._console_type = value elif key == "name": self.name = value elif key in ["node_id", "project_id", "console_host", "startup_config_content", "private_config_content", "startup_script"]: if key in self._properties: del self._properties[key] else: self._properties[key] = value self._list_ports() for link in self._links: yield from link.node_updated(self)
Update the object with the remote node object
24,163
def delete_collection_namespaced_replication_controller(self, namespace, **kwargs): kwargs[] = True if kwargs.get(): return self.delete_collection_namespaced_replication_controller_with_http_info(namespace, **kwargs) else: (data) = self.delete_collection_namespaced_replication_controller_with_http_info(namespace, **kwargs) return data
delete_collection_namespaced_replication_controller # noqa: E501 delete collection of ReplicationController # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_namespaced_replication_controller(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread.
24,164
def _apply_dvportgroup_out_shaping(pg_name, out_shaping, out_shaping_conf): log.trace(s \ out shaping policyaverage_bandwidthaverage_bandwidthburst_sizeburst_sizeenabledenabledpeak_bandwidthpeak_bandwidth']
Applies the values in out_shaping_conf to an out_shaping object pg_name The name of the portgroup out_shaping The vim.DVSTrafficShapingPolicy to apply the config to out_shaping_conf The out shaping config
24,165
def mean(self): return np.dot(np.array(self.norm_scores), self.weights)
Compute a total score for each model over all the tests. Uses the `norm_score` attribute, since otherwise direct comparison across different kinds of scores would not be possible.
24,166
def perform_permissions_check(self, user, obj, perms): return self.request.forum_permission_handler.can_access_moderation_queue(user)
Performs the permissions check.
24,167
def dorun(method, platonics=None, nsnrs=20, noise_samples=30, sweeps=30, burn=15): sigmas = np.logspace(np.log10(1.0/2048), 0, nsnrs) crbs, vals, errs, poss = [], [], [], [] for sigma in sigmas: print " for i, (image, pos) in enumerate(platonics): print , i, , s,im = create_comparison_state(image, pos, method=method) set_image(s, im, sigma) crbs.append(crb(s)) val, err = sample(s, im, sigma, N=noise_samples, sweeps=sweeps, burn=burn) poss.append(pos) vals.append(val) errs.append(err) shape0 = (nsnrs, len(platonics), -1) shape1 = (nsnrs, len(platonics), noise_samples, -1) crbs = np.array(crbs).reshape(shape0) vals = np.array(vals).reshape(shape1) errs = np.array(errs).reshape(shape1) poss = np.array(poss).reshape(shape0) return [crbs, vals, errs, poss, sigmas]
platonics = create_many_platonics(N=50) dorun(platonics)
24,168
def save_dict_to_file(filename, dictionary): with open(filename, ) as f: writer = csv.writer(f) for k, v in iteritems(dictionary): writer.writerow([str(k), str(v)])
Saves dictionary as CSV file.
24,169
def create_border(video, color="blue", border_percent=2): if video.shape[-1] != 3: return video color_to_axis = {"blue": 2, "red": 0, "green": 1} axis = color_to_axis[color] _, _, height, width, _ = video.shape border_height = np.ceil(border_percent * height / 100.0).astype(np.int) border_width = np.ceil(border_percent * width / 100.0).astype(np.int) video[:, :, :border_height, :, axis] = 255 video[:, :, -border_height:, :, axis] = 255 video[:, :, :, :border_width, axis] = 255 video[:, :, :, -border_width:, axis] = 255 return video
Creates a border around each frame to differentiate input and target. Args: video: 5-D NumPy array. color: string, "blue", "red" or "green". border_percent: Percentarge of the frame covered by the border. Returns: video: 5-D NumPy array.
24,170
def strip_water(self, os=None, o=None, on=None, compact=False, resn="SOL", groupname="notwater", **kwargs): force = kwargs.pop(, self.force) newtpr = self.outfile(self.infix_filename(os, self.tpr, )) newxtc = self.outfile(self.infix_filename(o, self.xtc, )) newndx = self.outfile(self.infix_filename(on, self.tpr, , )) nowater_ndx = self._join_dirname(newtpr, "nowater.ndx") if compact: TRJCONV = trj_compact if kwargs.get() is not None and in kwargs: logger.warn("centergroup = %r will be superceded by input[0] = %r", kwargs[], kwargs[][0]) _input = kwargs.get(, [kwargs.get(, )]) kwargs[] = [_input[0], groupname] del _input logger.info("Creating a compact trajectory centered on group %r", kwargs[][0]) logger.info("Writing %r to the output trajectory", kwargs[][1]) else: TRJCONV = gromacs.trjconv kwargs[] = [groupname] logger.info("Writing %r to the output trajectory (no centering)", kwargs[][0]) kwargs.pop("centergroup", None) NOTwater = "! r {resn!s}".format(**vars()) with utilities.in_dir(self.dirname): if not self.check_file_exists(newxtc, resolve="indicate", force=force): B = IndexBuilder(struct=self.tpr, selections=[+NOTwater], ndx=self.ndx, out_ndx=nowater_ndx) B.combine(name_all=groupname, operation="|", defaultgroups=True) logger.debug("Index file for water removal: %r", nowater_ndx) logger.info("TPR file without water {newtpr!r}".format(**vars())) gromacs.tpbconv(s=self.tpr, o=newtpr, n=nowater_ndx, input=[groupname]) logger.info("NDX of the new system %r", newndx) gromacs.make_ndx(f=newtpr, o=newndx, input=[], stderr=False, stdout=False) logger.info("Trajectory without water {newxtc!r}".format(**vars())) kwargs[] = self.tpr kwargs[] = self.xtc kwargs[] = nowater_ndx kwargs[] = newxtc TRJCONV(**kwargs) logger.info("pdb and gro for visualization") for ext in , : try:
Write xtc and tpr with water (by resname) removed. :Keywords: *os* Name of the output tpr file; by default use the original but insert "nowater" before suffix. *o* Name of the output trajectory; by default use the original name but insert "nowater" before suffix. *on* Name of a new index file (without water). *compact* ``True``: write a compact and centered trajectory ``False``: use trajectory as it is [``False``] *centergroup* Index group used for centering ["Protein"] .. Note:: If *input* is provided (see below under *kwargs*) then *centergroup* is ignored and the group for centering is taken as the first entry in *input*. *resn* Residue name of the water molecules; all these residues are excluded. *groupname* Name of the group that is generated by subtracting all waters from the system. *force* : Boolean - ``True``: overwrite existing trajectories - ``False``: throw a IOError exception - ``None``: skip existing and log a warning [default] *kwargs* are passed on to :func:`gromacs.cbook.trj_compact` (unless the values have to be set to certain values such as s, f, n, o keywords). The *input* keyword is always mangled: Only the first entry (the group to centre the trajectory on) is kept, and as a second group (the output group) *groupname* is used. :Returns: dictionary with keys *tpr*, *xtc*, *ndx* which are the names of the the new files .. warning:: The input tpr file should *not* have *any position restraints*; otherwise Gromacs will throw a hissy-fit and say *Software inconsistency error: Position restraint coordinates are missing* (This appears to be a bug in Gromacs 4.x.)
24,171
def transform_case(self, description, case_type): if case_type == CasingTypeEnum.Sentence: description = "{}{}".format( description[0].upper(), description[1:]) elif case_type == CasingTypeEnum.Title: description = description.title() else: description = description.lower() return description
Transforms the case of the expression description, based on options Args: description: The description to transform case_type: The casing type that controls the output casing second_expression: Seconds part Returns: The transformed description with proper casing
24,172
def saved_xids(self): if self._saved_xids is None: self._saved_xids = [] if self.debug: fpfn = os.path.join(self.tcex.args.tc_temp_path, ) if os.path.isfile(fpfn) and os.access(fpfn, os.R_OK): with open(fpfn) as fh: self._saved_xids = fh.read().splitlines() return self._saved_xids
Return previously saved xids.
24,173
def cloudInCells(x, y, bins, weights=None): x_bins = np.array(bins[0]) delta_x = x_bins[1] - x_bins[0] x_bins = np.insert(x_bins, 0, x_bins[0] - delta_x) x_bins = np.append(x_bins, x_bins[-1] + delta_x) y_bins = np.array(bins[1]) delta_y = y_bins[1] - y_bins[0] y_bins = np.insert(y_bins, 0, y_bins[0] - delta_y) y_bins = np.append(y_bins, y_bins[-1] + delta_y) x_bound_cut = np.logical_and(x >= x_bins[0], x <= x_bins[-1]) y_bound_cut = np.logical_and(y >= y_bins[0], y <= y_bins[-1]) bound_cut = np.logical_and(x_bound_cut, y_bound_cut) if not np.any(weights): bound_weights = np.ones(len(x))[bound_cut] else: bound_weights = np.array(weights)[bound_cut] x_vals = np.array(x)[bound_cut] y_vals = np.array(y)[bound_cut] x_width = x_bins[1] - x_bins[0] y_width = y_bins[1] - y_bins[0] x_centers = x_bins[0: -1] + (0.5 * x_width) y_centers = y_bins[0: -1] + (0.5 * y_width) dx = x_vals - x_centers[np.digitize(x_vals, x_bins) - 1] dy = y_vals - y_centers[np.digitize(y_vals, y_bins) - 1] ux = ((dx / x_width) * (dx >= 0)) +\ ((1. + (dx / x_width)) * (dx < 0)) lx = 1. - ux uy = ((dy / y_width) * (dy >= 0)) +\ ((1. + (dy / y_width)) * (dy < 0)) ly = 1. - uy new_x_vals = [] new_y_vals = [] cell_weights = [] new_x_vals.append(x_vals + (0.5 * x_width)) new_y_vals.append(y_vals + (0.5 * y_width)) cell_weights.append(bound_weights * ux * uy) new_x_vals.append(x_vals + (0.5 * x_width)) new_y_vals.append(y_vals - (0.5 * y_width)) cell_weights.append(bound_weights * ux * ly) new_x_vals.append(x_vals - (0.5 * x_width)) new_y_vals.append(y_vals + (0.5 * y_width)) cell_weights.append(bound_weights * lx * uy) new_x_vals.append(x_vals - (0.5 * x_width)) new_y_vals.append(y_vals - (0.5 * y_width)) cell_weights.append(bound_weights * lx * ly) new_x_vals = np.concatenate(new_x_vals) new_y_vals = np.concatenate(new_y_vals) cell_weights = np.concatenate(cell_weights) result = np.histogram2d(new_x_vals, new_y_vals, bins = [x_bins, y_bins], weights = cell_weights)[0] result = np.transpose(result[1: result.shape[0] - 1])[1: result.shape[1] - 1] return result, x_bins, y_bins
Use cloud-in-cells binning algorithm. Only valid for equal-spaced linear bins. http://ta.twi.tudelft.nl/dv/users/Lemmens/MThesis.TTH/chapter4.html#tth_sEc2 http://www.gnu.org/software/archimedes/manual/html/node29.html INPUTS: x: array of x-values y: array or y-values bins: [bins_x, bins_y] format, where bins_x corresponds to the bin edges along x-axis weights[None]: optionally assign a weight to each entry OUTPUTS: histogram: bins_x: bins_y:
24,174
def rewind_body(prepared_request): body_seek = getattr(prepared_request.body, , None) if body_seek is not None and isinstance(prepared_request._body_position, integer_types): try: body_seek(prepared_request._body_position) except (IOError, OSError): raise UnrewindableBodyError("An error occurred when rewinding request " "body for redirect.") else: raise UnrewindableBodyError("Unable to rewind request body for redirect.")
Move file pointer back to its recorded starting position so it can be read again on redirect.
24,175
def _request_reports(self, resource_param_name, resources, endpoint_name): params = [{resource_param_name: resource, : self._api_key} for resource in resources] return self._requests.multi_get(self.BASE_DOMAIN + endpoint_name, query_params=params)
Sends multiples requests for the resources to a particular endpoint. Args: resource_param_name: a string name of the resource parameter. resources: list of of the resources. endpoint_name: VirusTotal endpoint URL suffix. Returns: A list of the responses.
24,176
def contact_addresses(self): return MultiContactAddress( href=self.get_relation(), type=self.typeof, name=self.name)
Provides a reference to contact addresses used by this server. Obtain a reference to manipulate or iterate existing contact addresses:: >>> from smc.elements.servers import ManagementServer >>> mgt_server = ManagementServer.objects.first() >>> for contact_address in mgt_server.contact_addresses: ... contact_address ... ContactAddress(location=Default,addresses=[u'1.1.1.1']) ContactAddress(location=foolocation,addresses=[u'12.12.12.12']) :rtype: MultiContactAddress
24,177
def reindex(self): for i in range(self.rally_count()): self.rally_points[i].count = self.rally_count() self.rally_points[i].idx = i self.last_change = time.time()
reset counters and indexes
24,178
def sanitize(self): super(EncapsulatedControlMessage, self).sanitize() if not isinstance(self.security, bool): raise ValueError() if self.security: raise NotImplementedError( + ) if not isinstance(self.ddt_originated, bool): raise ValueError() if not isinstance(self.for_rtr, bool): raise ValueError() if not isinstance(self.relayed_by_rtr, bool): raise ValueError()
Check if the current settings conform to the LISP specifications and fix them where possible.
24,179
def _validate_list(self, input_list, schema_list, path_to_root, object_title=): rules_path_to_root = re.sub(, , path_to_root) list_rules = self.keyMap[rules_path_to_root] initial_key = rules_path_to_root + item_rules = self.keyMap[initial_key] list_error = { : object_title, : self.schema, : list_rules, : , : path_to_root, : 0, : 4001 } if in list_rules.keys(): if len(input_list) < list_rules[]: list_error[] = list_error[] = len(input_list) list_error[] = 4031 raise InputValidationError(list_error) if in list_rules.keys(): if len(input_list) > list_rules[]: list_error[] = list_error[] = len(input_list) list_error[] = 4032 raise InputValidationError(list_error) item_error = { : object_title, : self.schema, : item_rules, : , : initial_key, : None, : 4001 } for i in range(len(input_list)): input_path = path_to_root + % i item = input_list[i] item_error[] = input_path try: item_index = self._datatype_classes.index(item.__class__) except: item_error[] = item.__class__.__name__ raise InputValidationError(item_error) item_type = self._datatype_names[item_index] item_error[] = item if item_rules[] == : pass else: if item_type != item_rules[]: raise InputValidationError(item_error) if item_type == : input_list[i] = self._validate_boolean(item, input_path, object_title) elif item_type == : input_list[i] = self._validate_number(item, input_path, object_title) elif item_type == : input_list[i] = self._validate_string(item, input_path, object_title) elif item_type == : input_list[i] = self._validate_dict(item, schema_list[0], input_path, object_title) elif item_type == : input_list[i] = self._validate_list(item, schema_list[0], input_path, object_title) if in list_rules.keys(): if len(set(input_list)) < len(input_list): list_error[] = list_error[] = input_list list_error[] = 4033 raise InputValidationError(list_error) return input_list
a helper method for recursively validating items in a list :return: input_list
24,180
def stop(self): if not self.is_running(): raise RuntimeError() running, self._running = self._running, None try: next(running) except StopIteration: pass else: raise TypeError()
Stops the instance. :raises RuntimeError: has not been started. :raises TypeError: :meth:`run` is not canonical.
24,181
def double_hash_encode_ngrams(ngrams, keys, ks, l, encoding ): key_sha1, key_md5 = keys bf = bitarray(l) bf.setall(False) for m, k in zip(ngrams, ks): sha1hm = int( hmac.new(key_sha1, m.encode(encoding=encoding), sha1).hexdigest(), 16) % l md5hm = int( hmac.new(key_md5, m.encode(encoding=encoding), md5).hexdigest(), 16) % l for i in range(k): gi = (sha1hm + i * md5hm) % l bf[gi] = 1 return bf
Computes the double hash encoding of the ngrams with the given keys. Using the method from: Schnell, R., Bachteler, T., & Reiher, J. (2011). A Novel Error-Tolerant Anonymous Linking Code. http://grlc.german-microsimulation.de/wp-content/uploads/2017/05/downloadwp-grlc-2011-02.pdf :param ngrams: list of n-grams to be encoded :param keys: hmac secret keys for md5 and sha1 as bytes :param ks: ks[i] is k value to use for ngram[i] :param l: length of the output bitarray :param encoding: the encoding to use when turning the ngrams to bytes :return: bitarray of length l with the bits set which correspond to the encoding of the ngrams
24,182
def difference(self, other, joinBy=None, exact=False): if isinstance(other, GMQLDataset): other_idx = other.__index else: raise TypeError("other must be a GMQLDataset. " "{} was provided".format(type(other))) if isinstance(joinBy, list) and \ all([isinstance(x, str) for x in joinBy]): metaJoinCondition = Some(self.opmng.getMetaJoinCondition(joinBy)) elif joinBy is None: metaJoinCondition = none() else: raise TypeError("joinBy must be a list of strings. " "{} was provided".format(type(joinBy))) if not isinstance(exact, bool): raise TypeError("exact must be a boolean. " "{} was provided".format(type(exact))) new_index = self.opmng.difference(self.__index, other_idx, metaJoinCondition, exact) new_local_sources, new_remote_sources = self.__combine_sources(self, other) new_location = self.__combine_locations(self, other) return GMQLDataset(index=new_index, location=new_location, local_sources=new_local_sources, remote_sources=new_remote_sources, meta_profile=self.meta_profile)
*Wrapper of* ``DIFFERENCE`` DIFFERENCE is a binary, non-symmetric operator that produces one sample in the result for each sample of the first operand, by keeping the same metadata of the first operand sample and only those regions (with their schema and values) of the first operand sample which do not intersect with any region in the second operand sample (also known as negative regions) :param other: GMQLDataset :param joinBy: (optional) list of metadata attributes. It is used to extract subsets of samples on which to apply the operator: only those samples in the current and other dataset that have the same value for each specified attribute are considered when performing the operation :param exact: boolean. If true, the the regions are considered as intersecting only if their coordinates are exactly the same :return: a new GMQLDataset Example of usage. We compute the exact difference between Example_Dataset_1 and Example_Dataset_2, considering only the samples with same `antibody`:: import gmql as gl d1 = gl.get_example_dataset("Example_Dataset_1") d2 = gl.get_example_dataset("Example_Dataset_2") result = d1.difference(other=d2, exact=True, joinBy=['antibody'])
24,183
def _pack(formatstring, value): _checkString(formatstring, description=, minlength=1) try: result = struct.pack(formatstring, value) except: errortext = errortext += raise ValueError(errortext.format(value, formatstring)) if sys.version_info[0] > 2: return str(result, encoding=) return result
Pack a value into a bytestring. Uses the built-in :mod:`struct` Python module. Args: * formatstring (str): String for the packing. See the :mod:`struct` module for details. * value (depends on formatstring): The value to be packed Returns: A bytestring (str). Raises: ValueError Note that the :mod:`struct` module produces byte buffers for Python3, but bytestrings for Python2. This is compensated for automatically.
24,184
def batch_run_many(player, positions, batch_size=100): prob_list = [] value_list = [] for idx in range(0, len(positions), batch_size): probs, values = player.network.run_many(positions[idx:idx + batch_size]) prob_list.append(probs) value_list.append(values) return np.concatenate(prob_list, axis=0), np.concatenate(value_list, axis=0)
Used to avoid a memory oveflow issue when running the network on too many positions. TODO: This should be a member function of player.network?
24,185
def on_channel_flow(self, method): if method.active: LOGGER.info() self.state = self.STATE_READY if self.on_ready: self.on_ready(self) else: LOGGER.warning() self.state = self.STATE_BLOCKED if self.on_unavailable: self.on_unavailable(self)
When RabbitMQ indicates the connection is unblocked, set the state appropriately. :param pika.spec.Channel.Flow method: The Channel flow frame
24,186
def check_and_make_label(lbl, lineno): if isinstance(lbl, float): if lbl == int(lbl): id_ = str(int(lbl)) else: syntax_error(lineno, ) return None else: id_ = lbl return global_.SYMBOL_TABLE.access_label(id_, lineno)
Checks if the given label (or line number) is valid and, if so, returns a label object. :param lbl: Line number of label (string) :param lineno: Line number in the basic source code for error reporting :return: Label object or None if error.
24,187
def sum(arrays, masks=None, dtype=None, out=None, zeros=None, scales=None): return generic_combine(intl_combine.sum_method(), arrays, masks=masks, dtype=dtype, out=out, zeros=zeros, scales=scales)
Combine arrays by addition, with masks and offsets. Arrays and masks are a list of array objects. All input arrays have the same shape. If present, the masks have the same shape also. The function returns an array with one more dimension than the inputs and with size (3, shape). out[0] contains the sum, out[1] the variance and out[2] the number of points used. :param arrays: a list of arrays :param masks: a list of mask arrays, True values are masked :param dtype: data type of the output :param out: optional output, with one more axis than the input arrays :return: sum, variance of the sum and number of points stored Example: >>> import numpy >>> image = numpy.array([[1., 3.], [1., -1.4]]) >>> inputs = [image, image + 1] >>> sum(inputs) array([[[ 1.5, 3.5], [ 1.5, -0.9]], <BLANKLINE> [[ 0.5, 0.5], [ 0.5, 0.5]], <BLANKLINE> [[ 2. , 2. ], [ 2. , 2. ]]])
24,188
def get_event_logs(self, request_filter=None, log_limit=20, iterator=True): if iterator: return self.client.iter_call(, , filter=request_filter, limit=log_limit) return self.client.call(, , filter=request_filter, limit=log_limit)
Returns a list of event logs Example:: event_mgr = SoftLayer.EventLogManager(env.client) request_filter = event_mgr.build_filter(date_min="01/01/2019", date_max="02/01/2019") logs = event_mgr.get_event_logs(request_filter) for log in logs: print("Event Name: {}".format(log['eventName'])) :param dict request_filter: filter dict :param int log_limit: number of results to get in one API call :param bool iterator: False will only make one API call for log_limit results. True will keep making API calls until all logs have been retreived. There may be a lot of these. :returns: List of event logs. If iterator=True, will return a python generator object instead.
24,189
def _l2rgb(self, mode): self._check_modes(("L", "LA")) bands = ["L"] * 3 if mode[-1] == "A": bands.append("A") data = self.data.sel(bands=bands) data["bands"] = list(mode) return data
Convert from L (black and white) to RGB.
24,190
def filter_off(self, filt=None, analyte=None, samples=None, subset=None, show_status=False): if samples is not None: subset = self.make_subset(samples) samples = self._get_samples(subset) for s in samples: try: self.data[s].filt.off(analyte, filt) except: warnings.warn("filt.off failure in sample " + s) if show_status: self.filter_status(subset=subset) return
Turns data filters off for particular analytes and samples. Parameters ---------- filt : optional, str or array_like Name, partial name or list of names of filters. Supports partial matching. i.e. if 'cluster' is specified, all filters with 'cluster' in the name are activated. Defaults to all filters. analyte : optional, str or array_like Name or list of names of analytes. Defaults to all analytes. samples : optional, array_like or None Which samples to apply this filter to. If None, applies to all samples. Returns ------- None
24,191
def apply(self, func, ids=None, applyto=, noneval=nan, setdata=False, output_format=, ID=None, **kwargs): measurementdatameasurementdatadatadictcollectioncollection if ids is None: ids = self.keys() else: ids = to_list(ids) result = dict((i, self[i].apply(func, applyto, noneval, setdata)) for i in ids) if output_format == : can_keep_as_collection = all( [isinstance(r, self._measurement_class) for r in result.values()]) if not can_keep_as_collection: raise TypeError( .format( self._measurement_class)) new_collection = self.copy() ids_to_remove = [x for x in self.keys() if x not in ids] for ids in ids_to_remove: new_collection.pop(ids) for k, v in new_collection.items(): new_collection[k] = result[k] if ID is not None: new_collection.ID = ID return new_collection else: return result
Apply func to each of the specified measurements. Parameters ---------- func : callable Accepts a Measurement object or a DataFrame. ids : hashable| iterable of hashables | None Keys of measurements to which func will be applied. If None is given apply to all measurements. applyto : 'measurement' | 'data' * 'measurement' : apply to measurements objects themselves. * 'data' : apply to measurement associated data noneval : obj Value returned if applyto is 'data' but no data is available. setdata : bool Whether to set the data in the Measurement object. Used only if data is not already set. output_format : ['dict' | 'collection'] * collection : keeps result as collection WARNING: For collection, func should return a copy of the measurement instance rather than the original measurement instance. Returns ------- Dictionary keyed by measurement keys containing the corresponding output of func or returns a collection (if output_format='collection').
24,192
def execute_task(f, args, kwargs, user_ns): fname = getattr(f, , ) prefix = "parsl_" fname = prefix + "f" argname = prefix + "args" kwargname = prefix + "kwargs" resultname = prefix + "result" user_ns.update({fname: f, argname: args, kwargname: kwargs, resultname: resultname}) code = "{0} = {1}(*{2}, **{3})".format(resultname, fname, argname, kwargname) try: exec(code, user_ns, user_ns) except Exception as e: logger.warning("Caught exception; will raise it: {}".format(e)) raise e else: return user_ns.get(resultname)
Deserialize the buffer and execute the task. # Returns the result or exception.
24,193
def _get_graph_title(self): start_time = datetime.fromtimestamp(int(self.timestamp_list[0])) end_time = datetime.fromtimestamp(int(self.timestamp_list[-1])) end_time = end_time.strftime() title = "Timespan: %s —— %s" % (start_time, end_time) return title
获取图像的title.
24,194
def has_callback(obj, handle): callbacks = obj._callbacks if not callbacks: return False if isinstance(callbacks, Node): return handle is callbacks else: return handle in callbacks
Return whether a callback is currently registered for an object.
24,195
def save_keywords(filename, xml): tmp_dir = os.path.dirname(filename) if not os.path.isdir(tmp_dir): os.mkdir(tmp_dir) file_desc = open(filename, "w") file_desc.write(xml) file_desc.close()
Save keyword XML to filename.
24,196
def set_Name(self, Name, SaveName=None, include=None, ForceUpdate=False): self._dall[] = Name self.set_SaveName(SaveName=SaveName, include=include, ForceUpdate=ForceUpdate)
Set the Name of the instance, automatically updating the SaveName The name should be a str without spaces or underscores (removed) When the name is changed, if SaveName (i.e. the name used for saving) was not user-defined, it is automatically updated Parameters ---------- Name : str Name of the instance, without ' ' or '_' (automatically removed) SaveName : None / str If provided, overrides the default name for saving (not recommended) include: list Controls how te default SaveName is generated Each element of the list is a key str indicating whether an element should be present in the SaveName
24,197
def complete_hit(self, text, line, begidx, endidx): return [i for i in PsiturkNetworkShell.hit_commands if \ i.startswith(text)]
Tab-complete hit command.
24,198
def fill_extents(self): extents = ffi.new() cairo.cairo_fill_extents( self._pointer, extents + 0, extents + 1, extents + 2, extents + 3) self._check_status() return tuple(extents)
Computes a bounding box in user-space coordinates covering the area that would be affected, (the "inked" area), by a :meth:`fill` operation given the current path and fill parameters. If the current path is empty, returns an empty rectangle ``(0, 0, 0, 0)``. Surface dimensions and clipping are not taken into account. Contrast with :meth:`path_extents` which is similar, but returns non-zero extents for some paths with no inked area, (such as a simple line segment). Note that :meth:`fill_extents` must necessarily do more work to compute the precise inked areas in light of the fill rule, so :meth:`path_extents` may be more desirable for sake of performance if the non-inked path extents are desired. See :meth:`fill`, :meth:`set_fill_rule` and :meth:`fill_preserve`. :return: A ``(x1, y1, x2, y2)`` tuple of floats: the left, top, right and bottom of the resulting extents, respectively.
24,199
def setCentralWidget(self, widget): self.setEnabled(widget is not None) self._popupWidget.setCentralWidget(widget)
Sets the central widget for this button. :param widget | <QWidget>