Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
16,100
def gumbel_softmax_discrete_bottleneck(x, bottleneck_bits, beta=0.25, decay=0.999, epsilon=1e-5, temperature_warmup_steps=150000, hard=False, summary=True): bottleneck_size = 2**bottleneck_bits x_shape = common_layers.shape_list(x) hidden_size = x_shape[-1] means, ema_means, ema_count = get_vq_codebook(bottleneck_size, hidden_size) x = tf.reshape(x, [-1, hidden_size]) bottleneck_size = common_layers.shape_list(means)[0] x_norm_sq = tf.reduce_sum(tf.square(x), axis=-1, keepdims=True) means_norm_sq = tf.reduce_sum(tf.square(means), axis=-1, keepdims=True) scalar_prod = tf.matmul(x, means, transpose_b=True) dist = x_norm_sq + tf.transpose(means_norm_sq) - 2 * scalar_prod class_probs = tf.nn.softmax(dist) log_class_probs = tf.nn.log_softmax(dist) gumbel_samples = gumbel_sample(common_layers.shape_list(dist)) steps = temperature_warmup_steps gumbel_samples *= common_layers.inverse_exp_decay(steps // 5) * 0.5 temperature = 1.2 - common_layers.inverse_lin_decay(steps) temperature = tf.cond( tf.less(tf.random_uniform([]), 0.9), lambda: temperature, lambda: tf.random_uniform([], minval=0.5, maxval=1.0)) gumbel_softmax_samples = tf.nn.softmax( (log_class_probs + gumbel_samples) / temperature) kl = tf.reduce_sum( class_probs * (log_class_probs - tf.log(1.0 / bottleneck_size)), -1) if summary: tf.summary.histogram("KL", tf.reshape(kl, [-1])) if hard: x_means_idx = tf.reshape(tf.argmax(gumbel_softmax_samples, axis=-1), [-1]) x_means_hot = tf.one_hot(x_means_idx, bottleneck_size) x_means_assignments = gumbel_softmax_samples + tf.stop_gradient( x_means_hot - gumbel_softmax_samples) else: x_means_assignments = gumbel_softmax_samples x_means_assignments_flat = tf.reshape(x_means_assignments, [-1, bottleneck_size]) x_means = tf.matmul(x_means_assignments_flat, means) commitment_loss = tf.reduce_mean( tf.squared_difference(x, tf.stop_gradient(x_means))) updated_ema_count = moving_averages.assign_moving_average( ema_count, tf.reduce_sum( tf.reshape(x_means_assignments, shape=[-1, bottleneck_size]), axis=0), decay, zero_debias=False) dw = tf.matmul(x_means_assignments, x, transpose_a=True) updated_ema_means = tf.identity( moving_averages.assign_moving_average( ema_means, dw, decay, zero_debias=False)) n = tf.reduce_sum(updated_ema_count, axis=-1, keepdims=True) updated_ema_count = ( (updated_ema_count + epsilon) / (n + bottleneck_size * epsilon) * n) updated_ema_means /= tf.expand_dims(updated_ema_count, axis=-1) with tf.control_dependencies([commitment_loss]): update_means = means.assign(updated_ema_means) with tf.control_dependencies([update_means]): loss = beta * commitment_loss loss += tf.reduce_mean(kl) x_means_assignments = tf.reshape(x_means_assignments, x_shape[:-1] + [bottleneck_size]) return x_means_assignments, loss
VQ-VAE using Gumbel-Softmax. Different from `gumbel_softmax()` function as this function calculates the KL by using the discrete entropy instead of taking the argmax, and it also uses an exponential moving average to update the codebook while the `gumbel_softmax()` function includes no codebook update. Args: x: A `float`-like `Tensor` containing the latent vectors to be compared to the codebook, whose squared difference is used as the Gumbel-Softmax logits. bottleneck_bits: An `int` that sets the size of the bottleneck in `log_2`. beta: Beta factor for commitment loss (Default: 0.25). decay: Decay factor for exponential moving average (Default: 0.999). epsilon: Small value to avoid dividing by zero in EMA update (Default: 1e-5). temperature_warmup_steps: Number of steps it takes to decay temperature to 0 (Default: 150000). hard: When `True`, we use hard Gumbel-Softmax samples and force discrete latents by taking the argmax. When `False`, we use soft samples, which we treat as codebook weights (Default: False). summary: When `True`, we save histogram summaries of the KL term (Default: True). Returns: x_means_assignments: A `float`-like `Tensor` containing the codebook assignments. When `hard == True`, this is one-hot, containing the arg-max of the Gumbel-Softmax samples (and we use the straightthrough gradient). Otherwise, it contains the Gumbel-Softmax samples exactly, which are values from the `(K-1)`-simplex where `K` is the bottleneck size. loss: The loss, which is the sum of the KL between the Gumbel-Softmax and the uniform prior and the commitment loss multiplied by the beta factor. We approximate the KL by using the entropy of a categorical distribution instead of the Gumbel Softmax.
16,101
def plot_vxx(self, colorbar=True, cb_orientation=, cb_label=None, ax=None, show=True, fname=None, **kwargs): if cb_label is None: cb_label = self._vxx_label if ax is None: fig, axes = self.vxx.plot(colorbar=colorbar, cb_orientation=cb_orientation, cb_label=cb_label, show=False, **kwargs) if show: fig.show() if fname is not None: fig.savefig(fname) return fig, axes else: self.vxx.plot(colorbar=colorbar, cb_orientation=cb_orientation, cb_label=cb_label, ax=ax, **kwargs)
Plot the Vxx component of the tensor. Usage ----- x.plot_vxx([tick_interval, xlabel, ylabel, ax, colorbar, cb_orientation, cb_label, show, fname]) Parameters ---------- tick_interval : list or tuple, optional, default = [30, 30] Intervals to use when plotting the x and y ticks. If set to None, ticks will not be plotted. xlabel : str, optional, default = 'longitude' Label for the longitude axis. ylabel : str, optional, default = 'latitude' Label for the latitude axis. ax : matplotlib axes object, optional, default = None A single matplotlib axes object where the plot will appear. colorbar : bool, optional, default = False If True, plot a colorbar. cb_orientation : str, optional, default = 'vertical' Orientation of the colorbar: either 'vertical' or 'horizontal'. cb_label : str, optional, default = '$V_{xx}$' Text label for the colorbar.. show : bool, optional, default = True If True, plot the image to the screen. fname : str, optional, default = None If present, and if axes is not specified, save the image to the specified file. kwargs : optional Keyword arguements that will be sent to the SHGrid.plot() and plt.imshow() methods.
16,102
def generate_cloudformation_args(stack_name, parameters, tags, template, capabilities=DEFAULT_CAPABILITIES, change_set_type=None, service_role=None, stack_policy=None, change_set_name=None): args = { "StackName": stack_name, "Parameters": parameters, "Tags": tags, "Capabilities": capabilities, } if service_role: args["RoleARN"] = service_role if change_set_name: args["ChangeSetName"] = change_set_name if change_set_type: args["ChangeSetType"] = change_set_type if template.url: args["TemplateURL"] = template.url else: args["TemplateBody"] = template.body if not change_set_name: args.update(generate_stack_policy_args(stack_policy)) return args
Used to generate the args for common cloudformation API interactions. This is used for create_stack/update_stack/create_change_set calls in cloudformation. Args: stack_name (str): The fully qualified stack name in Cloudformation. parameters (list): A list of dictionaries that defines the parameter list to be applied to the Cloudformation stack. tags (list): A list of dictionaries that defines the tags that should be applied to the Cloudformation stack. template (:class:`stacker.provider.base.Template`): The template object. capabilities (list, optional): A list of capabilities to use when updating Cloudformation. change_set_type (str, optional): An optional change set type to use with create_change_set. service_role (str, optional): An optional service role to use when interacting with Cloudformation. stack_policy (:class:`stacker.providers.base.Template`): A template object representing a stack policy. change_set_name (str, optional): An optional change set name to use with create_change_set. Returns: dict: A dictionary of arguments to be used in the Cloudformation API call.
16,103
def equal_distribution_folds(y, folds=2): n, classes = y.shape dist = y.sum(axis=0).astype() dist /= dist.sum() index_list = [] fold_dist = np.zeros((folds, classes), dtype=) for _ in range(folds): index_list.append([]) for i in range(n): if i < folds: target_fold = i else: normed_folds = fold_dist.T / fold_dist.sum(axis=1) how_off = normed_folds.T - dist target_fold = np.argmin( np.dot((y[i] - .5).reshape(1, -1), how_off.T)) fold_dist[target_fold] += y[i] index_list[target_fold].append(i) logger.debug("Fold distributions:") logger.debug(fold_dist) return index_list
Creates `folds` number of indices that has roughly balanced multi-label distribution. Args: y: The multi-label outputs. folds: The number of folds to create. Returns: `folds` number of indices that have roughly equal multi-label distributions.
16,104
def disconnect(self): if self.sascfg.mode != : res = "This method is only available with the IOM access method" else: res = self._io.disconnect() return res
This method disconnects an IOM session to allow for reconnecting when switching networks See the Advanced topics section of the doc for details
16,105
def chown(hdfs_path, user=None, group=None, hdfs_user=None): user = user or group = group or host, port, path_ = path.split(hdfs_path, hdfs_user) with hdfs(host, port, hdfs_user) as fs: return fs.chown(path_, user=user, group=group)
See :meth:`fs.hdfs.chown`.
16,106
def delete(self, cls, rid, user=): self.validate_record_type(cls) deletedcount = self.db.delete(cls, {ID: rid}) if deletedcount < 1: raise KeyError(.format(cls, rid))
Delete a record by id. `user` currently unused. Would be used with soft deletes. >>> s = teststore() >>> s.create('tstoretest', {'id': '1', 'name': 'Toto'}) >>> len(s.list('tstoretest')) 1 >>> s.delete('tstoretest', '1') >>> len(s.list('tstoretest')) 0 >>> s.delete('tstoretest', '1') Traceback (most recent call last): ... KeyError: 'No record tstoretest/1'
16,107
def get_matching_multiplex_port(self,name): matching_multiplex_ports = [self.__getattribute__(p) for p in self._portnames if name.startswith(p) and name != p and hasattr(self, p) and self.__getattribute__(p).is_multiplex ] for port in matching_multiplex_ports: return port return None
Given a name, figure out if a multiplex port prefixes this name and return it. Otherwise return none.
16,108
def exists(self, value=None): try: if not value: value = self.get() except (AttributeError, DoesNotExist): return False else: return self.connection.sismember(self.collection_key, value)
Return True if the given pk value exists for the given class. If no value is given, we use the value of the current field, which is the value of the "_pk" attribute of its instance.
16,109
def _osquery_cmd(table, attrs=None, where=None, format=): ret = { : True, } if attrs: if isinstance(attrs, list): valid_attrs = _table_attrs(table) if valid_attrs: for a in attrs: if a not in valid_attrs: ret[] = False ret[] = .format(a, table) return ret _attrs = .join(attrs) else: ret[] = False ret[] = .format(table) return ret else: ret[] = ret[] = False return ret else: _attrs = sql = .format(_attrs, table) if where: sql = .format(sql, where) sql = .format(sql) res = _osquery(sql) if res[]: ret[] = res[] else: ret[] = res[] return ret
Helper function to run osquery queries
16,110
def toProtocolElement(self): gaContinuousSet = protocol.ContinuousSet() gaContinuousSet.id = self.getId() gaContinuousSet.dataset_id = self.getParentContainer().getId() gaContinuousSet.reference_set_id = pb.string( self._referenceSet.getId()) gaContinuousSet.name = self._name gaContinuousSet.source_uri = self._sourceUri attributes = self.getAttributes() for key in attributes: gaContinuousSet.attributes.attr[key] \ .values.extend(protocol.encodeValue(attributes[key])) return gaContinuousSet
Returns the representation of this ContinuousSet as the corresponding ProtocolElement.
16,111
def entropy(s): return -sum( p*np.log(p) for i in range(len(s)) for p in [prop(s[i], s)] )
Calculate the Entropy Impurity for a list of samples.
16,112
def _plot_data_to_ax( data_all, ax1, e_unit=None, sed=True, ylabel=None, ulim_opts={}, errorbar_opts={}, ): if e_unit is None: e_unit = data_all["energy"].unit f_unit, sedf = sed_conversion( data_all["energy"], data_all["flux"].unit, sed ) if "group" not in data_all.keys(): data_all["group"] = np.zeros(len(data_all)) groups = np.unique(data_all["group"]) for g in groups: data = data_all[np.where(data_all["group"] == g)] _, sedfg = sed_conversion(data["energy"], data["flux"].unit, sed) color = color_cycle[int(g) % len(color_cycle)] marker = marker_cycle[int(g) % len(marker_cycle)] ul = data["ul"] notul = ~ul yerr_lo = data["flux_error_lo"][notul] y = data["flux"][notul].to(yerr_lo.unit) bad_err = np.where((y - yerr_lo) <= 0.0) yerr_lo[bad_err] = y[bad_err] * (1.0 - 1e-7) yerr = u.Quantity((yerr_lo, data["flux_error_hi"][notul])) xerr = u.Quantity((data["energy_error_lo"], data["energy_error_hi"])) opts = dict( zorder=100, marker=marker, ls="", elinewidth=2, capsize=0, mec=color, mew=0.1, ms=5, color=color, ) opts.update(**errorbar_opts) ax1.errorbar( data["energy"][notul].to(e_unit).value, (data["flux"][notul] * sedfg[notul]).to(f_unit).value, yerr=(yerr * sedfg[notul]).to(f_unit).value, xerr=xerr[:, notul].to(e_unit).value, **opts ) if np.any(ul): if "elinewidth" in errorbar_opts: ulim_opts["elinewidth"] = errorbar_opts["elinewidth"] _plot_ulims( ax1, data["energy"][ul].to(e_unit).value, (data["flux"][ul] * sedfg[ul]).to(f_unit).value, (xerr[:, ul]).to(e_unit).value, color, **ulim_opts ) ax1.set_xscale("log") ax1.set_yscale("log") xmin = 10 ** np.floor( np.log10( np.min(data["energy"] - data["energy_error_lo"]).to(e_unit).value ) ) xmax = 10 ** np.ceil( np.log10( np.max(data["energy"] + data["energy_error_hi"]).to(e_unit).value ) ) ax1.set_xlim(xmin, xmax) notul = ~data_all["ul"] if np.any(data_all["flux_error_lo"][notul] >= data_all["flux"][notul]): elo = (data_all["flux"][notul] * sedf[notul]).to(f_unit).value - ( data_all["flux_error_lo"][notul] * sedf[notul] ).to(f_unit).value gooderr = np.where( data_all["flux_error_lo"][notul] < data_all["flux"][notul] ) ymin = 10 ** np.floor(np.log10(np.min(elo[gooderr]))) ax1.set_ylim(bottom=ymin) if ylabel is None: if sed: ax1.set_ylabel( r"$E^2\mathrm{{d}}N/\mathrm{{d}}E$" " [{0}]".format(u.Unit(f_unit).to_string("latex_inline")) ) else: ax1.set_ylabel( r"$\mathrm{{d}}N/\mathrm{{d}}E$" " [{0}]".format(u.Unit(f_unit).to_string("latex_inline")) ) else: ax1.set_ylabel(ylabel)
Plots data errorbars and upper limits onto ax. X label is left to plot_data and plot_fit because they depend on whether residuals are plotted.
16,113
def to_python(self, value): if value is not None: try: value = dbsafe_decode(value, self.compress) except: if isinstance(value, PickledObject): raise return value
B64decode and unpickle the object, optionally decompressing it. If an error is raised in de-pickling and we're sure the value is a definite pickle, the error is allowed to propogate. If we aren't sure if the value is a pickle or not, then we catch the error and return the original value instead.
16,114
def obj2unicode(obj): if isinstance(obj, unicode_type): return obj elif isinstance(obj, bytes_type): try: return unicode_type(obj, ) except UnicodeDecodeError as strerror: sys.stderr.write("UnicodeDecodeError exception for string : %s\n" % (obj, strerror)) return unicode_type(obj, , ) else: return unicode_type(obj)
Return a unicode representation of a python object
16,115
def adjoint(self): return Laplacian(self.range, self.domain, pad_mode=self.pad_mode, pad_const=0)
Return the adjoint operator. The laplacian is self-adjoint, so this returns ``self``.
16,116
def update_handler(Model, name=None, **kwds): async def action_handler(service, action_type, payload, props, notify=True, **kwds): if action_type == get_crud_action(, name or Model): try: message_props = {} if in props: message_props[] = props[] pk_field = Model.primary_key() if not pk_field.name in payload: raise ValueError("Must specify the pk of the model when updating") model = Model.select().where(pk_field == payload[pk_field.name]).get() payload.pop(pk_field.name, None) for key, value in payload.items(): setattr(model, key, value) model.save() if notify: await service.event_broker.send( payload=ModelSerializer().serialize(model), action_type=change_action_status(action_type, success_status()), **message_props ) except Exception as err: if notify: await service.event_broker.send( payload=str(err), action_type=change_action_status(action_type, error_status()), **message_props ) else: raise err return action_handler
This factory returns an action handler that updates a new instance of the specified model when a update action is recieved, assuming the action follows nautilus convetions. Args: Model (nautilus.BaseModel): The model to update when the action received. Returns: function(type, payload): The action handler for this model
16,117
def check(self, dsm, **kwargs): layered_architecture = True messages = [] categories = dsm.categories dsm_size = dsm.size[0] if not categories: categories = [] * dsm_size for i in range(0, dsm_size - 1): for j in range(i + 1, dsm_size): if (categories[i] != and categories[j] != and dsm.entities[i].split()[0] != dsm.entities[j].split()[0]): if dsm.data[i][j] > 0: layered_architecture = False messages.append( % ( dsm.entities[i], dsm.entities[j])) return layered_architecture, .join(messages)
Check layered architecture. Args: dsm (:class:`DesignStructureMatrix`): the DSM to check. Returns: bool, str: True if layered architecture else False, messages
16,118
def _call_command(self, name, *args, **kwargs): if self.dynamic_version_of is None: raise ImplementationError() try: result = super(DynamicFieldMixin, self)._call_command(name, *args, **kwargs) except: raise else: if name in self.available_modifiers and name not in (, ): self._inventory.sadd(self.dynamic_part) return result
If a command is called for the main field, without dynamic part, an ImplementationError is raised: commands can only be applied on dynamic versions. On dynamic versions, if the command is a modifier, we add the version in the inventory.
16,119
def render(gpg_data, saltenv=, sls=, argline=, **kwargs): if not _get_gpg_exec(): raise SaltRenderError() log.debug(, _get_key_dir()) translate_newlines = kwargs.get(, False) return _decrypt_object(gpg_data, translate_newlines=translate_newlines)
Create a gpg object given a gpg_keydir, and then use it to try to decrypt the data to be rendered.
16,120
def flush(self): self.logger.debug() self.queue.join() self.logger.debug()
This only needs to be called manually from unit tests
16,121
def doc_inherit(parent, style="parent"): merge_func = store[style] decorator = _DocInheritDecorator decorator.doc_merger = staticmethod(merge_func) return decorator(parent)
Returns a function/method decorator that, given `parent`, updates the docstring of the decorated function/method based on the specified style and the corresponding attribute of `parent`. Parameters ---------- parent : Union[str, Any] The docstring, or object of which the docstring is utilized as the parent docstring during the docstring merge. style : Union[Any, Callable[[str, str], str]], optional (default: "parent") A valid inheritance-scheme style ID or function that merges two docstrings. Returns ------- custom_inherit.DocInheritDecorator Notes ----- `doc_inherit` should always be used as the inner-most decorator when being used in conjunction with other decorators, such as `@property`, `@staticmethod`, etc.
16,122
def read_plain_double(file_obj, count): return struct.unpack("<{}d".format(count).encode("utf-8"), file_obj.read(8 * count))
Read `count` 64-bit float (double) using the plain encoding.
16,123
def gen_signature(priv, pub, signature_path, auto_create=False, keysize=None): skey = get_key(__opts__) return skey.gen_keys_signature(priv, pub, signature_path, auto_create, keysize)
Generate master public-key-signature
16,124
def common_bootsrap_payload(self): messages = get_flashed_messages(with_categories=True) locale = str(get_locale()) return { : messages, : {k: conf.get(k) for k in FRONTEND_CONF_KEYS}, : locale, : get_language_pack(locale), : get_feature_flags(), }
Common data always sent to the client
16,125
def get_by_id(self, symbol: str) -> SymbolMap: return self.query.filter(SymbolMap.in_symbol == symbol).first()
Finds the map by in-symbol
16,126
def setup_log(name): s log when running in XBMC mode. %(asctime)s - %(levelname)s - [%(name)s] %(message)s[%s] ' % name)) return _log
Returns a logging instance for the provided name. The returned object is an instance of logging.Logger. Logged messages will be printed to stderr when running in the CLI, or forwarded to XBMC's log when running in XBMC mode.
16,127
def linear_set_layer(layer_size, inputs, context=None, activation_fn=tf.nn.relu, dropout=0.0, name=None): with tf.variable_scope( name, default_name="linear_set_layer", values=[inputs]): outputs = conv1d(inputs, layer_size, 1, activation=None, name="set_conv") if context is not None: if len(context.get_shape().as_list()) == 2: context = tf.expand_dims(context, axis=1) cont_tfm = conv1d( context, layer_size, 1, activation=None, name="cont_conv") outputs += cont_tfm if activation_fn is not None: outputs = activation_fn(outputs) if dropout != 0.0: outputs = tf.nn.dropout(outputs, 1.0 - dropout) return outputs
Basic layer type for doing funky things with sets. Applies a linear transformation to each element in the input set. If a context is supplied, it is concatenated with the inputs. e.g. One can use global_pool_1d to get a representation of the set which can then be used as the context for the next layer. TODO: Add bias add (or control the biases used). Args: layer_size: Dimension to transform the input vectors to. inputs: A tensor of shape [batch_size, sequence_length, input_dims] containing the sequences of input vectors. context: A tensor of shape [batch_size, context_dims] containing a global statistic about the set. activation_fn: The activation function to use. dropout: Dropout probability. name: name. Returns: Tensor of shape [batch_size, sequence_length, output_dims] containing the sequences of transformed vectors.
16,128
def _create_dock(self): from safe.gui.widgets.dock import Dock self.dock_widget = Dock(self.iface) self.dock_widget.setObjectName() self.iface.addDockWidget(Qt.RightDockWidgetArea, self.dock_widget) legend_tab = self.iface.mainWindow().findChild(QApplication, ) if legend_tab: self.iface.mainWindow().tabifyDockWidget( legend_tab, self.dock_widget) self.dock_widget.raise_()
Create dockwidget and tabify it with the legend.
16,129
def insert(self, **fields): if self.conflict_target or self.conflict_action: compiler = self._build_insert_compiler([fields]) rows = compiler.execute_sql(return_id=True) pk_field_name = self.model._meta.pk.name return rows[0][pk_field_name] return super().create(**fields).pk
Creates a new record in the database. This allows specifying custom conflict behavior using .on_conflict(). If no special behavior was specified, this uses the normal Django create(..) Arguments: fields: The fields of the row to create. Returns: The primary key of the record that was created.
16,130
async def serialize_properties(inputs: , property_deps: Dict[str, List[]], input_transformer: Optional[Callable[[str], str]] = None) -> struct_pb2.Struct: struct = struct_pb2.Struct() for k, v in inputs.items(): deps = [] result = await serialize_property(v, deps, input_transformer) if result is not None: translated_name = k if input_transformer is not None: translated_name = input_transformer(k) log.debug(f"top-level input property translated: {k} -> {translated_name}") struct[translated_name] = result property_deps[translated_name] = deps return struct
Serializes an arbitrary Input bag into a Protobuf structure, keeping track of the list of dependent resources in the `deps` list. Serializing properties is inherently async because it awaits any futures that are contained transitively within the input bag.
16,131
def atime(self): try: return self._stat.st_atime except: self._stat = self.stat() return self.atime
Get most recent access time in timestamp.
16,132
def read_xml(cls, url, markup, game): return Players._read_objects(MlbamUtil.find_xml("".join([url, cls.FILENAME]), markup) ,game)
read xml object :param url: contents url :param markup: markup provider :param game: MLBAM Game object :return: pitchpx.game.players.Players object
16,133
def IsTemplateParameterList(clean_lines, linenum, column): (_, startline, startpos) = ReverseCloseExpression( clean_lines, linenum, column) if (startpos > -1 and Search(r, clean_lines.elided[startline][0:startpos])): return True return False
Check if the token ending on (linenum, column) is the end of template<>. Args: clean_lines: A CleansedLines instance containing the file. linenum: the number of the line to check. column: end column of the token to check. Returns: True if this token is end of a template parameter list, False otherwise.
16,134
def plan(self): for invoiceitem in self.invoiceitems.all(): if invoiceitem.plan: return invoiceitem.plan if self.subscription: return self.subscription.plan
Gets the associated plan for this invoice. In order to provide a consistent view of invoices, the plan object should be taken from the first invoice item that has one, rather than using the plan associated with the subscription. Subscriptions (and their associated plan) are updated by the customer and represent what is current, but invoice items are immutable within the invoice and stay static/unchanged. In other words, a plan retrieved from an invoice item will represent the plan as it was at the time an invoice was issued. The plan retrieved from the subscription will be the currently active plan. :returns: The associated plan for the invoice. :rtype: ``djstripe.Plan``
16,135
def find_aliases(self, seq_id=None, namespace=None, alias=None, current_only=True, translate_ncbi_namespace=None): clauses = [] params = [] def eq_or_like(s): return "like" if "%" in s else "=" if translate_ncbi_namespace is None: translate_ncbi_namespace = self.translate_ncbi_namespace if alias is not None: clauses += ["alias {} ?".format(eq_or_like(alias))] params += [alias] if namespace is not None: if namespace == "RefSeq": namespace = "NCBI" clauses += ["namespace {} ?".format(eq_or_like(namespace))] params += [namespace] if seq_id is not None: clauses += ["seq_id {} ?".format(eq_or_like(seq_id))] params += [seq_id] if current_only: clauses += ["is_current = 1"] cols = ["seqalias_id", "seq_id", "alias", "added", "is_current"] if translate_ncbi_namespace: cols += ["case namespace when then else namespace end as namespace"] else: cols += ["namespace"] sql = "select {cols} from seqalias".format(cols=", ".join(cols)) if clauses: sql += " where " + " and ".join("(" + c + ")" for c in clauses) sql += " order by seq_id, namespace, alias" _logger.debug("Executing: " + sql) return self._db.execute(sql, params)
returns iterator over alias annotation records that match criteria The arguments, all optional, restrict the records that are returned. Without arguments, all aliases are returned. If arguments contain %, the `like` comparison operator is used. Otherwise arguments must match exactly.
16,136
def get_tokens(self, node, include_extra=False): return self.token_range(node.first_token, node.last_token, include_extra=include_extra)
Yields all tokens making up the given node. If include_extra is True, includes non-coding tokens such as tokenize.NL and .COMMENT.
16,137
def update_image(self, name): api = self.doapi_manager return api._image(api.request(self.url, method=, data={"name": name})["image"])
Update (i.e., rename) the image :param str name: the new name for the image :return: an updated `Image` object :rtype: Image :raises DOAPIError: if the API endpoint replies with an error
16,138
def WriteSignedBinaryReferences(self, binary_id, references, cursor=None): args = { "binary_type": binary_id.binary_type.SerializeToDataStore(), "binary_path": binary_id.path, "binary_path_hash": mysql_utils.Hash(binary_id.path), "blob_references": references.SerializeToString() } query = .format( cols=mysql_utils.Columns(args), vals=mysql_utils.NamedPlaceholders(args)) cursor.execute(query, args)
Writes blob references for a signed binary to the DB.
16,139
def _spa_python_import(how): from pvlib import spa using_numba = spa.USE_NUMBA if how == and using_numba: warnings.warn() os.environ[] = spa = reload(spa) del os.environ[] elif how == and not using_numba: warnings.warn() os.environ[] = spa = reload(spa) del os.environ[] elif how != and how != : raise ValueError("how must be either or ") return spa
Compile spa.py appropriately
16,140
def execute_loaders(self, env=None, silent=None, key=None, filename=None): if key is None: default_loader(self, self._defaults) env = (env or self.current_env).upper() silent = silent or self.SILENT_ERRORS_FOR_DYNACONF settings_loader( self, env=env, silent=silent, key=key, filename=filename ) self.load_extra_yaml(env, silent, key) enable_external_loaders(self) for loader in self.loaders: self.logger.debug("Dynaconf executing: %s", loader.__name__) loader.load(self, env, silent=silent, key=key) self.load_includes(env, silent=silent, key=key) self.logger.debug("Loaded Files: %s", deduplicate(self._loaded_files))
Execute all internal and registered loaders :param env: The environment to load :param silent: If loading erros is silenced :param key: if provided load a single key :param filename: optional custom filename to load
16,141
def _easy_facetgrid(data, plotfunc, kind, x=None, y=None, row=None, col=None, col_wrap=None, sharex=True, sharey=True, aspect=None, size=None, subplot_kws=None, **kwargs): ax = kwargs.pop(, None) figsize = kwargs.pop(, None) if ax is not None: raise ValueError("Cancannot provide both `figsize` and `size` argumentslinedataarray': return g.map_dataarray(plotfunc, x, y, **kwargs)
Convenience method to call xarray.plot.FacetGrid from 2d plotting methods kwargs are the arguments to 2d plotting method
16,142
def create_module(clear_target, target): if os.path.exists(target): if clear_target: shutil.rmtree(target) else: log("Target exists! Use --clear to delete it first.", emitter=) sys.exit(2) done = False info = None while not done: info = _ask_questionnaire() pprint(info) done = _ask(, default=, data_type=) augmented_info = _augment_info(info) log("Constructing module %(plugin_name)s" % info) _construct_module(augmented_info, target)
Creates a new template HFOS plugin module
16,143
def calculate_signatures(self): if not self.signing_algorithm: return [] algo_id = {: 1, : 2}[self.signing_algorithm] hashers = [(algo_id, make_hasher(algo_id))] for block in get_signature_data(self.fileobj, self.filesize): [h.update(block) for (_, h) in hashers] signatures = [(algo_id, sign_hash(self.signing_key, h.finalize(), h.algorithm.name)) for (algo_id, h) in hashers] return signatures
Calculate the signatures for this MAR file. Returns: A list of signature tuples: [(algorithm_id, signature_data), ...]
16,144
def parse_parameter_group(self, global_params, region, parameter_group): pg_name = parameter_group.pop() pg_id = self.get_non_aws_id(pg_name) parameter_group[] = pg_name parameter_group[] = {} api_client = api_clients[region] parameters = handle_truncated_response(api_client.describe_cluster_parameters, {: pg_name}, [])[] for parameter in parameters: param = {} param[] = parameter[] param[] = parameter[] parameter_group[][parameter[]] = param (self).parameter_groups[pg_id] = parameter_group
Parse a single Redshift parameter group and fetch all of its parameters :param global_params: Parameters shared for all regions :param region: Name of the AWS region :param parameter_group: Parameter group
16,145
def set_status(self, status, msg): if len(msg) > 2000: msg = msg[:2000] msg += "\n... snip ...\n" if self.status == self.S_LOCKED or status == self.S_LOCKED: err_msg = ( "Locked files must be explicitly unlocked before calling set_status but\n" "task.status = %s, input status = %s" % (self.status, status)) raise RuntimeError(err_msg) status = Status.as_status(status) changed = True if hasattr(self, "_status"): changed = (status != self._status) self._status = status if status == self.S_RUN: if self.datetimes.start is None: self.datetimes.start = datetime.datetime.now() if changed: if status == self.S_SUB: self.datetimes.submission = datetime.datetime.now() self.history.info("Submitted with MPI=%s, Omp=%s, Memproc=%.1f [Gb] %s " % ( self.mpi_procs, self.omp_threads, self.mem_per_proc.to("Gb"), msg)) elif status == self.S_OK: self.history.info("Task completed %s", msg) elif status == self.S_ABICRITICAL: self.history.info("Status set to S_ABI_CRITICAL due to: %s", msg) else: self.history.info("Status changed to %s. msg: %s", status, msg) if status == self.S_DONE: self._on_done() if status == self.S_OK: if not self.finalized: self._on_ok() if self.gc is not None and self.gc.policy == "task": self.clean_output_files() if self.status == self.S_OK: self.send_signal(self.S_OK) return status
Set and return the status of the task. Args: status: Status object or string representation of the status msg: string with human-readable message used in the case of errors.
16,146
def load_heartrate(as_series=False): rslt = np.array([84.2697, 84.2697, 84.0619, 85.6542, 87.2093, 87.1246, 86.8726, 86.7052, 87.5899, 89.1475, 89.8204, 89.8204, 90.4375, 91.7605, 93.1081, 94.3291, 95.8003, 97.5119, 98.7457, 98.904, 98.3437, 98.3075, 98.8313, 99.0789, 98.8157, 98.2998, 97.7311, 97.6471, 97.7922, 97.2974, 96.2042, 95.2318, 94.9367, 95.0867, 95.389, 95.5414, 95.2439, 94.9415, 95.3557, 96.3423, 97.1563, 97.4026, 96.7028, 96.5516, 97.9837, 98.9879, 97.6312, 95.4064, 93.8603, 93.0552, 94.6012, 95.8476, 95.7692, 95.9236, 95.7692, 95.9211, 95.8501, 94.6703, 93.0993, 91.972, 91.7821, 91.7911, 90.807, 89.3196, 88.1511, 88.7762, 90.2265, 90.8066, 91.2284, 92.4238, 93.243, 92.8472, 92.5926, 91.7778, 91.2974, 91.6364, 91.2952, 91.771, 93.2285, 93.3199, 91.8799, 91.2239, 92.4055, 93.8716, 94.5825, 94.5594, 94.9453, 96.2412, 96.6879, 95.8295, 94.7819, 93.4731, 92.7997, 92.963, 92.6996, 91.9648, 91.2417, 91.9312, 93.9548, 95.3044, 95.2511, 94.5358, 93.8093, 93.2287, 92.2065, 92.1588, 93.6376, 94.899, 95.1592, 95.2415, 95.5414, 95.0971, 94.528, 95.5887, 96.4715, 96.6158, 97.0769, 96.8531, 96.3947, 97.4291, 98.1767, 97.0148, 96.044, 95.9581, 96.4814, 96.5211, 95.3629, 93.5741, 92.077, 90.4094, 90.1751, 91.3312, 91.2883, 89.0592, 87.052, 86.6226, 85.7889, 85.6348, 85.3911, 83.8064, 82.8729, 82.6266, 82.645, 82.645, 82.645, 82.645, 82.645, 82.645, 82.645, 82.645]) if as_series: return pd.Series(rslt) return rslt
Uniform heart-rate data. A sample of heartrate data borrowed from an `MIT database <http://ecg.mit.edu/time-series/>`_. The sample consists of 150 evenly spaced (0.5 seconds) heartrate measurements. Parameters ---------- as_series : bool, optional (default=False) Whether to return a Pandas series. If False, will return a 1d numpy array. Returns ------- rslt : array-like, shape=(n_samples,) The heartrate vector. Examples -------- >>> from pmdarima.datasets import load_heartrate >>> load_heartrate() array([84.2697, 84.2697, 84.0619, 85.6542, 87.2093, 87.1246, 86.8726, 86.7052, 87.5899, 89.1475, 89.8204, 89.8204, 90.4375, 91.7605, 93.1081, 94.3291, 95.8003, 97.5119, 98.7457, 98.904 , 98.3437, 98.3075, 98.8313, 99.0789, 98.8157, 98.2998, 97.7311, 97.6471, 97.7922, 97.2974, 96.2042, 95.2318, 94.9367, 95.0867, 95.389 , 95.5414, 95.2439, 94.9415, 95.3557, 96.3423, 97.1563, 97.4026, 96.7028, 96.5516, 97.9837, 98.9879, 97.6312, 95.4064, 93.8603, 93.0552, 94.6012, 95.8476, 95.7692, 95.9236, 95.7692, 95.9211, 95.8501, 94.6703, 93.0993, 91.972 , 91.7821, 91.7911, 90.807 , 89.3196, 88.1511, 88.7762, 90.2265, 90.8066, 91.2284, 92.4238, 93.243 , 92.8472, 92.5926, 91.7778, 91.2974, 91.6364, 91.2952, 91.771 , 93.2285, 93.3199, 91.8799, 91.2239, 92.4055, 93.8716, 94.5825, 94.5594, 94.9453, 96.2412, 96.6879, 95.8295, 94.7819, 93.4731, 92.7997, 92.963 , 92.6996, 91.9648, 91.2417, 91.9312, 93.9548, 95.3044, 95.2511, 94.5358, 93.8093, 93.2287, 92.2065, 92.1588, 93.6376, 94.899 , 95.1592, 95.2415, 95.5414, 95.0971, 94.528 , 95.5887, 96.4715, 96.6158, 97.0769, 96.8531, 96.3947, 97.4291, 98.1767, 97.0148, 96.044 , 95.9581, 96.4814, 96.5211, 95.3629, 93.5741, 92.077 , 90.4094, 90.1751, 91.3312, 91.2883, 89.0592, 87.052 , 86.6226, 85.7889, 85.6348, 85.3911, 83.8064, 82.8729, 82.6266, 82.645 , 82.645 , 82.645 , 82.645 , 82.645 , 82.645 , 82.645 , 82.645 ]) >>> load_heartrate(True).head() 0 84.2697 1 84.2697 2 84.0619 3 85.6542 4 87.2093 dtype: float64 References ---------- .. [1] Goldberger AL, Rigney DR. Nonlinear dynamics at the bedside. In: Glass L, Hunter P, McCulloch A, eds. Theory of Heart: Biomechanics, Biophysics, and Nonlinear Dynamics of Cardiac Function. New York: Springer-Verlag, 1991, pp. 583-605.
16,147
def hardware_connector_name(self, **kwargs): config = ET.Element("config") hardware = ET.SubElement(config, "hardware", xmlns="urn:brocade.com:mgmt:brocade-hardware") connector = ET.SubElement(hardware, "connector") name = ET.SubElement(connector, "name") name.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
16,148
def listdir_matches(match): import os last_slash = match.rfind() if last_slash == -1: dirname = match_prefix = match result_prefix = else: match_prefix = match[last_slash + 1:] if last_slash == 0: dirname = result_prefix = else: dirname = match[0:last_slash] result_prefix = dirname + def add_suffix_if_dir(filename): try: if (os.stat(filename)[0] & 0x4000) != 0: return filename + except FileNotFoundError: pass return filename matches = [add_suffix_if_dir(result_prefix + filename) for filename in os.listdir(dirname) if filename.startswith(match_prefix)] return matches
Returns a list of filenames contained in the named directory. Only filenames which start with `match` will be returned. Directories will have a trailing slash.
16,149
def totext(self) ->str: sreturn = if self.properties.content_settings.content_encoding is None: raise AzureStorageWrapException(self, .format(self.name)) else: sreturn = self.content.decode(self.properties.content_settings.content_encoding, ) return sreturn
return blob content from StorageBlobModel instance to a string. Parameters are:
16,150
def patch_namespaced_pod_preset(self, name, namespace, body, **kwargs): kwargs[] = True if kwargs.get(): return self.patch_namespaced_pod_preset_with_http_info(name, namespace, body, **kwargs) else: (data) = self.patch_namespaced_pod_preset_with_http_info(name, namespace, body, **kwargs) return data
patch_namespaced_pod_preset # noqa: E501 partially update the specified PodPreset # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_pod_preset(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the PodPreset (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param UNKNOWN_BASE_TYPE body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1alpha1PodPreset If the method is called asynchronously, returns the request thread.
16,151
def _set_property(xml_root, name, value, properties=None): if properties is None: properties = xml_root.find("properties") for prop in properties: if prop.get("name") == name: prop.set("value", utils.get_unicode_str(value)) break else: etree.SubElement( properties, "property", {"name": name, "value": utils.get_unicode_str(value)} )
Sets property to specified value.
16,152
def stream(self, actor_sid=values.unset, event_type=values.unset, resource_sid=values.unset, source_ip_address=values.unset, start_date=values.unset, end_date=values.unset, limit=None, page_size=None): limits = self._version.read_limits(limit, page_size) page = self.page( actor_sid=actor_sid, event_type=event_type, resource_sid=resource_sid, source_ip_address=source_ip_address, start_date=start_date, end_date=end_date, page_size=limits[], ) return self._version.stream(page, limits[], limits[])
Streams EventInstance records from the API as a generator stream. This operation lazily loads records as efficiently as possible until the limit is reached. The results are returned as a generator, so this operation is memory efficient. :param unicode actor_sid: Only include Events initiated by this Actor :param unicode event_type: Only include Events of this EventType :param unicode resource_sid: Only include Events referring to this resource :param unicode source_ip_address: Only include Events that originated from this IP address :param datetime start_date: Only show events on or after this date :param datetime end_date: Only show events on or before this date :param int limit: Upper limit for the number of records to return. stream() guarantees to never return more than limit. Default is no limit :param int page_size: Number of records to fetch per request, when not set will use the default value of 50 records. If no page_size is defined but a limit is defined, stream() will attempt to read the limit with the most efficient page size, i.e. min(limit, 1000) :returns: Generator that will yield up to limit results :rtype: list[twilio.rest.monitor.v1.event.EventInstance]
16,153
def fit(self, bbox, max_zoom=MAX_ZOOM, force_zoom=None): BUFFER_FACTOR = 1.1 if force_zoom is not None: self.zoom = force_zoom else: for zoom in range(max_zoom, MIN_ZOOM-1, -1): self.zoom = zoom left, top = self.lonlat_to_screen([bbox.west], [bbox.north]) right, bottom = self.lonlat_to_screen([bbox.east], [bbox.south]) if (top - bottom < SCREEN_H*BUFFER_FACTOR) and (right - left < SCREEN_W*BUFFER_FACTOR): break west_tile, north_tile = self.deg2num(bbox.north, bbox.west, self.zoom) east_tile, south_tile = self.deg2num(bbox.south, bbox.east, self.zoom) self.xtile = west_tile - self.tiles_horizontally/2. + (east_tile - west_tile)/2 self.ytile = north_tile - self.tiles_vertically/2. + (south_tile - north_tile)/2 self.calculate_viewport_size()
Fits the projector to a BoundingBox :param bbox: BoundingBox :param max_zoom: max zoom allowed :param force_zoom: force this specific zoom value even if the whole bbox does not completely fit
16,154
def iter_links_link_element(self, element): rel = element.attrib.get(, ) stylesheet = in rel icon = in rel inline = stylesheet or icon if stylesheet: link_type = LinkType.css elif icon: link_type = LinkType.media else: link_type = None for attrib_name, link in self.iter_links_by_attrib(element): yield LinkInfo( element=element, tag=element.tag, attrib=attrib_name, link=link, inline=inline, linked=not inline, base_link=None, value_type=, link_type=link_type )
Iterate a ``link`` for URLs. This function handles stylesheets and icons in addition to standard scraping rules.
16,155
def _build_request_url(self, secure, api_method, version): if secure: proto = ANDROID.PROTOCOL_SECURE else: proto = ANDROID.PROTOCOL_INSECURE req_url = ANDROID.API_URL.format( protocol=proto, api_method=api_method, version=version ) return req_url
Build a URL for a API method request
16,156
def page_factory(request): prefix = request.matchdict[] settings = request.registry.settings dbsession = settings[CONFIG_DBSESSION] config = settings[CONFIG_MODELS] if prefix not in config: request.matchdict[] =\ tuple([prefix] + list(request.matchdict[])) prefix = None resources = config.get( prefix, config.get( , config.get( , None))) if not hasattr(resources, ): resources = (resources, ) tree = {} if not resources: return tree for resource in resources: table = None if not hasattr(resource, )\ and hasattr(resource, ): table = resource.model else: table = resource if not hasattr(table, ): continue nodes = dbsession.query(table) if hasattr(table, ): nodes = nodes.filter(or_( table.parent_id == None, table.parent.has(table.slug == ) )) for node in nodes: if not node.slug: continue resource = resource_of_node(resources, node) tree[node.slug] = resource(node, prefix=prefix) return tree
Page factory. Config models example: .. code-block:: python models = { '': [WebPage, CatalogResource], 'catalogue': CatalogResource, 'news': NewsResource, }
16,157
def get_balance(self): if not SMSGLOBAL_CHECK_BALANCE_COUNTRY: raise Exception() params = { : self.get_username(), : self.get_password(), : SMSGLOBAL_CHECK_BALANCE_COUNTRY, } req = urllib2.Request(SMSGLOBAL_API_URL_CHECKBALANCE, urllib.urlencode(params)) response = urllib2.urlopen(req).read() if response.startswith(): raise Exception( % response.replace(, )) return dict([(p.split()[0].lower(), p.split()[1]) for p in response.split() if len(p) > 0])
Get balance with provider.
16,158
def add_orbit(self, component=None, **kwargs): kwargs.setdefault(, component) return self.add_component(, **kwargs)
Shortcut to :meth:`add_component` but with kind='orbit'
16,159
def _build_word(syl, vowels): return "(?:{syl}(?:-(?={syl})|aeo'])
Builds a Pinyin word re pattern from a Pinyin syllable re pattern. A word is defined as a series of consecutive valid Pinyin syllables with optional hyphens and apostrophes interspersed. Hyphens must be followed immediately by another valid Pinyin syllable. Apostrophes must be followed by another valid Pinyin syllable that starts with an 'a', 'e', or 'o'.
16,160
def _datalog(self, parameter, run, maxrun, det_id): "Extract data from database" values = { : parameter, : run, : maxrun, : det_id, } data = urlencode(values) content = self._get_content( + data) if content.startswith(): log.error(content) return None try: dataframe = read_csv(content) except ValueError: log.warning( "Empty dataset" ) return make_empty_dataset() else: add_datetime(dataframe) try: self._add_converted_units(dataframe, parameter) except KeyError: log.warning( "Could not add converted units for {0}".format(parameter) ) return dataframe
Extract data from database
16,161
def is_contradictory(self, other): if not isinstance(other, DictCell): raise Exception("Incomparable") for key, val in self: if key in other.__dict__[] \ and val.is_contradictory(other.__dict__[][key]): return True return False
Returns True if the two DictCells are unmergeable.
16,162
def warn_if_detached(func): @wraps(func) def wrapped(this, *args, **kwargs): if in this.__dict__ and this._detached: warnings.warn() return func(this, *args, **kwargs) return wrapped
Warn if self / cls is detached.
16,163
def check_for_eni_source(): with open(, ) as eni: for line in eni: if line == : return with open(, ) as eni: eni.write()
Juju removes the source line when setting up interfaces, replace if missing
16,164
def export_task_info(node_params, output_element): if consts.Consts.default in node_params and node_params[consts.Consts.default] is not None: output_element.set(consts.Consts.default, node_params[consts.Consts.default])
Adds Task node attributes to exported XML element :param node_params: dictionary with given task parameters, :param output_element: object representing BPMN XML 'task' element.
16,165
def quit(self): for c in self.channels: c.users.remove(self.nick) self.channels = []
Remove this user from all channels and reinitialize the user's list of joined channels.
16,166
def backup(schema, uuid, export_filter, export_format, filename, pretty, export_all, omit): export_format = export_format.upper() if pretty: indent = 4 else: indent = 0 f = None if filename: try: f = open(filename, ) except (IOError, PermissionError) as e: backup_log(, exc=True, lvl=error) return def output(what, convert=False): if convert: if export_format == : data = json.dumps(what, indent=indent) else: data = "" else: data = what if not filename: print(data) else: f.write(data) if schema is None: if export_all is False: backup_log(, lvl=warn) return else: schemata = objectmodels.keys() else: schemata = [schema] all_items = {} for schema_item in schemata: model = objectmodels[schema_item] if uuid: obj = model.find({: uuid}) elif export_filter: obj = model.find(literal_eval(export_filter)) else: obj = model.find() items = [] for item in obj: fields = item.serializablefields() for field in omit: try: fields.pop(field) except KeyError: pass items.append(fields) all_items[schema_item] = items output(all_items, convert=True) if f is not None: f.flush() f.close()
Exports all collections to (JSON-) files.
16,167
def grantxml2json(self, grant_xml): tree = etree.fromstring(grant_xml) if tree.prefix == : ptree = self.get_subtree( tree, )[0] header = self.get_subtree(tree, )[0] oai_id = self.get_text_node(header, ) modified = self.get_text_node(header, ) else: ptree = self.get_subtree( tree, )[0] header = self.get_subtree(tree, )[0] oai_id = self.get_text_node(header, ) modified = self.get_text_node(header, ) url = self.get_text_node(ptree, ) code = self.get_text_node(ptree, ) title = self.get_text_node(ptree, ) acronym = self.get_text_node(ptree, ) startdate = self.get_text_node(ptree, ) enddate = self.get_text_node(ptree, ) funder = self.fundertree2json(ptree, oai_id) internal_id = "{0}::{1}".format(funder[], code) eurepo_id = \ "info:eu-repo/grantAgreement/{funder}/{program}/{code}/".format( funder=quote_plus(funder[].encode()), program=quote_plus(funder[].encode()), code=quote_plus(code.encode()), ) ret_json = { : self.schema_formatter.schema_url, : internal_id, : { : oai_id, : eurepo_id, : url if url.startswith("http://purl.org/") else None, }, : code, : title, : acronym, : startdate, : enddate, : {: funder[]}, : funder[], : url, : modified, } return ret_json
Convert OpenAIRE grant XML into JSON.
16,168
def r_bergomi(H,T,eta,xi,rho,S0,r,N,M,dW=None,dW_orth=None,cholesky = False,return_v=False): times = np.linspace(0, T, N) dt = T/(N-1) times = np.reshape(times,(-1,1)) if dW is None: dW = np.sqrt(dt)*np.random.normal(size=(N-1,M)) if dW_orth is None: dW_orth = np.sqrt(dt)*np.random.normal(size=(N-1,M)) dZ = rho*dW+np.sqrt(1-rho**2)*dW_orth Y = eta*np.sqrt(2*H)*fBrown(H,T,N,M,dW =dW,cholesky = cholesky) v = xi*np.exp(Y-0.5*(eta**2)*times**(2*H)) S = S0*np.exp(integral(np.sqrt(v),dF = dZ,axis=0,cumulative = True)+integral(r - 0.5*v,F = times,axis=0,trapez=False,cumulative = True)) if return_v: return np.array([S,v]).T else: return np.array([S]).T
Return M Euler-Maruyama sample paths with N time steps of (S_t,v_t), where (S_t,v_t) follows the rBergomi model of mathematical finance :rtype: M x N x d array
16,169
def transform_coords(self, width, height): x = self._libinput.libinput_event_tablet_tool_get_x_transformed( self._handle, width) y = self._libinput.libinput_event_tablet_tool_get_y_transformed( self._handle, height) x_changed = self._libinput.libinput_event_tablet_tool_x_has_changed( self._handle) y_changed = self._libinput.libinput_event_tablet_tool_y_has_changed( self._handle) return (x, y), x_changed or y_changed
Return the current absolute (x, y) coordinates of the tablet tool event, transformed to screen coordinates and whether they have changed in this event. Note: On some devices, returned value may be negative or larger than the width of the device. See `Out-of-bounds motion events`_ for more details. Args: width (int): The current output screen width. height (int): The current output screen height. Returns: ((float, float), bool): The current absolute (x, y) coordinates transformed to screen coordinates and whether they have changed.
16,170
def get_draft_url(url): if verify_draft_url(url): return url url = urlparse.urlparse(url) salt = get_random_string(5) query = QueryDict(force_bytes(url.query), mutable=True) query[] = % (salt, get_draft_hmac(salt, url.path)) parts = list(url) parts[4] = query.urlencode(safe=) return urlparse.urlunparse(parts)
Return the given URL with a draft mode HMAC in its querystring.
16,171
def run_file(name, database, query_file=None, output=None, grain=None, key=None, overwrite=True, saltenv=None, check_db_exists=True, **connection_args): ret = {: name, : {}, : True, : .format(database)} if any([query_file.startswith(proto) for proto in [, , , , ]]): query_file = __salt__[](query_file, saltenv=saltenv or __env__) if not os.path.exists(query_file): ret[] = .format(query_file) ret[] = False return ret if check_db_exists and not __salt__[](database, **connection_args): err = _get_mysql_error() if err is not None: ret[] = err ret[] = False return ret ret[] = None ret[] = ( ).format(database) return ret if output == : if grain is not None and key is None: if not overwrite and grain in __salt__[](): ret[] = + grain\ + return ret elif __opts__[]: ret[] = None ret[] = \ + + grain return ret elif grain is not None: if grain in __salt__[](): grain_value = __salt__[](grain) else: grain_value = {} if not overwrite and key in grain_value: ret[] = + grain\ + + key + return ret elif __opts__[]: ret[] = None ret[] = \ + + grain + + key return ret else: ret[] = False ret[] = "Error: output type needs the grain "\ + "parameter\n" return ret elif output is not None: if not overwrite and os.path.isfile(output): ret[] = + output\ + return ret elif __opts__[]: ret[] = None ret[] = \ + + output return ret elif __opts__[]: ret[] = None ret[] = return ret query_result = __salt__[](database, query_file, **connection_args) if query_result is False: ret[] = False return ret mapped_results = [] if in query_result: for res in query_result[]: mapped_line = {} for idx, col in enumerate(query_result[]): mapped_line[col] = res[idx] mapped_results.append(mapped_line) query_result[] = mapped_results ret[] = six.text_type(query_result) if output == : if grain is not None and key is None: __salt__[](grain, query_result) ret[][] = "Executed. Output into grain: "\ + grain elif grain is not None: if grain in __salt__[](): grain_value = __salt__[](grain) else: grain_value = {} grain_value[key] = query_result __salt__[](grain, grain_value) ret[][] = "Executed. Output into grain: "\ + grain + ":" + key elif output is not None: ret[][] = "Executed. Output into " + output with salt.utils.files.fopen(output, ) as output_file: if in query_result: for res in query_result[]: for col, val in six.iteritems(res): output_file.write( salt.utils.stringutils.to_str( col + + val + ) ) else: output_file.write( salt.utils.stringutils.to_str(query_result) ) else: ret[][] = "Executed" return ret
Execute an arbitrary query on the specified database .. versionadded:: 2017.7.0 name Used only as an ID database The name of the database to execute the query_file on query_file The file of mysql commands to run output grain: output in a grain other: the file to store results None: output to the result comment (default) grain: grain to store the output (need output=grain) key: the specified grain will be treated as a dictionary, the result of this state will be stored under the specified key. overwrite: The file or grain will be overwritten if it already exists (default) saltenv: The saltenv to pull the query_file from check_db_exists: The state run will check that the specified database exists (default=True) before running any queries
16,172
def run(self, resources): hwman = resources[] con = hwman.hwman.controller() test_interface = con.test_interface() try: test_interface.synchronize_clock() print( % test_interface.current_time_str()) except: raise ArgumentError()
Sets the RTC timestamp to UTC. Args: resources (dict): A dictionary containing the required resources that we needed access to in order to perform this step.
16,173
def get_column_metadata(gctx_file_path, convert_neg_666=True): full_path = os.path.expanduser(gctx_file_path) gctx_file = h5py.File(full_path, "r") col_dset = gctx_file[col_meta_group_node] col_meta = parse_metadata_df("col", col_dset, convert_neg_666) gctx_file.close() return col_meta
Opens .gctx file and returns only column metadata Input: Mandatory: - gctx_file_path (str): full path to gctx file you want to parse. Optional: - convert_neg_666 (bool): whether to convert -666 values to num Output: - col_meta (pandas DataFrame): a DataFrame of all column metadata values.
16,174
def users_feature(app): if not app.config.get(, None): raise x.JwtSecretMissing() app.session_interface = BoilerSessionInterface() user_service.init(app) login_manager.init_app(app) login_manager.login_view = login_manager.login_message = None @login_manager.user_loader def load_user(id): return user_service.get(id) oauth.init_app(app) registry = OauthProviders(app) providers = registry.get_providers() with app.app_context(): for provider in providers: if provider not in oauth.remote_apps: oauth.remote_app(provider, **providers[provider]) registry.register_token_getter(provider) principal.init_app(app) @principal.identity_loader def load_identity(): if current_user.is_authenticated: return Identity(current_user.id) session.pop(, None) session.pop(, None) return AnonymousIdentity() @identity_loaded.connect_via(app) def on_identity_loaded(sender, identity): identity.user = current_user if not current_user.is_authenticated: return identity.provides.add(UserNeed(current_user.id)) for role in current_user.roles: identity.provides.add(RoleNeed(role.handle))
Add users feature Allows to register users and assign groups, instantiates flask login, flask principal and oauth integration
16,175
def covariance_matrix(self): a = N.dot(self.U,self.sigma) cv = N.dot(a,a.T) return cv
Constructs the covariance matrix of input data from the singular value decomposition. Note that this is different than a covariance matrix of residuals, which is what we want for calculating fit errors. Using SVD output to compute covariance matrix X=UΣV⊤ XX⊤XX⊤=(UΣV⊤)(UΣV⊤)⊤=(UΣV⊤)(VΣU⊤) V is an orthogonal matrix (V⊤V=I), covariance matrix of input data: XX⊤=UΣ2U⊤ Because the axes represent identity in the PCA coordinate system, the PCA major axes themselves represent an affine transformation matrix from PCA to Cartesian space
16,176
def dispatch_request(self, *args, **kwargs): if self.validation: specs = {} attrs = flasgger.constants.OPTIONAL_FIELDS + [ , , , , ] for attr in attrs: specs[attr] = getattr(self, attr) definitions = {} specs.update(convert_schemas(specs, definitions)) specs[] = definitions flasgger.utils.validate( specs=specs, validation_function=self.validation_function) return super(SwaggerView, self).dispatch_request(*args, **kwargs)
If validation=True perform validation
16,177
def get_urls(self): urls = super(CompetitionEntryAdmin, self).get_urls() csv_urls = patterns(, url( r, self.admin_site.admin_view(self.csv_export), name= ) ) return csv_urls + urls
Extend the admin urls for the CompetitionEntryAdmin model to be able to invoke a CSV export view on the admin model
16,178
def getPage(url, contextFactory=None, *args, **kwargs): scheme, host, port, path = client._parse(url) factory = client.HTTPClientFactory(url, *args, **kwargs) if scheme == : if contextFactory is None: raise RuntimeError, conn = reactor.connectSSL(host, port, factory, contextFactory) else: conn = reactor.connectTCP(host, port, factory) return factory
Download a web page as a string. Download a page. Return a deferred, which will callback with a page (as a string) or errback with a description of the error. See HTTPClientFactory to see what extra args can be passed.
16,179
def describe_instances(self, xml_bytes): root = XML(xml_bytes) results = [] for reservation_data in root.find("reservationSet"): reservation = model.Reservation( reservation_id=reservation_data.findtext("reservationId"), owner_id=reservation_data.findtext("ownerId")) instances = self.instances_set( reservation_data, reservation) results.extend(instances) return results
Parse the reservations XML payload that is returned from an AWS describeInstances API call. Instead of returning the reservations as the "top-most" object, we return the object that most developers and their code will be interested in: the instances. In instances reservation is available on the instance object. The following instance attributes are optional: * ami_launch_index * key_name * kernel_id * product_codes * ramdisk_id * reason @param xml_bytes: raw XML payload from AWS.
16,180
def remove_hook(self, name, func): if name in self._hooks and func in self._hooks[name]: self._hooks[name].remove(func) return True
Remove a callback from a hook.
16,181
def perform_experiment(self, engine_list): result = [] for endine_idx, engine in enumerate(engine_list): print( % (endine_idx, len(engine_list))) engine.clean_all_buckets() avg_recall = 0.0 avg_precision = 0.0 avg_search_time = 0.0 for index, v in enumerate(self.vectors): engine.store_vector(v, % index) for index in self.query_indices: real_nearest = set(self.closest[index]) search_time_start = time.time() nearest = engine.neighbours(self.vectors[index]) search_time = time.time() - search_time_start nearest = set([self.__index_of_vector(x[0]) for x in nearest]) nearest.remove(index) if len(nearest) == 0: recall = 0.0 precision = 0.0 else: inter_count = float(len(real_nearest & nearest)) recall = inter_count/float(len(real_nearest)) precision = inter_count/float(len(nearest)) avg_recall += recall avg_precision += precision avg_search_time += search_time avg_recall /= float(len(self.query_indices)) avg_precision /= float(len(self.query_indices)) avg_search_time = avg_search_time / float(len(self.query_indices)) avg_search_time /= self.exact_search_time_per_vector print( % (avg_recall, avg_precision, avg_search_time)) result.append((avg_recall, avg_precision, avg_search_time)) return result
Performs nearest neighbour recall experiments with custom vector data for all engines in the specified list. Returns self.result contains list of (recall, precision, search_time) tuple. All are the averaged values over all request vectors. search_time is the average retrieval/search time compared to the average exact search time.
16,182
def add_cmd_method(self, name, method, argc=None, complete=None): if in name: raise ValueError(" cannot be in command name {0}".format(name)) self._cmd_methods[name] = method self._cmd_argc[name] = argc self._cmd_complete[name] = complete
Adds a command to the command line interface loop. Parameters ---------- name : string The command. method : function(args) The function to execute when this command is issued. The argument of the function is a list of space separated arguments to the command. argc : int, optional (default=None) The number of expected further arguments. If None arguments are not restricted. complete : function(args, text), optional (default=None) A function that is called to complete further arguments. If None no suggestions are made. The function gets the arguments up to the incomplete argument (args). text contains the to be completed argument. The function must returns a list of suggestions or None if text is valid already and there are no further suggestions.
16,183
def _filter_by_zoom(element=None, conf_string=None, zoom=None): for op_str, op_func in [ ("=", operator.eq), ("<=", operator.le), (">=", operator.ge), ("<", operator.lt), (">", operator.gt), ]: if conf_string.startswith(op_str): return element if op_func(zoom, _strip_zoom(conf_string, op_str)) else None
Return element only if zoom condition matches with config string.
16,184
def _dump(self, tag, x, lo, hi): for i in xrange(lo, hi): yield % (tag, x[i])
Generate comparison results for a same-tagged range.
16,185
def _get_offset(cmd): dict_offset = cmd.dictionary_page_offset data_offset = cmd.data_page_offset if dict_offset is None or data_offset < dict_offset: return data_offset return dict_offset
Return the offset into the cmd based upon if it's a dictionary page or a data page.
16,186
def retrieveAcknowledge(): a = TpPd(pd=0x3) b = MessageType(mesType=0x1d) packet = a / b return packet
RETRIEVE ACKNOWLEDGE Section 9.3.21
16,187
def downloadFile(self, filename, ispickle=False, athome=False): print("Downloading file {} from Redunda.".format(filename)) _, tail = os.path.split(filename) url = "https://redunda.sobotics.org/bots/data/{}?key={}".format(tail, self.key) requestToMake = request.Request(url) response = request.urlopen(requestToMake) if response.code != 200: print("Error occured while downloading file with error code {}.".format(filename,response.code)) if athome: filename = str(os.path.expanduser("~")) + filename filedata = response.read().decode("utf-8") try: if filename.endswith (".pickle") or ispickle: data = json.loads(filedata) try: with open(filename, "wb") as fileToWrite: pickle.dump (data, fileToWrite) except pickle.PickleError as perr: print("Pickling error occurred: {}".format(perr)) return else: with open (filename, "w") as fileToWrite: fileToWrite.write(filedata) except IOError as ioerr: print("IOError occurred: {}".format(ioerr)) return
Downloads a single file from Redunda. :param str filename: The name of the file you want to download :param bool ispickle: Optional variable which tells if the file to be downloaded is a pickle; default is False. :returns: returns nothing
16,188
def get_vnetwork_hosts_output_vnetwork_hosts_name(self, **kwargs): config = ET.Element("config") get_vnetwork_hosts = ET.Element("get_vnetwork_hosts") config = get_vnetwork_hosts output = ET.SubElement(get_vnetwork_hosts, "output") vnetwork_hosts = ET.SubElement(output, "vnetwork-hosts") name = ET.SubElement(vnetwork_hosts, "name") name.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
16,189
def occurrences(coll, value=None, **options): count = {} for element in coll: count[element] = count.get(element, 0) + 1 if options: count = _filter_occurrences(count, options) if value: count = count.get(value, 0) return count
Return the occurrences of the elements in the collection :param coll: a collection :param value: a value in the collection :param options: an optional keyword used as a criterion to filter the values in the collection :returns: the frequency of the values in the collection as a dictionary >>> occurrences((1, 1, 2, 3)) {1: 2, 2: 1, 3: 1} >>> occurrences((1, 1, 2, 3), 1) 2 Filter the values of the occurrences that are <, <=, >, >=, == or != than a given number:: >>> occurrences((1, 1, 2, 3), lt=3) {1: 2, 2: 1, 3: 1} >>> occurrences((1, 1, 2, 3), gt=1) {1: 2} >>> occurrences((1, 1, 2, 3), ne=1) {1: 2}
16,190
def url_for(**options): url_parts = get_url_parts(**options) image_hash = hashlib.md5(b(options[])).hexdigest() url_parts.append(image_hash) return "/".join(url_parts)
Returns the url for the specified options
16,191
def find_declared_encoding(cls, markup, is_html=False, search_entire_document=False): if search_entire_document: xml_endpos = html_endpos = len(markup) else: xml_endpos = 1024 html_endpos = max(2048, int(len(markup) * 0.05)) declared_encoding = None declared_encoding_match = xml_encoding_re.search(markup, endpos=xml_endpos) if not declared_encoding_match and is_html: declared_encoding_match = html_meta_re.search(markup, endpos=html_endpos) if declared_encoding_match is not None: declared_encoding = declared_encoding_match.groups()[0].decode( , ) if declared_encoding: return declared_encoding.lower() return None
Given a document, tries to find its declared encoding. An XML encoding is declared at the beginning of the document. An HTML encoding is declared in a <meta> tag, hopefully near the beginning of the document.
16,192
def process_match(match, fixed_text, cur, cur_end): replace = True if match[] == : chk = cur - 1 else: chk = cur_end if match[].startswith(): scope = match[][1:] negative = True else: scope = match[] negative = False if scope == : if (not ((chk < 0 and match[] == ) or (chk >= len(fixed_text) and match[] == ) or validate.is_punctuation(fixed_text[chk])) ^ negative): replace = False elif scope == : if (not (((chk >= 0 and match[] == ) or (chk < len(fixed_text) and match[] == )) and validate.is_vowel(fixed_text[chk])) ^ negative): replace = False elif scope == : if (not (((chk >= 0 and match[] == ) or (chk < len(fixed_text) and match[] == )) and validate.is_consonant(fixed_text[chk])) ^ negative): replace = False elif scope == : if match[] == : exact_start = cur - len(match[]) exact_end = cur else: exact_start = cur_end exact_end = cur_end + len(match[]) if not validate.is_exact(match[], fixed_text, exact_start, exact_end, negative): replace = False return replace
Processes a single match in rules
16,193
def get_forces(self, a): f = np.zeros( [ len(a), 3 ], dtype=float ) for c in self.calcs: f += c.get_forces(a) return f
Calculate atomic forces.
16,194
def check_pypi_exists(dependencies): for dependency in dependencies.get(, []): logger.debug("Checking if %r exists in PyPI", dependency) try: exists = _pypi_head_package(dependency) except Exception as error: logger.error("Error checking %s in PyPI: %r", dependency, error) raise FadesError("Could not check if dependency exists in PyPI") else: if not exists: logger.error("%s doesn't exists in PyPI.", dependency) return False return True
Check if the indicated dependencies actually exists in pypi.
16,195
def _non_blocking_wrapper(self, method, *args, **kwargs): exceptions = [] def task_run(task): try: getattr(task, method)(*args, **kwargs) except Exception as e: exceptions.append(e) threads = [threading.Thread(name=f, target=task_run, args=[t]) for i, t in enumerate(self.tasks)] for thread in threads: thread.start() for thread in threads: thread.join() if exceptions: raise exceptions[0]
Runs given method on every task in the job. Blocks until all tasks finish. Propagates exception from first failed task.
16,196
def work(options): record = get_record(options) _, mainv, dailyv, _, _, _, safebrowsingv, bytecodev = record.split() versions = {: mainv, : dailyv, : safebrowsingv, : bytecodev} dqueue = Queue(maxsize=0) dqueue_workers = 3 info("[+] \033[92mStarting workers\033[0m") for index in range(dqueue_workers): info("=> Starting diff download worker: %d" % (index + 1)) worker = Thread(target=download_diffs, args=(dqueue,)) worker.setDaemon(True) worker.start() mqueue = Queue(maxsize=0) mqueue_workers = 4 for index in range(mqueue_workers): info("=> Starting signature download worker: %d" % (index + 1)) worker = Thread(target=update_sig, args=(mqueue,)) worker.setDaemon(True) worker.start() for signature_type in [, , , ]: if signature_type in [, , ]: localver = get_local_version(options.mirrordir, signature_type) remotever = versions[signature_type] if localver is not None: dqueue.put( ( options, signature_type, localver, remotever ) ) mqueue.put((options, signature_type, versions)) info("=> Waiting on workers to complete tasks") dqueue.join() mqueue.join() info("=> Workers done processing queues") create_dns_file(options, record) sys.exit(0)
The work functions
16,197
def toggle_deriv(self, evt=None, value=None): "toggle derivative of data" if value is None: self.conf.data_deriv = not self.conf.data_deriv expr = self.conf.data_expr or if self.conf.data_deriv: expr = "deriv(%s)" % expr self.write_message("plotting %s" % expr, panel=0) self.conf.process_data()
toggle derivative of data
16,198
def _set_tunnel(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("identifier",tunnel.tunnel, yang_name="tunnel", rest_name="tunnel", parent=self, is_container=, user_ordered=False, path_helper=self._path_helper, yang_keys=, extensions={u: {u: u, u: None, u: u, u: None, u: u}}), is_container=, yang_name="tunnel", rest_name="tunnel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: None, u: u, u: None, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "list", : , }) self.__tunnel = t if hasattr(self, ): self._set()
Setter method for tunnel, mapped from YANG variable /interface/tunnel (list) If this variable is read-only (config: false) in the source YANG file, then _set_tunnel is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_tunnel() directly.
16,199
def build(self, root, schema): if schema.get("subcommands") and schema["subcommands"]: for subcmd, childSchema in schema["subcommands"].items(): child = CommandTree(node=subcmd) child = self.build(child, childSchema) root.children.append(child) root.help = schema.get("help") for name, desc in schema.get("options").items(): if root.node == "kubectl": self.globalFlags.append(Option(name, desc["help"])) root.localFlags.append(Option(name, desc["help"])) for arg in schema.get("args"): node = CommandTree(node=arg) root.children.append(node) return root
Build the syntax tree for kubectl command line