_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
31
13.1k
language
stringclasses
1 value
meta_information
dict
q275300
Autoencoder._find_output
test
def _find_output(self, layer): '''Find a layer output name for the given layer specifier. Parameters ---------- layer : None, int, str, or :class:`theanets.layers.Layer` A layer specification. If this is None, the "middle" layer in the network will be used (i.e., the layer at the middle index in the list of network layers). If this is an integer, the corresponding layer in the network's layer list will be used. If this
python
{ "resource": "" }
q275301
Autoencoder.score
test
def score(self, x, w=None, **kwargs): '''Compute R^2 coefficient of determination for a given input. Parameters ---------- x : ndarray (num-examples, num-inputs) An array containing data to be fed into the network. Multiple examples are arranged as rows in this array, with columns containing the variables for each example. Returns ------- r2 : float The R^2 correlation between
python
{ "resource": "" }
q275302
Classifier.predict
test
def predict(self, x, **kwargs): '''Compute a greedy classification for the given set of data. Parameters ---------- x : ndarray (num-examples, num-variables) An array containing examples to classify. Examples are
python
{ "resource": "" }
q275303
Classifier.predict_proba
test
def predict_proba(self, x, **kwargs): '''Compute class posterior probabilities for the given set of data. Parameters ---------- x : ndarray (num-examples, num-variables) An array containing examples to predict. Examples
python
{ "resource": "" }
q275304
Classifier.predict_logit
test
def predict_logit(self, x, **kwargs): '''Compute the logit values that underlie the softmax output. Parameters ---------- x : ndarray (num-examples, num-variables) An array containing examples to classify. Examples are
python
{ "resource": "" }
q275305
Classifier.score
test
def score(self, x, y, w=None, **kwargs): '''Compute the mean accuracy on a set of labeled data. Parameters ---------- x : ndarray (num-examples, num-variables) An array containing examples to classify. Examples are given as the rows in this array. y : ndarray (num-examples, ) A vector of integer class labels, one for each row of input data. w : ndarray (num-examples, ) A vector of weights, one for each row of input data. Returns
python
{ "resource": "" }
q275306
batch_at
test
def batch_at(features, labels, seq_begins, seq_lengths): '''Extract a single batch of data to pass to the model being trained. Parameters ---------- features, labels : ndarray Arrays of the input features and target labels. seq_begins : ndarray Array of the start offsets of the speech segments to include. seq_lengths : ndarray Array of the lengths of the speech segments to include in the batch. Returns ------- features, labels, mask : ndarrays A triple of arrays for training a network. The first element contains input features, the second contains target labels, and the third contains a "mask" consisting of ones where there is valid data and zeros everywhere else.
python
{ "resource": "" }
q275307
batches
test
def batches(dataset): '''Returns a callable that chooses sequences from netcdf data.''' seq_lengths = dataset.variables['seqLengths'].data seq_begins = np.concatenate(([0], np.cumsum(seq_lengths)[:-1])) def sample(): chosen = np.random.choice( list(range(len(seq_lengths))), BATCH_SIZE, replace=False) return batch_at(dataset.variables['inputs'].data,
python
{ "resource": "" }
q275308
Experiment.load
test
def load(self, path): '''Load a saved network from a pickle file on disk. This method sets the ``network`` attribute of the experiment to the loaded network model. Parameters ---------- filename : str Load the keyword arguments and parameters of a network from a pickle file at the named path. If this name ends in ".gz" then the input will automatically be gunzipped; otherwise the input will be treated as a "raw" pickle.
python
{ "resource": "" }
q275309
random_matrix
test
def random_matrix(rows, cols, mean=0, std=1, sparsity=0, radius=0, diagonal=0, rng=None): '''Create a matrix of randomly-initialized weights. Parameters ---------- rows : int Number of rows of the weight matrix -- equivalently, the number of "input" units that the weight matrix connects. cols : int Number of columns of the weight matrix -- equivalently, the number of "output" units that the weight matrix connects. mean : float, optional Draw initial weight values from a normal with this mean. Defaults to 0. std : float, optional Draw initial weight values from a normal with this standard deviation. Defaults to 1. sparsity : float in (0, 1), optional If given, ensure that the given fraction of the weight matrix is set to zero. Defaults to 0, meaning all weights are nonzero. radius : float, optional If given, rescale the initial weights to have this spectral radius. No scaling is performed by default. diagonal : float, optional If nonzero, create a matrix containing all zeros except for this value along the diagonal. If nonzero, other arguments (except for rows and cols) will be ignored. rng : :class:`numpy.random.RandomState` or int, optional A random number generator, or an integer seed for a random number generator. If not provided, the random number generator will be created with an automatically chosen seed. Returns
python
{ "resource": "" }
q275310
random_vector
test
def random_vector(size, mean=0, std=1, rng=None): '''Create a vector of randomly-initialized values. Parameters ---------- size : int Length of vecctor to create. mean : float, optional Mean value for initial vector values. Defaults to 0. std : float, optional Standard deviation for initial vector values. Defaults to 1. rng : :class:`numpy.random.RandomState` or int,
python
{ "resource": "" }
q275311
outputs_matching
test
def outputs_matching(outputs, patterns): '''Get the outputs from a network that match a pattern. Parameters ---------- outputs : dict or sequence of (str, theano expression) Output expressions to filter for matches. If this is a dictionary, its ``items()`` will be processed for matches. patterns : sequence of str A sequence of glob-style patterns to match against. Any parameter matching any pattern in this sequence will be included in the match. Yields ------ matches : pair of str, theano expression Generates a sequence of (name, expression) pairs. The name is the name of the output that matched, and
python
{ "resource": "" }
q275312
params_matching
test
def params_matching(layers, patterns): '''Get the parameters from a network that match a pattern. Parameters ---------- layers : list of :class:`theanets.layers.Layer` A list of network layers to retrieve parameters from. patterns : sequence of str A sequence of glob-style patterns to match against. Any parameter matching any pattern in this sequence will be included in the match. Yields ------ matches : pair of str, theano expression Generates a sequence of (name, expression) pairs. The name is the name of the parameter that matched, and the expression represents the
python
{ "resource": "" }
q275313
from_kwargs
test
def from_kwargs(graph, **kwargs): '''Construct common regularizers from a set of keyword arguments. Keyword arguments not listed below will be passed to :func:`Regularizer.build` if they specify the name of a registered :class:`Regularizer`. Parameters ---------- graph : :class:`theanets.graph.Network` A network graph to regularize. regularizers : dict or tuple/list of :class:`Regularizer`, optional If this is a list or a tuple, the contents of the list will be returned as the regularizers. This is to permit custom lists of regularizers to be passed easily. If this is a dict, its contents will be added to the other keyword arguments passed in. rng : int or theano RandomStreams, optional If an integer is provided, it will be used to seed the random number generators for the dropout or noise regularizers. If a theano RandomStreams object is provided, it will be used directly. Defaults to 13. input_dropout : float, optional Apply dropout to input layers in the network graph, with this dropout rate. Defaults to 0 (no dropout). hidden_dropout : float, optional Apply dropout to hidden layers in the network graph, with this dropout rate. Defaults to 0 (no dropout). output_dropout : float, optional Apply dropout to the output layer in the network graph, with this dropout rate. Defaults to 0 (no dropout). input_noise : float, optional Apply noise to input layers in the network graph, with this standard deviation. Defaults to 0 (no noise). hidden_noise : float, optional Apply noise to hidden layers in the network graph, with this standard deviation. Defaults to 0 (no noise). output_noise : float, optional Apply noise to the output layer in the network graph, with this standard deviation. Defaults to 0 (no noise). Returns ------- regs : list of :class:`Regularizer` A list of regularizers to apply to the given network graph. ''' if 'regularizers' in kwargs: regs = kwargs['regularizers'] if isinstance(regs, (tuple, list)): return regs if isinstance(regs, dict): kwargs.update(regs) regs = [] rng = kwargs.get('rng', 13) def pattern(ls): return tuple(l.output_name for l in ls) inputs =
python
{ "resource": "" }
q275314
Loss.variables
test
def variables(self): '''A list of Theano variables used in this loss.''' result = [self._target] if self._weights is not
python
{ "resource": "" }
q275315
CrossEntropy.accuracy
test
def accuracy(self, outputs): '''Build a Theano expression for computing the accuracy of graph output. Parameters ---------- outputs : dict of Theano expressions A dictionary mapping network output names to Theano expressions representing the outputs of a computation graph. Returns ------- acc : Theano expression A Theano expression representing the accuracy of the output compared to the target data.
python
{ "resource": "" }
q275316
Recurrent._scan
test
def _scan(self, inputs, outputs, name='scan', step=None, constants=None): '''Helper method for defining a basic loop in theano. Parameters ---------- inputs : sequence of theano expressions Inputs to the scan operation. outputs : sequence of output specifiers Specifiers for the outputs of the scan operation. This should be a sequence containing: - None for values that are output by the scan but not tapped as inputs, - an integer or theano scalar (``ndim == 0``) indicating the batch size for initial zero state, - a theano tensor variable (``ndim > 0``) containing initial state data, or - a dictionary containing a full output specifier. See ``outputs_info`` in the Theano documentation for ``scan``. name : str, optional Name of the scan variable to create. Defaults to ``'scan'``. step : callable, optional The callable to apply in the loop. Defaults to :func:`self._step`. constants : sequence of tensor, optional A sequence of parameters, if any, needed by the step function. Returns ------- output(s) : theano expression(s) Theano expression(s) representing output(s) from the scan. updates : sequence of update tuples
python
{ "resource": "" }
q275317
build
test
def build(name, layer, **kwargs): '''Construct an activation function by name. Parameters ---------- name : str or :class:`Activation` The name of the type of activation function to build, or an already-created instance of an activation function. layer : :class:`theanets.layers.Layer` The layer to which this activation will be applied. kwargs : dict Additional named arguments to pass to the activation constructor. Returns ------- activation : :class:`Activation` A neural network activation function instance. ''' if isinstance(name, Activation): return name if '+' in name: return
python
{ "resource": "" }
q275318
SampleTrainer.reservoir
test
def reservoir(xs, n, rng): '''Select a random sample of n items from xs.''' pool = [] for i, x in enumerate(xs): if len(pool) < n: pool.append(x / np.linalg.norm(x)) continue j = rng.randint(i + 1) if j < n: pool[j] = x / np.linalg.norm(x) # if the pool still has fewer than n items, pad with distorted random # duplicates from the source data.
python
{ "resource": "" }
q275319
Network.set_loss
test
def set_loss(self, *args, **kwargs): '''Clear the current loss functions from the network and add a new one. All parameters and keyword arguments are passed to :func:`add_loss`
python
{ "resource": "" }
q275320
Network.itertrain
test
def itertrain(self, train, valid=None, algo='rmsprop', subalgo='rmsprop', save_every=0, save_progress=None, **kwargs): '''Train our network, one batch at a time. This method yields a series of ``(train, valid)`` monitor pairs. The ``train`` value is a dictionary mapping names to monitor values evaluated on the training dataset. The ``valid`` value is also a dictionary mapping names to values, but these values are evaluated on the validation dataset. Because validation might not occur every training iteration, the validation monitors might be repeated for multiple training iterations. It is probably most helpful to think of the validation monitors as being the "most recent" values that have been computed. After training completes, the network attribute of this class will contain the trained network parameters. Parameters ---------- train : :class:`Dataset <downhill.dataset.Dataset>` or list A dataset to use when training the network. If this is a ``downhill.Dataset`` instance, it will be used directly as the training datset. If it is a list of numpy arrays or a list of callables, it will be converted to a ``downhill.Dataset`` and then used as the training set. valid : :class:`Dataset <downhill.dataset.Dataset>` or list, optional If this is provided, it will be used as a validation dataset. If not provided, the training set will be used for validation. (This is not recommended!) algo : str, optional An optimization algorithm to use for training our network. If not provided, :class:`RMSProp <downhill.adaptive.RMSProp>` will be used. subalgo : str, optional An optimization algorithm to use for a trainer that requires a "sub-algorithm," sugh as an unsupervised pretrainer. Defaults to :class:`RMSProp <downhill.adaptive.RMSProp>`. save_every : int or float, optional If this is nonzero and ``save_progress`` is not None, then the model being trained will be saved periodically. If this is a float, it is treated as a number of minutes to wait between savings. If it is an int, it is treated as the number of training epochs to wait between savings. Defaults to 0. save_progress : str or file handle, optional If this is not None, and ``save_progress`` is nonzero, then save the model periodically during training. This parameter gives either (a) the full path of a file to save the model, or (b) a file-like object where the model should be saved. If it is a string and the given name contains a "{}" format specifier, it will be filled with the integer Unix timestamp at the time the model is saved. Defaults to None, which does not save models. Yields ------ training : dict A dictionary of monitor values computed using the training dataset, at the conclusion of training. This dictionary will at least contain a 'loss' key that indicates the value of the loss function. Other keys may be available depending on the trainer being used. validation : dict A dictionary of monitor values computed using the validation dataset, at the conclusion of training. ''' if 'rng' not in kwargs: kwargs['rng'] = self._rng def create_dataset(data, **kwargs): name = kwargs.get('name', 'dataset') s = '{}_batches'.format(name) return downhill.Dataset( data, name=name, batch_size=kwargs.get('batch_size', 32), iteration_size=kwargs.get('iteration_size', kwargs.get(s)), axis=kwargs.get('axis', 0), rng=kwargs['rng']) # set up datasets ... if valid
python
{ "resource": "" }
q275321
Network.train
test
def train(self, *args, **kwargs): '''Train the network until the trainer converges. All arguments are passed to :func:`itertrain`. Returns ------- training : dict A dictionary of monitor values computed using the training dataset, at the conclusion of training. This dictionary will at least contain a 'loss' key that indicates the value of the loss function. Other keys may be available depending on the trainer being used. validation : dict
python
{ "resource": "" }
q275322
Network._hash
test
def _hash(self, regularizers=()): '''Construct a string key for representing a computation graph. This key will be unique for a given (a) network topology, (b) set of losses, and (c) set of regularizers. Returns ------- key : str A hash representing the computation graph for the current network. ''' def add(s): h.update(str(s).encode('utf-8')) h = hashlib.md5() for l in self.layers: add('{}{}{}'.format(l.__class__.__name__,
python
{ "resource": "" }
q275323
Network.build_graph
test
def build_graph(self, regularizers=()): '''Connect the layers in this network to form a computation graph. Parameters ---------- regularizers : list of :class:`theanets.regularizers.Regularizer` A list of the regularizers to apply while building the computation graph. Returns ------- outputs : list of Theano variables A list of expressions giving the output of each layer in the graph. updates : list of update tuples A list of updates that should be performed by a Theano function that computes something using this graph. ''' key = self._hash(regularizers) if key not in self._graphs: util.log('building computation graph') for loss in self.losses: loss.log() for reg in regularizers:
python
{ "resource": "" }
q275324
Network.inputs
test
def inputs(self): '''A list of Theano variables for feedforward computations.'''
python
{ "resource": "" }
q275325
Network.variables
test
def variables(self): '''A list of Theano variables for loss computations.''' result = self.inputs seen = set(i.name for i in result) for loss in self.losses: for v in loss.variables:
python
{ "resource": "" }
q275326
Network.find
test
def find(self, which, param): '''Get a parameter from a layer in the network. Parameters ---------- which : int or str The layer that owns the parameter to return. If this is an integer, then 0 refers to the input layer, 1 refers to the first hidden layer, 2 to the second, and so on. If this is a string, the layer with the corresponding name, if any,
python
{ "resource": "" }
q275327
Network.feed_forward
test
def feed_forward(self, x, **kwargs): '''Compute a forward pass of all layers from the given input. All keyword arguments are passed directly to :func:`build_graph`. Parameters ---------- x : ndarray (num-examples, num-variables) An array containing data to be fed into the network. Multiple examples are arranged as rows in this array, with columns containing the variables for each example. Returns ------- layers : list of ndarray (num-examples, num-units) The activation values of each layer in the the
python
{ "resource": "" }
q275328
Network.predict
test
def predict(self, x, **kwargs): '''Compute a forward pass of the inputs, returning the network output. All keyword arguments end up being passed to :func:`build_graph`. Parameters ---------- x : ndarray (num-examples, num-variables) An array containing data to be fed into the network. Multiple examples are arranged as rows in this array, with columns containing the variables for each example. Returns ------- y : ndarray (num-examples,
python
{ "resource": "" }
q275329
Network.score
test
def score(self, x, y, w=None, **kwargs): '''Compute R^2 coefficient of determination for a given labeled input. Parameters ---------- x : ndarray (num-examples, num-inputs) An array containing data to be fed into the network. Multiple examples are arranged as rows in this array, with columns containing the variables for each example. y : ndarray (num-examples, num-outputs) An array containing expected target data for the network. Multiple examples are arranged as rows in this array, with columns containing the variables for
python
{ "resource": "" }
q275330
Network.save
test
def save(self, filename_or_handle): '''Save the state of this network to a pickle file on disk. Parameters ---------- filename_or_handle : str or file handle Save the state of this network to a pickle file. If this parameter is a string, it names the file where the pickle will be saved. If it is a file-like object, this object will be used for writing the pickle. If the filename ends in ".gz" then the output will automatically be gzipped. ''' if isinstance(filename_or_handle, util.basestring):
python
{ "resource": "" }
q275331
Network.load
test
def load(cls, filename_or_handle): '''Load a saved network from disk. Parameters ---------- filename_or_handle : str or file handle Load the state of this network from a pickle file. If this parameter is a string, it names the file where the pickle will be saved. If it is a file-like object, this object will be used for reading the pickle. If the filename ends in ".gz" then the output will automatically be gunzipped. ''' assert not isinstance(cls, Network), \ 'cannot load an instance! say instead: net = Network.load(source)' if isinstance(filename_or_handle,
python
{ "resource": "" }
q275332
Network.loss
test
def loss(self, **kwargs): '''Return a variable representing the regularized loss for this network. The regularized loss includes both the :ref:`loss computation <losses>` for the network as well as any :ref:`regularizers <regularizers>` that are in place. Keyword arguments are passed directly to :func:`theanets.regularizers.from_kwargs`. Returns ------- loss : Theano expression A Theano expression representing the loss of this network.
python
{ "resource": "" }
q275333
Network.updates
test
def updates(self, **kwargs): '''Return expressions to run as updates during network training. Returns ------- updates : list of (parameter, expression) pairs A list of named parameter update expressions for this network. '''
python
{ "resource": "" }
q275334
Layer.output_size
test
def output_size(self): '''Number of "neurons" in this layer's default output.''' shape = self.output_shape if shape is None: raise util.ConfigurationError(
python
{ "resource": "" }
q275335
Layer.connect
test
def connect(self, inputs): '''Create Theano variables representing the outputs of this layer. Parameters ---------- inputs : dict of Theano expressions Symbolic inputs to this layer, given as a dictionary mapping string names to Theano expressions. Each string key should be of the form "{layer_name}:{output_name}" and refers to a specific output from a specific layer in the graph. Returns ------- outputs : dict A dictionary mapping names to Theano expressions for the outputs from this layer. updates : sequence of (parameter, expression) tuples
python
{ "resource": "" }
q275336
Layer.bind
test
def bind(self, graph, reset=True, initialize=True): '''Bind this layer into a computation graph. This method is a wrapper for performing common initialization tasks. It calls :func:`resolve`, :func:`setup`, and :func:`log`. Parameters ---------- graph : :class:`Network <theanets.graph.Network>` A computation network in which this layer is to be bound. reset : bool, optional If ``True`` (the default), reset the resolved layers for this layer.
python
{ "resource": "" }
q275337
Layer.resolve_inputs
test
def resolve_inputs(self, layers): '''Resolve the names of inputs for this layer into shape tuples. Parameters ---------- layers : list of :class:`Layer` A list of the layers that are available for resolving inputs. Raises ------ theanets.util.ConfigurationError : If an input cannot be resolved. ''' resolved = {}
python
{ "resource": "" }
q275338
Layer.resolve_outputs
test
def resolve_outputs(self): '''Resolve the names of outputs for this layer into shape tuples.''' input_shape = None for i, shape in enumerate(self._input_shapes.values()): if i == 0: input_shape = shape if len(input_shape) != len(shape) or any( a is not None and b is not None and a != b for a, b in zip(input_shape[:-1], shape[:-1])): raise util.ConfigurationError( 'layer "{}" incompatible input shapes {}' .format(self.name, self._input_shapes))
python
{ "resource": "" }
q275339
Layer.log
test
def log(self): '''Log some information about this layer.''' inputs = ', '.join('"{0}" {1}'.format(*ns) for ns in self._input_shapes.items())
python
{ "resource": "" }
q275340
Layer.log_params
test
def log_params(self): '''Log information about this layer's parameters.''' total = 0 for p in self.params: shape = p.get_value().shape
python
{ "resource": "" }
q275341
Layer._fmt
test
def _fmt(self, string): '''Helper method to format our name into a string.''' if '{' not in string:
python
{ "resource": "" }
q275342
Layer._resolve_shape
test
def _resolve_shape(self, name, layers): '''Given a list of layers, find the layer output with the given name. Parameters ---------- name : str Name of a layer to resolve. layers : list of :class:`theanets.layers.base.Layer` A list of layers to search in. Raises ------ util.ConfigurationError : If there is no such layer, or if there are more than one. Returns ------- name : str The fully-scoped name of the desired output. shape : tuple of None and/or int
python
{ "resource": "" }
q275343
Layer.find
test
def find(self, key): '''Get a shared variable for a parameter by name. Parameters ---------- key : str or int The name of the parameter to look up, or the index of the parameter in our parameter list. These are both dependent on the implementation of the layer. Returns ------- param : shared variable A shared variable containing values for the given parameter. Raises
python
{ "resource": "" }
q275344
Layer.add_bias
test
def add_bias(self, name, size, mean=0, std=1): '''Helper method to create a new bias vector. Parameters ---------- name : str Name of the parameter to add. size : int Size of the bias vector. mean : float, optional
python
{ "resource": "" }
q275345
Layer.to_spec
test
def to_spec(self): '''Create a specification dictionary for this layer. Returns ------- spec : dict A dictionary specifying the configuration of this layer. ''' spec = dict(**self.kwargs) spec.update(
python
{ "resource": "" }
q275346
LogGabor.loggabor
test
def loggabor(self, x_pos, y_pos, sf_0, B_sf, theta, B_theta, preprocess=True): """ Returns the envelope of a LogGabor Note that the convention for coordinates follows that of matrices: the origin is at the top left of the image, and coordinates are first the rows (vertical axis, going down) then the columns (horizontal axis, going right). """ env = np.multiply(self.band(sf_0, B_sf), self.orientation(theta, B_theta)) if not(x_pos==0.) and not(y_pos==0.): # bypass translation whenever none is needed env = env.astype(np.complex128) * self.trans(x_pos*1., y_pos*1.) if
python
{ "resource": "" }
q275347
LogGabor.loggabor_image
test
def loggabor_image(self, x_pos, y_pos, theta, sf_0, phase, B_sf, B_theta): """ Returns the image of a LogGabor Note that the convention for coordinates follows that of matrices: the origin is at the top left of the image, and coordinates are first the rows (vertical axis, going down) then the columns (horizontal axis, going
python
{ "resource": "" }
q275348
TextGrid.add_tier
test
def add_tier(self, name, tier_type='IntervalTier', number=None): """Add an IntervalTier or a TextTier on the specified location. :param str name: Name of the tier, duplicate names is allowed. :param str tier_type: Type of the tier. :param int number: Place to insert the tier, when ``None`` the number is generated and the tier will be placed on the bottom. :returns: The created tier. :raises ValueError: If the number is out of bounds. """ if number is None: number = 1 if not self.tiers else len(self.tiers)+1
python
{ "resource": "" }
q275349
TextGrid.remove_tier
test
def remove_tier(self, name_num): """Remove a tier, when multiple tiers exist with that name only the first is removed. :param name_num: Name or number of the tier to remove. :type name_num: int or str :raises IndexError: If there is no tier with that number.
python
{ "resource": "" }
q275350
TextGrid.get_tier
test
def get_tier(self, name_num): """Gives a tier, when multiple tiers exist with that name only the first is returned. :param name_num: Name or number of the tier to return. :type name_num: int or str :returns: The tier. :raises IndexError:
python
{ "resource": "" }
q275351
TextGrid.to_eaf
test
def to_eaf(self, skipempty=True, pointlength=0.1): """Convert the object to an pympi.Elan.Eaf object :param int pointlength: Length of respective interval from points in seconds :param bool skipempty: Skip the empty annotations :returns: :class:`pympi.Elan.Eaf` object :raises ImportError: If the Eaf module can't be loaded. :raises ValueError: If the pointlength is not strictly positive. """ from pympi.Elan import Eaf eaf_out = Eaf() if pointlength <= 0: raise ValueError('Pointlength should be strictly positive')
python
{ "resource": "" }
q275352
Tier.add_point
test
def add_point(self, point, value, check=True): """Add a point to the TextTier :param int point: Time of the point. :param str value: Text of the point. :param bool check: Flag to check for overlap. :raises Exception: If overlap or wrong tiertype. """ if self.tier_type != 'TextTier':
python
{ "resource": "" }
q275353
Tier.add_interval
test
def add_interval(self, begin, end, value, check=True): """Add an interval to the IntervalTier. :param float begin: Start time of the interval. :param float end: End time of the interval. :param str value: Text of the interval. :param bool check: Flag to check for overlap. :raises Exception: If overlap, begin > end or wrong tiertype. """ if self.tier_type != 'IntervalTier': raise Exception('Tiertype must be IntervalTier') if check: if
python
{ "resource": "" }
q275354
Tier.remove_interval
test
def remove_interval(self, time): """Remove an interval, if no interval is found nothing happens. :param int time: Time of the interval. :raises TierTypeException: If the tier is not a IntervalTier. """ if self.tier_type != 'IntervalTier':
python
{ "resource": "" }
q275355
Tier.remove_point
test
def remove_point(self, time): """Remove a point, if no point is found nothing happens. :param int time: Time of the point. :raises TierTypeException: If the tier is not a TextTier. """ if self.tier_type != 'TextTier':
python
{ "resource": "" }
q275356
Tier.get_intervals
test
def get_intervals(self, sort=False): """Give all the intervals or points. :param bool sort: Flag for yielding the intervals or points sorted. :yields: All the intervals
python
{ "resource": "" }
q275357
Tier.get_all_intervals
test
def get_all_intervals(self): """Returns the true list of intervals including the empty intervals.""" ints = sorted(self.get_intervals(True)) if self.tier_type == 'IntervalTier': if not ints: ints.append((self.xmin, self.xmax, '')) else: if ints[0][0] > self.xmin: ints.insert(0, (self.xmin, ints[0][0], '')) if ints[-1][1] < self.xmax:
python
{ "resource": "" }
q275358
indent
test
def indent(el, level=0): """Function to pretty print the xml, meaning adding tabs and newlines. :param ElementTree.Element el: Current element. :param int level: Current level. """ i = '\n' + level * '\t' if len(el): if not el.text or not el.text.strip(): el.text = i+'\t' if not el.tail or not el.tail.strip(): el.tail = i for elem in el:
python
{ "resource": "" }
q275359
Eaf.add_annotation
test
def add_annotation(self, id_tier, start, end, value='', svg_ref=None): """Add an annotation. :param str id_tier: Name of the tier. :param int start: Start time of the annotation. :param int end: End time of the annotation. :param str value: Value of the annotation. :param str svg_ref: Svg reference. :raises KeyError: If the tier is non existent. :raises ValueError: If one of the values is negative or start is bigger then end or if the tiers already contains ref annotations. """ if self.tiers[id_tier][1]: raise ValueError('Tier already contains ref annotations...') if start == end: raise ValueError('Annotation length is zero...') if start > end:
python
{ "resource": "" }
q275360
Eaf.add_cv_entry
test
def add_cv_entry(self, cv_id, cve_id, values, ext_ref=None): """Add an entry to a controlled vocabulary. :param str cv_id: Name of the controlled vocabulary to add an entry. :param str cve_id: Name of the entry. :param list values: List of values of the form: ``(value, lang_ref, description)`` where description can be ``None``. :param str ext_ref: External reference. :throws KeyError: If there is no controlled vocabulary with that id. :throws ValueError: If a language in one of the entries doesn't exist.
python
{ "resource": "" }
q275361
Eaf.add_cv_description
test
def add_cv_description(self, cv_id, lang_ref, description=None): """Add a description to a controlled vocabulary. :param str cv_id: Name of the controlled vocabulary to add the description. :param str lang_ref: Language reference. :param str description: Description, this can be none. :throws KeyError: If there is no controlled vocabulary with that id.
python
{ "resource": "" }
q275362
Eaf.add_external_ref
test
def add_external_ref(self, eid, etype, value): """Add an external reference. :param str eid: Name of the external reference. :param str etype: Type of the external reference, has to be in ``['iso12620', 'ecv', 'cve_id', 'lexen_id', 'resource_url']``. :param str value: Value of the external reference. :throws KeyError: if etype is not in the
python
{ "resource": "" }
q275363
Eaf.add_language
test
def add_language(self, lang_id, lang_def=None, lang_label=None): """Add a language. :param str lang_id: ID of the language. :param str lang_def: Definition of the language(preferably ISO-639-3).
python
{ "resource": "" }
q275364
Eaf.add_lexicon_ref
test
def add_lexicon_ref(self, lrid, name, lrtype, url, lexicon_id, lexicon_name, datcat_id=None, datcat_name=None): """Add lexicon reference. :param str lrid: Lexicon reference internal ID. :param str name: Lexicon reference display name. :param str lrtype: Lexicon reference service type. :param str url: Lexicon reference service location :param str lexicon_id: Lexicon reference service id. :param str lexicon_name: Lexicon reference service name. :param str datacat_id: Lexicon reference identifier of data category. :param str datacat_name: Lexicon reference name of data category. """
python
{ "resource": "" }
q275365
Eaf.add_linguistic_type
test
def add_linguistic_type(self, lingtype, constraints=None, timealignable=True, graphicreferences=False, extref=None, param_dict=None): """Add a linguistic type. :param str lingtype: Name of the linguistic type. :param str constraints: Constraint name. :param bool timealignable: Flag for time alignable. :param bool graphicreferences: Flag for graphic references. :param str extref: External reference. :param dict param_dict: TAG attributes, when this is not ``None`` it will ignore all other options. Please only use dictionaries coming from the :func:`get_parameters_for_linguistic_type` :raises KeyError: If a constraint is not defined """
python
{ "resource": "" }
q275366
Eaf.add_linked_file
test
def add_linked_file(self, file_path, relpath=None, mimetype=None, time_origin=None, ex_from=None): """Add a linked file. :param str file_path: Path of the file. :param str relpath: Relative path of the file. :param str mimetype: Mimetype of the file, if ``None`` it tries to guess it according to the file extension which currently only works for wav, mpg, mpeg and xml. :param int time_origin: Time origin for the media file. :param str ex_from: Extracted from field. :raises KeyError: If mimetype had to be guessed and a non standard
python
{ "resource": "" }
q275367
Eaf.add_locale
test
def add_locale(self, language_code, country_code=None, variant=None): """Add a locale. :param str language_code: The language code of
python
{ "resource": "" }
q275368
Eaf.add_secondary_linked_file
test
def add_secondary_linked_file(self, file_path, relpath=None, mimetype=None, time_origin=None, assoc_with=None): """Add a secondary linked file. :param str file_path: Path of the file. :param str relpath: Relative path of the file. :param str mimetype: Mimetype of the file, if ``None`` it tries to guess it according to the file extension which currently only works for wav, mpg, mpeg and xml. :param int time_origin: Time origin for the media file. :param str assoc_with: Associated with field. :raises KeyError: If mimetype had to be guessed and a non standard
python
{ "resource": "" }
q275369
Eaf.add_tier
test
def add_tier(self, tier_id, ling='default-lt', parent=None, locale=None, part=None, ann=None, language=None, tier_dict=None): """Add a tier. When no linguistic type is given and the default linguistic type is unavailable then the assigned linguistic type will be the first in the list. :param str tier_id: Name of the tier. :param str ling: Linguistic type, if the type is not available it will warn and pick the first available type. :param str parent: Parent tier name. :param str locale: Locale, if the locale is not present this option is ignored and the locale will not be set. :param str part: Participant. :param str ann: Annotator. :param str language: Language , if the language is not present this option is ignored and the language will not be set. :param dict tier_dict: TAG attributes, when this is not ``None`` it will ignore all other options. Please only use dictionaries coming from the
python
{ "resource": "" }
q275370
Eaf.clean_time_slots
test
def clean_time_slots(self): """Clean up all unused timeslots. .. warning:: This can and will take time for larger tiers. When you want to do a lot of operations on a lot of tiers please unset the flags for cleaning in the functions so that the cleaning is only performed afterwards.
python
{ "resource": "" }
q275371
Eaf.extract
test
def extract(self, start, end): """Extracts the selected time frame as a new object. :param int start: Start time. :param int end: End time. :returns: class:`pympi.Elan.Eaf` object containing the extracted frame. """ from copy import deepcopy eaf_out = deepcopy(self)
python
{ "resource": "" }
q275372
Eaf.generate_annotation_id
test
def generate_annotation_id(self): """Generate the next annotation id, this function is mainly used internally. """ if not self.maxaid: valid_anns = [int(''.join(filter(str.isdigit, a)))
python
{ "resource": "" }
q275373
Eaf.generate_ts_id
test
def generate_ts_id(self, time=None): """Generate the next timeslot id, this function is mainly used internally :param int time: Initial time to assign to the timeslot. :raises ValueError: If the time is negative. """ if time and time < 0: raise ValueError('Time is negative...') if not self.maxts: valid_ts = [int(''.join(filter(str.isdigit, a)))
python
{ "resource": "" }
q275374
Eaf.get_child_tiers_for
test
def get_child_tiers_for(self, id_tier): """Give all child tiers for a tier. :param str id_tier: Name of the tier. :returns: List of all children :raises KeyError: If the tier is non existent. """ self.tiers[id_tier]
python
{ "resource": "" }
q275375
Eaf.get_full_time_interval
test
def get_full_time_interval(self): """Give the full time interval of the file. Note that the real interval can be longer because the sound file attached can be longer. :returns: Tuple of the form: ``(min_time, max_time)``. """
python
{ "resource": "" }
q275376
Eaf.get_ref_annotation_data_after_time
test
def get_ref_annotation_data_after_time(self, id_tier, time): """Give the ref annotation after a time. If an annotation overlaps with `ktime`` that annotation will be returned. :param str id_tier: Name of the tier. :param int time: Time to get the annotation after. :returns: Annotation after that time in a list :raises KeyError: If the tier is
python
{ "resource": "" }
q275377
Eaf.get_ref_annotation_data_before_time
test
def get_ref_annotation_data_before_time(self, id_tier, time): """Give the ref annotation before a time. If an annotation overlaps with ``time`` that annotation will be returned. :param str id_tier: Name of the tier. :param int time: Time to get the annotation before. :returns: Annotation before that time in a list
python
{ "resource": "" }
q275378
Eaf.get_tier_ids_for_linguistic_type
test
def get_tier_ids_for_linguistic_type(self, ling_type, parent=None): """Give a list of all tiers matching a linguistic type. :param str ling_type: Name of the linguistic type. :param str parent: Only match tiers from this parent, when ``None`` this option will be ignored. :returns: List of tiernames. :raises KeyError: If a tier or linguistic type is non existent. """
python
{ "resource": "" }
q275379
Eaf.merge_tiers
test
def merge_tiers(self, tiers, tiernew=None, gapt=0, sep='_', safe=False): """Merge tiers into a new tier and when the gap is lower then the threshhold glue the annotations together. :param list tiers: List of tier names. :param str tiernew: Name for the new tier, if ``None`` the name will be generated. :param int gapt: Threshhold for the gaps, if the this is set to 10 it means that all gaps below 10 are ignored. :param str sep: Separator for the merged annotations. :param bool safe: Ignore zero length annotations(when working with possible malformed data). :returns: Name of the created tier. :raises KeyError: If a tier is non existent. """ if tiernew is None: tiernew = u'{}_merged'.format('_'.join(tiers)) self.add_tier(tiernew) aa = [(sys.maxsize, sys.maxsize, None)] + sorted(( a for t in tiers for a in self.get_annotation_data_for_tier(t)), reverse=True)
python
{ "resource": "" }
q275380
Eaf.remove_all_annotations_from_tier
test
def remove_all_annotations_from_tier(self, id_tier, clean=True): """remove all annotations from a tier :param str id_tier: Name of the tier. :raises KeyError: If the tier is non existent. """ for aid in self.tiers[id_tier][0]: del(self.annotations[aid])
python
{ "resource": "" }
q275381
Eaf.remove_cv_description
test
def remove_cv_description(self, cv_id, lang_ref): """Remove a controlled vocabulary description. :param str cv_id: Name of the controlled vocabulary. :paarm str cve_id: Name of the entry. :throws KeyError: If there is no controlled vocabulary with that name. """
python
{ "resource": "" }
q275382
Eaf.remove_license
test
def remove_license(self, name=None, url=None): """Remove all licenses matching both key and value. :param str name: Name of the license. :param str url: URL of the license. """ for k, v in self.licenses[:]:
python
{ "resource": "" }
q275383
Eaf.remove_linked_files
test
def remove_linked_files(self, file_path=None, relpath=None, mimetype=None, time_origin=None, ex_from=None): """Remove all linked files that match all the criteria, criterias that are ``None`` are ignored. :param str file_path: Path of the file. :param str relpath: Relative filepath. :param str mimetype: Mimetype of the file. :param int time_origin: Time origin. :param str ex_from: Extracted from. """ for attrib in self.media_descriptors[:]: if file_path is not None and attrib['MEDIA_URL'] != file_path: continue
python
{ "resource": "" }
q275384
Eaf.remove_property
test
def remove_property(self, key=None, value=None): """Remove all properties matching both key and value. :param str key: Key of the property. :param str value: Value of the property. """ for k, v in self.properties[:]:
python
{ "resource": "" }
q275385
Eaf.remove_ref_annotation
test
def remove_ref_annotation(self, id_tier, time): """Remove a reference annotation. :param str id_tier: Name of tier. :param int time: Time of the referenced annotation :raises KeyError: If the tier is non existent. :returns: Number of removed annotations. """ removed = 0 bucket = [] for aid, (ref, value, _, _) in self.tiers[id_tier][1].items(): begin, end, rvalue, _ = self.tiers[self.annotations[ref]][0][ref] begin = self.timeslots[begin]
python
{ "resource": "" }
q275386
Eaf.remove_secondary_linked_files
test
def remove_secondary_linked_files(self, file_path=None, relpath=None, mimetype=None, time_origin=None, assoc_with=None): """Remove all secondary linked files that match all the criteria, criterias that are ``None`` are ignored. :param str file_path: Path of the file. :param str relpath: Relative filepath. :param str mimetype: Mimetype of the file. :param int time_origin: Time origin. :param str ex_from:
python
{ "resource": "" }
q275387
Eaf.remove_tier
test
def remove_tier(self, id_tier, clean=True): """Remove a tier. :param str id_tier: Name of the tier. :param bool clean: Flag to also clean the timeslots. :raises KeyError:
python
{ "resource": "" }
q275388
Eaf.remove_tiers
test
def remove_tiers(self, tiers): """Remove multiple tiers, note that this is a lot faster then removing them individually because of the delayed cleaning of timeslots.
python
{ "resource": "" }
q275389
Eaf.rename_tier
test
def rename_tier(self, id_from, id_to): """Rename a tier. Note that this renames also the child tiers that have the tier as a parent. :param str id_from: Original name of the tier. :param str id_to: Target name of the tier. :throws KeyError: If the tier doesnt' exist. """
python
{ "resource": "" }
q275390
Eaf.shift_annotations
test
def shift_annotations(self, time): """Shift all annotations in time. Annotations that are in the beginning and a left shift is applied can be squashed or discarded. :param int time: Time shift width, negative numbers make a left shift. :returns: Tuple of a list of squashed annotations and a list of removed annotations in the format: ``(tiername, start, end, value)``. """ total_re = [] total_sq = [] for name, tier in self.tiers.items(): squashed = [] for aid, (begin, end, value, _) in tier[0].items(): if self.timeslots[end]+time <= 0: squashed.append((name, aid)) elif self.timeslots[begin]+time < 0: total_sq.append((name, self.timeslots[begin], self.timeslots[end], value))
python
{ "resource": "" }
q275391
main
test
def main(): """Will be used to create the console script""" import optparse import sys import codecs import locale import six from .algorithm import get_display parser = optparse.OptionParser() parser.add_option('-e', '--encoding', dest='encoding', default='utf-8', type='string', help='Text encoding (default: utf-8)') parser.add_option('-u', '--upper-is-rtl', dest='upper_is_rtl', default=False, action='store_true', help="Treat upper case chars as strong 'R' " 'for debugging (default: False).') parser.add_option('-d', '--debug', dest='debug', default=False, action='store_true', help="Output to stderr steps taken with the algorithm") parser.add_option('-b', '--base-dir', dest='base_dir',
python
{ "resource": "" }
q275392
debug_storage
test
def debug_storage(storage, base_info=False, chars=True, runs=False): "Display debug information for the storage" import codecs import locale import sys if six.PY2: stderr = codecs.getwriter(locale.getpreferredencoding())(sys.stderr) else: stderr = sys.stderr caller = inspect.stack()[1][3] stderr.write('in %s\n' % caller) if base_info: stderr.write(u' base level : %d\n' % storage['base_level']) stderr.write(u' base dir : %s\n' % storage['base_dir']) if runs: stderr.write(u' runs : %s\n' % list(storage['runs'])) if chars: output = u' Chars : ' for _ch in storage['chars']: if _ch != '\n': output += _ch['ch'] else: output += 'C' stderr.write(output +
python
{ "resource": "" }
q275393
get_base_level
test
def get_base_level(text, upper_is_rtl=False): """Get the paragraph base embedding level. Returns 0 for LTR, 1 for RTL. `text` a unicode object. Set `upper_is_rtl` to True to treat upper case chars as strong 'R' for debugging (default: False). """ base_level = None prev_surrogate = False # P2 for _ch in text: # surrogate in case of ucs2 if _IS_UCS2 and (_SURROGATE_MIN <= ord(_ch) <= _SURROGATE_MAX): prev_surrogate = _ch continue elif prev_surrogate: _ch = prev_surrogate + _ch
python
{ "resource": "" }
q275394
get_embedding_levels
test
def get_embedding_levels(text, storage, upper_is_rtl=False, debug=False): """Get the paragraph base embedding level and direction, set the storage to the array of chars""" prev_surrogate = False base_level = storage['base_level'] # preset the storage's chars for _ch in text: if _IS_UCS2 and (_SURROGATE_MIN <= ord(_ch) <= _SURROGATE_MAX): prev_surrogate = _ch continue elif prev_surrogate: _ch = prev_surrogate + _ch prev_surrogate = False
python
{ "resource": "" }
q275395
explicit_embed_and_overrides
test
def explicit_embed_and_overrides(storage, debug=False): """Apply X1 to X9 rules of the unicode algorithm. See http://unicode.org/reports/tr9/#Explicit_Levels_and_Directions """ overflow_counter = almost_overflow_counter = 0 directional_override = 'N' levels = deque() # X1 embedding_level = storage['base_level'] for _ch in storage['chars']: bidi_type = _ch['type'] level_func, override = X2_X5_MAPPINGS.get(bidi_type, (None, None)) if level_func: # So this is X2 to X5 # if we've past EXPLICIT_LEVEL_LIMIT, note it and do nothing if overflow_counter != 0: overflow_counter += 1 continue new_level = level_func(embedding_level) if new_level < EXPLICIT_LEVEL_LIMIT: levels.append((embedding_level, directional_override)) embedding_level, directional_override = new_level, override elif embedding_level == EXPLICIT_LEVEL_LIMIT - 2: # The new level is invalid, but a valid level can still be # achieved if this level is 60 and we encounter an RLE or # RLO further on. So record that we 'almost' overflowed. almost_overflow_counter += 1 else: overflow_counter += 1 else: # X6 if bidi_type not in X6_IGNORED: _ch['level'] = embedding_level if directional_override != 'N': _ch['type'] = directional_override # X7 elif bidi_type == 'PDF': if overflow_counter: overflow_counter -= 1
python
{ "resource": "" }
q275396
calc_level_runs
test
def calc_level_runs(storage): """Split the storage to run of char types at the same level. Applies X10. See http://unicode.org/reports/tr9/#X10 """ # run level depends on the higher of the two levels on either side of # the boundary If the higher level is odd, the type is R; otherwise, # it is L storage['runs'].clear() chars = storage['chars'] # empty string ? if not chars: return def calc_level_run(b_l, b_r): return ['L', 'R'][max(b_l, b_r) % 2] first_char = chars[0] sor = calc_level_run(storage['base_level'], first_char['level']) eor = None
python
{ "resource": "" }
q275397
resolve_weak_types
test
def resolve_weak_types(storage, debug=False): """Reslove weak type rules W1 - W3. See: http://unicode.org/reports/tr9/#Resolving_Weak_Types """ for run in storage['runs']: prev_strong = prev_type = run['sor'] start, length = run['start'], run['length'] chars = storage['chars'][start:start+length] for _ch in chars: # W1. Examine each nonspacing mark (NSM) in the level run, and # change the type of the NSM to the type of the previous character. # If the NSM is at the start of the level run, it will get the type # of sor. bidi_type = _ch['type'] if bidi_type == 'NSM': _ch['type'] = bidi_type = prev_type # W2. Search backward from each instance of a European number until # the first strong type (R, L, AL, or sor) is found. If an AL is # found, change the type of the European number to Arabic number. if bidi_type == 'EN' and prev_strong == 'AL': _ch['type'] = 'AN' # update prev_strong if needed if bidi_type in ('R', 'L', 'AL'): prev_strong = bidi_type prev_type = _ch['type'] # W3. Change all ALs to R for _ch in chars: if _ch['type'] == 'AL': _ch['type'] = 'R' # W4. A single European separator between two European numbers changes # to a European number. A single common separator between two numbers of # the same type changes to that type. for idx in range(1, len(chars) - 1): bidi_type = chars[idx]['type'] prev_type = chars[idx-1]['type'] next_type = chars[idx+1]['type'] if bidi_type == 'ES' and (prev_type == next_type == 'EN'): chars[idx]['type'] = 'EN' if bidi_type == 'CS' and prev_type == next_type and \ prev_type in ('AN', 'EN'): chars[idx]['type'] = prev_type # W5. A sequence of European terminators adjacent to European numbers # changes to all European numbers. for idx in range(len(chars)):
python
{ "resource": "" }
q275398
resolve_neutral_types
test
def resolve_neutral_types(storage, debug): """Resolving neutral types. Implements N1 and N2 See: http://unicode.org/reports/tr9/#Resolving_Neutral_Types """ for run in storage['runs']: start, length = run['start'], run['length'] # use sor and eor chars = [{'type': run['sor']}] + storage['chars'][start:start+length] +\ [{'type': run['eor']}] total_chars = len(chars) seq_start = None for idx in range(total_chars): _ch = chars[idx] if _ch['type'] in ('B', 'S', 'WS', 'ON'): # N1. A sequence of neutrals takes the direction of the # surrounding strong text if the text on both sides has the same # direction. European and Arabic numbers act as if they were R # in terms of their influence on neutrals. Start-of-level-run # (sor) and end-of-level-run (eor) are used at level run # boundaries. if seq_start is None: seq_start = idx prev_bidi_type = chars[idx-1]['type'] else: if seq_start is not None: next_bidi_type = chars[idx]['type'] if prev_bidi_type in ('AN', 'EN'): prev_bidi_type = 'R' if next_bidi_type in ('AN', 'EN'): next_bidi_type = 'R' for seq_idx in range(seq_start, idx):
python
{ "resource": "" }
q275399
reverse_contiguous_sequence
test
def reverse_contiguous_sequence(chars, line_start, line_end, highest_level, lowest_odd_level): """L2. From the highest level found in the text to the lowest odd level on each line, including intermediate levels not actually present in the text, reverse any contiguous sequence of characters that are at that level or higher. """ for level in range(highest_level, lowest_odd_level-1, -1): _start = _end = None for run_idx in range(line_start, line_end+1): run_ch = chars[run_idx] if run_ch['level'] >= level: if _start is None: _start = _end = run_idx else: _end = run_idx
python
{ "resource": "" }