_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q277100
BoltArraySpark.repartition
test
def repartition(self, npartitions): """ Repartitions the underlying RDD Parameters ---------- npartitions : int Number of partitions to repartion the underlying RDD to """ rdd = self._rdd.repartition(npartitions) return self._constructor(rdd, ordered=False).__finalize__(self)
python
{ "resource": "" }
q277101
BoltArraySpark.stack
test
def stack(self, size=None): """ Aggregates records of a distributed array. Stacking should improve the performance of vectorized operations, but the resulting StackedArray object only exposes a restricted set of operations (e.g. map, reduce). The unstack method can be used to restore the full bolt array. Parameters ---------- size : int, optional, default=None The maximum size for each stack (number of original records), will aggregate groups of records per partition up to this size, if None will aggregate all records on each partition. Returns ------- StackedArray """ stk = StackedArray(self._rdd, shape=self.shape, split=self.split) return stk.stack(size)
python
{ "resource": "" }
q277102
BoltArraySpark._align
test
def _align(self, axis): """ Align spark bolt array so that axes for iteration are in the keys. This operation is applied before most functional operators. It ensures that the specified axes are valid, and swaps key/value axes so that functional operators can be applied over the correct records. Parameters ---------- axis: tuple[int] One or more axes that wil be iterated over by a functional operator Returns ------- BoltArraySpark """ # ensure that the specified axes are valid inshape(self.shape, axis) # find the value axes that should be moved into the keys (axis >= split) tokeys = [(a - self.split) for a in axis if a >= self.split] # find the key axes that should be moved into the values (axis < split) tovalues = [a for a in range(self.split) if a not in axis] if tokeys or tovalues: return self.swap(tovalues, tokeys) else: return self
python
{ "resource": "" }
q277103
BoltArraySpark.first
test
def first(self): """ Return the first element of an array """ from bolt.local.array import BoltArrayLocal rdd = self._rdd if self._ordered else self._rdd.sortByKey() return BoltArrayLocal(rdd.values().first())
python
{ "resource": "" }
q277104
BoltArraySpark._stat
test
def _stat(self, axis=None, func=None, name=None, keepdims=False): """ Compute a statistic over an axis. Can provide either a function (for use in a reduce) or a name (for use by a stat counter). Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes func : function, optional, default=None Function for reduce, see BoltArraySpark.reduce name : str A named statistic, see StatCounter keepdims : boolean, optional, default=False Keep axis remaining after operation with size 1. """ if axis is None: axis = list(range(len(self.shape))) axis = tupleize(axis) if func and not name: return self.reduce(func, axis, keepdims) if name and not func: from bolt.local.array import BoltArrayLocal swapped = self._align(axis) def reducer(left, right): return left.combine(right) counter = swapped._rdd.values()\ .mapPartitions(lambda i: [StatCounter(values=i, stats=name)])\ .treeReduce(reducer, depth=3) arr = getattr(counter, name) if keepdims: for i in axis: arr = expand_dims(arr, axis=i) return BoltArrayLocal(arr).toscalar() else: raise ValueError('Must specify either a function or a statistic name.')
python
{ "resource": "" }
q277105
BoltArraySpark.mean
test
def mean(self, axis=None, keepdims=False): """ Return the mean of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes keepdims : boolean, optional, default=False Keep axis remaining after operation with size 1. """ return self._stat(axis, name='mean', keepdims=keepdims)
python
{ "resource": "" }
q277106
BoltArraySpark.var
test
def var(self, axis=None, keepdims=False): """ Return the variance of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes keepdims : boolean, optional, default=False Keep axis remaining after operation with size 1. """ return self._stat(axis, name='variance', keepdims=keepdims)
python
{ "resource": "" }
q277107
BoltArraySpark.std
test
def std(self, axis=None, keepdims=False): """ Return the standard deviation of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes keepdims : boolean, optional, default=False Keep axis remaining after operation with size 1. """ return self._stat(axis, name='stdev', keepdims=keepdims)
python
{ "resource": "" }
q277108
BoltArraySpark.sum
test
def sum(self, axis=None, keepdims=False): """ Return the sum of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes keepdims : boolean, optional, default=False Keep axis remaining after operation with size 1. """ from operator import add return self._stat(axis, func=add, keepdims=keepdims)
python
{ "resource": "" }
q277109
BoltArraySpark.max
test
def max(self, axis=None, keepdims=False): """ Return the maximum of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes keepdims : boolean, optional, default=False Keep axis remaining after operation with size 1. """ from numpy import maximum return self._stat(axis, func=maximum, keepdims=keepdims)
python
{ "resource": "" }
q277110
BoltArraySpark.min
test
def min(self, axis=None, keepdims=False): """ Return the minimum of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes keepdims : boolean, optional, default=False Keep axis remaining after operation with size 1. """ from numpy import minimum return self._stat(axis, func=minimum, keepdims=keepdims)
python
{ "resource": "" }
q277111
BoltArraySpark.chunk
test
def chunk(self, size="150", axis=None, padding=None): """ Chunks records of a distributed array. Chunking breaks arrays into subarrays, using an specified size of chunks along each value dimension. Can alternatively specify an average chunk byte size (in kilobytes) and the size of chunks (as ints) will be computed automatically. Parameters ---------- size : tuple, int, or str, optional, default = "150" A string giving the size in kilobytes, or a tuple with the size of chunks along each dimension. axis : int or tuple, optional, default = None One or more axis to chunk array along, if None will use all axes, padding: tuple or int, default = None Number of elements per dimension that will overlap with the adjacent chunk. If a tuple, specifies padding along each chunked dimension; if a int, same padding will be applied to all chunked dimensions. Returns ------- ChunkedArray """ if type(size) is not str: size = tupleize((size)) axis = tupleize((axis)) padding = tupleize((padding)) from bolt.spark.chunk import ChunkedArray chnk = ChunkedArray(rdd=self._rdd, shape=self._shape, split=self._split, dtype=self._dtype) return chnk._chunk(size, axis, padding)
python
{ "resource": "" }
q277112
BoltArraySpark.swap
test
def swap(self, kaxes, vaxes, size="150"): """ Swap axes from keys to values. This is the core operation underlying shape manipulation on the Spark bolt array. It exchanges an arbitrary set of axes between the keys and the valeus. If either is None, will only move axes in one direction (from keys to values, or values to keys). Keys moved to values will be placed immediately after the split; values moved to keys will be placed immediately before the split. Parameters ---------- kaxes : tuple Axes from keys to move to values vaxes : tuple Axes from values to move to keys size : tuple or int, optional, default = "150" Can either provide a string giving the size in kilobytes, or a tuple with the number of chunks along each value dimension being moved Returns ------- BoltArraySpark """ kaxes = asarray(tupleize(kaxes), 'int') vaxes = asarray(tupleize(vaxes), 'int') if type(size) is not str: size = tupleize(size) if len(kaxes) == self.keys.ndim and len(vaxes) == 0: raise ValueError('Cannot perform a swap that would ' 'end up with all data on a single key') if len(kaxes) == 0 and len(vaxes) == 0: return self from bolt.spark.chunk import ChunkedArray chunks = self.chunk(size) swapped = chunks.keys_to_values(kaxes).values_to_keys([v+len(kaxes) for v in vaxes]) barray = swapped.unchunk() return barray
python
{ "resource": "" }
q277113
BoltArraySpark.transpose
test
def transpose(self, *axes): """ Return an array with the axes transposed. This operation will incur a swap unless the desiured permutation can be obtained only by transpoing the keys or the values. Parameters ---------- axes : None, tuple of ints, or n ints If None, will reverse axis order. """ if len(axes) == 0: p = arange(self.ndim-1, -1, -1) else: p = asarray(argpack(axes)) istransposeable(p, range(self.ndim)) split = self.split # compute the keys/value axes that need to be swapped new_keys, new_values = p[:split], p[split:] swapping_keys = sort(new_values[new_values < split]) swapping_values = sort(new_keys[new_keys >= split]) stationary_keys = sort(new_keys[new_keys < split]) stationary_values = sort(new_values[new_values >= split]) # compute the permutation that the swap causes p_swap = r_[stationary_keys, swapping_values, swapping_keys, stationary_values] # compute the extra permutation (p_x) on top of this that # needs to happen to get the full permutation desired p_swap_inv = argsort(p_swap) p_x = p_swap_inv[p] p_keys, p_values = p_x[:split], p_x[split:]-split # perform the swap and the the within key/value permutations arr = self.swap(swapping_keys, swapping_values-split) arr = arr.keys.transpose(tuple(p_keys.tolist())) arr = arr.values.transpose(tuple(p_values.tolist())) return arr
python
{ "resource": "" }
q277114
BoltArraySpark.swapaxes
test
def swapaxes(self, axis1, axis2): """ Return the array with two axes interchanged. Parameters ---------- axis1 : int The first axis to swap axis2 : int The second axis to swap """ p = list(range(self.ndim)) p[axis1] = axis2 p[axis2] = axis1 return self.transpose(p)
python
{ "resource": "" }
q277115
BoltArraySpark.reshape
test
def reshape(self, *shape): """ Return an array with the same data but a new shape. Currently only supports reshaping that independently reshapes the keys, or the values, or both. Parameters ---------- shape : tuple of ints, or n ints New shape """ new = argpack(shape) isreshapeable(new, self.shape) if new == self.shape: return self i = self._reshapebasic(new) if i == -1: raise NotImplementedError("Currently no support for reshaping between " "keys and values for BoltArraySpark") else: new_key_shape, new_value_shape = new[:i], new[i:] return self.keys.reshape(new_key_shape).values.reshape(new_value_shape)
python
{ "resource": "" }
q277116
BoltArraySpark._reshapebasic
test
def _reshapebasic(self, shape): """ Check if the requested reshape can be broken into independant reshapes on the keys and values. If it can, returns the index in the new shape separating keys from values, otherwise returns -1 """ new = tupleize(shape) old_key_size = prod(self.keys.shape) old_value_size = prod(self.values.shape) for i in range(len(new)): new_key_size = prod(new[:i]) new_value_size = prod(new[i:]) if new_key_size == old_key_size and new_value_size == old_value_size: return i return -1
python
{ "resource": "" }
q277117
BoltArraySpark.squeeze
test
def squeeze(self, axis=None): """ Remove one or more single-dimensional axes from the array. Parameters ---------- axis : tuple or int One or more singleton axes to remove. """ if not any([d == 1 for d in self.shape]): return self if axis is None: drop = where(asarray(self.shape) == 1)[0] elif isinstance(axis, int): drop = asarray((axis,)) elif isinstance(axis, tuple): drop = asarray(axis) else: raise ValueError("an integer or tuple is required for the axis") if any([self.shape[i] > 1 for i in drop]): raise ValueError("cannot select an axis to squeeze out which has size greater than one") if any(asarray(drop) < self.split): kmask = set([d for d in drop if d < self.split]) kfunc = lambda k: tuple([kk for ii, kk in enumerate(k) if ii not in kmask]) else: kfunc = lambda k: k if any(asarray(drop) >= self.split): vmask = tuple([d - self.split for d in drop if d >= self.split]) vfunc = lambda v: v.squeeze(vmask) else: vfunc = lambda v: v rdd = self._rdd.map(lambda kv: (kfunc(kv[0]), vfunc(kv[1]))) shape = tuple([ss for ii, ss in enumerate(self.shape) if ii not in drop]) split = len([d for d in range(self.keys.ndim) if d not in drop]) return self._constructor(rdd, shape=shape, split=split).__finalize__(self)
python
{ "resource": "" }
q277118
BoltArraySpark.astype
test
def astype(self, dtype, casting='unsafe'): """ Cast the array to a specified type. Parameters ---------- dtype : str or dtype Typecode or data-type to cast the array to (see numpy) """ rdd = self._rdd.mapValues(lambda v: v.astype(dtype, 'K', casting)) return self._constructor(rdd, dtype=dtype).__finalize__(self)
python
{ "resource": "" }
q277119
BoltArraySpark.clip
test
def clip(self, min=None, max=None): """ Clip values above and below. Parameters ---------- min : scalar or array-like Minimum value. If array, will be broadcasted max : scalar or array-like Maximum value. If array, will be broadcasted. """ rdd = self._rdd.mapValues(lambda v: v.clip(min=min, max=max)) return self._constructor(rdd).__finalize__(self)
python
{ "resource": "" }
q277120
BoltArraySpark.toarray
test
def toarray(self): """ Returns the contents as a local array. Will likely cause memory problems for large objects. """ rdd = self._rdd if self._ordered else self._rdd.sortByKey() x = rdd.values().collect() return asarray(x).reshape(self.shape)
python
{ "resource": "" }
q277121
tupleize
test
def tupleize(arg): """ Coerce singletons and lists and ndarrays to tuples. Parameters ---------- arg : tuple, list, ndarray, or singleton Item to coerce """ if arg is None: return None if not isinstance(arg, (tuple, list, ndarray, Iterable)): return tuple((arg,)) elif isinstance(arg, (list, ndarray)): return tuple(arg) elif isinstance(arg, Iterable) and not isinstance(arg, str): return tuple(arg) else: return arg
python
{ "resource": "" }
q277122
argpack
test
def argpack(args): """ Coerce a list of arguments to a tuple. Parameters ---------- args : tuple or nested tuple Pack arguments into a tuple, converting ((,...),) or (,) -> (,) """ if isinstance(args[0], (tuple, list, ndarray)): return tupleize(args[0]) elif isinstance(args[0], Iterable) and not isinstance(args[0], str): # coerce any iterable into a list before calling tupleize (Python 3 compatibility) return tupleize(list(args[0])) else: return tuple(args)
python
{ "resource": "" }
q277123
inshape
test
def inshape(shape, axes): """ Checks to see if a list of axes are contained within an array shape. Parameters ---------- shape : tuple[int] the shape of a BoltArray axes : tuple[int] the axes to check against shape """ valid = all([(axis < len(shape)) and (axis >= 0) for axis in axes]) if not valid: raise ValueError("axes not valid for an ndarray of shape: %s" % str(shape))
python
{ "resource": "" }
q277124
allclose
test
def allclose(a, b): """ Test that a and b are close and match in shape. Parameters ---------- a : ndarray First array to check b : ndarray First array to check """ from numpy import allclose return (a.shape == b.shape) and allclose(a, b)
python
{ "resource": "" }
q277125
listify
test
def listify(lst, dim): """ Flatten lists of indices and ensure bounded by a known dim. Parameters ---------- lst : list List of integer indices dim : tuple Bounds for indices """ if not all([l.dtype == int for l in lst]): raise ValueError("indices must be integers") if npany(asarray(lst) >= dim): raise ValueError("indices out of bounds for axis with size %s" % dim) return lst.flatten()
python
{ "resource": "" }
q277126
slicify
test
def slicify(slc, dim): """ Force a slice to have defined start, stop, and step from a known dim. Start and stop will always be positive. Step may be negative. There is an exception where a negative step overflows the stop needs to have the default value set to -1. This is the only case of a negative start/stop value. Parameters ---------- slc : slice or int The slice to modify, or int to convert to a slice dim : tuple Bound for slice """ if isinstance(slc, slice): # default limits start = 0 if slc.start is None else slc.start stop = dim if slc.stop is None else slc.stop step = 1 if slc.step is None else slc.step # account for negative indices if start < 0: start += dim if stop < 0: stop += dim # account for over-flowing the bounds if step > 0: if start < 0: start = 0 if stop > dim: stop = dim else: if stop < 0: stop = -1 if start > dim: start = dim-1 return slice(start, stop, step) elif isinstance(slc, int): if slc < 0: slc += dim return slice(slc, slc+1, 1) else: raise ValueError("Type for slice %s not recongized" % type(slc))
python
{ "resource": "" }
q277127
istransposeable
test
def istransposeable(new, old): """ Check to see if a proposed tuple of axes is a valid permutation of an old set of axes. Checks length, axis repetion, and bounds. Parameters ---------- new : tuple tuple of proposed axes old : tuple tuple of old axes """ new, old = tupleize(new), tupleize(old) if not len(new) == len(old): raise ValueError("Axes do not match axes of keys") if not len(set(new)) == len(set(old)): raise ValueError("Repeated axes") if any(n < 0 for n in new) or max(new) > len(old) - 1: raise ValueError("Invalid axes")
python
{ "resource": "" }
q277128
isreshapeable
test
def isreshapeable(new, old): """ Check to see if a proposed tuple of axes is a valid reshaping of the old axes by ensuring that they can be factored. Parameters ---------- new : tuple tuple of proposed axes old : tuple tuple of old axes """ new, old = tupleize(new), tupleize(old) if not prod(new) == prod(old): raise ValueError("Total size of new keys must remain unchanged")
python
{ "resource": "" }
q277129
allstack
test
def allstack(vals, depth=0): """ If an ndarray has been split into multiple chunks by splitting it along each axis at a number of locations, this function rebuilds the original array from chunks. Parameters ---------- vals : nested lists of ndarrays each level of nesting of the lists representing a dimension of the original array. """ if type(vals[0]) is ndarray: return concatenate(vals, axis=depth) else: return concatenate([allstack(x, depth+1) for x in vals], axis=depth)
python
{ "resource": "" }
q277130
iterexpand
test
def iterexpand(arry, extra): """ Expand dimensions by iteratively append empty axes. Parameters ---------- arry : ndarray The original array extra : int The number of empty axes to append """ for d in range(arry.ndim, arry.ndim+extra): arry = expand_dims(arry, axis=d) return arry
python
{ "resource": "" }
q277131
zip_with_index
test
def zip_with_index(rdd): """ Alternate version of Spark's zipWithIndex that eagerly returns count. """ starts = [0] if rdd.getNumPartitions() > 1: nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect() count = sum(nums) for i in range(len(nums) - 1): starts.append(starts[-1] + nums[i]) else: count = rdd.count() def func(k, it): for i, v in enumerate(it, starts[k]): yield v, i return count, rdd.mapPartitionsWithIndex(func)
python
{ "resource": "" }
q277132
wrapped
test
def wrapped(f): """ Decorator to append routed docstrings """ import inspect def extract(func): append = "" args = inspect.getargspec(func) for i, a in enumerate(args.args): if i < (len(args) - len(args.defaults)): append += str(a) + ", " else: default = args.defaults[i-len(args.defaults)] if hasattr(default, "__name__"): default = default.__name__ else: default = str(default) append += str(a) + "=" + default + ", " append = append[:-2] + ")" return append doc = f.__doc__ + "\n" doc += " local -> array(" + extract(getattr(ConstructLocal, f.__name__)) + "\n" doc += " spark -> array(" + extract(getattr(ConstructSpark, f.__name__)) + "\n" f.__doc__ = doc return f
python
{ "resource": "" }
q277133
lookup
test
def lookup(*args, **kwargs): """ Use arguments to route constructor. Applies a series of checks on arguments to identify constructor, starting with known keyword arguments, and then applying constructor-specific checks """ if 'mode' in kwargs: mode = kwargs['mode'] if mode not in constructors: raise ValueError('Mode %s not supported' % mode) del kwargs['mode'] return constructors[mode] else: for mode, constructor in constructors: if constructor._argcheck(*args, **kwargs): return constructor return ConstructLocal
python
{ "resource": "" }
q277134
Keys.reshape
test
def reshape(self, *shape): """ Reshape just the keys of a BoltArraySpark, returning a new BoltArraySpark. Parameters ---------- shape : tuple New proposed axes. """ new = argpack(shape) old = self.shape isreshapeable(new, old) if new == old: return self._barray def f(k): return unravel_index(ravel_multi_index(k, old), new) newrdd = self._barray._rdd.map(lambda kv: (f(kv[0]), kv[1])) newsplit = len(new) newshape = new + self._barray.values.shape return BoltArraySpark(newrdd, shape=newshape, split=newsplit).__finalize__(self._barray)
python
{ "resource": "" }
q277135
Keys.transpose
test
def transpose(self, *axes): """ Transpose just the keys of a BoltArraySpark, returning a new BoltArraySpark. Parameters ---------- axes : tuple New proposed axes. """ new = argpack(axes) old = range(self.ndim) istransposeable(new, old) if new == old: return self._barray def f(k): return tuple(k[i] for i in new) newrdd = self._barray._rdd.map(lambda kv: (f(kv[0]), kv[1])) newshape = tuple(self.shape[i] for i in new) + self._barray.values.shape return BoltArraySpark(newrdd, shape=newshape, ordered=False).__finalize__(self._barray)
python
{ "resource": "" }
q277136
Values.reshape
test
def reshape(self, *shape): """ Reshape just the values of a BoltArraySpark, returning a new BoltArraySpark. Parameters ---------- shape : tuple New proposed axes. """ new = argpack(shape) old = self.shape isreshapeable(new, old) if new == old: return self._barray def f(v): return v.reshape(new) newrdd = self._barray._rdd.mapValues(f) newshape = self._barray.keys.shape + new return BoltArraySpark(newrdd, shape=newshape).__finalize__(self._barray)
python
{ "resource": "" }
q277137
Values.transpose
test
def transpose(self, *axes): """ Transpose just the values of a BoltArraySpark, returning a new BoltArraySpark. Parameters ---------- axes : tuple New proposed axes. """ new = argpack(axes) old = range(self.ndim) istransposeable(new, old) if new == old: return self._barray def f(v): return v.transpose(new) newrdd = self._barray._rdd.mapValues(f) newshape = self._barray.keys.shape + tuple(self.shape[i] for i in new) return BoltArraySpark(newrdd, shape=newshape).__finalize__(self._barray)
python
{ "resource": "" }
q277138
ConstructLocal.ones
test
def ones(shape, dtype=float64, order='C'): """ Create a local bolt array of ones. Parameters ---------- shape : tuple Dimensions of the desired array dtype : data-type, optional, default=float64 The desired data-type for the array. (see numpy) order : {'C', 'F', 'A'}, optional, default='C' The order of the array. (see numpy) Returns ------- BoltArrayLocal """ from numpy import ones return ConstructLocal._wrap(ones, shape, dtype, order)
python
{ "resource": "" }
q277139
ConstructLocal.zeros
test
def zeros(shape, dtype=float64, order='C'): """ Create a local bolt array of zeros. Parameters ---------- shape : tuple Dimensions of the desired array. dtype : data-type, optional, default=float64 The desired data-type for the array. (see numpy) order : {'C', 'F', 'A'}, optional, default='C' The order of the array. (see numpy) Returns ------- BoltArrayLocal """ from numpy import zeros return ConstructLocal._wrap(zeros, shape, dtype, order)
python
{ "resource": "" }
q277140
ConstructLocal.concatenate
test
def concatenate(arrays, axis=0): """ Join a sequence of arrays together. Parameters ---------- arrays : tuple A sequence of array-like e.g. (a1, a2, ...) axis : int, optional, default=0 The axis along which the arrays will be joined. Returns ------- BoltArrayLocal """ if not isinstance(arrays, tuple): raise ValueError("data type not understood") arrays = tuple([asarray(a) for a in arrays]) from numpy import concatenate return BoltArrayLocal(concatenate(arrays, axis))
python
{ "resource": "" }
q277141
discrete_likelihood
test
def discrete_likelihood(data, xmin, alpha): """ Equation B.8 in Clauset Given a data set, an xmin value, and an alpha "scaling parameter", computes the log-likelihood (the value to be maximized) """ if not scipyOK: raise ImportError("Can't import scipy. Need scipy for zeta function.") from scipy.special import zeta as zeta zz = data[data>=xmin] nn = len(zz) sum_log_data = np.log(zz).sum() zeta = zeta(alpha, xmin) L_of_alpha = -1*nn*log(zeta) - alpha * sum_log_data return L_of_alpha
python
{ "resource": "" }
q277142
most_likely_alpha
test
def most_likely_alpha(data, xmin, alpharange=(1.5,3.5), n_alpha=201): """ Return the most likely alpha for the data given an xmin """ alpha_vector = np.linspace(alpharange[0],alpharange[1],n_alpha) return alpha_vector[discrete_max_likelihood_arg(data, xmin, alpharange=alpharange, n_alpha=n_alpha)]
python
{ "resource": "" }
q277143
discrete_alpha_mle
test
def discrete_alpha_mle(data, xmin): """ Equation B.17 of Clauset et al 2009 The Maximum Likelihood Estimator of the "scaling parameter" alpha in the discrete case is similar to that in the continuous case """ # boolean indices of positive data gexmin = (data>=xmin) nn = gexmin.sum() if nn < 2: return 0 xx = data[gexmin] alpha = 1.0 + float(nn) * (sum(log(xx/(float(xmin)-0.5))))**-1 return alpha
python
{ "resource": "" }
q277144
discrete_best_alpha
test
def discrete_best_alpha(data, alpharangemults=(0.9,1.1), n_alpha=201, approximate=True, verbose=True): """ Use the maximum L to determine the most likely value of alpha *alpharangemults* [ 2-tuple ] Pair of values indicating multiplicative factors above and below the approximate alpha from the MLE alpha to use when determining the "exact" alpha (by directly maximizing the likelihood function) """ xmins = np.unique(data) if approximate: alpha_of_xmin = [ discrete_alpha_mle(data,xmin) for xmin in xmins ] else: alpha_approx = [ discrete_alpha_mle(data,xmin) for xmin in xmins ] alpharanges = [(0.9*a,1.1*a) for a in alpha_approx] alpha_of_xmin = [ most_likely_alpha(data,xmin,alpharange=ar,n_alpha=n_alpha) for xmin,ar in zip(xmins,alpharanges) ] ksvalues = [ discrete_ksD(data, xmin, alpha) for xmin,alpha in zip(xmins,alpha_of_xmin) ] best_index = argmin(ksvalues) best_alpha = alpha_of_xmin[best_index] best_xmin = xmins[best_index] best_ks = ksvalues[best_index] best_likelihood = discrete_likelihood(data, best_xmin, best_alpha) if verbose: print("alpha = %f xmin = %f ksD = %f L = %f (n<x) = %i (n>=x) = %i" % ( best_alpha, best_xmin, best_ks, best_likelihood, (data<best_xmin).sum(), (data>=best_xmin).sum())) return best_alpha,best_xmin,best_ks,best_likelihood
python
{ "resource": "" }
q277145
plfit.discrete_best_alpha
test
def discrete_best_alpha(self, alpharangemults=(0.9,1.1), n_alpha=201, approximate=True, verbose=True, finite=True): """ Use the maximum likelihood to determine the most likely value of alpha *alpharangemults* [ 2-tuple ] Pair of values indicating multiplicative factors above and below the approximate alpha from the MLE alpha to use when determining the "exact" alpha (by directly maximizing the likelihood function) *n_alpha* [ int ] Number of alpha values to use when measuring. Larger number is more accurate. *approximate* [ bool ] If False, try to "zoom-in" around the MLE alpha and get the exact best alpha value within some range around the approximate best *vebose* [ bool ] *finite* [ bool ] Correction for finite data? """ data = self.data self._xmins = xmins = np.unique(data) if approximate: alpha_of_xmin = [ discrete_alpha_mle(data,xmin) for xmin in xmins ] else: alpha_approx = [ discrete_alpha_mle(data,xmin) for xmin in xmins ] alpharanges = [(0.9*a,1.1*a) for a in alpha_approx] alpha_of_xmin = [ most_likely_alpha(data,xmin,alpharange=ar,n_alpha=n_alpha) for xmin,ar in zip(xmins,alpharanges) ] ksvalues = np.array([discrete_ksD(data, xmin, alpha) for xmin,alpha in zip(xmins,alpha_of_xmin) ]) self._alpha_values = np.array(alpha_of_xmin) self._xmin_kstest = ksvalues ksvalues[np.isnan(ksvalues)] = np.inf best_index = argmin(ksvalues) self._alpha = best_alpha = alpha_of_xmin[best_index] self._xmin = best_xmin = xmins[best_index] self._ks = best_ks = ksvalues[best_index] self._likelihood = best_likelihood = discrete_likelihood(data, best_xmin, best_alpha) if finite: self._alpha = self._alpha*(n-1.)/n+1./n if verbose: print("alpha = %f xmin = %f ksD = %f L = %f (n<x) = %i (n>=x) = %i" % ( best_alpha, best_xmin, best_ks, best_likelihood, (data<best_xmin).sum(), (data>=best_xmin).sum())) self._ngtx = n = (self.data>=self._xmin).sum() self._alphaerr = (self._alpha-1.0)/np.sqrt(n) if scipyOK: self._ks_prob = scipy.stats.ksone.sf(self._ks, n) return best_alpha,best_xmin,best_ks,best_likelihood
python
{ "resource": "" }
q277146
plfit.plotppf
test
def plotppf(self,x=None,xmin=None,alpha=None,dolog=True,**kwargs): """ Plots the power-law-predicted value on the Y-axis against the real values along the X-axis. Can be used as a diagnostic of the fit quality. """ if not(xmin): xmin=self._xmin if not(alpha): alpha=self._alpha if not(x): x=np.sort(self.data[self.data>xmin]) else: x=np.sort(x[x>xmin]) # N = M^(-alpha+1) # M = N^(1/(-alpha+1)) m0 = min(x) N = (1.0+np.arange(len(x)))[::-1] xmodel = m0 * N**(1/(1-alpha)) / max(N)**(1/(1-alpha)) if dolog: pylab.loglog(x,xmodel,'.',**kwargs) pylab.gca().set_xlim(min(x),max(x)) pylab.gca().set_ylim(min(x),max(x)) else: pylab.plot(x,xmodel,'.',**kwargs) pylab.plot([min(x),max(x)],[min(x),max(x)],'k--') pylab.xlabel("Real Value") pylab.ylabel("Power-Law Model Value")
python
{ "resource": "" }
q277147
plfit.lognormal
test
def lognormal(self,doprint=True): """ Use the maximum likelihood estimator for a lognormal distribution to produce the best-fit lognormal parameters """ # N = float(self.data.shape[0]) # mu = log(self.data).sum() / N # sigmasquared = ( ( log(self.data) - mu )**2 ).sum() / N # self.lognormal_mu = mu # self.lognormal_sigma = np.sqrt(sigmasquared) # self.lognormal_likelihood = -N/2. * log(np.pi*2) - N/2. * log(sigmasquared) - 1/(2*sigmasquared) * (( self.data - mu )**2).sum() # if doprint: # print "Best fit lognormal is exp( -(x-%g)^2 / (2*%g^2)" % (mu,np.sqrt(sigmasquared)) # print "Likelihood: %g" % (self.lognormal_likelihood) if scipyOK: fitpars = scipy.stats.lognorm.fit(self.data) self.lognormal_dist = scipy.stats.lognorm(*fitpars) self.lognormal_ksD,self.lognormal_ksP = scipy.stats.kstest(self.data,self.lognormal_dist.cdf) # nnlf = NEGATIVE log likelihood self.lognormal_likelihood = -1*scipy.stats.lognorm.nnlf(fitpars,self.data) # Is this the right likelihood ratio? # Definition of L from eqn. B3 of Clauset et al 2009: # L = log(p(x|alpha)) # _nnlf from scipy.stats.distributions: # -sum(log(self._pdf(x, *args)),axis=0) # Assuming the pdf and p(x|alpha) are both non-inverted, it looks # like the _nnlf and L have opposite signs, which would explain the # likelihood ratio I've used here: self.power_lognorm_likelihood = (self._likelihood + self.lognormal_likelihood) # a previous version had 2*(above). That is the correct form if you want the likelihood ratio # statistic "D": http://en.wikipedia.org/wiki/Likelihood-ratio_test # The above explanation makes sense, since nnlf is the *negative* log likelihood function: ## nnlf -- negative log likelihood function (to minimize) # # Assuming we want the ratio between the POSITIVE likelihoods, the D statistic is: # D = -2 log( L_power / L_lognormal ) self.likelihood_ratio_D = -2 * (log(self._likelihood/self.lognormal_likelihood)) if doprint: print("Lognormal KS D: %g p(D): %g" % (self.lognormal_ksD,self.lognormal_ksP), end=' ') print(" Likelihood Ratio Statistic (powerlaw/lognormal): %g" % self.likelihood_ratio_D) print("At this point, have a look at Clauset et al 2009 Appendix C: determining sigma(likelihood_ratio)")
python
{ "resource": "" }
q277148
sanitize_turbo
test
def sanitize_turbo(html, allowed_tags=TURBO_ALLOWED_TAGS, allowed_attrs=TURBO_ALLOWED_ATTRS): """Sanitizes HTML, removing not allowed tags and attributes. :param str|unicode html: :param list allowed_tags: List of allowed tags. :param dict allowed_attrs: Dictionary with attributes allowed for tags. :rtype: unicode """ return clean(html, tags=allowed_tags, attributes=allowed_attrs, strip=True)
python
{ "resource": "" }
q277149
YandexTurboFeed.configure_analytics_yandex
test
def configure_analytics_yandex(self, ident, params=None): """Configure Yandex Metrika analytics counter. :param str|unicode ident: Metrika counter ID. :param dict params: Additional params. """ params = params or {} data = { 'type': 'Yandex', 'id': ident, } if params: data['params'] = '%s' % params self.analytics.append(data)
python
{ "resource": "" }
q277150
LabelWidget.tag_list
test
def tag_list(self, tags): """ Generates a list of tags identifying those previously selected. Returns a list of tuples of the form (<tag name>, <CSS class name>). Uses the string names rather than the tags themselves in order to work with tag lists built from forms not fully submitted. """ return [ (tag.name, "selected taggit-tag" if tag.name in tags else "taggit-tag") for tag in self.model.objects.all() ]
python
{ "resource": "" }
q277151
SSHKey.hash_md5
test
def hash_md5(self): """Calculate md5 fingerprint. Shamelessly copied from http://stackoverflow.com/questions/6682815/deriving-an-ssh-fingerprint-from-a-public-key-in-python For specification, see RFC4716, section 4.""" fp_plain = hashlib.md5(self._decoded_key).hexdigest() return "MD5:" + ':'.join(a + b for a, b in zip(fp_plain[::2], fp_plain[1::2]))
python
{ "resource": "" }
q277152
SSHKey.hash_sha256
test
def hash_sha256(self): """Calculate sha256 fingerprint.""" fp_plain = hashlib.sha256(self._decoded_key).digest() return (b"SHA256:" + base64.b64encode(fp_plain).replace(b"=", b"")).decode("utf-8")
python
{ "resource": "" }
q277153
SSHKey.hash_sha512
test
def hash_sha512(self): """Calculates sha512 fingerprint.""" fp_plain = hashlib.sha512(self._decoded_key).digest() return (b"SHA512:" + base64.b64encode(fp_plain).replace(b"=", b"")).decode("utf-8")
python
{ "resource": "" }
q277154
SSHKey._parse_long
test
def _parse_long(cls, data): """Calculate two's complement.""" if sys.version < '3': # this does not exist in python 3 - undefined-variable disabled to make pylint happier. ret = long(0) # pylint:disable=undefined-variable for byte in data: ret = (ret << 8) + ord(byte) else: ret = 0 for byte in data: ret = (ret << 8) + byte return ret
python
{ "resource": "" }
q277155
SSHKey.decode_key
test
def decode_key(cls, pubkey_content): """Decode base64 coded part of the key.""" try: decoded_key = base64.b64decode(pubkey_content.encode("ascii")) except (TypeError, binascii.Error): raise MalformedDataError("Unable to decode the key") return decoded_key
python
{ "resource": "" }
q277156
SSHKey.parse_options
test
def parse_options(self, options): """Parses ssh options string.""" quote_open = False parsed_options = {} def parse_add_single_option(opt): """Parses and validates a single option, and adds it to parsed_options field.""" if "=" in opt: opt_name, opt_value = opt.split("=", 1) opt_value = opt_value.replace('"', '') else: opt_name = opt opt_value = True if " " in opt_name or not self.OPTION_NAME_RE.match(opt_name): raise InvalidOptionNameError("%s is not valid option name." % opt_name) if self.strict_mode: for valid_opt_name, value_required in self.OPTIONS_SPEC: if opt_name.lower() == valid_opt_name: if value_required and opt_value is True: raise MissingMandatoryOptionValueError("%s is missing mandatory value." % opt_name) break else: raise UnknownOptionNameError("%s is unrecognized option name." % opt_name) if opt_name not in parsed_options: parsed_options[opt_name] = [] parsed_options[opt_name].append(opt_value) start_of_current_opt = 0 i = 1 # Need to be set for empty options strings for i, character in enumerate(options): if character == '"': # only double quotes are allowed, no need to care about single quotes quote_open = not quote_open if quote_open: continue if character == ",": opt = options[start_of_current_opt:i] parse_add_single_option(opt) start_of_current_opt = i + 1 # Data begins after the first space if start_of_current_opt + 1 != i: opt = options[start_of_current_opt:] parse_add_single_option(opt) if quote_open: raise InvalidOptionsError("Unbalanced quotes.") return parsed_options
python
{ "resource": "" }
q277157
SSHKey._process_ssh_rsa
test
def _process_ssh_rsa(self, data): """Parses ssh-rsa public keys.""" current_position, raw_e = self._unpack_by_int(data, 0) current_position, raw_n = self._unpack_by_int(data, current_position) unpacked_e = self._parse_long(raw_e) unpacked_n = self._parse_long(raw_n) self.rsa = RSAPublicNumbers(unpacked_e, unpacked_n).public_key(default_backend()) self.bits = self.rsa.key_size if self.strict_mode: min_length = self.RSA_MIN_LENGTH_STRICT max_length = self.RSA_MAX_LENGTH_STRICT else: min_length = self.RSA_MIN_LENGTH_LOOSE max_length = self.RSA_MAX_LENGTH_LOOSE if self.bits < min_length: raise TooShortKeyError( "%s key data can not be shorter than %s bits (was %s)" % (self.key_type, min_length, self.bits) ) if self.bits > max_length: raise TooLongKeyError( "%s key data can not be longer than %s bits (was %s)" % (self.key_type, max_length, self.bits) ) return current_position
python
{ "resource": "" }
q277158
SSHKey._process_ssh_dss
test
def _process_ssh_dss(self, data): """Parses ssh-dsa public keys.""" data_fields = {} current_position = 0 for item in ("p", "q", "g", "y"): current_position, value = self._unpack_by_int(data, current_position) data_fields[item] = self._parse_long(value) q_bits = self._bits_in_number(data_fields["q"]) p_bits = self._bits_in_number(data_fields["p"]) if q_bits != self.DSA_N_LENGTH: raise InvalidKeyError("Incorrect DSA key parameters: bits(p)=%s, q=%s" % (self.bits, q_bits)) if self.strict_mode: min_length = self.DSA_MIN_LENGTH_STRICT max_length = self.DSA_MAX_LENGTH_STRICT else: min_length = self.DSA_MIN_LENGTH_LOOSE max_length = self.DSA_MAX_LENGTH_LOOSE if p_bits < min_length: raise TooShortKeyError("%s key can not be shorter than %s bits (was %s)" % (self.key_type, min_length, p_bits)) if p_bits > max_length: raise TooLongKeyError( "%s key data can not be longer than %s bits (was %s)" % (self.key_type, max_length, p_bits) ) dsa_parameters = DSAParameterNumbers(data_fields["p"], data_fields["q"], data_fields["g"]) self.dsa = DSAPublicNumbers(data_fields["y"], dsa_parameters).public_key(default_backend()) self.bits = self.dsa.key_size return current_position
python
{ "resource": "" }
q277159
SSHKey._process_ecdsa_sha
test
def _process_ecdsa_sha(self, data): """Parses ecdsa-sha public keys.""" current_position, curve_information = self._unpack_by_int(data, 0) if curve_information not in self.ECDSA_CURVE_DATA: raise NotImplementedError("Invalid curve type: %s" % curve_information) curve, hash_algorithm = self.ECDSA_CURVE_DATA[curve_information] current_position, key_data = self._unpack_by_int(data, current_position) try: # data starts with \x04, which should be discarded. ecdsa_key = ecdsa.VerifyingKey.from_string(key_data[1:], curve, hash_algorithm) except AssertionError: raise InvalidKeyError("Invalid ecdsa key") self.bits = int(curve_information.replace(b"nistp", b"")) self.ecdsa = ecdsa_key return current_position
python
{ "resource": "" }
q277160
SSHKey._process_ed25516
test
def _process_ed25516(self, data): """Parses ed25516 keys. There is no (apparent) way to validate ed25519 keys. This only checks data length (256 bits), but does not try to validate the key in any way.""" current_position, verifying_key = self._unpack_by_int(data, 0) verifying_key_length = len(verifying_key) * 8 verifying_key = self._parse_long(verifying_key) if verifying_key < 0: raise InvalidKeyError("ed25519 verifying key must be >0.") self.bits = verifying_key_length if self.bits != 256: raise InvalidKeyLengthError("ed25519 keys must be 256 bits (was %s bits)" % self.bits) return current_position
python
{ "resource": "" }
q277161
SSHKey.parse
test
def parse(self, keydata=None): """Validates SSH public key. Throws exception for invalid keys. Otherwise returns None. Populates key_type, bits and bits fields. For rsa keys, see field "rsa" for raw public key data. For dsa keys, see field "dsa". For ecdsa keys, see field "ecdsa".""" if keydata is None: if self.keydata is None: raise ValueError("Key data must be supplied either in constructor or to parse()") keydata = self.keydata else: self.reset() self.keydata = keydata if keydata.startswith("---- BEGIN SSH2 PUBLIC KEY ----"): # SSH2 key format key_type = None # There is no redundant key-type field - skip comparing plain-text and encoded data. pubkey_content = "".join([line for line in keydata.split("\n") if ":" not in line and "----" not in line]) else: key_parts = self._split_key(keydata) key_type = key_parts[0] pubkey_content = key_parts[1] self._decoded_key = self.decode_key(pubkey_content) # Check key type current_position, unpacked_key_type = self._unpack_by_int(self._decoded_key, 0) if key_type is not None and key_type != unpacked_key_type.decode(): raise InvalidTypeError("Keytype mismatch: %s != %s" % (key_type, unpacked_key_type)) self.key_type = unpacked_key_type key_data_length = self._process_key(self._decoded_key[current_position:]) current_position = current_position + key_data_length if current_position != len(self._decoded_key): raise MalformedDataError("Leftover data: %s bytes" % (len(self._decoded_key) - current_position)) if self.disallow_options and self.options: raise InvalidOptionsError("Options are disallowed.")
python
{ "resource": "" }
q277162
InitContext.step
test
def step(self, input_token=None): """Performs a step to establish the context as an initiator. This method should be called in a loop and fed input tokens from the acceptor, and its output tokens should be sent to the acceptor, until this context's :attr:`established` attribute is True. :param input_token: The input token from the acceptor (omit this param or pass None on the first call). :type input_token: bytes :returns: either a byte string with the next token to send to the acceptor, or None if there is no further token to send to the acceptor. :raises: :exc:`~gssapi.error.GSSException` if there is an error establishing the context. """ minor_status = ffi.new('OM_uint32[1]') if input_token: input_token_buffer = ffi.new('gss_buffer_desc[1]') input_token_buffer[0].length = len(input_token) c_str_input_token = ffi.new('char[]', input_token) input_token_buffer[0].value = c_str_input_token else: input_token_buffer = ffi.cast('gss_buffer_t', C.GSS_C_NO_BUFFER) if isinstance(self._desired_mech, OID): desired_mech = ffi.addressof(self._desired_mech._oid) else: desired_mech = ffi.cast('gss_OID', C.GSS_C_NO_OID) actual_mech = ffi.new('gss_OID[1]') output_token_buffer = ffi.new('gss_buffer_desc[1]') actual_flags = ffi.new('OM_uint32[1]') actual_time = ffi.new('OM_uint32[1]') if self._cred_object is not None: cred = self._cred_object._cred[0] else: cred = ffi.cast('gss_cred_id_t', C.GSS_C_NO_CREDENTIAL) retval = C.gss_init_sec_context( minor_status, cred, self._ctx, self.peer_name._name[0], desired_mech, self._req_flags, self._time_req, self._channel_bindings, input_token_buffer, actual_mech, output_token_buffer, actual_flags, actual_time ) try: if output_token_buffer[0].length != 0: out_token = _buf_to_str(output_token_buffer[0]) else: out_token = None if GSS_ERROR(retval): if minor_status[0] and actual_mech[0]: raise _exception_for_status(retval, minor_status[0], actual_mech[0], out_token) else: raise _exception_for_status(retval, minor_status[0], None, out_token) self.established = not (retval & C.GSS_S_CONTINUE_NEEDED) self.flags = actual_flags[0] if actual_mech[0]: self.mech_type = OID(actual_mech[0][0]) return out_token except: if self._ctx[0]: C.gss_delete_sec_context( minor_status, self._ctx, ffi.cast('gss_buffer_t', C.GSS_C_NO_BUFFER) ) self._reset_flags() raise finally: if output_token_buffer[0].length != 0: C.gss_release_buffer(minor_status, output_token_buffer)
python
{ "resource": "" }
q277163
AcceptContext.step
test
def step(self, input_token): """Performs a step to establish the context as an acceptor. This method should be called in a loop and fed input tokens from the initiator, and its output tokens should be sent to the initiator, until this context's :attr:`established` attribute is True. :param input_token: The input token from the initiator (required). :type input_token: bytes :returns: either a byte string with the next token to send to the initiator, or None if there is no further token to send to the initiator. :raises: :exc:`~gssapi.error.GSSException` if there is an error establishing the context. """ minor_status = ffi.new('OM_uint32[1]') input_token_buffer = ffi.new('gss_buffer_desc[1]') input_token_buffer[0].length = len(input_token) c_str_import_token = ffi.new('char[]', input_token) input_token_buffer[0].value = c_str_import_token mech_type = ffi.new('gss_OID[1]') output_token_buffer = ffi.new('gss_buffer_desc[1]') src_name_handle = ffi.new('gss_name_t[1]') actual_flags = ffi.new('OM_uint32[1]') time_rec = ffi.new('OM_uint32[1]') delegated_cred_handle = ffi.new('gss_cred_id_t[1]') if self._cred_object is not None: cred = self._cred_object._cred[0] else: cred = ffi.cast('gss_cred_id_t', C.GSS_C_NO_CREDENTIAL) retval = C.gss_accept_sec_context( minor_status, self._ctx, cred, input_token_buffer, self._channel_bindings, src_name_handle, mech_type, output_token_buffer, actual_flags, time_rec, delegated_cred_handle ) if src_name_handle[0]: src_name = MechName(src_name_handle, mech_type[0]) # make sure src_name is GC'd try: if output_token_buffer[0].length != 0: out_token = _buf_to_str(output_token_buffer[0]) else: out_token = None if GSS_ERROR(retval): if minor_status[0] and mech_type[0]: raise _exception_for_status(retval, minor_status[0], mech_type[0], out_token) else: raise _exception_for_status(retval, minor_status[0], None, out_token) self.established = not (retval & C.GSS_S_CONTINUE_NEEDED) self.flags = actual_flags[0] if (self.flags & C.GSS_C_DELEG_FLAG): self.delegated_cred = Credential(delegated_cred_handle) if mech_type[0]: self.mech_type = OID(mech_type[0][0]) if src_name_handle[0]: src_name._mech_type = self.mech_type self.peer_name = src_name return out_token except: if self._ctx: C.gss_delete_sec_context( minor_status, self._ctx, ffi.cast('gss_buffer_t', C.GSS_C_NO_BUFFER) ) self._reset_flags() raise finally: if output_token_buffer[0].length != 0: C.gss_release_buffer(minor_status, output_token_buffer) # if self.delegated_cred is present, it will handle gss_release_cred: if delegated_cred_handle[0] and not self.delegated_cred: C.gss_release_cred(minor_status, delegated_cred_handle)
python
{ "resource": "" }
q277164
Credential.mechs
test
def mechs(self): """ The set of mechanisms supported by the credential. :type: :class:`~gssapi.oids.OIDSet` """ if not self._mechs: self._mechs = self._inquire(False, False, False, True)[3] return self._mechs
python
{ "resource": "" }
q277165
Credential.store
test
def store(self, usage=None, mech=None, overwrite=False, default=False, cred_store=None): """ Stores this credential into a 'credential store'. It can either store this credential in the default credential store, or into a specific credential store specified by a set of mechanism-specific key-value pairs. The former method of operation requires that the underlying GSSAPI implementation supports the ``gss_store_cred`` C function, the latter method requires support for the ``gss_store_cred_into`` C function. :param usage: Optional parameter specifying whether to store the initiator, acceptor, or both usages of this credential. Defaults to the value of this credential's :attr:`usage` property. :type usage: One of :data:`~gssapi.C_INITIATE`, :data:`~gssapi.C_ACCEPT` or :data:`~gssapi.C_BOTH` :param mech: Optional parameter specifying a single mechanism to store the credential element for. If not supplied, all mechanisms' elements in this credential will be stored. :type mech: :class:`~gssapi.oids.OID` :param overwrite: If True, indicates that any credential for the same principal in the credential store should be overwritten with this credential. :type overwrite: bool :param default: If True, this credential should be made available as the default credential when stored, for acquisition when no `desired_name` parameter is passed to :class:`Credential` or for use when no credential is passed to :class:`~gssapi.ctx.InitContext` or :class:`~gssapi.ctx.AcceptContext`. This is only an advisory parameter to the GSSAPI implementation. :type default: bool :param cred_store: Optional dict or list of (key, value) pairs indicating the credential store to use. The interpretation of these values will be mechanism-specific. :type cred_store: dict, or list of (str, str) :returns: A pair of values indicating the set of mechanism OIDs for which credential elements were successfully stored, and the usage of the credential that was stored. :rtype: tuple(:class:`~gssapi.oids.OIDSet`, int) :raises: :exc:`~gssapi.error.GSSException` if there is a problem with storing the credential. :exc:`NotImplementedError` if the underlying GSSAPI implementation does not support the ``gss_store_cred`` or ``gss_store_cred_into`` C functions. """ if usage is None: usage = self.usage if isinstance(mech, OID): oid_ptr = ffi.addressof(mech._oid) else: oid_ptr = ffi.cast('gss_OID', C.GSS_C_NO_OID) minor_status = ffi.new('OM_uint32[1]') elements_stored = ffi.new('gss_OID_set[1]') usage_stored = ffi.new('gss_cred_usage_t[1]') if cred_store is None: if not hasattr(C, 'gss_store_cred'): raise NotImplementedError("The GSSAPI implementation does not support " "gss_store_cred") retval = C.gss_store_cred( minor_status, self._cred[0], ffi.cast('gss_cred_usage_t', usage), oid_ptr, ffi.cast('OM_uint32', overwrite), ffi.cast('OM_uint32', default), elements_stored, usage_stored ) else: if not hasattr(C, 'gss_store_cred_into'): raise NotImplementedError("The GSSAPI implementation does not support " "gss_store_cred_into") c_strings, elements, cred_store_kv_set = _make_kv_set(cred_store) retval = C.gss_store_cred_into( minor_status, self._cred[0], ffi.cast('gss_cred_usage_t', usage), oid_ptr, ffi.cast('OM_uint32', overwrite), ffi.cast('OM_uint32', default), cred_store_kv_set, elements_stored, usage_stored ) try: if GSS_ERROR(retval): if oid_ptr: raise _exception_for_status(retval, minor_status[0], oid_ptr) else: raise _exception_for_status(retval, minor_status[0]) except: if elements_stored[0]: C.gss_release_oid_set(minor_status, elements_stored) raise return (OIDSet(elements_stored), usage_stored[0])
python
{ "resource": "" }
q277166
main
test
def main(properties=properties, options=options, **custom_options): """Imports and runs setup function with given properties.""" return init(**dict(options, **custom_options))(**properties)
python
{ "resource": "" }
q277167
init
test
def init( dist='dist', minver=None, maxver=None, use_markdown_readme=True, use_stdeb=False, use_distribute=False, ): """Imports and returns a setup function. If use_markdown_readme is set, then README.md is added to setuptools READMES list. If use_stdeb is set on a Debian based system, then module stdeb is imported. Stdeb supports building deb packages on Debian based systems. The package should only be installed on the same system version it was built on, though. See http://github.com/astraw/stdeb. If use_distribute is set, then distribute_setup.py is imported. """ if not minver == maxver == None: import sys if not minver <= sys.version < (maxver or 'Any'): sys.stderr.write( '%s: requires python version in <%s, %s), not %s\n' % ( sys.argv[0], minver or 'any', maxver or 'any', sys.version.split()[0])) sys.exit(1) if use_distribute: from distribute_setup import use_setuptools use_setuptools(to_dir=dist) from setuptools import setup else: try: from setuptools import setup except ImportError: from distutils.core import setup if use_markdown_readme: try: import setuptools.command.sdist setuptools.command.sdist.READMES = tuple(list(getattr(setuptools.command.sdist, 'READMES', ())) + ['README.md']) except ImportError: pass if use_stdeb: import platform if 'debian' in platform.dist(): try: import stdeb except ImportError: pass return setup
python
{ "resource": "" }
q277168
_create_file
test
def _create_file(): """ Returns a file handle which is used to record audio """ f = wave.open('audio.wav', mode='wb') f.setnchannels(2) p = pyaudio.PyAudio() f.setsampwidth(p.get_sample_size(pyaudio.paInt16)) f.setframerate(p.get_default_input_device_info()['defaultSampleRate']) try: yield f finally: f.close()
python
{ "resource": "" }
q277169
djfrontend_h5bp_css
test
def djfrontend_h5bp_css(version=None): """ Returns HTML5 Boilerplate CSS file. Included in HTML5 Boilerplate. """ if version is None: version = getattr(settings, 'DJFRONTEND_H5BP_CSS', DJFRONTEND_H5BP_CSS_DEFAULT) return format_html( '<link rel="stylesheet" href="{0}djfrontend/css/h5bp/{1}/h5bp.css">', _static_url, version)
python
{ "resource": "" }
q277170
djfrontend_normalize
test
def djfrontend_normalize(version=None): """ Returns Normalize CSS file. Included in HTML5 Boilerplate. """ if version is None: version = getattr(settings, 'DJFRONTEND_NORMALIZE', DJFRONTEND_NORMALIZE_DEFAULT) return format_html( '<link rel="stylesheet" href="{0}djfrontend/css/normalize/{1}/normalize.css">', _static_url, version)
python
{ "resource": "" }
q277171
djfrontend_fontawesome
test
def djfrontend_fontawesome(version=None): """ Returns Font Awesome CSS file. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: version = getattr(settings, 'DJFRONTEND_FONTAWESOME', DJFRONTEND_FONTAWESOME_DEFAULT) return format_html( '<link rel="stylesheet" href="{0}djfrontend/css/fontawesome/{1}/font-awesome{2}.css">', _static_url, version, _min)
python
{ "resource": "" }
q277172
djfrontend_modernizr
test
def djfrontend_modernizr(version=None): """ Returns Modernizr JavaScript file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. Included in HTML5 Boilerplate. """ if version is None: version = getattr(settings, 'DJFRONTEND_MODERNIZR', DJFRONTEND_MODERNIZR_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/modernizr/{v}/modernizr.js"></script>' else: template = ( '<script src="//cdnjs.cloudflare.com/ajax/libs/modernizr/{v}/modernizr.min.js"></script>\n' '<script>window.Modernizr || document.write(\'<script src="{static}djfrontend/js/modernizr/{v}/modernizr.min.js"><\/script>\')</script>') return format_html(template, static=_static_url, v=version)
python
{ "resource": "" }
q277173
djfrontend_jquery
test
def djfrontend_jquery(version=None): """ Returns jQuery JavaScript file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file from Google CDN with local fallback. Included in HTML5 Boilerplate. """ if version is None: version = getattr(settings, 'DJFRONTEND_JQUERY', DJFRONTEND_JQUERY_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/jquery/{v}/jquery.js"></script>' else: template = ( '<script src="//ajax.googleapis.com/ajax/libs/jquery/{v}/jquery.min.js"></script>' '<script>window.jQuery || document.write(\'<script src="{static}djfrontend/js/jquery/{v}/jquery.min.js"><\/script>\')</script>') return format_html(template, static=_static_url, v=version)
python
{ "resource": "" }
q277174
djfrontend_jqueryui
test
def djfrontend_jqueryui(version=None): """ Returns the jQuery UI plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file from Google CDN with local fallback. """ if version is None: version = getattr(settings, 'DJFRONTEND_JQUERYUI', DJFRONTEND_JQUERYUI_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): return format_html( '<script src="{0}djfrontend/js/jquery/jqueryui/{1}/jquery-ui.js"></script>', settings.STATIC_URL, version) else: return format_html( '<script src="//ajax.googleapis.com/ajax/libs/jqueryui/{v}/jquery-ui.min.js"></script>' '<script>window.jQuery.ui || document.write(\'<script src="{static}djfrontend/js/jquery/jqueryui/{v}/jquery-ui.min.js"><\/script>\')</script>', static=_static_url, v=version)
python
{ "resource": "" }
q277175
djfrontend_jquery_datatables
test
def djfrontend_jquery_datatables(version=None): """ Returns the jQuery DataTables plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: if not getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES', False): version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_VERSION', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) else: version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/jquery/jquery.dataTables/{v}/jquery.dataTables.js"></script>' else: template = ( '<script src="//cdnjs.cloudflare.com/ajax/libs/datatables/{v}/jquery.dataTables.min.js"></script>' '<script>window.jQuery.fn.DataTable || document.write(\'<script src="{static}djfrontend/js/jquery/jquery.dataTables/{v}/jquery.dataTables.min.js"><\/script>\')</script>') return format_html(template, static=_static_url, v=version)
python
{ "resource": "" }
q277176
djfrontend_jquery_datatables_css
test
def djfrontend_jquery_datatables_css(version=None): """ Returns the jQuery DataTables CSS file according to version number. """ if version is None: if not getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_CSS', False): version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_VERSION', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) else: version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_CSS', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) return format_html( '<link rel="stylesheet" href="{static}djfrontend/css/jquery/jquery.dataTables/{v}/jquery.dataTables{min}.css">', static=_static_url, v=version, min=_min)
python
{ "resource": "" }
q277177
djfrontend_jquery_datatables_themeroller
test
def djfrontend_jquery_datatables_themeroller(version=None): """ Returns the jQuery DataTables ThemeRoller CSS file according to version number. """ if version is None: if not getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_THEMEROLLER', False): version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_VERSION', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) else: version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_THEMEROLLER', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) return format_html( '<link rel="stylesheet" href="href="{static}djfrontend/css/jquery/jquery.dataTables/{v}/jquery.dataTables_themeroller.min.css">', static=_static_url, v=version)
python
{ "resource": "" }
q277178
djfrontend_jquery_formset
test
def djfrontend_jquery_formset(version=None): """ Returns the jQuery Dynamic Formset plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: version = getattr(settings, 'DJFRONTEND_JQUERY_FORMSET', DJFRONTEND_JQUERY_FORMSET_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/jquery/jquery.formset/{v}/jquery.formset.js"></script>' else: template = ( '<script src="//cdnjs.cloudflare.com/ajax/libs/jquery.formset/{v}/jquery.formset.min.js"></script>\n' '<script>window.jQuery.fn.formset || document.write(\'<script src="{static}djfrontend/js/jquery/jquery.formset/{v}/jquery.formset.min.js"><\/script>\')</script>') return format_html(template, static=_static_url, v=version)
python
{ "resource": "" }
q277179
djfrontend_jquery_scrollto
test
def djfrontend_jquery_scrollto(version=None): """ Returns the jQuery ScrollTo plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: version = getattr(settings, 'DJFRONTEND_JQUERY_SCROLLTO', DJFRONTEND_JQUERY_SCROLLTO_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/jquery/jquery.scrollTo/{v}/jquery.scrollTo.js"></script>' else: template = ( '<script src="//cdnjs.cloudflare.com/ajax/libs/jquery-scrollTo/{v}/jquery.scrollTo.min.js"></script>' '<script>window.jQuery.fn.scrollTo || document.write(\'<script src="{static}djfrontend/js/jquery/jquery.scrollTo/{v}/jquery.scrollTo.min.js"><\/script>\')</script>') return format_html(template, static=_static_url, v=version)
python
{ "resource": "" }
q277180
djfrontend_jquery_smoothscroll
test
def djfrontend_jquery_smoothscroll(version=None): """ Returns the jQuery Smooth Scroll plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: version = getattr(settings, 'DJFRONTEND_JQUERY_SMOOTHSCROLL', DJFRONTEND_JQUERY_SMOOTHSCROLL_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/jquery/jquery.smooth-scroll/{v}/jquery.smooth-scroll.js"></script>' else: template = ( '<script src="//cdnjs.cloudflare.com/ajax/libs/jquery-smooth-scroll/{v}/jquery.smooth-scroll.min.js"></script>' '<script>window.jQuery.fn.smoothScroll || document.write(\'<script src="{static}djfrontend/js/jquery/jquery.smooth-scroll/{v}/jquery.smooth-scroll.min.js"><\/script>\')</script>') return format_html(template, static=_static_url, v=version)
python
{ "resource": "" }
q277181
djfrontend_twbs_css
test
def djfrontend_twbs_css(version=None): """ Returns Twitter Bootstrap CSS file. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: if not getattr(settings, 'DJFRONTEND_TWBS_CSS', False): version = getattr(settings, 'DJFRONTEND_TWBS_VERSION', DJFRONTEND_TWBS_VERSION_DEFAULT) else: version = getattr(settings, 'DJFRONTEND_TWBS_CSS', DJFRONTEND_TWBS_VERSION_DEFAULT) return format_html( '<link rel="stylesheet" href="{static}djfrontend/css/twbs/{v}/bootstrap{min}.css">', static=_static_url, v=version, min=_min)
python
{ "resource": "" }
q277182
djfrontend_ga
test
def djfrontend_ga(account=None): """ Returns Google Analytics asynchronous snippet. Use DJFRONTEND_GA_SETDOMAINNAME to set domain for multiple, or cross-domain tracking. Set DJFRONTEND_GA_SETALLOWLINKER to use _setAllowLinker method on target site for cross-domain tracking. Included in HTML5 Boilerplate. """ if account is None: account = getattr(settings, 'DJFRONTEND_GA', False) if account: if getattr(settings, 'TEMPLATE_DEBUG', False): return '' else: if getattr(settings, 'DJFRONTEND_GA_SETDOMAINNAME', False): if getattr(settings, 'DJFRONTEND_GA_SETALLOWLINKER', False): return mark_safe( '<script>(function(i,s,o,g,r,a,m){i["GoogleAnalyticsObject"]=r;i[r]=i[r]||function(){(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)})(window,document,"script","//www.google-analytics.com/analytics.js","ga");ga("require", "linker");ga("linker:autoLink", ["%s"]);ga("create", "%s", "auto", {"allowLinker": true});ga("send", "pageview");</script>' % (settings.DJFRONTEND_GA_SETDOMAINNAME, account)) else: return mark_safe( '<script>(function(i,s,o,g,r,a,m){i["GoogleAnalyticsObject"]=r;i[r]=i[r]||function(){(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)})(window,document,"script","//www.google-analytics.com/analytics.js","ga");ga("create", "%s", "%s");ga("send", "pageview");</script>' % (account, settings.DJFRONTEND_GA_SETDOMAINNAME)) else: return mark_safe( '<script>(function(i,s,o,g,r,a,m){i["GoogleAnalyticsObject"]=r;i[r]=i[r]||function(){(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)})(window,document,"script","//www.google-analytics.com/analytics.js","ga");ga("create", "%s", "auto");ga("send", "pageview");</script>' % account) else: return ''
python
{ "resource": "" }
q277183
CodeMirrorTextarea.render
test
def render(self, name, value, attrs=None): u"""Render CodeMirrorTextarea""" if self.js_var_format is not None: js_var_bit = 'var %s = ' % (self.js_var_format % name) else: js_var_bit = '' output = [super(CodeMirrorTextarea, self).render(name, value, attrs), '<script type="text/javascript">%sCodeMirror.fromTextArea(document.getElementById(%s), %s);</script>' % (js_var_bit, '"id_%s"' % name, self.option_json)] return mark_safe('\n'.join(output))
python
{ "resource": "" }
q277184
iter_auth_hashes
test
def iter_auth_hashes(user, purpose, minutes_valid): """ Generate auth tokens tied to user and specified purpose. The hash expires at midnight on the minute of now + minutes_valid, such that when minutes_valid=1 you get *at least* 1 minute to use the token. """ now = timezone.now().replace(microsecond=0, second=0) for minute in range(minutes_valid + 1): yield hashlib.sha1( '%s:%s:%s:%s:%s' % ( now - datetime.timedelta(minutes=minute), user.password, purpose, user.pk, settings.SECRET_KEY, ), ).hexdigest()
python
{ "resource": "" }
q277185
calc_expiry_time
test
def calc_expiry_time(minutes_valid): """Return specific time an auth_hash will expire.""" return ( timezone.now() + datetime.timedelta(minutes=minutes_valid + 1) ).replace(second=0, microsecond=0)
python
{ "resource": "" }
q277186
get_user_token
test
def get_user_token(user, purpose, minutes_valid): """Return login token info for given user.""" token = ''.join( dumps([ user.get_username(), get_auth_hash(user, purpose), ]).encode('base64').split('\n') ) return { 'id': get_meteor_id(user), 'token': token, 'tokenExpires': calc_expiry_time(minutes_valid), }
python
{ "resource": "" }
q277187
Users.serialize
test
def serialize(self, obj, *args, **kwargs): """Serialize user as per Meteor accounts serialization.""" # use default serialization, then modify to suit our needs. data = super(Users, self).serialize(obj, *args, **kwargs) # everything that isn't handled explicitly ends up in `profile` profile = data.pop('fields') profile.setdefault('name', obj.get_full_name()) fields = data['fields'] = { 'username': obj.get_username(), 'emails': [], 'profile': profile, 'permissions': sorted(self.model.get_all_permissions(obj)), } # clear out sensitive data for sensitive in [ 'password', 'user_permissions_ids', 'is_active', 'is_staff', 'is_superuser', 'groups_ids', ]: profile.pop(sensitive, None) # createdAt (default is django.contrib.auth.models.User.date_joined) try: fields['createdAt'] = profile.pop('date_joined') except KeyError: date_joined = getattr( obj, 'get_date_joined', lambda: getattr(obj, 'date_joined', None) )() if date_joined: fields['createdAt'] = date_joined # email (default is django.contrib.auth.models.User.email) try: email = profile.pop('email') except KeyError: email = getattr( obj, 'get_email', lambda: getattr(obj, 'email', None) )() if email: fields['emails'].append({'address': email, 'verified': True}) return data
python
{ "resource": "" }
q277188
Users.deserialize_profile
test
def deserialize_profile(profile, key_prefix='', pop=False): """De-serialize user profile fields into concrete model fields.""" result = {} if pop: getter = profile.pop else: getter = profile.get def prefixed(name): """Return name prefixed by `key_prefix`.""" return '%s%s' % (key_prefix, name) for key in profile.keys(): val = getter(key) if key == prefixed('name'): result['full_name'] = val else: raise MeteorError(400, 'Bad profile key: %r' % key) return result
python
{ "resource": "" }
q277189
Users.update
test
def update(self, selector, update, options=None): """Update user data.""" # we're ignoring the `options` argument at this time del options user = get_object( self.model, selector['_id'], pk=this.user_id, ) profile_update = self.deserialize_profile( update['$set'], key_prefix='profile.', pop=True, ) if len(update['$set']) != 0: raise MeteorError(400, 'Invalid update fields: %r') for key, val in profile_update.items(): setattr(user, key, val) user.save()
python
{ "resource": "" }
q277190
Auth.auth_failed
test
def auth_failed(**credentials): """Consistent fail so we don't provide attackers with valuable info.""" if credentials: user_login_failed.send_robust( sender=__name__, credentials=auth._clean_credentials(credentials), ) raise MeteorError(403, 'Authentication failed.')
python
{ "resource": "" }
q277191
Auth.validated_user
test
def validated_user(cls, token, purpose, minutes_valid): """Resolve and validate auth token, returns user object.""" try: username, auth_hash = loads(token.decode('base64')) except (ValueError, Error): cls.auth_failed(token=token) try: user = cls.user_model.objects.get(**{ cls.user_model.USERNAME_FIELD: username, 'is_active': True, }) user.backend = 'django.contrib.auth.backends.ModelBackend' except cls.user_model.DoesNotExist: cls.auth_failed(username=username, token=token) if auth_hash not in iter_auth_hashes(user, purpose, minutes_valid): cls.auth_failed(username=username, token=token) return user
python
{ "resource": "" }
q277192
Auth.check_secure
test
def check_secure(): """Check request, return False if using SSL or local connection.""" if this.request.is_secure(): return True # using SSL elif this.request.META['REMOTE_ADDR'] in [ 'localhost', '127.0.0.1', ]: return True # localhost raise MeteorError(403, 'Authentication refused without SSL.')
python
{ "resource": "" }
q277193
Auth.get_username
test
def get_username(self, user): """Retrieve username from user selector.""" if isinstance(user, basestring): return user elif isinstance(user, dict) and len(user) == 1: [(key, val)] = user.items() if key == 'username' or (key == self.user_model.USERNAME_FIELD): # username provided directly return val elif key in ('email', 'emails.address'): email_field = getattr(self.user_model, 'EMAIL_FIELD', 'email') if self.user_model.USERNAME_FIELD == email_field: return val # email is username # find username by email return self.user_model.objects.values_list( self.user_model.USERNAME_FIELD, flat=True, ).get(**{email_field: val}) elif key in ('id', 'pk'): # find username by primary key (ID) return self.user_model.objects.values_list( self.user_model.USERNAME_FIELD, flat=True, ).get( pk=val, ) else: raise MeteorError(400, 'Invalid user lookup: %r' % key) else: raise MeteorError(400, 'Invalid user expression: %r' % user)
python
{ "resource": "" }
q277194
Auth.create_user
test
def create_user(self, params): """Register a new user account.""" receivers = create_user.send( sender=__name__, request=this.request, params=params, ) if len(receivers) == 0: raise NotImplementedError( 'Handler for `create_user` not registered.' ) user = receivers[0][1] user = auth.authenticate( username=user.get_username(), password=params['password'], ) self.do_login(user) return get_user_token( user=user, purpose=HashPurpose.RESUME_LOGIN, minutes_valid=HASH_MINUTES_VALID[HashPurpose.RESUME_LOGIN], )
python
{ "resource": "" }
q277195
Auth.do_login
test
def do_login(self, user): """Login a user.""" this.user_id = user.pk this.user_ddp_id = get_meteor_id(user) # silent subscription (sans sub/nosub msg) to LoggedInUser pub this.user_sub_id = meteor_random_id() API.do_sub(this.user_sub_id, 'LoggedInUser', silent=True) self.update_subs(user.pk) user_logged_in.send( sender=user.__class__, request=this.request, user=user, )
python
{ "resource": "" }
q277196
Auth.do_logout
test
def do_logout(self): """Logout a user.""" # silent unsubscription (sans sub/nosub msg) from LoggedInUser pub API.do_unsub(this.user_sub_id, silent=True) del this.user_sub_id self.update_subs(None) user_logged_out.send( sender=self.user_model, request=this.request, user=this.user, ) this.user_id = None this.user_ddp_id = None
python
{ "resource": "" }
q277197
Auth.login
test
def login(self, params): """Login either with resume token or password.""" if 'password' in params: return self.login_with_password(params) elif 'resume' in params: return self.login_with_resume_token(params) else: self.auth_failed(**params)
python
{ "resource": "" }
q277198
Auth.login_with_password
test
def login_with_password(self, params): """Authenticate using credentials supplied in params.""" # never allow insecure login self.check_secure() username = self.get_username(params['user']) password = self.get_password(params['password']) user = auth.authenticate(username=username, password=password) if user is not None: # the password verified for the user if user.is_active: self.do_login(user) return get_user_token( user=user, purpose=HashPurpose.RESUME_LOGIN, minutes_valid=HASH_MINUTES_VALID[HashPurpose.RESUME_LOGIN], ) # Call to `authenticate` couldn't verify the username and password. # It will have sent the `user_login_failed` signal, no need to pass the # `username` argument to auth_failed(). self.auth_failed()
python
{ "resource": "" }
q277199
Auth.login_with_resume_token
test
def login_with_resume_token(self, params): """ Login with existing resume token. Either the token is valid and the user is logged in, or the token is invalid and a non-specific ValueError("Login failed.") exception is raised - don't be tempted to give clues to attackers as to why their logins are invalid! """ # never allow insecure login self.check_secure() # pull the username and auth_hash from the token user = self.validated_user( params['resume'], purpose=HashPurpose.RESUME_LOGIN, minutes_valid=HASH_MINUTES_VALID[HashPurpose.RESUME_LOGIN], ) self.do_login(user) return get_user_token( user=user, purpose=HashPurpose.RESUME_LOGIN, minutes_valid=HASH_MINUTES_VALID[HashPurpose.RESUME_LOGIN], )
python
{ "resource": "" }