_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
31
13.1k
language
stringclasses
1 value
meta_information
dict
q277100
BoltArraySpark.repartition
test
def repartition(self, npartitions): """ Repartitions the underlying RDD Parameters ---------- npartitions : int Number of partitions to repartion the underlying RDD to """
python
{ "resource": "" }
q277101
BoltArraySpark.stack
test
def stack(self, size=None): """ Aggregates records of a distributed array. Stacking should improve the performance of vectorized operations, but the resulting StackedArray object only exposes a restricted set of operations (e.g. map, reduce). The unstack method can be used to restore the full bolt array. Parameters ---------- size : int, optional, default=None The maximum size for each stack (number of original records), will aggregate groups
python
{ "resource": "" }
q277102
BoltArraySpark._align
test
def _align(self, axis): """ Align spark bolt array so that axes for iteration are in the keys. This operation is applied before most functional operators. It ensures that the specified axes are valid, and swaps key/value axes so that functional operators can be applied over the correct records. Parameters ---------- axis: tuple[int] One or more axes that wil be iterated over by a functional operator Returns ------- BoltArraySpark """ # ensure that the specified axes are valid
python
{ "resource": "" }
q277103
BoltArraySpark.first
test
def first(self): """ Return the first element of an array """ from bolt.local.array import
python
{ "resource": "" }
q277104
BoltArraySpark._stat
test
def _stat(self, axis=None, func=None, name=None, keepdims=False): """ Compute a statistic over an axis. Can provide either a function (for use in a reduce) or a name (for use by a stat counter). Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes func : function, optional, default=None Function for reduce, see BoltArraySpark.reduce name : str A named statistic, see StatCounter keepdims : boolean, optional, default=False Keep axis remaining after operation with size 1. """ if axis is None: axis = list(range(len(self.shape))) axis = tupleize(axis) if func and not name: return self.reduce(func, axis, keepdims) if name and not func: from bolt.local.array import BoltArrayLocal swapped = self._align(axis)
python
{ "resource": "" }
q277105
BoltArraySpark.mean
test
def mean(self, axis=None, keepdims=False): """ Return the mean of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes
python
{ "resource": "" }
q277106
BoltArraySpark.var
test
def var(self, axis=None, keepdims=False): """ Return the variance of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes
python
{ "resource": "" }
q277107
BoltArraySpark.std
test
def std(self, axis=None, keepdims=False): """ Return the standard deviation of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all
python
{ "resource": "" }
q277108
BoltArraySpark.sum
test
def sum(self, axis=None, keepdims=False): """ Return the sum of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes
python
{ "resource": "" }
q277109
BoltArraySpark.max
test
def max(self, axis=None, keepdims=False): """ Return the maximum of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes
python
{ "resource": "" }
q277110
BoltArraySpark.min
test
def min(self, axis=None, keepdims=False): """ Return the minimum of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes
python
{ "resource": "" }
q277111
BoltArraySpark.chunk
test
def chunk(self, size="150", axis=None, padding=None): """ Chunks records of a distributed array. Chunking breaks arrays into subarrays, using an specified size of chunks along each value dimension. Can alternatively specify an average chunk byte size (in kilobytes) and the size of chunks (as ints) will be computed automatically. Parameters ---------- size : tuple, int, or str, optional, default = "150" A string giving the size in kilobytes, or a tuple with the size of chunks along each dimension.
python
{ "resource": "" }
q277112
BoltArraySpark.swap
test
def swap(self, kaxes, vaxes, size="150"): """ Swap axes from keys to values. This is the core operation underlying shape manipulation on the Spark bolt array. It exchanges an arbitrary set of axes between the keys and the valeus. If either is None, will only move axes in one direction (from keys to values, or values to keys). Keys moved to values will be placed immediately after the split; values moved to keys will be placed immediately before the split. Parameters ---------- kaxes : tuple Axes from keys to move to values vaxes : tuple Axes from values to move to keys size : tuple or int, optional, default = "150" Can either provide a string giving the size in kilobytes, or a tuple with the number of chunks along each value dimension being moved Returns ------- BoltArraySpark """ kaxes = asarray(tupleize(kaxes), 'int') vaxes = asarray(tupleize(vaxes), 'int')
python
{ "resource": "" }
q277113
BoltArraySpark.transpose
test
def transpose(self, *axes): """ Return an array with the axes transposed. This operation will incur a swap unless the desiured permutation can be obtained only by transpoing the keys or the values. Parameters ---------- axes : None, tuple of ints, or n ints If None, will reverse axis order. """ if len(axes) == 0: p = arange(self.ndim-1, -1, -1) else: p = asarray(argpack(axes)) istransposeable(p, range(self.ndim)) split = self.split # compute the keys/value axes that need to be swapped new_keys, new_values = p[:split], p[split:] swapping_keys = sort(new_values[new_values < split]) swapping_values = sort(new_keys[new_keys >= split]) stationary_keys = sort(new_keys[new_keys < split]) stationary_values = sort(new_values[new_values >= split])
python
{ "resource": "" }
q277114
BoltArraySpark.swapaxes
test
def swapaxes(self, axis1, axis2): """ Return the array with two axes interchanged. Parameters ---------- axis1 : int The first axis to swap axis2 : int
python
{ "resource": "" }
q277115
BoltArraySpark.reshape
test
def reshape(self, *shape): """ Return an array with the same data but a new shape. Currently only supports reshaping that independently reshapes the keys, or the values, or both. Parameters ---------- shape : tuple of ints, or n ints New shape """ new = argpack(shape) isreshapeable(new, self.shape) if new == self.shape: return self i = self._reshapebasic(new) if i == -1:
python
{ "resource": "" }
q277116
BoltArraySpark._reshapebasic
test
def _reshapebasic(self, shape): """ Check if the requested reshape can be broken into independant reshapes on the keys and values. If it can, returns the index in the new shape separating keys from values, otherwise returns -1
python
{ "resource": "" }
q277117
BoltArraySpark.squeeze
test
def squeeze(self, axis=None): """ Remove one or more single-dimensional axes from the array. Parameters ---------- axis : tuple or int One or more singleton axes to remove. """ if not any([d == 1 for d in self.shape]): return self if axis is None: drop = where(asarray(self.shape) == 1)[0] elif isinstance(axis, int): drop = asarray((axis,)) elif isinstance(axis, tuple): drop = asarray(axis) else: raise ValueError("an integer or tuple is required for the axis") if any([self.shape[i] > 1 for i in drop]): raise ValueError("cannot select an axis to squeeze out which has size greater than one") if any(asarray(drop) < self.split): kmask = set([d for d in drop if d < self.split]) kfunc = lambda k: tuple([kk for ii, kk in enumerate(k) if ii not in kmask]) else: kfunc = lambda k: k if any(asarray(drop) >= self.split):
python
{ "resource": "" }
q277118
BoltArraySpark.astype
test
def astype(self, dtype, casting='unsafe'): """ Cast the array to a specified type. Parameters ---------- dtype : str or dtype Typecode or data-type to cast the array to (see numpy) """
python
{ "resource": "" }
q277119
BoltArraySpark.clip
test
def clip(self, min=None, max=None): """ Clip values above and below. Parameters ---------- min : scalar or array-like Minimum value. If array, will be broadcasted max : scalar or array-like Maximum value. If array, will
python
{ "resource": "" }
q277120
BoltArraySpark.toarray
test
def toarray(self): """ Returns the contents as a local array. Will likely cause memory problems for large objects. """
python
{ "resource": "" }
q277121
tupleize
test
def tupleize(arg): """ Coerce singletons and lists and ndarrays to tuples. Parameters ---------- arg : tuple, list, ndarray, or singleton Item to coerce """ if arg is None: return None if not isinstance(arg, (tuple, list, ndarray, Iterable)): return tuple((arg,))
python
{ "resource": "" }
q277122
argpack
test
def argpack(args): """ Coerce a list of arguments to a tuple. Parameters ---------- args : tuple or nested tuple Pack arguments into a tuple, converting ((,...),) or (,) -> (,) """
python
{ "resource": "" }
q277123
inshape
test
def inshape(shape, axes): """ Checks to see if a list of axes are contained within an array shape. Parameters ---------- shape : tuple[int] the shape of a BoltArray axes : tuple[int] the axes to check against shape """ valid =
python
{ "resource": "" }
q277124
allclose
test
def allclose(a, b): """ Test that a and b are close and match in shape. Parameters ---------- a : ndarray First array to check b : ndarray First array to check
python
{ "resource": "" }
q277125
listify
test
def listify(lst, dim): """ Flatten lists of indices and ensure bounded by a known dim. Parameters ---------- lst : list List of integer indices dim : tuple Bounds for indices """ if not all([l.dtype == int for l in lst]):
python
{ "resource": "" }
q277126
slicify
test
def slicify(slc, dim): """ Force a slice to have defined start, stop, and step from a known dim. Start and stop will always be positive. Step may be negative. There is an exception where a negative step overflows the stop needs to have the default value set to -1. This is the only case of a negative start/stop value. Parameters ---------- slc : slice or int The slice to modify, or int to convert to a slice dim : tuple Bound for slice """ if isinstance(slc, slice):
python
{ "resource": "" }
q277127
istransposeable
test
def istransposeable(new, old): """ Check to see if a proposed tuple of axes is a valid permutation of an old set of axes. Checks length, axis repetion, and bounds. Parameters ---------- new : tuple tuple of proposed axes old : tuple tuple of old axes """ new, old = tupleize(new), tupleize(old)
python
{ "resource": "" }
q277128
isreshapeable
test
def isreshapeable(new, old): """ Check to see if a proposed tuple of axes is a valid reshaping of the old axes by ensuring that they can be factored. Parameters ---------- new : tuple tuple of proposed axes old : tuple
python
{ "resource": "" }
q277129
allstack
test
def allstack(vals, depth=0): """ If an ndarray has been split into multiple chunks by splitting it along each axis at a number of locations, this function rebuilds the original array from chunks. Parameters ---------- vals : nested lists of ndarrays each level of nesting of the lists representing a dimension of
python
{ "resource": "" }
q277130
iterexpand
test
def iterexpand(arry, extra): """ Expand dimensions by iteratively append empty axes. Parameters ---------- arry : ndarray The original array extra : int The number of empty axes to append
python
{ "resource": "" }
q277131
zip_with_index
test
def zip_with_index(rdd): """ Alternate version of Spark's zipWithIndex that eagerly returns count. """ starts = [0] if rdd.getNumPartitions() > 1: nums = rdd.mapPartitions(lambda it: [sum(1 for _ in it)]).collect() count = sum(nums) for i in range(len(nums) - 1):
python
{ "resource": "" }
q277132
wrapped
test
def wrapped(f): """ Decorator to append routed docstrings """ import inspect def extract(func): append = "" args = inspect.getargspec(func) for i, a in enumerate(args.args): if i < (len(args) - len(args.defaults)): append += str(a) + ", " else: default = args.defaults[i-len(args.defaults)] if hasattr(default, "__name__"): default = default.__name__ else:
python
{ "resource": "" }
q277133
lookup
test
def lookup(*args, **kwargs): """ Use arguments to route constructor. Applies a series of checks on arguments to identify constructor, starting with known keyword arguments, and then applying constructor-specific checks """ if 'mode' in kwargs:
python
{ "resource": "" }
q277134
Keys.reshape
test
def reshape(self, *shape): """ Reshape just the keys of a BoltArraySpark, returning a new BoltArraySpark. Parameters ---------- shape : tuple New proposed axes. """ new = argpack(shape) old = self.shape isreshapeable(new, old) if new == old: return self._barray def f(k): return unravel_index(ravel_multi_index(k, old), new)
python
{ "resource": "" }
q277135
Keys.transpose
test
def transpose(self, *axes): """ Transpose just the keys of a BoltArraySpark, returning a new BoltArraySpark. Parameters ---------- axes : tuple New proposed axes. """ new = argpack(axes) old = range(self.ndim) istransposeable(new, old) if new == old: return self._barray def f(k): return tuple(k[i] for i in new)
python
{ "resource": "" }
q277136
Values.reshape
test
def reshape(self, *shape): """ Reshape just the values of a BoltArraySpark, returning a new BoltArraySpark. Parameters ---------- shape : tuple New proposed axes. """ new = argpack(shape) old = self.shape isreshapeable(new, old) if new == old: return self._barray def f(v):
python
{ "resource": "" }
q277137
Values.transpose
test
def transpose(self, *axes): """ Transpose just the values of a BoltArraySpark, returning a new BoltArraySpark. Parameters ---------- axes : tuple New proposed axes. """ new = argpack(axes) old = range(self.ndim) istransposeable(new, old) if new == old: return self._barray def f(v): return v.transpose(new)
python
{ "resource": "" }
q277138
ConstructLocal.ones
test
def ones(shape, dtype=float64, order='C'): """ Create a local bolt array of ones. Parameters ---------- shape : tuple Dimensions of the desired array dtype : data-type, optional, default=float64 The desired data-type for the array. (see numpy) order : {'C', 'F', 'A'}, optional,
python
{ "resource": "" }
q277139
ConstructLocal.zeros
test
def zeros(shape, dtype=float64, order='C'): """ Create a local bolt array of zeros. Parameters ---------- shape : tuple Dimensions of the desired array. dtype : data-type, optional, default=float64 The desired data-type for the array. (see numpy) order : {'C', 'F', 'A'}, optional,
python
{ "resource": "" }
q277140
ConstructLocal.concatenate
test
def concatenate(arrays, axis=0): """ Join a sequence of arrays together. Parameters ---------- arrays : tuple A sequence of array-like e.g. (a1, a2, ...) axis : int, optional, default=0 The axis along which the arrays will be joined. Returns ------- BoltArrayLocal """ if not isinstance(arrays, tuple):
python
{ "resource": "" }
q277141
discrete_likelihood
test
def discrete_likelihood(data, xmin, alpha): """ Equation B.8 in Clauset Given a data set, an xmin value, and an alpha "scaling parameter", computes the log-likelihood (the value to be maximized) """ if not scipyOK: raise ImportError("Can't import scipy. Need scipy for zeta function.") from scipy.special import zeta as zeta
python
{ "resource": "" }
q277142
most_likely_alpha
test
def most_likely_alpha(data, xmin, alpharange=(1.5,3.5), n_alpha=201): """ Return the most likely alpha for the data given an xmin """ alpha_vector = np.linspace(alpharange[0],alpharange[1],n_alpha) return alpha_vector[discrete_max_likelihood_arg(data, xmin,
python
{ "resource": "" }
q277143
discrete_alpha_mle
test
def discrete_alpha_mle(data, xmin): """ Equation B.17 of Clauset et al 2009 The Maximum Likelihood Estimator of the "scaling parameter" alpha in the discrete case is similar to that in the continuous case """ # boolean indices of positive data gexmin = (data>=xmin) nn = gexmin.sum()
python
{ "resource": "" }
q277144
discrete_best_alpha
test
def discrete_best_alpha(data, alpharangemults=(0.9,1.1), n_alpha=201, approximate=True, verbose=True): """ Use the maximum L to determine the most likely value of alpha *alpharangemults* [ 2-tuple ] Pair of values indicating multiplicative factors above and below the approximate alpha from the MLE alpha to use when determining the "exact" alpha (by directly maximizing the likelihood function) """ xmins = np.unique(data) if approximate: alpha_of_xmin = [ discrete_alpha_mle(data,xmin) for xmin in xmins ] else: alpha_approx = [ discrete_alpha_mle(data,xmin) for xmin in xmins ] alpharanges = [(0.9*a,1.1*a) for a in alpha_approx] alpha_of_xmin = [ most_likely_alpha(data,xmin,alpharange=ar,n_alpha=n_alpha) for xmin,ar in zip(xmins,alpharanges) ] ksvalues = [ discrete_ksD(data, xmin, alpha) for xmin,alpha in zip(xmins,alpha_of_xmin) ] best_index = argmin(ksvalues)
python
{ "resource": "" }
q277145
plfit.discrete_best_alpha
test
def discrete_best_alpha(self, alpharangemults=(0.9,1.1), n_alpha=201, approximate=True, verbose=True, finite=True): """ Use the maximum likelihood to determine the most likely value of alpha *alpharangemults* [ 2-tuple ] Pair of values indicating multiplicative factors above and below the approximate alpha from the MLE alpha to use when determining the "exact" alpha (by directly maximizing the likelihood function) *n_alpha* [ int ] Number of alpha values to use when measuring. Larger number is more accurate. *approximate* [ bool ] If False, try to "zoom-in" around the MLE alpha and get the exact best alpha value within some range around the approximate best *vebose* [ bool ] *finite* [ bool ] Correction for finite data? """ data = self.data self._xmins = xmins = np.unique(data) if approximate: alpha_of_xmin = [ discrete_alpha_mle(data,xmin) for xmin in xmins ] else: alpha_approx = [ discrete_alpha_mle(data,xmin) for xmin in xmins ] alpharanges = [(0.9*a,1.1*a) for a in alpha_approx] alpha_of_xmin = [ most_likely_alpha(data,xmin,alpharange=ar,n_alpha=n_alpha) for xmin,ar in zip(xmins,alpharanges) ] ksvalues = np.array([discrete_ksD(data, xmin, alpha)
python
{ "resource": "" }
q277146
plfit.plotppf
test
def plotppf(self,x=None,xmin=None,alpha=None,dolog=True,**kwargs): """ Plots the power-law-predicted value on the Y-axis against the real values along the X-axis. Can be used as a diagnostic of the fit quality. """ if not(xmin): xmin=self._xmin if not(alpha): alpha=self._alpha if not(x): x=np.sort(self.data[self.data>xmin]) else: x=np.sort(x[x>xmin]) # N = M^(-alpha+1) # M = N^(1/(-alpha+1)) m0 = min(x) N = (1.0+np.arange(len(x)))[::-1] xmodel = m0 * N**(1/(1-alpha)) / max(N)**(1/(1-alpha))
python
{ "resource": "" }
q277147
plfit.lognormal
test
def lognormal(self,doprint=True): """ Use the maximum likelihood estimator for a lognormal distribution to produce the best-fit lognormal parameters """ # N = float(self.data.shape[0]) # mu = log(self.data).sum() / N # sigmasquared = ( ( log(self.data) - mu )**2 ).sum() / N # self.lognormal_mu = mu # self.lognormal_sigma = np.sqrt(sigmasquared) # self.lognormal_likelihood = -N/2. * log(np.pi*2) - N/2. * log(sigmasquared) - 1/(2*sigmasquared) * (( self.data - mu )**2).sum() # if doprint: # print "Best fit lognormal is exp( -(x-%g)^2 / (2*%g^2)" % (mu,np.sqrt(sigmasquared)) # print "Likelihood: %g" % (self.lognormal_likelihood) if scipyOK: fitpars = scipy.stats.lognorm.fit(self.data) self.lognormal_dist = scipy.stats.lognorm(*fitpars) self.lognormal_ksD,self.lognormal_ksP = scipy.stats.kstest(self.data,self.lognormal_dist.cdf) # nnlf = NEGATIVE log likelihood self.lognormal_likelihood = -1*scipy.stats.lognorm.nnlf(fitpars,self.data) # Is this the right likelihood ratio? # Definition of L from eqn. B3 of Clauset et al 2009: # L = log(p(x|alpha)) # _nnlf from scipy.stats.distributions: # -sum(log(self._pdf(x, *args)),axis=0) # Assuming the pdf and p(x|alpha) are both non-inverted, it looks # like the _nnlf and L have opposite signs, which would explain the
python
{ "resource": "" }
q277148
sanitize_turbo
test
def sanitize_turbo(html, allowed_tags=TURBO_ALLOWED_TAGS, allowed_attrs=TURBO_ALLOWED_ATTRS): """Sanitizes HTML, removing not allowed tags and attributes. :param str|unicode html: :param list allowed_tags: List of allowed tags.
python
{ "resource": "" }
q277149
YandexTurboFeed.configure_analytics_yandex
test
def configure_analytics_yandex(self, ident, params=None): """Configure Yandex Metrika analytics counter. :param str|unicode ident: Metrika counter ID. :param dict params: Additional params. """ params = params or {}
python
{ "resource": "" }
q277150
LabelWidget.tag_list
test
def tag_list(self, tags): """ Generates a list of tags identifying those previously selected. Returns a list of tuples of the form (<tag name>, <CSS class name>). Uses the string names rather than the tags themselves in order to work with tag lists built from forms not fully submitted.
python
{ "resource": "" }
q277151
SSHKey.hash_md5
test
def hash_md5(self): """Calculate md5 fingerprint. Shamelessly copied from http://stackoverflow.com/questions/6682815/deriving-an-ssh-fingerprint-from-a-public-key-in-python For specification, see RFC4716, section 4."""
python
{ "resource": "" }
q277152
SSHKey.hash_sha256
test
def hash_sha256(self): """Calculate sha256 fingerprint.""" fp_plain = hashlib.sha256(self._decoded_key).digest() return
python
{ "resource": "" }
q277153
SSHKey.hash_sha512
test
def hash_sha512(self): """Calculates sha512 fingerprint.""" fp_plain = hashlib.sha512(self._decoded_key).digest() return
python
{ "resource": "" }
q277154
SSHKey._parse_long
test
def _parse_long(cls, data): """Calculate two's complement.""" if sys.version < '3': # this does not exist in python 3 - undefined-variable disabled to make pylint happier. ret = long(0) # pylint:disable=undefined-variable for byte in data:
python
{ "resource": "" }
q277155
SSHKey.decode_key
test
def decode_key(cls, pubkey_content): """Decode base64 coded part of the key.""" try: decoded_key = base64.b64decode(pubkey_content.encode("ascii"))
python
{ "resource": "" }
q277156
SSHKey.parse_options
test
def parse_options(self, options): """Parses ssh options string.""" quote_open = False parsed_options = {} def parse_add_single_option(opt): """Parses and validates a single option, and adds it to parsed_options field.""" if "=" in opt: opt_name, opt_value = opt.split("=", 1) opt_value = opt_value.replace('"', '') else: opt_name = opt opt_value = True if " " in opt_name or not self.OPTION_NAME_RE.match(opt_name): raise InvalidOptionNameError("%s is not valid option name." % opt_name) if self.strict_mode: for valid_opt_name, value_required in self.OPTIONS_SPEC: if opt_name.lower() == valid_opt_name: if value_required and opt_value is True: raise MissingMandatoryOptionValueError("%s is missing mandatory value." % opt_name) break else: raise UnknownOptionNameError("%s is unrecognized option name." % opt_name) if opt_name not in parsed_options: parsed_options[opt_name] =
python
{ "resource": "" }
q277157
SSHKey._process_ssh_rsa
test
def _process_ssh_rsa(self, data): """Parses ssh-rsa public keys.""" current_position, raw_e = self._unpack_by_int(data, 0) current_position, raw_n = self._unpack_by_int(data, current_position) unpacked_e = self._parse_long(raw_e) unpacked_n = self._parse_long(raw_n) self.rsa = RSAPublicNumbers(unpacked_e, unpacked_n).public_key(default_backend()) self.bits = self.rsa.key_size if self.strict_mode: min_length = self.RSA_MIN_LENGTH_STRICT max_length = self.RSA_MAX_LENGTH_STRICT else: min_length = self.RSA_MIN_LENGTH_LOOSE max_length = self.RSA_MAX_LENGTH_LOOSE
python
{ "resource": "" }
q277158
SSHKey._process_ssh_dss
test
def _process_ssh_dss(self, data): """Parses ssh-dsa public keys.""" data_fields = {} current_position = 0 for item in ("p", "q", "g", "y"): current_position, value = self._unpack_by_int(data, current_position) data_fields[item] = self._parse_long(value) q_bits = self._bits_in_number(data_fields["q"]) p_bits = self._bits_in_number(data_fields["p"]) if q_bits != self.DSA_N_LENGTH: raise InvalidKeyError("Incorrect DSA key parameters: bits(p)=%s, q=%s" % (self.bits, q_bits)) if self.strict_mode: min_length = self.DSA_MIN_LENGTH_STRICT max_length = self.DSA_MAX_LENGTH_STRICT else: min_length = self.DSA_MIN_LENGTH_LOOSE max_length =
python
{ "resource": "" }
q277159
SSHKey._process_ecdsa_sha
test
def _process_ecdsa_sha(self, data): """Parses ecdsa-sha public keys.""" current_position, curve_information = self._unpack_by_int(data, 0) if curve_information not in self.ECDSA_CURVE_DATA: raise NotImplementedError("Invalid curve type: %s" % curve_information) curve, hash_algorithm = self.ECDSA_CURVE_DATA[curve_information] current_position, key_data = self._unpack_by_int(data, current_position) try: #
python
{ "resource": "" }
q277160
SSHKey._process_ed25516
test
def _process_ed25516(self, data): """Parses ed25516 keys. There is no (apparent) way to validate ed25519 keys. This only checks data length (256 bits), but does not try to validate the key in any way.""" current_position, verifying_key = self._unpack_by_int(data, 0) verifying_key_length = len(verifying_key) * 8 verifying_key = self._parse_long(verifying_key) if verifying_key
python
{ "resource": "" }
q277161
SSHKey.parse
test
def parse(self, keydata=None): """Validates SSH public key. Throws exception for invalid keys. Otherwise returns None. Populates key_type, bits and bits fields. For rsa keys, see field "rsa" for raw public key data. For dsa keys, see field "dsa". For ecdsa keys, see field "ecdsa".""" if keydata is None: if self.keydata is None: raise ValueError("Key data must be supplied either in constructor or to parse()") keydata = self.keydata else: self.reset() self.keydata = keydata if keydata.startswith("---- BEGIN SSH2 PUBLIC KEY ----"): # SSH2 key format key_type = None # There is no redundant key-type field - skip comparing plain-text and encoded data.
python
{ "resource": "" }
q277162
InitContext.step
test
def step(self, input_token=None): """Performs a step to establish the context as an initiator. This method should be called in a loop and fed input tokens from the acceptor, and its output tokens should be sent to the acceptor, until this context's :attr:`established` attribute is True. :param input_token: The input token from the acceptor (omit this param or pass None on the first call). :type input_token: bytes :returns: either a byte string with the next token to send to the acceptor, or None if there is no further token to send to the acceptor. :raises: :exc:`~gssapi.error.GSSException` if there is an error establishing the context. """ minor_status = ffi.new('OM_uint32[1]') if input_token: input_token_buffer = ffi.new('gss_buffer_desc[1]') input_token_buffer[0].length = len(input_token) c_str_input_token = ffi.new('char[]', input_token) input_token_buffer[0].value = c_str_input_token else: input_token_buffer = ffi.cast('gss_buffer_t', C.GSS_C_NO_BUFFER) if isinstance(self._desired_mech, OID): desired_mech = ffi.addressof(self._desired_mech._oid) else: desired_mech = ffi.cast('gss_OID', C.GSS_C_NO_OID) actual_mech = ffi.new('gss_OID[1]') output_token_buffer = ffi.new('gss_buffer_desc[1]') actual_flags = ffi.new('OM_uint32[1]') actual_time = ffi.new('OM_uint32[1]') if self._cred_object is not None: cred = self._cred_object._cred[0] else: cred = ffi.cast('gss_cred_id_t', C.GSS_C_NO_CREDENTIAL) retval = C.gss_init_sec_context( minor_status, cred, self._ctx, self.peer_name._name[0], desired_mech, self._req_flags, self._time_req, self._channel_bindings, input_token_buffer, actual_mech, output_token_buffer, actual_flags, actual_time )
python
{ "resource": "" }
q277163
AcceptContext.step
test
def step(self, input_token): """Performs a step to establish the context as an acceptor. This method should be called in a loop and fed input tokens from the initiator, and its output tokens should be sent to the initiator, until this context's :attr:`established` attribute is True. :param input_token: The input token from the initiator (required). :type input_token: bytes :returns: either a byte string with the next token to send to the initiator, or None if there is no further token to send to the initiator. :raises: :exc:`~gssapi.error.GSSException` if there is an error establishing the context. """ minor_status = ffi.new('OM_uint32[1]') input_token_buffer = ffi.new('gss_buffer_desc[1]') input_token_buffer[0].length = len(input_token) c_str_import_token = ffi.new('char[]', input_token) input_token_buffer[0].value = c_str_import_token mech_type = ffi.new('gss_OID[1]') output_token_buffer = ffi.new('gss_buffer_desc[1]') src_name_handle = ffi.new('gss_name_t[1]') actual_flags = ffi.new('OM_uint32[1]') time_rec = ffi.new('OM_uint32[1]') delegated_cred_handle = ffi.new('gss_cred_id_t[1]') if self._cred_object is not None: cred = self._cred_object._cred[0] else: cred = ffi.cast('gss_cred_id_t', C.GSS_C_NO_CREDENTIAL) retval = C.gss_accept_sec_context( minor_status, self._ctx, cred, input_token_buffer, self._channel_bindings, src_name_handle, mech_type, output_token_buffer, actual_flags, time_rec, delegated_cred_handle ) if src_name_handle[0]: src_name = MechName(src_name_handle, mech_type[0]) # make sure src_name is GC'd try: if output_token_buffer[0].length != 0: out_token = _buf_to_str(output_token_buffer[0]) else: out_token = None if GSS_ERROR(retval):
python
{ "resource": "" }
q277164
Credential.mechs
test
def mechs(self): """ The set of mechanisms supported by the credential. :type: :class:`~gssapi.oids.OIDSet`
python
{ "resource": "" }
q277165
Credential.store
test
def store(self, usage=None, mech=None, overwrite=False, default=False, cred_store=None): """ Stores this credential into a 'credential store'. It can either store this credential in the default credential store, or into a specific credential store specified by a set of mechanism-specific key-value pairs. The former method of operation requires that the underlying GSSAPI implementation supports the ``gss_store_cred`` C function, the latter method requires support for the ``gss_store_cred_into`` C function. :param usage: Optional parameter specifying whether to store the initiator, acceptor, or both usages of this credential. Defaults to the value of this credential's :attr:`usage` property. :type usage: One of :data:`~gssapi.C_INITIATE`, :data:`~gssapi.C_ACCEPT` or :data:`~gssapi.C_BOTH` :param mech: Optional parameter specifying a single mechanism to store the credential element for. If not supplied, all mechanisms' elements in this credential will be stored. :type mech: :class:`~gssapi.oids.OID` :param overwrite: If True, indicates that any credential for the same principal in the credential store should be overwritten with this credential. :type overwrite: bool :param default: If True, this credential should be made available as the default credential when stored, for acquisition when no `desired_name` parameter is passed to :class:`Credential` or for use when no credential is passed to :class:`~gssapi.ctx.InitContext` or :class:`~gssapi.ctx.AcceptContext`. This is only an advisory parameter to the GSSAPI implementation. :type default: bool :param cred_store: Optional dict or list of (key, value) pairs indicating the credential store to use. The interpretation of these values will be mechanism-specific. :type cred_store: dict, or list of (str, str) :returns: A pair of values indicating the set of mechanism OIDs for which credential elements were successfully stored, and the usage of the credential that was stored. :rtype: tuple(:class:`~gssapi.oids.OIDSet`, int) :raises: :exc:`~gssapi.error.GSSException` if there is a problem with storing the credential. :exc:`NotImplementedError` if the underlying GSSAPI implementation does not support the ``gss_store_cred`` or ``gss_store_cred_into`` C functions. """ if usage is None: usage = self.usage if isinstance(mech, OID): oid_ptr = ffi.addressof(mech._oid) else: oid_ptr = ffi.cast('gss_OID', C.GSS_C_NO_OID) minor_status = ffi.new('OM_uint32[1]') elements_stored = ffi.new('gss_OID_set[1]') usage_stored = ffi.new('gss_cred_usage_t[1]')
python
{ "resource": "" }
q277166
main
test
def main(properties=properties, options=options, **custom_options): """Imports and runs setup function with given properties."""
python
{ "resource": "" }
q277167
init
test
def init( dist='dist', minver=None, maxver=None, use_markdown_readme=True, use_stdeb=False, use_distribute=False, ): """Imports and returns a setup function. If use_markdown_readme is set, then README.md is added to setuptools READMES list. If use_stdeb is set on a Debian based system, then module stdeb is imported. Stdeb supports building deb packages on Debian based systems. The package should only be installed on the same system version it was built on, though. See http://github.com/astraw/stdeb. If use_distribute is set, then distribute_setup.py is imported. """ if not minver == maxver == None: import sys if not minver <= sys.version < (maxver or 'Any'): sys.stderr.write( '%s: requires python version in <%s, %s), not %s\n' % ( sys.argv[0], minver or 'any', maxver or 'any', sys.version.split()[0])) sys.exit(1) if use_distribute: from distribute_setup import use_setuptools use_setuptools(to_dir=dist) from setuptools import setup else:
python
{ "resource": "" }
q277168
_create_file
test
def _create_file(): """ Returns a file handle which is used to record audio """ f = wave.open('audio.wav', mode='wb') f.setnchannels(2)
python
{ "resource": "" }
q277169
djfrontend_h5bp_css
test
def djfrontend_h5bp_css(version=None): """ Returns HTML5 Boilerplate CSS file. Included in HTML5 Boilerplate. """ if version is None: version = getattr(settings, 'DJFRONTEND_H5BP_CSS', DJFRONTEND_H5BP_CSS_DEFAULT)
python
{ "resource": "" }
q277170
djfrontend_normalize
test
def djfrontend_normalize(version=None): """ Returns Normalize CSS file. Included in HTML5 Boilerplate. """ if version is None: version = getattr(settings, 'DJFRONTEND_NORMALIZE', DJFRONTEND_NORMALIZE_DEFAULT) return format_html(
python
{ "resource": "" }
q277171
djfrontend_fontawesome
test
def djfrontend_fontawesome(version=None): """ Returns Font Awesome CSS file. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: version = getattr(settings, 'DJFRONTEND_FONTAWESOME', DJFRONTEND_FONTAWESOME_DEFAULT)
python
{ "resource": "" }
q277172
djfrontend_modernizr
test
def djfrontend_modernizr(version=None): """ Returns Modernizr JavaScript file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. Included in HTML5 Boilerplate. """ if version is None: version = getattr(settings, 'DJFRONTEND_MODERNIZR', DJFRONTEND_MODERNIZR_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/modernizr/{v}/modernizr.js"></script>' else: template =
python
{ "resource": "" }
q277173
djfrontend_jquery
test
def djfrontend_jquery(version=None): """ Returns jQuery JavaScript file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file from Google CDN with local fallback. Included in HTML5 Boilerplate. """ if version is None: version = getattr(settings, 'DJFRONTEND_JQUERY', DJFRONTEND_JQUERY_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/jquery/{v}/jquery.js"></script>' else: template
python
{ "resource": "" }
q277174
djfrontend_jqueryui
test
def djfrontend_jqueryui(version=None): """ Returns the jQuery UI plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file from Google CDN with local fallback. """ if version is None: version = getattr(settings, 'DJFRONTEND_JQUERYUI', DJFRONTEND_JQUERYUI_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False):
python
{ "resource": "" }
q277175
djfrontend_jquery_datatables
test
def djfrontend_jquery_datatables(version=None): """ Returns the jQuery DataTables plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: if not getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES', False): version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_VERSION', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) else:
python
{ "resource": "" }
q277176
djfrontend_jquery_datatables_css
test
def djfrontend_jquery_datatables_css(version=None): """ Returns the jQuery DataTables CSS file according to version number. """ if version is None: if not getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_CSS', False): version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_VERSION', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) else:
python
{ "resource": "" }
q277177
djfrontend_jquery_datatables_themeroller
test
def djfrontend_jquery_datatables_themeroller(version=None): """ Returns the jQuery DataTables ThemeRoller CSS file according to version number. """ if version is None: if not getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_THEMEROLLER', False): version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_VERSION', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT) else:
python
{ "resource": "" }
q277178
djfrontend_jquery_formset
test
def djfrontend_jquery_formset(version=None): """ Returns the jQuery Dynamic Formset plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: version = getattr(settings, 'DJFRONTEND_JQUERY_FORMSET', DJFRONTEND_JQUERY_FORMSET_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/jquery/jquery.formset/{v}/jquery.formset.js"></script>' else: template = (
python
{ "resource": "" }
q277179
djfrontend_jquery_scrollto
test
def djfrontend_jquery_scrollto(version=None): """ Returns the jQuery ScrollTo plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: version = getattr(settings, 'DJFRONTEND_JQUERY_SCROLLTO', DJFRONTEND_JQUERY_SCROLLTO_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/jquery/jquery.scrollTo/{v}/jquery.scrollTo.js"></script>' else: template = (
python
{ "resource": "" }
q277180
djfrontend_jquery_smoothscroll
test
def djfrontend_jquery_smoothscroll(version=None): """ Returns the jQuery Smooth Scroll plugin file according to version number. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: version = getattr(settings, 'DJFRONTEND_JQUERY_SMOOTHSCROLL', DJFRONTEND_JQUERY_SMOOTHSCROLL_DEFAULT) if getattr(settings, 'TEMPLATE_DEBUG', False): template = '<script src="{static}djfrontend/js/jquery/jquery.smooth-scroll/{v}/jquery.smooth-scroll.js"></script>' else: template = (
python
{ "resource": "" }
q277181
djfrontend_twbs_css
test
def djfrontend_twbs_css(version=None): """ Returns Twitter Bootstrap CSS file. TEMPLATE_DEBUG returns full file, otherwise returns minified file. """ if version is None: if not getattr(settings, 'DJFRONTEND_TWBS_CSS', False): version = getattr(settings, 'DJFRONTEND_TWBS_VERSION', DJFRONTEND_TWBS_VERSION_DEFAULT) else:
python
{ "resource": "" }
q277182
djfrontend_ga
test
def djfrontend_ga(account=None): """ Returns Google Analytics asynchronous snippet. Use DJFRONTEND_GA_SETDOMAINNAME to set domain for multiple, or cross-domain tracking. Set DJFRONTEND_GA_SETALLOWLINKER to use _setAllowLinker method on target site for cross-domain tracking. Included in HTML5 Boilerplate. """ if account is None: account = getattr(settings, 'DJFRONTEND_GA', False) if account: if getattr(settings, 'TEMPLATE_DEBUG', False): return '' else: if getattr(settings, 'DJFRONTEND_GA_SETDOMAINNAME', False): if getattr(settings, 'DJFRONTEND_GA_SETALLOWLINKER', False): return mark_safe( '<script>(function(i,s,o,g,r,a,m){i["GoogleAnalyticsObject"]=r;i[r]=i[r]||function(){(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)})(window,document,"script","//www.google-analytics.com/analytics.js","ga");ga("require", "linker");ga("linker:autoLink", ["%s"]);ga("create", "%s", "auto", {"allowLinker": true});ga("send", "pageview");</script>' % (settings.DJFRONTEND_GA_SETDOMAINNAME, account)) else: return mark_safe(
python
{ "resource": "" }
q277183
CodeMirrorTextarea.render
test
def render(self, name, value, attrs=None): u"""Render CodeMirrorTextarea""" if self.js_var_format is not None: js_var_bit = 'var %s = ' % (self.js_var_format % name) else:
python
{ "resource": "" }
q277184
iter_auth_hashes
test
def iter_auth_hashes(user, purpose, minutes_valid): """ Generate auth tokens tied to user and specified purpose. The hash expires at midnight on the minute of now + minutes_valid, such that when minutes_valid=1 you get *at least* 1 minute to use the token.
python
{ "resource": "" }
q277185
calc_expiry_time
test
def calc_expiry_time(minutes_valid): """Return specific time an auth_hash will expire."""
python
{ "resource": "" }
q277186
get_user_token
test
def get_user_token(user, purpose, minutes_valid): """Return login token info for given user.""" token = ''.join( dumps([ user.get_username(), get_auth_hash(user, purpose), ]).encode('base64').split('\n') ) return {
python
{ "resource": "" }
q277187
Users.serialize
test
def serialize(self, obj, *args, **kwargs): """Serialize user as per Meteor accounts serialization.""" # use default serialization, then modify to suit our needs. data = super(Users, self).serialize(obj, *args, **kwargs) # everything that isn't handled explicitly ends up in `profile` profile = data.pop('fields') profile.setdefault('name', obj.get_full_name()) fields = data['fields'] = { 'username': obj.get_username(), 'emails': [], 'profile': profile, 'permissions': sorted(self.model.get_all_permissions(obj)), } # clear out sensitive data for sensitive in [ 'password', 'user_permissions_ids', 'is_active', 'is_staff', 'is_superuser', 'groups_ids', ]: profile.pop(sensitive, None) # createdAt (default is django.contrib.auth.models.User.date_joined)
python
{ "resource": "" }
q277188
Users.deserialize_profile
test
def deserialize_profile(profile, key_prefix='', pop=False): """De-serialize user profile fields into concrete model fields.""" result = {} if pop: getter = profile.pop else: getter = profile.get
python
{ "resource": "" }
q277189
Users.update
test
def update(self, selector, update, options=None): """Update user data.""" # we're ignoring the `options` argument at this time del options user = get_object( self.model, selector['_id'], pk=this.user_id, ) profile_update = self.deserialize_profile(
python
{ "resource": "" }
q277190
Auth.auth_failed
test
def auth_failed(**credentials): """Consistent fail so we don't provide attackers with valuable info.""" if credentials: user_login_failed.send_robust( sender=__name__,
python
{ "resource": "" }
q277191
Auth.validated_user
test
def validated_user(cls, token, purpose, minutes_valid): """Resolve and validate auth token, returns user object.""" try: username, auth_hash = loads(token.decode('base64')) except (ValueError, Error): cls.auth_failed(token=token) try: user = cls.user_model.objects.get(**{ cls.user_model.USERNAME_FIELD: username, 'is_active': True, }) user.backend = 'django.contrib.auth.backends.ModelBackend' except
python
{ "resource": "" }
q277192
Auth.check_secure
test
def check_secure(): """Check request, return False if using SSL or local connection.""" if this.request.is_secure(): return True # using SSL elif this.request.META['REMOTE_ADDR'] in [ 'localhost',
python
{ "resource": "" }
q277193
Auth.get_username
test
def get_username(self, user): """Retrieve username from user selector.""" if isinstance(user, basestring): return user elif isinstance(user, dict) and len(user) == 1: [(key, val)] = user.items() if key == 'username' or (key == self.user_model.USERNAME_FIELD): # username provided directly return val elif key in ('email', 'emails.address'): email_field = getattr(self.user_model, 'EMAIL_FIELD', 'email') if self.user_model.USERNAME_FIELD == email_field: return val # email is username # find username by email return self.user_model.objects.values_list( self.user_model.USERNAME_FIELD, flat=True, ).get(**{email_field: val}) elif key in ('id', 'pk'):
python
{ "resource": "" }
q277194
Auth.create_user
test
def create_user(self, params): """Register a new user account.""" receivers = create_user.send( sender=__name__, request=this.request, params=params, ) if len(receivers) == 0: raise NotImplementedError(
python
{ "resource": "" }
q277195
Auth.do_login
test
def do_login(self, user): """Login a user.""" this.user_id = user.pk this.user_ddp_id = get_meteor_id(user) # silent subscription (sans sub/nosub msg) to LoggedInUser pub this.user_sub_id = meteor_random_id()
python
{ "resource": "" }
q277196
Auth.do_logout
test
def do_logout(self): """Logout a user.""" # silent unsubscription (sans sub/nosub msg) from LoggedInUser pub API.do_unsub(this.user_sub_id, silent=True) del this.user_sub_id self.update_subs(None) user_logged_out.send(
python
{ "resource": "" }
q277197
Auth.login
test
def login(self, params): """Login either with resume token or password.""" if 'password' in params: return self.login_with_password(params) elif 'resume' in params:
python
{ "resource": "" }
q277198
Auth.login_with_password
test
def login_with_password(self, params): """Authenticate using credentials supplied in params.""" # never allow insecure login self.check_secure() username = self.get_username(params['user']) password = self.get_password(params['password']) user = auth.authenticate(username=username, password=password) if user is not None: # the password verified for the user if user.is_active: self.do_login(user) return get_user_token(
python
{ "resource": "" }
q277199
Auth.login_with_resume_token
test
def login_with_resume_token(self, params): """ Login with existing resume token. Either the token is valid and the user is logged in, or the token is invalid and a non-specific ValueError("Login failed.") exception is raised - don't be tempted to give clues to attackers as to why their logins are invalid! """ # never allow insecure login self.check_secure() # pull the username and auth_hash from the token user = self.validated_user( params['resume'],
python
{ "resource": "" }