text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Returns a model function frozen to the specified parameter values. <END_TASK> <USER_TASK:> Description: def make_frozen_func(self, params): """Returns a model function frozen to the specified parameter values. Any remaining arguments are left free and must be provided when the function is called. For this model, the returned function is the application of :func:`functools.partial` to the :attr:`func` property of this object. """
params = np.array(params, dtype=np.float, ndmin=1) from functools import partial return partial(self.func, params)
<SYSTEM_TASK:> Solve for the parameters, using an initial guess. <END_TASK> <USER_TASK:> Description: def solve(self, guess): """Solve for the parameters, using an initial guess. This uses the Levenberg-Marquardt optimizer described in :mod:`pwkit.lmmin`. Returns *self*. """
guess = np.array(guess, dtype=np.float, ndmin=1) f = self.func args = self._args def lmfunc(params, vec): vec[:] = f(params, *args).flatten() self.lm_prob.set_residual_func(self.data.flatten(), self.invsigma.flatten(), lmfunc, None) self.lm_soln = soln = self.lm_prob.solve(guess) self.params = soln.params self.puncerts = soln.perror self.covar = soln.covar self.mfunc = self.make_frozen_func(soln.params) # fvec = resids * invsigma = (data - mdata) * invsigma self.resids = soln.fvec.reshape(self.data.shape) / self.invsigma self.mdata = self.data - self.resids # lm_soln.fnorm can be unreliable ("max(fnorm, fnorm1)" branch) self.chisq = (self.lm_soln.fvec**2).sum() if soln.ndof > 0: self.rchisq = self.chisq / soln.ndof return self
<SYSTEM_TASK:> Return a `Model` equivalent to this object. The nonlinear solver is less <END_TASK> <USER_TASK:> Description: def as_nonlinear(self, params=None): """Return a `Model` equivalent to this object. The nonlinear solver is less efficient, but lets you freeze parameters, compute uncertainties, etc. If the `params` argument is provided, solve() will be called on the returned object with those parameters. If it is `None` and this object has parameters in `self.params`, those will be use. Otherwise, solve() will not be called on the returned object. """
if params is None: params = self.params nlm = Model(None, self.data, self.invsigma) nlm.set_func(lambda p, x: npoly.polyval(x, p), self.pnames, args=(self.x,)) if params is not None: nlm.solve(params) return nlm
<SYSTEM_TASK:> Returns the URLs of all files attached to posts in the thread. <END_TASK> <USER_TASK:> Description: def files(self): """Returns the URLs of all files attached to posts in the thread."""
if self.topic.has_file: yield self.topic.file.file_url for reply in self.replies: if reply.has_file: yield reply.file.file_url
<SYSTEM_TASK:> Returns the URLs of all thumbnails in the thread. <END_TASK> <USER_TASK:> Description: def thumbs(self): """Returns the URLs of all thumbnails in the thread."""
if self.topic.has_file: yield self.topic.file.thumbnail_url for reply in self.replies: if reply.has_file: yield reply.file.thumbnail_url
<SYSTEM_TASK:> Returns the filenames of all files attached to posts in the thread. <END_TASK> <USER_TASK:> Description: def filenames(self): """Returns the filenames of all files attached to posts in the thread."""
if self.topic.has_file: yield self.topic.file.filename for reply in self.replies: if reply.has_file: yield reply.file.filename
<SYSTEM_TASK:> Returns the filenames of all thumbnails in the thread. <END_TASK> <USER_TASK:> Description: def thumbnames(self): """Returns the filenames of all thumbnails in the thread."""
if self.topic.has_file: yield self.topic.file.thumbnail_fname for reply in self.replies: if reply.has_file: yield reply.file.thumbnail_fname
<SYSTEM_TASK:> Fetch new posts from the server. <END_TASK> <USER_TASK:> Description: def update(self, force=False): """Fetch new posts from the server. Arguments: force (bool): Force a thread update, even if thread has 404'd. Returns: int: How many new posts have been fetched. """
# The thread has already 404'ed, this function shouldn't do anything anymore. if self.is_404 and not force: return 0 if self._last_modified: headers = {'If-Modified-Since': self._last_modified} else: headers = None # random connection errors, just return 0 and try again later try: res = self._board._requests_session.get(self._api_url, headers=headers) except: # try again later return 0 # 304 Not Modified, no new posts. if res.status_code == 304: return 0 # 404 Not Found, thread died. elif res.status_code == 404: self.is_404 = True # remove post from cache, because it's gone. self._board._thread_cache.pop(self.id, None) return 0 elif res.status_code == 200: # If we somehow 404'ed, we should put ourself back in the cache. if self.is_404: self.is_404 = False self._board._thread_cache[self.id] = self # Remove self.want_update = False self.omitted_images = 0 self.omitted_posts = 0 self._last_modified = res.headers['Last-Modified'] posts = res.json()['posts'] original_post_count = len(self.replies) self.topic = Post(self, posts[0]) if self.last_reply_id and not force: self.replies.extend(Post(self, p) for p in posts if p['no'] > self.last_reply_id) else: self.replies[:] = [Post(self, p) for p in posts[1:]] new_post_count = len(self.replies) post_count_delta = new_post_count - original_post_count if not post_count_delta: return 0 self.last_reply_id = self.replies[-1].post_number return post_count_delta else: res.raise_for_status()
<SYSTEM_TASK:> Return the flux of Cas A given a frequency and the year of observation. <END_TASK> <USER_TASK:> Description: def cas_a (freq_mhz, year): """Return the flux of Cas A given a frequency and the year of observation. Based on the formula given in Baars et al., 1977. Parameters: freq - Observation frequency in MHz. year - Year of observation. May be floating-point. Returns: s, flux in Jy. """
# The snu rule is right out of Baars et al. The dnu is corrected # for the frequency being measured in MHz, not GHz. snu = 10. ** (5.745 - 0.770 * np.log10 (freq_mhz)) # Jy dnu = 0.01 * (0.07 - 0.30 * np.log10 (freq_mhz)) # percent per yr. loss = (1 - dnu) ** (year - 1980.) return snu * loss
<SYSTEM_TASK:> Insert an entry for Cas A into the table of models. Need to specify the <END_TASK> <USER_TASK:> Description: def init_cas_a (year): """Insert an entry for Cas A into the table of models. Need to specify the year of the observations to account for the time variation of Cas A's emission. """
year = float (year) models['CasA'] = lambda f: cas_a (f, year)
<SYSTEM_TASK:> Add an entry into the models table for a source based on L-band and <END_TASK> <USER_TASK:> Description: def add_from_vla_obs (src, Lband, Cband): """Add an entry into the models table for a source based on L-band and C-band flux densities. """
if src in models: raise PKError ('already have a model for ' + src) fL = np.log10 (1425) fC = np.log10 (4860) lL = np.log10 (Lband) lC = np.log10 (Cband) A = (lL - lC) / (fL - fC) B = lL - A * fL def fluxdens (freq_mhz): return 10. ** (A * np.log10 (freq_mhz) + B) def spindex (freq_mhz): return A models[src] = fluxdens spindexes[src] = spindex
<SYSTEM_TASK:> Compute the main parameters of a bivariate distribution from data. The <END_TASK> <USER_TASK:> Description: def databiv (xy, coordouter=False, **kwargs): """Compute the main parameters of a bivariate distribution from data. The parameters are returned in the same format as used in the rest of this module. * xy: a 2D data array of shape (2, nsamp) or (nsamp, 2) * coordouter: if True, the coordinate axis is the outer axis; i.e. the shape is (2, nsamp). Otherwise, the coordinate axis is the inner axis; i.e. shape is (nsamp, 2). Returns: (sx, sy, cxy) In both cases, the first slice along the coordinate axis gives the X data (i.e., xy[0] or xy[:,0]) and the second slice gives the Y data (xy[1] or xy[:,1]). """
xy = np.asarray (xy) if xy.ndim != 2: raise ValueError ('"xy" must be a 2D array') if coordouter: if xy.shape[0] != 2: raise ValueError ('if "coordouter" is True, first axis of "xy" ' 'must have size 2') else: if xy.shape[1] != 2: raise ValueError ('if "coordouter" is False, second axis of "xy" ' 'must have size 2') cov = np.cov (xy, rowvar=coordouter, **kwargs) sx, sy = np.sqrt (np.diag (cov)) cxy = cov[0,1] return _bivcheck (sx, sy, cxy)
<SYSTEM_TASK:> Compute random values distributed according to the specified bivariate <END_TASK> <USER_TASK:> Description: def bivrandom (x0, y0, sx, sy, cxy, size=None): """Compute random values distributed according to the specified bivariate distribution. Inputs: * x0: the center of the x distribution (i.e. its intended mean) * y0: the center of the y distribution * sx: standard deviation (not variance) of x var * sy: standard deviation (not variance) of y var * cxy: covariance (not correlation coefficient) of x and y * size (optional): the number of values to compute Returns: array of shape (size, 2); or just (2, ), if size was not specified. The bivariate parameters of the generated data are approximately recoverable by calling 'databiv(retval)'. """
from numpy.random import multivariate_normal as mvn p0 = np.asarray ([x0, y0]) cov = np.asarray ([[sx**2, cxy], [cxy, sy**2]]) return mvn (p0, cov, size)
<SYSTEM_TASK:> Given two independent bivariate distributions, compute a bivariate <END_TASK> <USER_TASK:> Description: def bivconvolve (sx_a, sy_a, cxy_a, sx_b, sy_b, cxy_b): """Given two independent bivariate distributions, compute a bivariate distribution corresponding to their convolution. I'm sure this is worked out in a ton of places, but I got the equations from Pineau+ (2011A&A...527A.126P). Returns: (sx_c, sy_c, cxy_c), the parameters of the convolved distribution. """
_bivcheck (sx_a, sy_a, cxy_a) _bivcheck (sx_b, sy_b, cxy_b) sx_c = np.sqrt (sx_a**2 + sx_b**2) sy_c = np.sqrt (sy_a**2 + sy_b**2) cxy_c = cxy_a + cxy_b return _bivcheck (sx_c, sy_c, cxy_c)
<SYSTEM_TASK:> Given an 2D Gaussian expressed as the ABC polynomial coefficients, compute <END_TASK> <USER_TASK:> Description: def abcd2 (x0, y0, a, b, c, x, y): """Given an 2D Gaussian expressed as the ABC polynomial coefficients, compute a "squared distance parameter" such that z = exp (-0.5 * d2) Inputs: * x0: position of Gaussian center on x axis * y0: position of Gaussian center on y axis * a: such that z = exp (ax² + bxy + cy²) * b: see above * c: see above * x: x coordinates of the locations for which to evaluate d2 * y: y coordinates of the locations for which to evaluate d2 Returns: d2, distance parameter defined as above. This is pretty trivial. """
_abccheck (a, b, c) dx, dy = x - x0, y - y0 return -2 * (a * dx**2 + b * dx * dy + c * dy**2)
<SYSTEM_TASK:> Robustly solve the Hermitian generalized eigenvalue problem <END_TASK> <USER_TASK:> Description: def eigh_robust(a, b=None, eigvals=None, eigvals_only=False, overwrite_a=False, overwrite_b=False, turbo=True, check_finite=True): """Robustly solve the Hermitian generalized eigenvalue problem This function robustly solves the Hermetian generalized eigenvalue problem ``A v = lambda B v`` in the case that B is not strictly positive definite. When B is strictly positive-definite, the result is equivalent to scipy.linalg.eigh() within floating-point accuracy. Parameters ---------- a : (M, M) array_like A complex Hermitian or real symmetric matrix whose eigenvalues and eigenvectors will be computed. b : (M, M) array_like, optional A complex Hermitian or real symmetric matrix. If omitted, identity matrix is assumed. eigvals : tuple (lo, hi), optional Indexes of the smallest and largest (in ascending order) eigenvalues and corresponding eigenvectors to be returned: 0 <= lo <= hi <= M-1. If omitted, all eigenvalues and eigenvectors are returned. eigvals_only : bool, optional Whether to calculate only eigenvalues and no eigenvectors. (Default: both are calculated) turbo : bool, optional Use divide and conquer algorithm (faster but expensive in memory, only for generalized eigenvalue problem and if eigvals=None) overwrite_a : bool, optional Whether to overwrite data in `a` (may improve performance) overwrite_b : bool, optional Whether to overwrite data in `b` (may improve performance) check_finite : bool, optional Whether to check that the input matrices contain only finite numbers. Disabling may give a performance gain, but may result in problems (crashes, non-termination) if the inputs do contain infinities or NaNs. Returns ------- w : (N,) float ndarray The N (1<=N<=M) selected eigenvalues, in ascending order, each repeated according to its multiplicity. v : (M, N) complex ndarray (if eigvals_only == False) """
kwargs = dict(eigvals=eigvals, eigvals_only=eigvals_only, turbo=turbo, check_finite=check_finite, overwrite_a=overwrite_a, overwrite_b=overwrite_b) # Check for easy case first: if b is None: return linalg.eigh(a, **kwargs) # Compute eigendecomposition of b kwargs_b = dict(turbo=turbo, check_finite=check_finite, overwrite_a=overwrite_b) # b is a for this operation S, U = linalg.eigh(b, **kwargs_b) # Combine a and b on left hand side via decomposition of b S[S <= 0] = np.inf Sinv = 1. / np.sqrt(S) W = Sinv[:, None] * np.dot(U.T, np.dot(a, U)) * Sinv output = linalg.eigh(W, **kwargs) if eigvals_only: return output else: evals, evecs = output return evals, np.dot(U, Sinv[:, None] * evecs)
<SYSTEM_TASK:> Compute the LPP projection matrix <END_TASK> <USER_TASK:> Description: def _compute_projection(self, X, W): """Compute the LPP projection matrix Parameters ---------- X : array_like, (n_samples, n_features) The input data W : array_like or sparse matrix, (n_samples, n_samples) The precomputed adjacency matrix Returns ------- P : ndarray, (n_features, self.n_components) The matrix encoding the locality preserving projection """
# TODO: check W input; handle sparse case X = check_array(X) D = np.diag(W.sum(1)) L = D - W evals, evecs = eigh_robust(np.dot(X.T, np.dot(L, X)), np.dot(X.T, np.dot(D, X)), eigvals=(0, self.n_components - 1)) return evecs
<SYSTEM_TASK:> Enhances aslinearoperator if A is None. <END_TASK> <USER_TASK:> Description: def get_linearoperator(shape, A, timer=None): """Enhances aslinearoperator if A is None."""
ret = None import scipy.sparse.linalg as scipylinalg if isinstance(A, LinearOperator): ret = A elif A is None: ret = IdentityLinearOperator(shape) elif isinstance(A, numpy.ndarray) or isspmatrix(A): ret = MatrixLinearOperator(A) elif isinstance(A, numpy.matrix): ret = MatrixLinearOperator(numpy.atleast_2d(numpy.asarray(A))) elif isinstance(A, scipylinalg.LinearOperator): if not hasattr(A, 'dtype'): raise ArgumentError('scipy LinearOperator has no dtype.') ret = LinearOperator(A.shape, dot=A.matvec, dot_adj=A.rmatvec, dtype=A.dtype) else: raise TypeError('type not understood') # set up timer if requested if A is not None and not isinstance(A, IdentityLinearOperator) \ and timer is not None: ret = TimedLinearOperator(ret, timer) # check shape if shape != ret.shape: raise LinearOperatorError('shape mismatch') return ret
<SYSTEM_TASK:> Measure orthonormality of given basis. <END_TASK> <USER_TASK:> Description: def orthonormality(V, ip_B=None): """Measure orthonormality of given basis. :param V: a matrix :math:`V=[v_1,\ldots,v_n]` with ``shape==(N,n)``. :param ip_B: (optional) the inner product to use, see :py:meth:`inner`. :return: :math:`\\| I_n - \\langle V,V \\rangle \\|_2`. """
return norm(numpy.eye(V.shape[1]) - inner(V, V, ip_B=ip_B))
<SYSTEM_TASK:> Measure Arnoldi residual. <END_TASK> <USER_TASK:> Description: def arnoldi_res(A, V, H, ip_B=None): """Measure Arnoldi residual. :param A: a linear operator that can be used with scipy's aslinearoperator with ``shape==(N,N)``. :param V: Arnoldi basis matrix with ``shape==(N,n)``. :param H: Hessenberg matrix: either :math:`\\underline{H}_{n-1}` with ``shape==(n,n-1)`` or :math:`H_n` with ``shape==(n,n)`` (if the Arnoldi basis spans an A-invariant subspace). :param ip_B: (optional) the inner product to use, see :py:meth:`inner`. :returns: either :math:`\\|AV_{n-1} - V_n \\underline{H}_{n-1}\\|` or :math:`\\|A V_n - V_n H_n\\|` (in the invariant case). """
N = V.shape[0] invariant = H.shape[0] == H.shape[1] A = get_linearoperator((N, N), A) if invariant: res = A*V - numpy.dot(V, H) else: res = A*V[:, :-1] - numpy.dot(V, H) return norm(res, ip_B=ip_B)
<SYSTEM_TASK:> QR factorization with customizable inner product. <END_TASK> <USER_TASK:> Description: def qr(X, ip_B=None, reorthos=1): """QR factorization with customizable inner product. :param X: array with ``shape==(N,k)`` :param ip_B: (optional) inner product, see :py:meth:`inner`. :param reorthos: (optional) numer of reorthogonalizations. Defaults to 1 (i.e. 2 runs of modified Gram-Schmidt) which should be enough in most cases (TODO: add reference). :return: Q, R where :math:`X=QR` with :math:`\\langle Q,Q \\rangle=I_k` and R upper triangular. """
if ip_B is None and X.shape[1] > 0: return scipy.linalg.qr(X, mode='economic') else: (N, k) = X.shape Q = X.copy() R = numpy.zeros((k, k), dtype=X.dtype) for i in range(k): for reortho in range(reorthos+1): for j in range(i): alpha = inner(Q[:, [j]], Q[:, [i]], ip_B=ip_B)[0, 0] R[j, i] += alpha Q[:, [i]] -= alpha * Q[:, [j]] R[i, i] = norm(Q[:, [i]], ip_B=ip_B) if R[i, i] >= 1e-15: Q[:, [i]] /= R[i, i] return Q, R
<SYSTEM_TASK:> Principal angles between two subspaces. <END_TASK> <USER_TASK:> Description: def angles(F, G, ip_B=None, compute_vectors=False): """Principal angles between two subspaces. This algorithm is based on algorithm 6.2 in `Knyazev, Argentati. Principal angles between subspaces in an A-based scalar product: algorithms and perturbation estimates. 2002.` This algorithm can also handle small angles (in contrast to the naive cosine-based svd algorithm). :param F: array with ``shape==(N,k)``. :param G: array with ``shape==(N,l)``. :param ip_B: (optional) angles are computed with respect to this inner product. See :py:meth:`inner`. :param compute_vectors: (optional) if set to ``False`` then only the angles are returned (default). If set to ``True`` then also the principal vectors are returned. :return: * ``theta`` if ``compute_vectors==False`` * ``theta, U, V`` if ``compute_vectors==True`` where * ``theta`` is the array with ``shape==(max(k,l),)`` containing the principal angles :math:`0\\leq\\theta_1\\leq\\ldots\\leq\\theta_{\\max\\{k,l\\}}\\leq \\frac{\\pi}{2}`. * ``U`` are the principal vectors from F with :math:`\\langle U,U \\rangle=I_k`. * ``V`` are the principal vectors from G with :math:`\\langle V,V \\rangle=I_l`. The principal angles and vectors fulfill the relation :math:`\\langle U,V \\rangle = \ \\begin{bmatrix} \ \\cos(\\Theta) & 0_{m,l-m} \\\\ \ 0_{k-m,m} & 0_{k-m,l-m} \ \\end{bmatrix}` where :math:`m=\\min\\{k,l\\}` and :math:`\\cos(\\Theta)=\\operatorname{diag}(\\cos(\\theta_1),\\ldots,\\cos(\\theta_m))`. Furthermore, :math:`\\theta_{m+1}=\\ldots=\\theta_{\\max\\{k,l\\}}=\\frac{\\pi}{2}`. """
# make sure that F.shape[1]>=G.shape[1] reverse = False if F.shape[1] < G.shape[1]: reverse = True F, G = G, F QF, _ = qr(F, ip_B=ip_B) QG, _ = qr(G, ip_B=ip_B) # one or both matrices empty? (enough to check G here) if G.shape[1] == 0: theta = numpy.ones(F.shape[1])*numpy.pi/2 U = QF V = QG else: Y, s, Z = scipy.linalg.svd(inner(QF, QG, ip_B=ip_B)) Vcos = numpy.dot(QG, Z.T.conj()) n_large = numpy.flatnonzero((s**2) < 0.5).shape[0] n_small = s.shape[0] - n_large theta = numpy.r_[ numpy.arccos(s[n_small:]), # [-i:] does not work if i==0 numpy.ones(F.shape[1]-G.shape[1])*numpy.pi/2] if compute_vectors: Ucos = numpy.dot(QF, Y) U = Ucos[:, n_small:] V = Vcos[:, n_small:] if n_small > 0: RG = Vcos[:, :n_small] S = RG - numpy.dot(QF, inner(QF, RG, ip_B=ip_B)) _, R = qr(S, ip_B=ip_B) Y, u, Z = scipy.linalg.svd(R) theta = numpy.r_[ numpy.arcsin(u[::-1][:n_small]), theta] if compute_vectors: RF = Ucos[:, :n_small] Vsin = numpy.dot(RG, Z.T.conj()) # next line is hand-crafted since the line from the paper does # not seem to work. Usin = numpy.dot(RF, numpy.dot( numpy.diag(1/s[:n_small]), numpy.dot(Z.T.conj(), numpy.diag(s[:n_small])))) U = numpy.c_[Usin, U] V = numpy.c_[Vsin, V] if compute_vectors: if reverse: U, V = V, U return theta, U, V else: return theta
<SYSTEM_TASK:> Compute spectral gap. <END_TASK> <USER_TASK:> Description: def gap(lamda, sigma, mode='individual'): """Compute spectral gap. Useful for eigenvalue/eigenvector bounds. Computes the gap :math:`\delta\geq 0` between two sets of real numbers ``lamda`` and ``sigma``. The gap can be computed in several ways and may not exist, see the ``mode`` parameter. :param lamda: a non-empty set :math:`\Lambda=\{\lambda_1,\ldots,\lambda_n\}` given as a single real number or a list or ``numpy.array`` with real numbers. :param sigma: a non-empty set :math:`\Sigma=\{\sigma_1,\ldots,\sigma_m\}`. See ``lamda``. :param mode: (optional). Defines how the gap should be computed. May be one of * ``'individual'`` (default): :math:`\delta=\min_{\substack{i\in\{1,\ldots,n\}\\\\j\in\{1,\ldots,m\}}} |\lambda_i - \sigma_j|`. With this mode, the gap is always be defined. * ``'interval'``: determine the maximal :math:`\delta` such that :math:`\Sigma\subset\mathbb{R}\setminus[\min_{\lambda\in\Lambda}\lambda-\delta,\max_{\lambda\in\Lambda}\lambda+\delta]`. If the gap does not exists, ``None`` is returned. :return: :math:`\delta` or ``None``. """
# sanitize input if numpy.isscalar(lamda): lamda = [lamda] lamda = numpy.array(lamda) if numpy.isscalar(sigma): sigma = [sigma] sigma = numpy.array(sigma) if not numpy.isreal(lamda).all() or not numpy.isreal(sigma).all(): raise ArgumentError('complex spectra not yet implemented') if mode == 'individual': return numpy.min(numpy.abs(numpy.reshape(lamda, (len(lamda), 1)) - numpy.reshape(sigma, (1, len(sigma))))) elif mode == 'interval': lamda_min, lamda_max = numpy.min(lamda), numpy.max(lamda) # determine all values in sigma<lamda_min or >lamda_max sigma_lo = sigma <= lamda_min sigma_hi = sigma >= lamda_max # is a sigma value in lamda interval? if not numpy.all(sigma_lo + sigma_hi): return None delta = numpy.Infinity if numpy.any(sigma_lo): delta = lamda_min - numpy.max(sigma[sigma_lo]) if numpy.any(sigma_hi): delta = numpy.min([delta, numpy.min(sigma[sigma_hi]) - lamda_max]) return delta
<SYSTEM_TASK:> Apply Householder transformation to vector x. <END_TASK> <USER_TASK:> Description: def apply(self, x): """Apply Householder transformation to vector x. Applies the Householder transformation efficiently to the given vector. """
# make sure that x is a (N,*) matrix if len(x.shape) != 2: raise ArgumentError('x is not a matrix of shape (N,*)') if self.beta == 0: return x return x - self.beta * self.v * numpy.dot(self.v.T.conj(), x)
<SYSTEM_TASK:> Build matrix representation of Householder transformation. <END_TASK> <USER_TASK:> Description: def matrix(self): """Build matrix representation of Householder transformation. Builds the matrix representation :math:`H = I - \\beta vv^*`. **Use with care!** This routine may be helpful for testing purposes but should not be used in production codes for high dimensions since the resulting matrix is dense. """
n = self.v.shape[0] return numpy.eye(n, n) - self.beta * numpy.dot(self.v, self.v.T.conj())
<SYSTEM_TASK:> r"""Apply the projection to an array. <END_TASK> <USER_TASK:> Description: def apply(self, a, return_Ya=False): r"""Apply the projection to an array. The computation is carried out without explicitly forming the matrix corresponding to the projection (which would be an array with ``shape==(N,N)``). See also :py:meth:`_apply`. """
# is projection the zero operator? if self.V.shape[1] == 0: Pa = numpy.zeros(a.shape) if return_Ya: return Pa, numpy.zeros((0, a.shape[1])) return Pa if return_Ya: x, Ya = self._apply(a, return_Ya=return_Ya) else: x = self._apply(a) for i in range(self.iterations-1): z = a - x w = self._apply(z) x = x + w if return_Ya: return x, Ya return x
<SYSTEM_TASK:> Apply the complementary projection to an array. <END_TASK> <USER_TASK:> Description: def apply_complement(self, a, return_Ya=False): """Apply the complementary projection to an array. :param z: array with ``shape==(N,m)``. :return: :math:`P_{\\mathcal{Y}^\\perp,\\mathcal{X}}z = z - P_{\\mathcal{X},\\mathcal{Y}^\\perp} z`. """
# is projection the zero operator? --> complement is identity if self.V.shape[1] == 0: if return_Ya: return a.copy(), numpy.zeros((0, a.shape[1])) return a.copy() if return_Ya: x, Ya = self._apply(a, return_Ya=True) else: x = self._apply(a) z = a - x for i in range(self.iterations-1): w = self._apply(z) z = z - w if return_Ya: return z, Ya return z
<SYSTEM_TASK:> Sets a command function as the default command. <END_TASK> <USER_TASK:> Description: def set_default_command(self, command): """Sets a command function as the default command."""
cmd_name = command.name self.add_command(command) self.default_cmd_name = cmd_name
<SYSTEM_TASK:> compute hash of string using given hash function <END_TASK> <USER_TASK:> Description: def compute_hash(func, string): """compute hash of string using given hash function"""
h = func() h.update(string) return h.hexdigest()
<SYSTEM_TASK:> This method is used to change the expire date of a group <END_TASK> <USER_TASK:> Description: def set_expire(self, y = 2999, mon = 12, d = 28, h = 23, min_ = 59, s = 59): """This method is used to change the expire date of a group - y is the year between 1 and 9999 inclusive - mon is the month between 1 and 12 - d is a day in the given month - h is a hour between 0 and 23 - min_ is a minute between 0 and 59 - s is a second between 0 and 59 The special date 2999-12-28 23:59:59 means that group expires never. If only an uuid is given the expire date will set to this one. """
if type(y) is not int or type(mon) is not int or type(d) is not int or \ type(h) is not int or type(min_) is not int or type(s) is not int: raise KPError("Date variables must be integers") elif y > 9999 or y < 1 or mon > 12 or mon < 1 or d > 31 or d < 1 or \ h > 23 or h < 0 or min_ > 59 or min_ < 0 or s > 59 or s < 0: raise KPError("No legal date") elif ((mon == 1 or mon == 3 or mon == 5 or mon == 7 or mon == 8 or \ mon == 10 or mon == 12) and d > 31) or ((mon == 4 or mon == 6 or \ mon == 9 or mon == 11) and d > 30) or (mon == 2 and d > 28): raise KPError("Given day doesn't exist in given month") else: self.expire = datetime(y, mon, d, h, min_, s) self.last_mod = datetime.now().replace(microsecond = 0) return True
<SYSTEM_TASK:> This method creates an entry in this group. <END_TASK> <USER_TASK:> Description: def create_entry(self, title='', image=1, url='', username='', password='', comment='', y=2999, mon=12, d=28, h=23, min_=59, s=59): """This method creates an entry in this group. Compare to StdEntry for information about the arguments. One of the following arguments is needed: - title - url - username - password - comment """
return self.db.create_entry(self, title, image, url, username, password, comment, y, mon, d, h, min_, s)
<SYSTEM_TASK:> This method is used to change an entry title. <END_TASK> <USER_TASK:> Description: def set_title(self, title = None): """This method is used to change an entry title. A new title string is needed. """
if title is None or type(title) is not str: raise KPError("Need a new title.") else: self.title = title self.last_mod = datetime.now().replace(microsecond=0) return True
<SYSTEM_TASK:> This method is used to set the image number. <END_TASK> <USER_TASK:> Description: def set_image(self, image = None): """This method is used to set the image number. image must be an unsigned int. """
if image is None or type(image) is not int: raise KPError("Need a new image number") else: self.image = image self.last_mod = datetime.now().replace(microsecond=0) return True
<SYSTEM_TASK:> This method is used to set the url. <END_TASK> <USER_TASK:> Description: def set_url(self, url = None): """This method is used to set the url. url must be a string. """
if url is None or type(url) is not str: raise KPError("Need a new image number") else: self.url = url self.last_mod = datetime.now().replace(microsecond=0) return True
<SYSTEM_TASK:> This method is used to set the username. <END_TASK> <USER_TASK:> Description: def set_username(self, username = None): """This method is used to set the username. username must be a string. """
if username is None or type(username) is not str: raise KPError("Need a new image number") else: self.username = username self.last_mod = datetime.now().replace(microsecond=0) return True
<SYSTEM_TASK:> This method is used to set the password. <END_TASK> <USER_TASK:> Description: def set_password(self, password = None): """This method is used to set the password. password must be a string. """
if password is None or type(password) is not str: raise KPError("Need a new image number") else: self.password = password self.last_mod = datetime.now().replace(microsecond=0) return True
<SYSTEM_TASK:> This method is used to the the comment. <END_TASK> <USER_TASK:> Description: def set_comment(self, comment = None): """This method is used to the the comment. comment must be a string. """
if comment is None or type(comment) is not str: raise KPError("Need a new image number") else: self.comment = comment self.last_mod = datetime.now().replace(microsecond=0) return True
<SYSTEM_TASK:> This method closes the database correctly. <END_TASK> <USER_TASK:> Description: def close(self): """This method closes the database correctly."""
if self.filepath is not None: if path.isfile(self.filepath+'.lock'): remove(self.filepath+'.lock') self.filepath = None self.read_only = False self.lock() return True else: raise KPError('Can\'t close a not opened file')
<SYSTEM_TASK:> Unlock the database. <END_TASK> <USER_TASK:> Description: def unlock(self, password = None, keyfile = None, buf = None): """Unlock the database. masterkey is needed. """
if ((password is None or password == "") and (keyfile is None or keyfile == "")): raise KPError("A password/keyfile is needed") elif ((type(password) is not str and password is not None) or (type(keyfile) is not str and keyfile is not None)): raise KPError("password/keyfile must be a string.") if keyfile == "": keyfile = None if password == "": password = None self.password = password self.keyfile = keyfile return self.load(buf)
<SYSTEM_TASK:> This method removes a group. <END_TASK> <USER_TASK:> Description: def remove_group(self, group = None): """This method removes a group. The group needed to remove the group. group must be a v1Group. """
if group is None: raise KPError("Need group to remove a group") elif type(group) is not v1Group: raise KPError("group must be v1Group") children = [] entries = [] if group in self.groups: # Save all children and entries to # delete them later children.extend(group.children) entries.extend(group.entries) # Finally remove group group.parent.children.remove(group) self.groups.remove(group) else: raise KPError("Given group doesn't exist") self._num_groups -= 1 for i in children: self.remove_group(i) for i in entries: self.remove_entry(i) return True
<SYSTEM_TASK:> Append group to a new parent. <END_TASK> <USER_TASK:> Description: def move_group(self, group = None, parent = None): """Append group to a new parent. group and parent must be v1Group-instances. """
if group is None or type(group) is not v1Group: raise KPError("A valid group must be given.") elif parent is not None and type(parent) is not v1Group: raise KPError("parent must be a v1Group.") elif group is parent: raise KPError("group and parent must not be the same group") if parent is None: parent = self.root_group if group in self.groups: self.groups.remove(group) group.parent.children.remove(group) group.parent = parent if parent.children: if parent.children[-1] is self.groups[-1]: self.groups.append(group) else: new_index = self.groups.index(parent.children[-1]) + 1 self.groups.insert(new_index, group) else: new_index = self.groups.index(parent) + 1 self.groups.insert(new_index, group) parent.children.append(group) if parent is self.root_group: group.level = 0 else: group.level = parent.level + 1 if group.children: self._move_group_helper(group) group.last_mod = datetime.now().replace(microsecond=0) return True else: raise KPError("Didn't find given group.")
<SYSTEM_TASK:> Move group to another position in group's parent. <END_TASK> <USER_TASK:> Description: def move_group_in_parent(self, group = None, index = None): """Move group to another position in group's parent. index must be a valid index of group.parent.groups """
if group is None or index is None: raise KPError("group and index must be set") elif type(group) is not v1Group or type(index) is not int: raise KPError("group must be a v1Group-instance and index " "must be an integer.") elif group not in self.groups: raise KPError("Given group doesn't exist") elif index < 0 or index >= len(group.parent.children): raise KPError("index must be a valid index if group.parent.groups") else: group_at_index = group.parent.children[index] pos_in_parent = group.parent.children.index(group) pos_in_groups = self.groups.index(group) pos_in_groups2 = self.groups.index(group_at_index) group.parent.children[index] = group group.parent.children[pos_in_parent] = group_at_index self.groups[pos_in_groups2] = group self.groups[pos_in_groups] = group_at_index if group.children: self._move_group_helper(group) if group_at_index.children: self._move_group_helper(group_at_index) group.last_mod = datetime.now().replace(microsecond=0) return True
<SYSTEM_TASK:> A helper to move the chidren of a group. <END_TASK> <USER_TASK:> Description: def _move_group_helper(self, group): """A helper to move the chidren of a group."""
for i in group.children: self.groups.remove(i) i.level = group.level + 1 self.groups.insert(self.groups.index(group) + 1, i) if i.children: self._move_group_helper(i)
<SYSTEM_TASK:> This method creates a new entry. <END_TASK> <USER_TASK:> Description: def create_entry(self, group = None, title = "", image = 1, url = "", username = "", password = "", comment = "", y = 2999, mon = 12, d = 28, h = 23, min_ = 59, s = 59): """This method creates a new entry. The group which should hold the entry is needed. image must be an unsigned int >0, group a v1Group. It is possible to give an expire date in the following way: - y is the year between 1 and 9999 inclusive - mon is the month between 1 and 12 - d is a day in the given month - h is a hour between 0 and 23 - min_ is a minute between 0 and 59 - s is a second between 0 and 59 The special date 2999-12-28 23:59:59 means that entry expires never. """
if (type(title) is not str or type(image) is not int or image < 0 or type(url) is not str or type(username) is not str or type(password) is not str or type(comment) is not str or type(y) is not int or type(mon) is not int or type(d) is not int or type(h) is not int or type(min_) is not int or type(s) is not int or type(group) is not v1Group): raise KPError("One argument has not a valid type.") elif group not in self.groups: raise KPError("Group doesn't exist.") elif (y > 9999 or y < 1 or mon > 12 or mon < 1 or d > 31 or d < 1 or h > 23 or h < 0 or min_ > 59 or min_ < 0 or s > 59 or s < 0): raise KPError("No legal date") elif (((mon == 1 or mon == 3 or mon == 5 or mon == 7 or mon == 8 or mon == 10 or mon == 12) and d > 31) or ((mon == 4 or mon == 6 or mon == 9 or mon == 11) and d > 30) or (mon == 2 and d > 28)): raise KPError("Given day doesn't exist in given month") Random.atfork() uuid = Random.get_random_bytes(16) entry = v1Entry(group.id_, group, image, title, url, username, password, comment, datetime.now().replace(microsecond = 0), datetime.now().replace(microsecond = 0), datetime.now().replace(microsecond = 0), datetime(y, mon, d, h, min_, s), uuid) self.entries.append(entry) group.entries.append(entry) self._num_entries += 1 return True
<SYSTEM_TASK:> This method can remove entries. <END_TASK> <USER_TASK:> Description: def remove_entry(self, entry = None): """This method can remove entries. The v1Entry-object entry is needed. """
if entry is None or type(entry) is not v1Entry: raise KPError("Need an entry.") elif entry in self.entries: entry.group.entries.remove(entry) self.entries.remove(entry) self._num_entries -= 1 return True else: raise KPError("Given entry doesn't exist.")
<SYSTEM_TASK:> Move an entry to another group. <END_TASK> <USER_TASK:> Description: def move_entry(self, entry = None, group = None): """Move an entry to another group. A v1Group group and a v1Entry entry are needed. """
if entry is None or group is None or type(entry) is not v1Entry or \ type(group) is not v1Group: raise KPError("Need an entry and a group.") elif entry not in self.entries: raise KPError("No entry found.") elif group in self.groups: entry.group.entries.remove(entry) group.entries.append(entry) entry.group_id = group.id_ entry.group = group return True else: raise KPError("No group found.")
<SYSTEM_TASK:> Move entry to another position inside a group. <END_TASK> <USER_TASK:> Description: def move_entry_in_group(self, entry = None, index = None): """Move entry to another position inside a group. An entry and a valid index to insert the entry in the entry list of the holding group is needed. 0 means that the entry is moved to the first position 1 to the second and so on. """
if entry is None or index is None or type(entry) is not v1Entry \ or type(index) is not int: raise KPError("Need an entry and an index.") elif index < 0 or index > len(entry.group.entries)-1: raise KPError("Index is not valid.") elif entry not in self.entries: raise KPError("Entry not found.") pos_in_group = entry.group.entries.index(entry) pos_in_entries = self.entries.index(entry) entry_at_index = entry.group.entries[index] pos_in_entries2 = self.entries.index(entry_at_index) entry.group.entries[index] = entry entry.group.entries[pos_in_group] = entry_at_index self.entries[pos_in_entries2] = entry self.entries[pos_in_entries] = entry_at_index return True
<SYSTEM_TASK:> This method creates the key to decrypt the database <END_TASK> <USER_TASK:> Description: def _transform_key(self, masterkey): """This method creates the key to decrypt the database"""
aes = AES.new(self._transf_randomseed, AES.MODE_ECB) # Encrypt the created hash for _ in range(self._key_transf_rounds): masterkey = aes.encrypt(masterkey) # Finally, hash it again... sha_obj = SHA256.new() sha_obj.update(masterkey) masterkey = sha_obj.digest() # ...and hash the result together with the randomseed sha_obj = SHA256.new() sha_obj.update(self._final_randomseed + masterkey) return sha_obj.digest()
<SYSTEM_TASK:> This method creates a key from a keyfile. <END_TASK> <USER_TASK:> Description: def _get_filekey(self): """This method creates a key from a keyfile."""
if not os.path.exists(self.keyfile): raise KPError('Keyfile not exists.') try: with open(self.keyfile, 'rb') as handler: handler.seek(0, os.SEEK_END) size = handler.tell() handler.seek(0, os.SEEK_SET) if size == 32: return handler.read(32) elif size == 64: try: return binascii.unhexlify(handler.read(64)) except (TypeError, binascii.Error): handler.seek(0, os.SEEK_SET) sha = SHA256.new() while True: buf = handler.read(2048) sha.update(buf) if len(buf) < 2048: break return sha.digest() except IOError as e: raise KPError('Could not read file: %s' % e)
<SYSTEM_TASK:> This method decrypts the database <END_TASK> <USER_TASK:> Description: def _cbc_decrypt(self, final_key, crypted_content): """This method decrypts the database"""
# Just decrypt the content with the created key aes = AES.new(final_key, AES.MODE_CBC, self._enc_iv) decrypted_content = aes.decrypt(crypted_content) padding = decrypted_content[-1] if sys.version > '3': padding = decrypted_content[-1] else: padding = ord(decrypted_content[-1]) decrypted_content = decrypted_content[:len(decrypted_content)-padding] return decrypted_content
<SYSTEM_TASK:> This method handles the different fields of a group <END_TASK> <USER_TASK:> Description: def _read_group_field(self, group, levels, field_type, field_size, decrypted_content): """This method handles the different fields of a group"""
if field_type == 0x0000: # Ignored (commentar block) pass elif field_type == 0x0001: group.id_ = struct.unpack('<I', decrypted_content[:4])[0] elif field_type == 0x0002: try: group.title = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') except UnicodeDecodeError: group.title = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('latin-1') decrypted_content = decrypted_content[1:] elif field_type == 0x0003: group.creation = self._get_date(decrypted_content) elif field_type == 0x0004: group.last_mod = self._get_date(decrypted_content) elif field_type == 0x0005: group.last_access = self._get_date(decrypted_content) elif field_type == 0x0006: group.expire = self._get_date(decrypted_content) elif field_type == 0x0007: group.image = struct.unpack('<I', decrypted_content[:4])[0] elif field_type == 0x0008: level = struct.unpack('<H', decrypted_content[:2])[0] group.level = level levels.append(level) elif field_type == 0x0009: group.flags = struct.unpack('<I', decrypted_content[:4])[0] elif field_type == 0xFFFF: pass else: return False return True
<SYSTEM_TASK:> This method handles the different fields of an entry <END_TASK> <USER_TASK:> Description: def _read_entry_field(self, entry, field_type, field_size, decrypted_content): """This method handles the different fields of an entry"""
if field_type == 0x0000: # Ignored pass elif field_type == 0x0001: entry.uuid = decrypted_content[:16] elif field_type == 0x0002: entry.group_id = struct.unpack('<I', decrypted_content[:4])[0] elif field_type == 0x0003: entry.image = struct.unpack('<I', decrypted_content[:4])[0] elif field_type == 0x0004: entry.title = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') decrypted_content = decrypted_content[1:] elif field_type == 0x0005: entry.url = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') decrypted_content = decrypted_content[1:] elif field_type == 0x0006: entry.username = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') decrypted_content = decrypted_content[1:] elif field_type == 0x0007: entry.password = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') elif field_type == 0x0008: entry.comment = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') elif field_type == 0x0009: entry.creation = self._get_date(decrypted_content) elif field_type == 0x000A: entry.last_mod = self._get_date(decrypted_content) elif field_type == 0x000B: entry.last_access = self._get_date(decrypted_content) elif field_type == 0x000C: entry.expire = self._get_date(decrypted_content) elif field_type == 0x000D: entry.binary_desc = struct.unpack('<{0}s'.format(field_size-1), decrypted_content[:field_size-1])[0].decode('utf-8') elif field_type == 0x000E: entry.binary = decrypted_content[:field_size] elif field_type == 0xFFFF: pass else: return False return True
<SYSTEM_TASK:> This method is used to decode the packed dates of entries <END_TASK> <USER_TASK:> Description: def _get_date(self, decrypted_content): """This method is used to decode the packed dates of entries"""
# Just copied from original KeePassX source date_field = struct.unpack('<5B', decrypted_content[:5]) dw1 = date_field[0] dw2 = date_field[1] dw3 = date_field[2] dw4 = date_field[3] dw5 = date_field[4] y = (dw1 << 6) | (dw2 >> 2) mon = ((dw2 & 0x03) << 2) | (dw3 >> 6) d = (dw3 >> 1) & 0x1F h = ((dw3 & 0x01) << 4) | (dw4 >> 4) min_ = ((dw4 & 0x0F) << 2) | (dw5 >> 6) s = dw5 & 0x3F return datetime(y, mon, d, h, min_, s)
<SYSTEM_TASK:> This method is used to encode dates <END_TASK> <USER_TASK:> Description: def _pack_date(self, date): """This method is used to encode dates"""
# Just copied from original KeePassX source y, mon, d, h, min_, s = date.timetuple()[:6] dw1 = 0x0000FFFF & ((y>>6) & 0x0000003F) dw2 = 0x0000FFFF & ((y & 0x0000003F)<<2 | ((mon>>2) & 0x00000003)) dw3 = 0x0000FFFF & (((mon & 0x0000003)<<6) | ((d & 0x0000001F)<<1) \ | ((h>>4) & 0x00000001)) dw4 = 0x0000FFFF & (((h & 0x0000000F)<<4) | ((min_>>2) & 0x0000000F)) dw5 = 0x0000FFFF & (((min_ & 0x00000003)<<6) | (s & 0x0000003F)) return struct.pack('<5B', dw1, dw2, dw3, dw4, dw5)
<SYSTEM_TASK:> This method creates a group tree <END_TASK> <USER_TASK:> Description: def _create_group_tree(self, levels): """This method creates a group tree"""
if levels[0] != 0: raise KPError("Invalid group tree") for i in range(len(self.groups)): if(levels[i] == 0): self.groups[i].parent = self.root_group self.groups[i].index = len(self.root_group.children) self.root_group.children.append(self.groups[i]) continue j = i-1 while j >= 0: if levels[j] < levels[i]: if levels[i]-levels[j] != 1: raise KPError("Invalid group tree") self.groups[i].parent = self.groups[j] self.groups[i].index = len(self.groups[j].children) self.groups[i].parent.children.append(self.groups[i]) break if j == 0: raise KPError("Invalid group tree") j -= 1 for e in range(len(self.entries)): for g in range(len(self.groups)): if self.entries[e].group_id == self.groups[g].id_: self.groups[g].entries.append(self.entries[e]) self.entries[e].group = self.groups[g] # from original KeePassX-code, but what does it do? self.entries[e].index = 0 return True
<SYSTEM_TASK:> This method packs a group field <END_TASK> <USER_TASK:> Description: def _save_group_field(self, field_type, group): """This method packs a group field"""
if field_type == 0x0000: # Ignored (commentar block) pass elif field_type == 0x0001: if group.id_ is not None: return (4, struct.pack('<I', group.id_)) elif field_type == 0x0002: if group.title is not None: return (len(group.title.encode())+1, (group.title+'\0').encode()) elif field_type == 0x0003: if group.creation is not None: return (5, self._pack_date(group.creation)) elif field_type == 0x0004: if group.last_mod is not None: return (5, self._pack_date(group.last_mod)) elif field_type == 0x0005: if group.last_access is not None: return (5, self._pack_date(group.last_access)) elif field_type == 0x0006: if group.expire is not None: return (5, self._pack_date(group.expire)) elif field_type == 0x0007: if group.image is not None: return (4, struct.pack('<I', group.image)) elif field_type == 0x0008: if group.level is not None: return (2, struct.pack('<H', group.level)) elif field_type == 0x0009: if group.flags is not None: return (4, struct.pack('<I', group.flags)) return False
<SYSTEM_TASK:> This group packs a entry field <END_TASK> <USER_TASK:> Description: def _save_entry_field(self, field_type, entry): """This group packs a entry field"""
if field_type == 0x0000: # Ignored pass elif field_type == 0x0001: if entry.uuid is not None: return (16, entry.uuid) elif field_type == 0x0002: if entry.group_id is not None: return (4, struct.pack('<I', entry.group_id)) elif field_type == 0x0003: if entry.image is not None: return (4, struct.pack('<I', entry.image)) elif field_type == 0x0004: if entry.title is not None: return (len(entry.title.encode())+1, (entry.title+'\0').encode()) elif field_type == 0x0005: if entry.url is not None: return (len(entry.url.encode())+1, (entry.url+'\0').encode()) elif field_type == 0x0006: if entry.username is not None: return (len(entry.username.encode())+1, (entry.username+'\0').encode()) elif field_type == 0x0007: if entry.password is not None: return (len(entry.password.encode())+1, (entry.password+'\0').encode()) elif field_type == 0x0008: if entry.comment is not None: return (len(entry.comment.encode())+1, (entry.comment+'\0').encode()) elif field_type == 0x0009: if entry.creation is not None: return (5, self._pack_date(entry.creation)) elif field_type == 0x000A: if entry.last_mod is not None: return (5, self._pack_date(entry.last_mod)) elif field_type == 0x000B: if entry.last_access is not None: return (5, self._pack_date(entry.last_access)) elif field_type == 0x000C: if entry.expire is not None: return (5, self._pack_date(entry.expire)) elif field_type == 0x000D: if entry.binary_desc is not None: return (len(entry.binary_desc.encode())+1, (entry.binary_desc+'\0').encode()) elif field_type == 0x000E: if entry.binary is not None: return (len(entry.binary), entry.binary) return False
<SYSTEM_TASK:> Get a secret from Custodia <END_TASK> <USER_TASK:> Description: def getsecret(self, section, option, **kwargs): """Get a secret from Custodia """
# keyword-only arguments, vars and fallback are directly passed through raw = kwargs.get('raw', False) value = self.get(section, option, **kwargs) if raw: return value return self.custodia_client.get_secret(value)
<SYSTEM_TASK:> Load Custodia plugin <END_TASK> <USER_TASK:> Description: def _load_plugin_class(menu, name): """Load Custodia plugin Entry points are preferred over dotted import path. """
group = 'custodia.{}'.format(menu) eps = list(pkg_resources.iter_entry_points(group, name)) if len(eps) > 1: raise ValueError( "Multiple entry points for {} {}: {}".format(menu, name, eps)) elif len(eps) == 1: # backwards compatibility with old setuptools ep = eps[0] if hasattr(ep, 'resolve'): return ep.resolve() else: return ep.load(require=False) elif '.' in name: # fall back to old style dotted name module, classname = name.rsplit('.', 1) m = importlib.import_module(module) return getattr(m, classname) else: raise ValueError("{}: {} not found".format(menu, name))
<SYSTEM_TASK:> Parses a simple message <END_TASK> <USER_TASK:> Description: def parse(self, msg, name): """Parses a simple message :param msg: the json-decoded value :param name: the requested name :raises UnknownMessageType: if the type is not 'simple' :raises InvalidMessage: if the message cannot be parsed or validated """
# On requests we imply 'simple' if there is no input message if msg is None: return if not isinstance(msg, string_types): raise InvalidMessage("The 'value' attribute is not a string") self.name = name self.payload = msg self.msg_type = 'simple'
<SYSTEM_TASK:> Split a Kerberos principal name into parts <END_TASK> <USER_TASK:> Description: def krb5_unparse_principal_name(name): """Split a Kerberos principal name into parts Returns: * ('host', hostname, realm) for a host principal * (servicename, hostname, realm) for a service principal * (None, username, realm) for a user principal :param text name: Kerberos principal name :return: (service, host, realm) or (None, username, realm) """
prefix, realm = name.split(u'@') if u'/' in prefix: service, host = prefix.rsplit(u'/', 1) return service, host, realm else: return None, prefix, realm
<SYSTEM_TASK:> Parses the message. <END_TASK> <USER_TASK:> Description: def parse(self, msg, name): """Parses the message. We check that the message is properly formatted. :param msg: a json-encoded value containing a JWS or JWE+JWS token :raises InvalidMessage: if the message cannot be parsed or validated :returns: A verified payload """
try: jtok = JWT(jwt=msg) except Exception as e: raise InvalidMessage('Failed to parse message: %s' % str(e)) try: token = jtok.token if isinstance(token, JWE): token.decrypt(self.kkstore.server_keys[KEY_USAGE_ENC]) # If an encrypted payload is received then there must be # a nested signed payload to verify the provenance. payload = token.payload.decode('utf-8') token = JWS() token.deserialize(payload) elif isinstance(token, JWS): pass else: raise TypeError("Invalid Token type: %s" % type(jtok)) # Retrieve client keys for later use self.client_keys = [ JWK(**self._get_key(token.jose_header, KEY_USAGE_SIG)), JWK(**self._get_key(token.jose_header, KEY_USAGE_ENC))] # verify token and get payload token.verify(self.client_keys[KEY_USAGE_SIG]) claims = json_decode(token.payload) except Exception as e: logger.debug('Failed to validate message', exc_info=True) raise InvalidMessage('Failed to validate message: %s' % str(e)) check_kem_claims(claims, name) self.name = name self.payload = claims.get('value') self.msg_type = 'kem' return {'type': self.msg_type, 'value': {'kid': self.client_keys[KEY_USAGE_ENC].key_id, 'claims': claims}}
<SYSTEM_TASK:> Bytecodes are the same except the magic number, so just change <END_TASK> <USER_TASK:> Description: def copy_magic_into_pyc(input_pyc, output_pyc, src_version, dest_version): """Bytecodes are the same except the magic number, so just change that"""
(version, timestamp, magic_int, co, is_pypy, source_size) = load_module(input_pyc) assert version == float(src_version), ( "Need Python %s bytecode; got bytecode for version %s" % (src_version, version)) magic_int = magic2int(magics[dest_version]) write_bytecode_file(output_pyc, co, magic_int) print("Wrote %s" % output_pyc) return
<SYSTEM_TASK:> MAKEFUNCTION adds another const. probably MAKECLASS as well <END_TASK> <USER_TASK:> Description: def transform_32_33(inst, new_inst, i, n, offset, instructions, new_asm): """MAKEFUNCTION adds another const. probably MAKECLASS as well """
add_size = xdis.op_size(new_inst.opcode, opcode_33) if inst.opname in ('MAKE_FUNCTION','MAKE_CLOSURE'): # Previous instruction should be a load const which # contains the name of the function to call prev_inst = instructions[i-1] assert prev_inst.opname == 'LOAD_CONST' assert isinstance(prev_inst.arg, int) # Add the function name as an additional LOAD_CONST load_fn_const = Instruction() load_fn_const.opname = 'LOAD_CONST' load_fn_const.opcode = opcode_33.opmap['LOAD_CONST'] load_fn_const.line_no = None prev_const = new_asm.code.co_consts[prev_inst.arg] if hasattr(prev_const, 'co_name'): fn_name = new_asm.code.co_consts[prev_inst.arg].co_name else: fn_name = 'what-is-up' const_index = len(new_asm.code.co_consts) new_asm.code.co_consts = list(new_asm.code.co_consts) new_asm.code.co_consts.append(fn_name) load_fn_const.arg = const_index load_fn_const.offset = offset load_fn_const.starts_line = False load_fn_const.is_jump_target = False new_asm.code.instructions.append(load_fn_const) load_const_size = xdis.op_size(load_fn_const.opcode, opcode_33) add_size += load_const_size new_inst.offset = offset + add_size pass return add_size
<SYSTEM_TASK:> MAKE_FUNCTION, and MAKE_CLOSURE have an additional LOAD_CONST of a name <END_TASK> <USER_TASK:> Description: def transform_33_32(inst, new_inst, i, n, offset, instructions, new_asm): """MAKE_FUNCTION, and MAKE_CLOSURE have an additional LOAD_CONST of a name that are not in Python 3.2. Remove these. """
add_size = xdis.op_size(new_inst.opcode, opcode_33) if inst.opname in ('MAKE_FUNCTION','MAKE_CLOSURE'): # Previous instruction should be a load const which # contains the name of the function to call prev_inst = instructions[i-1] assert prev_inst.opname == 'LOAD_CONST' assert isinstance(prev_inst.arg, int) assert len(instructions) > 2 assert len(instructions) > 2 prev_inst2 = instructions[i-2] assert prev_inst2.opname == 'LOAD_CONST' assert isinstance(prev_inst2.arg, int) # Remove the function name as an additional LOAD_CONST prev2_const = new_asm.code.co_consts[prev_inst.arg] assert hasattr(prev2_const, 'co_name') new_asm.code.instructions = new_asm.code.instructions[:-1] load_const_size = xdis.op_size(prev_inst.opcode, opcode_33) add_size -= load_const_size new_inst.offset = offset - add_size return -load_const_size return 0
<SYSTEM_TASK:> Convert Python bytecode from one version to another. <END_TASK> <USER_TASK:> Description: def main(conversion_type, input_pyc, output_pyc): """Convert Python bytecode from one version to another. INPUT_PYC contains the input bytecode path name OUTPUT_PYC contians the output bytecode path name if supplied The --conversion type option specifies what conversion to do. Note: there are a very limited set of conversions currently supported. Help out and write more!"""
shortname = osp.basename(input_pyc) if shortname.endswith('.pyc'): shortname = shortname[:-4] src_version = conversion_to_version(conversion_type, is_dest=False) dest_version = conversion_to_version(conversion_type, is_dest=True) if output_pyc is None: output_pyc = "%s-%s.pyc" % (shortname, dest_version) if conversion_type in UPWARD_COMPATABLE: copy_magic_into_pyc(input_pyc, output_pyc, src_version, dest_version) return temp_asm = NamedTemporaryFile('w', suffix='.pyasm', prefix=shortname, delete=False) (filename, co, version, timestamp, magic_int) = disassemble_file(input_pyc, temp_asm, asm_format=True) temp_asm.close() assert version == float(src_version), ( "Need Python %s bytecode; got bytecode for version %s" % (src_version, version)) asm = asm_file(temp_asm.name) new_asm = transform_asm(asm, conversion_type, src_version, dest_version) os.unlink(temp_asm.name) write_pycfile(output_pyc, new_asm)
<SYSTEM_TASK:> generates fingerprints of the input. Either provide `str` to compute fingerprint directly from your string or `fpath` to compute fingerprint from the text of the file. Make sure to have your text decoded in `utf-8` format if you pass the input string. <END_TASK> <USER_TASK:> Description: def generate(self, str=None, fpath=None): """generates fingerprints of the input. Either provide `str` to compute fingerprint directly from your string or `fpath` to compute fingerprint from the text of the file. Make sure to have your text decoded in `utf-8` format if you pass the input string. Args: str (Optional(str)): string whose fingerprint is to be computed. fpath (Optional(str)): absolute path of the text file whose fingerprint is to be computed. Returns: List(int): fingerprints of the input. Raises: FingerprintException: If the input string do not meet the requirements of parameters provided for fingerprinting. """
self.prepare_storage() self.str = self.load_file(fpath) if fpath else self.sanitize(str) self.validate_config() self.generate_kgrams() self.hash_kgrams() self.generate_fingerprints() return self.fingerprints
<SYSTEM_TASK:> Create Python bytecode from a Python assembly file. <END_TASK> <USER_TASK:> Description: def main(pyc_file, asm_path): """ Create Python bytecode from a Python assembly file. ASM_PATH gives the input Python assembly file. We suggest ending the file in .pyc If --pyc-file is given, that indicates the path to write the Python bytecode. The path should end in '.pyc'. See https://github.com/rocky/python-xasm/blob/master/HOW-TO-USE.rst for how to write a Python assembler file. """
if os.stat(asm_path).st_size == 0: print("Size of assembly file %s is zero" % asm_path) sys.exit(1) asm = asm_file(asm_path) if not pyc_file and asm_path.endswith('.pyasm'): pyc_file = asm_path[:-len('.pyasm')] + '.pyc' write_pycfile(pyc_file, asm)
<SYSTEM_TASK:> Adds the standard 'exp' field, used to prevent replay attacks. <END_TASK> <USER_TASK:> Description: def expire(self, secs): """ Adds the standard 'exp' field, used to prevent replay attacks. Adds the 'exp' field to the payload. When a request is made, the field says that it should expire at now + `secs` seconds. Of course, this provides no protection unless the server reads and interprets this field. """
self.add_field('exp', lambda req: int(time.time() + secs))
<SYSTEM_TASK:> Generate a payload for the given request. <END_TASK> <USER_TASK:> Description: def _generate(self, request): """ Generate a payload for the given request. """
payload = {} for field, gen in self._generators.items(): value = None if callable(gen): value = gen(request) else: value = gen if value: payload[field] = value return payload
<SYSTEM_TASK:> encode a URL to be safe as a filename <END_TASK> <USER_TASK:> Description: def url2fs(url): """ encode a URL to be safe as a filename """
uri, extension = posixpath.splitext(url) return safe64.dir(uri) + extension
<SYSTEM_TASK:> Return true if the map projection matches that used by VEarth, Google, OSM, etc. <END_TASK> <USER_TASK:> Description: def is_merc_projection(srs): """ Return true if the map projection matches that used by VEarth, Google, OSM, etc. Is currently necessary for zoom-level shorthand for scale-denominator. """
if srs.lower() == '+init=epsg:900913': return True # observed srs = dict([p.split('=') for p in srs.split() if '=' in p]) # expected # note, common optional modifiers like +no_defs, +over, and +wkt # are not pairs and should not prevent matching gym = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null' gym = dict([p.split('=') for p in gym.split() if '=' in p]) for p in gym: if srs.get(p, None) != gym.get(p, None): return False return True
<SYSTEM_TASK:> Given a Map element and directories object, remove and return a complete <END_TASK> <USER_TASK:> Description: def extract_declarations(map_el, dirs, scale=1, user_styles=[]): """ Given a Map element and directories object, remove and return a complete list of style declarations from any Stylesheet elements found within. """
styles = [] # # First, look at all the stylesheets defined in the map itself. # for stylesheet in map_el.findall('Stylesheet'): map_el.remove(stylesheet) content, mss_href = fetch_embedded_or_remote_src(stylesheet, dirs) if content: styles.append((content, mss_href)) # # Second, look through the user-supplied styles for override rules. # for stylesheet in user_styles: mss_href = urljoin(dirs.source.rstrip('/')+'/', stylesheet) content = urllib.urlopen(mss_href).read().decode(DEFAULT_ENCODING) styles.append((content, mss_href)) declarations = [] for (content, mss_href) in styles: is_merc = is_merc_projection(map_el.get('srs','')) for declaration in stylesheet_declarations(content, is_merc, scale): # # Change the value of each URI relative to the location # of the containing stylesheet. We generally just have # the one instance of "dirs" around for a full parse cycle, # so it's necessary to perform this normalization here # instead of later, while mss_href is still available. # uri_value = declaration.value.value if uri_value.__class__ is uri: uri_value.address = urljoin(mss_href, uri_value.address) declarations.append(declaration) return declarations
<SYSTEM_TASK:> Given a Selector and Filter, return True if the Selector is <END_TASK> <USER_TASK:> Description: def is_applicable_selector(selector, filter): """ Given a Selector and Filter, return True if the Selector is compatible with the given Filter, and False if they contradict. """
for test in selector.allTests(): if not test.isCompatible(filter.tests): return False return True
<SYSTEM_TASK:> Given a Map element, a Layer element, and a list of declarations, <END_TASK> <USER_TASK:> Description: def get_polygon_rules(declarations): """ Given a Map element, a Layer element, and a list of declarations, create a new Style element with a PolygonSymbolizer, add it to Map and refer to it in Layer. """
property_map = {'polygon-fill': 'fill', 'polygon-opacity': 'fill-opacity', 'polygon-gamma': 'gamma', 'polygon-meta-output': 'meta-output', 'polygon-meta-writer': 'meta-writer'} property_names = property_map.keys() # a place to put rules rules = [] for (filter, values) in filtered_property_declarations(declarations, property_names): color = values.has_key('polygon-fill') and values['polygon-fill'].value opacity = values.has_key('polygon-opacity') and values['polygon-opacity'].value or None gamma = values.has_key('polygon-gamma') and values['polygon-gamma'].value or None symbolizer = color and output.PolygonSymbolizer(color, opacity, gamma) if symbolizer: rules.append(make_rule(filter, symbolizer)) return rules
<SYSTEM_TASK:> Given a Map element, a Layer element, and a list of declarations, <END_TASK> <USER_TASK:> Description: def get_raster_rules(declarations): """ Given a Map element, a Layer element, and a list of declarations, create a new Style element with a RasterSymbolizer, add it to Map and refer to it in Layer. The RasterSymbolizer will always created, even if there are no applicable declarations. """
property_map = {'raster-opacity': 'opacity', 'raster-mode': 'mode', 'raster-scaling': 'scaling' } property_names = property_map.keys() # a place to put rules rules = [] for (filter, values) in filtered_property_declarations(declarations, property_names): sym_params = {} for prop,attr in property_map.items(): sym_params[attr] = values.has_key(prop) and values[prop].value or None symbolizer = output.RasterSymbolizer(**sym_params) rules.append(make_rule(filter, symbolizer)) if not rules: # No raster-* rules were created, but we're here so we must need a symbolizer. rules.append(make_rule(Filter(), output.RasterSymbolizer())) return rules
<SYSTEM_TASK:> Locally cache a remote resource using a predictable file name <END_TASK> <USER_TASK:> Description: def locally_cache_remote_file(href, dir): """ Locally cache a remote resource using a predictable file name and awareness of modification date. Assume that files are "normal" which is to say they have filenames with extensions. """
scheme, host, remote_path, params, query, fragment = urlparse(href) assert scheme in ('http','https'), 'Scheme must be either http or https, not "%s" (for %s)' % (scheme,href) head, ext = posixpath.splitext(posixpath.basename(remote_path)) head = sub(r'[^\w\-_]', '', head) hash = md5(href).hexdigest()[:8] local_path = '%(dir)s/%(host)s-%(hash)s-%(head)s%(ext)s' % locals() headers = {} if posixpath.exists(local_path): msg('Found local file: %s' % local_path ) t = localtime(os.stat(local_path).st_mtime) headers['If-Modified-Since'] = strftime('%a, %d %b %Y %H:%M:%S %Z', t) if scheme == 'https': conn = HTTPSConnection(host, timeout=5) else: conn = HTTPConnection(host, timeout=5) if query: remote_path += '?%s' % query conn.request('GET', remote_path, headers=headers) resp = conn.getresponse() if resp.status in range(200, 210): # hurrah, it worked f = open(un_posix(local_path), 'wb') msg('Reading from remote: %s' % remote_path) f.write(resp.read()) f.close() elif resp.status in (301, 302, 303) and resp.getheader('location', False): # follow a redirect, totally untested. redirected_href = urljoin(href, resp.getheader('location')) redirected_path = locally_cache_remote_file(redirected_href, dir) os.rename(redirected_path, local_path) elif resp.status == 304: # hurrah, it's cached msg('Reading directly from local cache') pass else: raise Exception("Failed to get remote resource %s: %s" % (href, resp.status)) return local_path
<SYSTEM_TASK:> Given an image file href and a set of directories, modify the image file <END_TASK> <USER_TASK:> Description: def post_process_symbolizer_image_file(file_href, dirs): """ Given an image file href and a set of directories, modify the image file name so it's correct with respect to the output and cache directories. """
# support latest mapnik features of auto-detection # of image sizes and jpeg reading support... # http://trac.mapnik.org/ticket/508 mapnik_auto_image_support = (MAPNIK_VERSION >= 701) mapnik_requires_absolute_paths = (MAPNIK_VERSION < 601) file_href = urljoin(dirs.source.rstrip('/')+'/', file_href) scheme, n, path, p, q, f = urlparse(file_href) if scheme in ('http','https'): scheme, path = '', locally_cache_remote_file(file_href, dirs.cache) if scheme not in ('file', '') or not systempath.exists(un_posix(path)): raise Exception("Image file needs to be a working, fetchable resource, not %s" % file_href) if not mapnik_auto_image_support and not Image: raise SystemExit('PIL (Python Imaging Library) is required for handling image data unless you are using PNG inputs and running Mapnik >=0.7.0') img = Image.open(un_posix(path)) if mapnik_requires_absolute_paths: path = posixpath.realpath(path) else: path = dirs.output_path(path) msg('reading symbol: %s' % path) image_name, ext = posixpath.splitext(path) if ext in ('.png', '.tif', '.tiff'): output_ext = ext else: output_ext = '.png' # new local file name dest_file = un_posix('%s%s' % (image_name, output_ext)) if not posixpath.exists(dest_file): img.save(dest_file,'PNG') msg('Destination file: %s' % dest_file) return dest_file, output_ext[1:], img.size[0], img.size[1]
<SYSTEM_TASK:> Given a shapefile href and a set of directories, modify the shapefile <END_TASK> <USER_TASK:> Description: def localize_shapefile(shp_href, dirs): """ Given a shapefile href and a set of directories, modify the shapefile name so it's correct with respect to the output and cache directories. """
# support latest mapnik features of auto-detection # of image sizes and jpeg reading support... # http://trac.mapnik.org/ticket/508 mapnik_requires_absolute_paths = (MAPNIK_VERSION < 601) shp_href = urljoin(dirs.source.rstrip('/')+'/', shp_href) scheme, host, path, p, q, f = urlparse(shp_href) if scheme in ('http','https'): msg('%s | %s' % (shp_href, dirs.cache)) scheme, path = '', locally_cache_remote_file(shp_href, dirs.cache) else: host = None # collect drive for windows to_posix(systempath.realpath(path)) if scheme not in ('file', ''): raise Exception("Shapefile needs to be local, not %s" % shp_href) if mapnik_requires_absolute_paths: path = posixpath.realpath(path) original = path path = dirs.output_path(path) if path.endswith('.zip'): # unzip_shapefile_into needs a path it can find path = posixpath.join(dirs.output, path) path = unzip_shapefile_into(path, dirs.cache, host) return dirs.output_path(path)
<SYSTEM_TASK:> Handle localizing file-based datasources other than shapefiles. <END_TASK> <USER_TASK:> Description: def localize_file_datasource(file_href, dirs): """ Handle localizing file-based datasources other than shapefiles. This will only work for single-file based types. """
# support latest mapnik features of auto-detection # of image sizes and jpeg reading support... # http://trac.mapnik.org/ticket/508 mapnik_requires_absolute_paths = (MAPNIK_VERSION < 601) file_href = urljoin(dirs.source.rstrip('/')+'/', file_href) scheme, n, path, p, q, f = urlparse(file_href) if scheme in ('http','https'): scheme, path = '', locally_cache_remote_file(file_href, dirs.cache) if scheme not in ('file', ''): raise Exception("Datasource file needs to be a working, fetchable resource, not %s" % file_href) if mapnik_requires_absolute_paths: return posixpath.realpath(path) else: return dirs.output_path(path)
<SYSTEM_TASK:> Return a point guranteed to fall within this range, hopefully near the middle. <END_TASK> <USER_TASK:> Description: def midpoint(self): """ Return a point guranteed to fall within this range, hopefully near the middle. """
minpoint = self.leftedge if self.leftop is gt: minpoint += 1 maxpoint = self.rightedge if self.rightop is lt: maxpoint -= 1 if minpoint is None: return maxpoint elif maxpoint is None: return minpoint else: return (minpoint + maxpoint) / 2
<SYSTEM_TASK:> Return true if this range has any room in it. <END_TASK> <USER_TASK:> Description: def isOpen(self): """ Return true if this range has any room in it. """
if self.leftedge and self.rightedge and self.leftedge > self.rightedge: return False if self.leftedge == self.rightedge: if self.leftop is gt or self.rightop is lt: return False return True
<SYSTEM_TASK:> Convert this range to a Filter with a tests having a given property. <END_TASK> <USER_TASK:> Description: def toFilter(self, property): """ Convert this range to a Filter with a tests having a given property. """
if self.leftedge == self.rightedge and self.leftop is ge and self.rightop is le: # equivalent to == return Filter(style.SelectorAttributeTest(property, '=', self.leftedge)) try: return Filter(style.SelectorAttributeTest(property, opstr[self.leftop], self.leftedge), style.SelectorAttributeTest(property, opstr[self.rightop], self.rightedge)) except KeyError: try: return Filter(style.SelectorAttributeTest(property, opstr[self.rightop], self.rightedge)) except KeyError: try: return Filter(style.SelectorAttributeTest(property, opstr[self.leftop], self.leftedge)) except KeyError: return Filter()
<SYSTEM_TASK:> Return true if this filter is not trivially false, i.e. self-contradictory. <END_TASK> <USER_TASK:> Description: def isOpen(self): """ Return true if this filter is not trivially false, i.e. self-contradictory. """
equals = {} nequals = {} for test in self.tests: if test.op == '=': if equals.has_key(test.property) and test.value != equals[test.property]: # we've already stated that this arg must equal something else return False if nequals.has_key(test.property) and test.value in nequals[test.property]: # we've already stated that this arg must not equal its current value return False equals[test.property] = test.value if test.op == '!=': if equals.has_key(test.property) and test.value == equals[test.property]: # we've already stated that this arg must equal its current value return False if not nequals.has_key(test.property): nequals[test.property] = set() nequals[test.property].add(test.value) return True
<SYSTEM_TASK:> Return a new Filter that's equal to this one, <END_TASK> <USER_TASK:> Description: def minusExtras(self): """ Return a new Filter that's equal to this one, without extra terms that don't add meaning. """
assert self.isOpen() trimmed = self.clone() equals = {} for test in trimmed.tests: if test.op == '=': equals[test.property] = test.value extras = [] for (i, test) in enumerate(trimmed.tests): if test.op == '!=' and equals.has_key(test.property) and equals[test.property] != test.value: extras.append(i) while extras: trimmed.tests.pop(extras.pop()) return trimmed
<SYSTEM_TASK:> Assign the value 'preference' to the diagonal entries <END_TASK> <USER_TASK:> Description: def add_preference(hdf5_file, preference): """Assign the value 'preference' to the diagonal entries of the matrix of similarities stored in the HDF5 data structure at 'hdf5_file'. """
Worker.hdf5_lock.acquire() with tables.open_file(hdf5_file, 'r+') as fileh: S = fileh.root.aff_prop_group.similarities diag_ind = np.diag_indices(S.nrows) S[diag_ind] = preference Worker.hdf5_lock.release()
<SYSTEM_TASK:> This procedure organizes the addition of small fluctuations on top of <END_TASK> <USER_TASK:> Description: def add_fluctuations(hdf5_file, N_columns, N_processes): """This procedure organizes the addition of small fluctuations on top of a matrix of similarities at 'hdf5_file' across 'N_processes' different processes. Each of those processes is an instance of the class 'Fluctuations_Worker' defined elsewhere in this module. """
random_state = np.random.RandomState(0) slice_queue = multiprocessing.JoinableQueue() pid_list = [] for i in range(N_processes): worker = Fluctuations_worker(hdf5_file, '/aff_prop_group/similarities', random_state, N_columns, slice_queue) worker.daemon = True worker.start() pid_list.append(worker.pid) for rows_slice in chunk_generator(N_columns, 4 * N_processes): slice_queue.put(rows_slice) slice_queue.join() slice_queue.close() terminate_processes(pid_list) gc.collect()
<SYSTEM_TASK:> Organize the computation and update of the responsibility matrix <END_TASK> <USER_TASK:> Description: def compute_responsibilities(hdf5_file, N_columns, damping, N_processes): """Organize the computation and update of the responsibility matrix for Affinity Propagation clustering with 'damping' as the eponymous damping parameter. Each of the processes concurrently involved in this task is an instance of the class 'Responsibilities_worker' defined above. """
slice_queue = multiprocessing.JoinableQueue() pid_list = [] for i in range(N_processes): worker = Responsibilities_worker(hdf5_file, '/aff_prop_group', N_columns, damping, slice_queue) worker.daemon = True worker.start() pid_list.append(worker.pid) for rows_slice in chunk_generator(N_columns, 8 * N_processes): slice_queue.put(rows_slice) slice_queue.join() slice_queue.close() terminate_processes(pid_list)
<SYSTEM_TASK:> Convert a share multiprocessing array to a numpy array. <END_TASK> <USER_TASK:> Description: def to_numpy_array(multiprocessing_array, shape, dtype): """Convert a share multiprocessing array to a numpy array. No data copying involved. """
return np.frombuffer(multiprocessing_array.get_obj(), dtype = dtype).reshape(shape)
<SYSTEM_TASK:> Parallel computation of the sums across the rows of two-dimensional array <END_TASK> <USER_TASK:> Description: def compute_rows_sum(hdf5_file, path, N_columns, N_processes, method = 'Process'): """Parallel computation of the sums across the rows of two-dimensional array accessible at the node specified by 'path' in the 'hdf5_file' hierarchical data format. """
assert isinstance(method, str), "parameter 'method' must consist in a string of characters" assert method in ('Ordinary', 'Pool'), "parameter 'method' must be set to either of 'Ordinary' or 'Pool'" if method == 'Ordinary': rows_sum = np.zeros(N_columns, dtype = float) chunk_size = get_chunk_size(N_columns, 2) with Worker.hdf5_lock: with tables.open_file(hdf5_file, 'r+') as fileh: hdf5_array = fileh.get_node(path) N_rows = hdf5_array.nrows assert N_columns == N_rows for i in range(0, N_columns, chunk_size): slc = slice(i, min(i+chunk_size, N_columns)) tmp = hdf5_array[:, slc] rows_sum[slc] = tmp[:].sum(axis = 0) else: rows_sum_array = multiprocessing.Array(c_double, N_columns, lock = True) chunk_size = get_chunk_size(N_columns, 2 * N_processes) numpy_args = rows_sum_array, N_columns, np.float64 with closing(multiprocessing.Pool(N_processes, initializer = rows_sum_init, initargs = (hdf5_file, path, rows_sum_array.get_lock()) + numpy_args)) as pool: pool.map_async(multiprocessing_get_sum, chunk_generator(N_columns, 2 * N_processes), chunk_size) pool.close() pool.join() rows_sum = to_numpy_array(*numpy_args) gc.collect() return rows_sum
<SYSTEM_TASK:> If the estimated number of clusters has not changed for 'convergence_iter' <END_TASK> <USER_TASK:> Description: def check_convergence(hdf5_file, iteration, convergence_iter, max_iter): """If the estimated number of clusters has not changed for 'convergence_iter' consecutive iterations in a total of 'max_iter' rounds of message-passing, the procedure herewith returns 'True'. Otherwise, returns 'False'. Parameter 'iteration' identifies the run of message-passing that has just completed. """
Worker.hdf5_lock.acquire() with tables.open_file(hdf5_file, 'r+') as fileh: A = fileh.root.aff_prop_group.availabilities R = fileh.root.aff_prop_group.responsibilities P = fileh.root.aff_prop_group.parallel_updates N = A.nrows diag_ind = np.diag_indices(N) E = (A[diag_ind] + R[diag_ind]) > 0 P[:, iteration % convergence_iter] = E e_mat = P[:] K = E.sum(axis = 0) Worker.hdf5_lock.release() if iteration >= convergence_iter: se = e_mat.sum(axis = 1) unconverged = (np.sum((se == convergence_iter) + (se == 0)) != N) if (not unconverged and (K > 0)) or (iteration == max_iter): return True return False
<SYSTEM_TASK:> One of the task to be performed by a pool of subprocesses, as the first <END_TASK> <USER_TASK:> Description: def cluster_labels_A(hdf5_file, c, lock, I, rows_slice): """One of the task to be performed by a pool of subprocesses, as the first step in identifying the cluster labels and indices of the cluster centers for Affinity Propagation clustering. """
with Worker.hdf5_lock: with tables.open_file(hdf5_file, 'r+') as fileh: S = fileh.root.aff_prop_group.similarities s = S[rows_slice, ...] s = np.argmax(s[:, I], axis = 1) with lock: c[rows_slice] = s[:] del s
<SYSTEM_TASK:> Second task to be performed by a pool of subprocesses before <END_TASK> <USER_TASK:> Description: def cluster_labels_B(hdf5_file, s_reduced, lock, I, ii, iix, rows_slice): """Second task to be performed by a pool of subprocesses before the cluster labels and cluster center indices can be identified. """
with Worker.hdf5_lock: with tables.open_file(hdf5_file, 'r+') as fileh: S = fileh.root.aff_prop_group.similarities s = S[rows_slice, ...] s = s[:, ii] s = s[iix[rows_slice]] with lock: s_reduced += s[:].sum(axis = 0) del s
<SYSTEM_TASK:> Write in tab-separated files the vectors of cluster identities and <END_TASK> <USER_TASK:> Description: def output_clusters(labels, cluster_centers_indices): """Write in tab-separated files the vectors of cluster identities and of indices of cluster centers. """
here = os.getcwd() try: output_directory = os.path.join(here, 'concurrent_AP_output') os.makedirs(output_directory) except OSError: if not os.path.isdir(output_directory): print("ERROR: concurrent_AP: output_clusters: cannot create a directory " "for storage of the results of Affinity Propagation clustering " "in your current working directory") sys.exit(1) if any(np.isnan(labels)): fmt = '%.1f' else: fmt = '%d' with open(os.path.join(output_directory, 'labels.tsv'), 'w') as fh: np.savetxt(fh, labels, fmt = fmt, delimiter = '\t') if cluster_centers_indices is not None: with open(os.path.join(output_directory, 'cluster_centers_indices.tsv'), 'w') as fh: np.savetxt(fh, cluster_centers_indices, fmt = '%.1f', delimiter = '\t')
<SYSTEM_TASK:> Get blockchain information, aggregated data as well as data for the <END_TASK> <USER_TASK:> Description: def get_coin_snapshot(fsym, tsym): """ Get blockchain information, aggregated data as well as data for the individual exchanges available for the specified currency pair. Args: fsym: FROM symbol. tsym: TO symbol. Returns: The function returns a dictionairy containing blockain as well as trading information from the different exchanges were the specified currency pair is available. {'AggregatedData': dict, 'Algorithm': ..., 'BlockNumber': ..., 'BlockReward': ..., 'Exchanges': [dict1, dict2, ...], 'NetHashesPerSecond': ..., 'ProofType': ..., 'TotalCoinsMined': ...} dict = {'FLAGS': ..., 'FROMSYMBOL': ..., 'HIGH24HOUR': ..., 'LASTMARKET': ..., 'LASTTRADEID': ..., 'LASTUPDATE': ..., 'LASTVOLUME': ..., 'LASTVOLUMETO': ..., 'LOW24HOUR': ..., 'MARKET': ..., 'OPEN24HOUR': ..., 'PRICE': ..., 'TOSYMBOL': ..., 'TYPE': ..., 'VOLUME24HOUR': ..., 'VOLUME24HOURTO': ...} """
# load data url = build_url('coinsnapshot', fsym=fsym, tsym=tsym) data = load_data(url)['Data'] return data
<SYSTEM_TASK:> Given an id and a list of classes, return True if this selector would match. <END_TASK> <USER_TASK:> Description: def matches(self, tag, id, classes): """ Given an id and a list of classes, return True if this selector would match. """
element = self.elements[0] unmatched_ids = [name[1:] for name in element.names if name.startswith('#')] unmatched_classes = [name[1:] for name in element.names if name.startswith('.')] unmatched_tags = [name for name in element.names if name is not '*' and not name.startswith('#') and not name.startswith('.')] if tag and tag in unmatched_tags: unmatched_tags.remove(tag) if id and id in unmatched_ids: unmatched_ids.remove(id) for class_ in classes: if class_ in unmatched_classes: unmatched_classes.remove(class_) if unmatched_tags or unmatched_ids or unmatched_classes: return False else: return True
<SYSTEM_TASK:> Return a new Selector with scale denominators scaled by a number. <END_TASK> <USER_TASK:> Description: def scaledBy(self, scale): """ Return a new Selector with scale denominators scaled by a number. """
scaled = deepcopy(self) for test in scaled.elements[0].tests: if type(test.value) in (int, float): if test.property == 'scale-denominator': test.value /= scale elif test.property == 'zoom': test.value += log(scale)/log(2) return scaled
<SYSTEM_TASK:> Return a new Value scaled by a given number for ints and floats. <END_TASK> <USER_TASK:> Description: def scaledBy(self, scale): """ Return a new Value scaled by a given number for ints and floats. """
scaled = deepcopy(self) if type(scaled.value) in (int, float): scaled.value *= scale elif isinstance(scaled.value, numbers): scaled.value.values = tuple(v * scale for v in scaled.value.values) return scaled
<SYSTEM_TASK:> Get all the mining equipment information available. <END_TASK> <USER_TASK:> Description: def get_mining_equipment(): """Get all the mining equipment information available. Returns: This function returns two major dictionaries. The first one contains information about the coins for which mining equipment data is available. coin_data: {symbol1: {'BlockNumber': ..., 'BlockReward': ..., 'BlockRewardReduction': ..., 'BlockTime': ..., 'DifficultyAdjustment': ..., 'NetHashesPerSecond': ..., 'PreviousTotalCoinsMined': ..., 'PriceUSD': ..., 'Symbol': ..., 'TotalCoinsMined': ...}, symbol2: {...}, ...} The other one contains all the available mining equipment. mining_data: {id1: {'AffiliateURL': ..., 'Algorithm': ..., 'Company': ..., 'Cost': ..., 'CurrenciesAvailable': ..., 'CurrenciesAvailableLogo': ..., 'CurrenciesAvailableName': ..., 'Currency': ..., 'EquipmentType': ..., 'HashesPerSecond': ..., 'Id': ..., 'LogoUrl': ..., 'Name': ..., 'ParentId': ..., 'PowerConsumption': ..., 'Recommended': ..., 'Sponsored': ..., 'Url': ...}, id2: {...}, """
# load data url = build_url('miningequipment') data = load_data(url) coin_data = data['CoinData'] mining_data = data['MiningData'] return coin_data, mining_data
<SYSTEM_TASK:> Given an input layers file and a directory, print the compiled <END_TASK> <USER_TASK:> Description: def main(src_file, dest_file, **kwargs): """ Given an input layers file and a directory, print the compiled XML file to stdout and save any encountered external image files to the named directory. """
mmap = mapnik.Map(1, 1) # allow [zoom] filters to work mmap.srs = '+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null' load_kwargs = dict([(k, v) for (k, v) in kwargs.items() if k in ('cache_dir', 'scale', 'verbose', 'datasources_cfg', 'user_styles')]) cascadenik.load_map(mmap, src_file, dirname(realpath(dest_file)), **load_kwargs) (handle, tmp_file) = tempfile.mkstemp(suffix='.xml', prefix='cascadenik-mapnik-') os.close(handle) mapnik.save_map(mmap, tmp_file) if kwargs.get('pretty'): doc = ElementTree.fromstring(open(tmp_file, 'rb').read()) cascadenik._compile.indent(doc) f = open(tmp_file, 'wb') ElementTree.ElementTree(doc).write(f) f.close() # manually unlinking seems to be required on windows if os.path.exists(dest_file): os.unlink(dest_file) os.chmod(tmp_file, 0666^os.umask(0)) shutil.move(tmp_file, dest_file) return 0