prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>spectral.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Algorithms for spectral clustering"""
# Author: Gael Varoquaux [email protected]
# Brian Cheung
# Wei LI <[email protected]>
# License: BSD 3 clause
import warnings
import numpy as np
from ..base import BaseEstimator, ClusterMixin
from ..utils import check_random_state, as_float_array
from ..utils.validation import check_array
from ..utils.extmath import norm
from ..metrics.pairwise import pairwise_kernels
from ..neighbors import kneighbors_graph
from ..manifold import spectral_embedding
from .k_means_ import k_means
def discretize(vectors, copy=True, max_svd_restarts=30, n_iter_max=20,
random_state=None):
"""Search for a partition matrix (clustering) which is closest to the
eigenvector embedding.
Parameters
----------
vectors : array-like, shape: (n_samples, n_clusters)
The embedding space of the samples.
copy : boolean, optional, default: True
Whether to copy vectors, or perform in-place normalization.
max_svd_restarts : int, optional, default: 30
Maximum number of attempts to restart SVD if convergence fails
n_iter_max : int, optional, default: 30
Maximum number of iterations to attempt in rotation and partition
matrix search if machine precision convergence is not reached
random_state : int, RandomState instance or None, optional, default: None
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
labels : array of integers, shape: n_samples
The labels of the clusters.
References
----------
- Multiclass spectral clustering, 2003
Stella X. Yu, Jianbo Shi
http://www1.icsi.berkeley.edu/~stellayu/publication/doc/2003kwayICCV.pdf
Notes
-----
The eigenvector embedding is used to iteratively search for the
closest discrete partition. First, the eigenvector embedding is
normalized to the space of partition matrices. An optimal discrete
partition matrix closest to this normalized embedding multiplied by
an initial rotation is calculated. Fixing this discrete partition
matrix, an optimal rotation matrix is calculated. These two
calculations are performed until convergence. The discrete partition
matrix is returned as the clustering solution. Used in spectral
clustering, this method tends to be faster and more robust to random
initialization than k-means.
"""
from scipy.sparse import csc_matrix
from scipy.linalg import LinAlgError
random_state = check_random_state(random_state)
vectors = as_float_array(vectors, copy=copy)
eps = np.finfo(float).eps
n_samples, n_components = vectors.shape
# Normalize the eigenvectors to an equal length of a vector of ones.
# Reorient the eigenvectors to point in the negative direction with respect
# to the first element. This may have to do with constraining the
# eigenvectors to lie in a specific quadrant to make the discretization
# search easier.
norm_ones = np.sqrt(n_samples)
for i in range(vectors.shape[1]):
vectors[:, i] = (vectors[:, i] / norm(vectors[:, i])) \
* norm_ones
if vectors[0, i] != 0:
vectors[:, i] = -1 * vectors[:, i] * np.sign(vectors[0, i])
# Normalize the rows of the eigenvectors. Samples should lie on the unit
# hypersphere centered at the origin. This transforms the samples in the
# embedding space to the space of partition matrices.
vectors = vectors / np.sqrt((vectors ** 2).sum(axis=1))[:, np.newaxis]
svd_restarts = 0
has_converged = False
# If there is an exception we try to randomize and rerun SVD again
# do this max_svd_restarts times.
while (svd_restarts < max_svd_restarts) and not has_converged:
# Initialize first column of rotation matrix with a row of the
# eigenvectors
rotation = np.zeros((n_components, n_components))
rotation[:, 0] = vectors[random_state.randint(n_samples), :].T
# To initialize the rest of the rotation matrix, find the rows
# of the eigenvectors that are as orthogonal to each other as
# possible
c = np.zeros(n_samples)
for j in range(1, n_components):
# Accumulate c to ensure row is as orthogonal as possible to
# previous picks as well as current one
c += np.abs(np.dot(vectors, rotation[:, j - 1]))
rotation[:, j] = vectors[c.argmin(), :].T
last_objective_value = 0.0
n_iter = 0
while not has_converged:
n_iter += 1
t_discrete = np.dot(vectors, rotation)
labels = t_discrete.argmax(axis=1)
vectors_discrete = csc_matrix(
(np.ones(len(labels)), (np.arange(0, n_samples), labels)),
shape=(n_samples, n_components))
t_svd = vectors_discrete.T * vectors
try:
U, S, Vh = np.linalg.svd(t_svd)
svd_restarts += 1
except LinAlgError:
print("SVD did not converge, randomizing and trying again")
break
ncut_value = 2.0 * (n_samples - S.sum())
if ((abs(ncut_value - last_objective_value) < eps) or
(n_iter > n_iter_max)):
has_converged = True
else:
# otherwise calculate rotation and continue
last_objective_value = ncut_value
rotation = np.dot(Vh.T, U.T)
if not has_converged:
raise LinAlgError('SVD did not converge')
return labels
def spectral_clustering(affinity, n_clusters=8, n_components=None,
eigen_solver=None, random_state=None, n_init=10,
eigen_tol=0.0, assign_labels='kmeans'):
"""Apply clustering to a projection to the normalized laplacian.
In practice Spectral Clustering is very useful when the structure of
the individual clusters is highly non-convex or more generally when
a measure of the center and spread of the cluster is not a suitable
description of the complete cluster. For instance when clusters are
nested circles on the 2D plan.
If affinity is the adjacency matrix of a graph, this method can be
used to find normalized graph cuts.
Read more in the :ref:`User Guide <spectral_clustering>`.
Parameters
-----------
affinity : array-like or sparse matrix, shape: (n_samples, n_samples)
The affinity matrix describing the relationship of the samples to
embed. **Must be symmetric**.
Possible examples:
- adjacency matrix of a graph,
- heat kernel of the pairwise distance matrix of the samples,
- symmetric k-nearest neighbours connectivity matrix of the samples.
n_clusters : integer, optional
Number of clusters to extract.
n_components : integer, optional, default is n_clusters
Number of eigen vectors to use for the spectral embedding
eigen_solver : {None, 'arpack', 'lobpcg', or 'amg'}
The eigenvalue decomposition strategy to use. AMG requires pyamg
to be installed. It can be faster on very large, sparse problems,
but may also lead to instabilities
random_state : int, RandomState instance or None, optional, default: None
A pseudo random number generator used for the initialization of the
lobpcg eigen vectors decomposition when eigen_solver == 'amg' and by
the K-Means initialization. If int, random_state is the seed used by
the random number generator; If RandomState instance, random_state is
the random number generator; If None, the random number generator is
the RandomState instance used by `np.random`.
n_init : int, optional, default: 10
Number of time the k-means algorithm will be run with different
centroid seeds. The final results will be the best output of
n_init consecutive runs in terms of inertia.
eigen_tol : float, optional, default: 0.0
Stopping criterion for eigendecomposition of the Laplacian matrix
when using arpack eigen_solver.
assign_labels : {'kmeans', 'discretize'}, default: 'kmeans'
The strategy to use to assign labels in the embedding
space. There are two ways to assign labels after the laplacian
embedding. k-means can be applied and is a popular choice. But it can
also be sensitive to initialization. Discretization is another
approach which is less sensitive to random initialization. See
the 'Multiclass spectral clustering' paper referenced below for
more details on the discretization approach.
Returns
-------
labels : array of integers, shape: n_samples
The labels of the clusters.
References
----------
- Normalized cuts and image segmentation, 2000
Jianbo Shi, Jitendra Malik
http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.160.2324
- A Tutorial on Spectral Clustering, 2007
Ulrike von Luxburg
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.165.9323
- Multiclass spectral clustering, 2003
Stella X. Yu, Jianbo Shi
http://www1.icsi.berkeley.edu/~stellayu/publication/doc/2003kwayICCV.pdf
Notes
------
The graph should contain only one connect component, elsewhere
the results make little sense.
This algorithm solves the normalized cut for k=2: it is a
normalized spectral clustering.
"""
if assign_labels not in ('kmeans', 'discretize'):
raise ValueError("The 'assign_labels' parameter should be "
"'kmeans' or 'discretize', but '%s' was given"
% assign_labels)
random_state = check_random_state(random_state)
n_components = n_clusters if n_components is None else n_components
maps = spectral_embedding(affinity, n_components=n_components,
eigen_solver=eigen_solver,
random_state=random_state,
eigen_tol=eigen_tol, drop_first=False)
if assign_labels == 'kmeans':
_, labels, _ = k_means(maps, n_clusters, random_state=random_state,
n_init=n_init)
else:
labels = discretize(maps, random_state=random_state)
return labels
class SpectralClustering(BaseEstimator, ClusterMixin):
"""Apply clustering to a projection to the normalized laplacian.
In practice Spectral Clustering is very useful when the structure of
the individual clusters is highly non-convex or more generally when
a measure of the center and spread of the cluster is not a suitable
description of the complete cluster. For instance when clusters are
nested circles on the 2D plan.
If affinity is the adjacency matrix of a graph, this method can be
used to find normalized graph cuts.
When calling ``fit``, an affinity matrix is constructed using either
kernel function such the Gaussian (aka RBF) kernel of the euclidean
distanced ``d(X, X)``::
np.exp(-gamma * d(X,X) ** 2)
or a k-nearest neighbors connectivity matrix.
Alternatively, using ``precomputed``, a user-provided affinity
matrix can be used.
Read more in the :ref:`User Guide <spectral_clustering>`.
Parameters
-----------
n_clusters : integer, optional
The dimension of the projection subspace.
affinity : string, array-like or callable, default 'rbf'
If a string, this may be one of 'nearest_neighbors', 'precomputed',
'rbf' or one of the kernels supported by
`sklearn.metrics.pairwise_kernels`.<|fim▁hole|> by the clustering algorithm.
gamma : float, default=1.0
Kernel coefficient for rbf, poly, sigmoid, laplacian and chi2 kernels.
Ignored for ``affinity='nearest_neighbors'``.
degree : float, default=3
Degree of the polynomial kernel. Ignored by other kernels.
coef0 : float, default=1
Zero coefficient for polynomial and sigmoid kernels.
Ignored by other kernels.
n_neighbors : integer
Number of neighbors to use when constructing the affinity matrix using
the nearest neighbors method. Ignored for ``affinity='rbf'``.
eigen_solver : {None, 'arpack', 'lobpcg', or 'amg'}
The eigenvalue decomposition strategy to use. AMG requires pyamg
to be installed. It can be faster on very large, sparse problems,
but may also lead to instabilities
random_state : int, RandomState instance or None, optional, default: None
A pseudo random number generator used for the initialization of the
lobpcg eigen vectors decomposition when eigen_solver == 'amg' and by
the K-Means initialization. If int, random_state is the seed used by
the random number generator; If RandomState instance, random_state is
the random number generator; If None, the random number generator is
the RandomState instance used by `np.random`.
n_init : int, optional, default: 10
Number of time the k-means algorithm will be run with different
centroid seeds. The final results will be the best output of
n_init consecutive runs in terms of inertia.
eigen_tol : float, optional, default: 0.0
Stopping criterion for eigendecomposition of the Laplacian matrix
when using arpack eigen_solver.
assign_labels : {'kmeans', 'discretize'}, default: 'kmeans'
The strategy to use to assign labels in the embedding
space. There are two ways to assign labels after the laplacian
embedding. k-means can be applied and is a popular choice. But it can
also be sensitive to initialization. Discretization is another approach
which is less sensitive to random initialization.
kernel_params : dictionary of string to any, optional
Parameters (keyword arguments) and values for kernel passed as
callable object. Ignored by other kernels.
n_jobs : int, optional (default = 1)
The number of parallel jobs to run.
If ``-1``, then the number of jobs is set to the number of CPU cores.
Attributes
----------
affinity_matrix_ : array-like, shape (n_samples, n_samples)
Affinity matrix used for clustering. Available only if after calling
``fit``.
labels_ :
Labels of each point
Notes
-----
If you have an affinity matrix, such as a distance matrix,
for which 0 means identical elements, and high values means
very dissimilar elements, it can be transformed in a
similarity matrix that is well suited for the algorithm by
applying the Gaussian (RBF, heat) kernel::
np.exp(- dist_matrix ** 2 / (2. * delta ** 2))
Where ``delta`` is a free parameter representing the width of the Gaussian
kernel.
Another alternative is to take a symmetric version of the k
nearest neighbors connectivity matrix of the points.
If the pyamg package is installed, it is used: this greatly
speeds up computation.
References
----------
- Normalized cuts and image segmentation, 2000
Jianbo Shi, Jitendra Malik
http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.160.2324
- A Tutorial on Spectral Clustering, 2007
Ulrike von Luxburg
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.165.9323
- Multiclass spectral clustering, 2003
Stella X. Yu, Jianbo Shi
http://www1.icsi.berkeley.edu/~stellayu/publication/doc/2003kwayICCV.pdf
"""
def __init__(self, n_clusters=8, eigen_solver=None, random_state=None,
n_init=10, gamma=1., affinity='rbf', n_neighbors=10,
eigen_tol=0.0, assign_labels='kmeans', degree=3, coef0=1,
kernel_params=None, n_jobs=1):
self.n_clusters = n_clusters
self.eigen_solver = eigen_solver
self.random_state = random_state
self.n_init = n_init
self.gamma = gamma
self.affinity = affinity
self.n_neighbors = n_neighbors
self.eigen_tol = eigen_tol
self.assign_labels = assign_labels
self.degree = degree
self.coef0 = coef0
self.kernel_params = kernel_params
self.n_jobs = n_jobs
def fit(self, X, y=None):
"""Creates an affinity matrix for X using the selected affinity,
then applies spectral clustering to this affinity matrix.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
OR, if affinity==`precomputed`, a precomputed affinity
matrix of shape (n_samples, n_samples)
"""
X = check_array(X, accept_sparse=['csr', 'csc', 'coo'],
dtype=np.float64)
if X.shape[0] == X.shape[1] and self.affinity != "precomputed":
warnings.warn("The spectral clustering API has changed. ``fit``"
"now constructs an affinity matrix from data. To use"
" a custom affinity matrix, "
"set ``affinity=precomputed``.")
if self.affinity == 'nearest_neighbors':
connectivity = kneighbors_graph(X, n_neighbors=self.n_neighbors, include_self=True,
n_jobs=self.n_jobs)
self.affinity_matrix_ = 0.5 * (connectivity + connectivity.T)
elif self.affinity == 'precomputed':
self.affinity_matrix_ = X
else:
params = self.kernel_params
if params is None:
params = {}
if not callable(self.affinity):
params['gamma'] = self.gamma
params['degree'] = self.degree
params['coef0'] = self.coef0
self.affinity_matrix_ = pairwise_kernels(X, metric=self.affinity,
filter_params=True,
**params)
random_state = check_random_state(self.random_state)
self.labels_ = spectral_clustering(self.affinity_matrix_,
n_clusters=self.n_clusters,
eigen_solver=self.eigen_solver,
random_state=random_state,
n_init=self.n_init,
eigen_tol=self.eigen_tol,
assign_labels=self.assign_labels)
return self
@property
def _pairwise(self):
return self.affinity == "precomputed"<|fim▁end|>
|
Only kernels that produce similarity scores (non-negative values that
increase with similarity) should be used. This property is not checked
|
<|file_name|>mq.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
channel = connection.channel()
channel.exchange_declare(exchange='clock_output', type='fanout')
result = channel.queue_declare(exclusive=True)
queue_name = result.method.queue
channel.queue_bind(exchange='clock_output', queue=queue_name)
print ' [*] Waiting for messages. To exit press CTRL+C'
def select_callback():
print("select message sent")
channel.basic_publish(exchange='clock_output', routing_key='', body='ALARM_STOP')
channel.basic_publish(exchange='clock_output', routing_key='', body='ALARM_CANCEL')
def callback(ch, method, properties, body):
print("message received: {0}".format(body))
if body == "ALARM_START":
items = ("It's sunny today", "Meeting at 2pm")
lcd_scroller = LCDLinearScroll(items, select_callback=select_callback)
lcd_scroller.display_message("Scroll through\nmessages")
#lcd_scroller.setup_scroll_events()
channel.basic_consume(callback, queue=queue_name, no_ack=True)
channel.start_consuming()<|fim▁end|>
|
import pika
import pickle
from display import LCDLinearScroll
|
<|file_name|>posix.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! POSIX file path handling
use c_str::{CString, ToCStr};
use clone::Clone;
use cmp::{PartialEq, Eq, PartialOrd, Ord, Ordering};
use hash;
use io::Writer;
use iter::{DoubleEndedIteratorExt, AdditiveIterator, Extend};
use iter::{Iterator, IteratorExt, Map};
use option::{Option, None, Some};
use kinds::Sized;
use str::{FromStr, Str};
use str;
use slice::{CloneSliceAllocPrelude, Splits, AsSlice, VectorVector,
PartialEqSlicePrelude, SlicePrelude};
use vec::Vec;
use super::{BytesContainer, GenericPath, GenericPathUnsafe};
/// Iterator that yields successive components of a Path as &[u8]
pub type Components<'a> = Splits<'a, u8>;
/// Iterator that yields successive components of a Path as Option<&str>
pub type StrComponents<'a> = Map<'a, &'a [u8], Option<&'a str>,
Components<'a>>;
/// Represents a POSIX file path
#[deriving(Clone)]
pub struct Path {
repr: Vec<u8>, // assumed to never be empty or contain NULs
sepidx: Option<uint> // index of the final separator in repr
}
/// The standard path separator character
pub const SEP: char = '/';
/// The standard path separator byte
pub const SEP_BYTE: u8 = SEP as u8;
/// Returns whether the given byte is a path separator
#[inline]
pub fn is_sep_byte(u: &u8) -> bool {
*u as char == SEP
}
/// Returns whether the given char is a path separator
#[inline]
pub fn is_sep(c: char) -> bool {
c == SEP
}
impl PartialEq for Path {
#[inline]
fn eq(&self, other: &Path) -> bool {
self.repr == other.repr
}
}
impl Eq for Path {}
impl PartialOrd for Path {
fn partial_cmp(&self, other: &Path) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Path {
fn cmp(&self, other: &Path) -> Ordering {
self.repr.cmp(&other.repr)
}
}
impl FromStr for Path {
fn from_str(s: &str) -> Option<Path> {
Path::new_opt(s)
}
}
// FIXME (#12938): Until DST lands, we cannot decompose &str into & and str, so
// we cannot usefully take ToCStr arguments by reference (without forcing an
// additional & around &str). So we are instead temporarily adding an instance
// for &Path, so that we can take ToCStr as owned. When DST lands, the &Path
// instance should be removed, and arguments bound by ToCStr should be passed by
// reference.
impl ToCStr for Path {
#[inline]
fn to_c_str(&self) -> CString {
// The Path impl guarantees no internal NUL
unsafe { self.to_c_str_unchecked() }
}
#[inline]
unsafe fn to_c_str_unchecked(&self) -> CString {
self.as_vec().to_c_str_unchecked()
}
}
impl<S: hash::Writer> hash::Hash<S> for Path {
#[inline]
fn hash(&self, state: &mut S) {
self.repr.hash(state)
}
}
impl BytesContainer for Path {
#[inline]
fn container_as_bytes<'a>(&'a self) -> &'a [u8] {
self.as_vec()
}
}
impl GenericPathUnsafe for Path {
unsafe fn new_unchecked<T: BytesContainer>(path: T) -> Path {
let path = Path::normalize(path.container_as_bytes());
assert!(!path.is_empty());
let idx = path.as_slice().rposition_elem(&SEP_BYTE);
Path{ repr: path, sepidx: idx }
}
unsafe fn set_filename_unchecked<T: BytesContainer>(&mut self, filename: T) {
let filename = filename.container_as_bytes();
match self.sepidx {
None if b".." == self.repr.as_slice() => {
let mut v = Vec::with_capacity(3 + filename.len());
v.push_all(dot_dot_static);
v.push(SEP_BYTE);
v.push_all(filename);
// FIXME: this is slow
self.repr = Path::normalize(v.as_slice());
}
None => {
self.repr = Path::normalize(filename);
}
Some(idx) if self.repr[idx+1..] == b".." => {
let mut v = Vec::with_capacity(self.repr.len() + 1 + filename.len());
v.push_all(self.repr.as_slice());
v.push(SEP_BYTE);
v.push_all(filename);
// FIXME: this is slow
self.repr = Path::normalize(v.as_slice());
}
Some(idx) => {
let mut v = Vec::with_capacity(idx + 1 + filename.len());
v.push_all(self.repr[..idx+1]);
v.push_all(filename);
// FIXME: this is slow
self.repr = Path::normalize(v.as_slice());
}
}
self.sepidx = self.repr.as_slice().rposition_elem(&SEP_BYTE);
}
unsafe fn push_unchecked<T: BytesContainer>(&mut self, path: T) {
let path = path.container_as_bytes();
if !path.is_empty() {
if path[0] == SEP_BYTE {
self.repr = Path::normalize(path);
} else {
let mut v = Vec::with_capacity(self.repr.len() + path.len() + 1);
v.push_all(self.repr.as_slice());
v.push(SEP_BYTE);
v.push_all(path);
// FIXME: this is slow
self.repr = Path::normalize(v.as_slice());
}
self.sepidx = self.repr.as_slice().rposition_elem(&SEP_BYTE);
}
}
}
impl GenericPath for Path {
#[inline]
fn as_vec<'a>(&'a self) -> &'a [u8] {
self.repr.as_slice()
}
fn into_vec(self) -> Vec<u8> {
self.repr
}
fn dirname<'a>(&'a self) -> &'a [u8] {
match self.sepidx {
None if b".." == self.repr.as_slice() => self.repr.as_slice(),
None => dot_static,
Some(0) => self.repr[..1],
Some(idx) if self.repr[idx+1..] == b".." => self.repr.as_slice(),
Some(idx) => self.repr[..idx]
}
}
fn filename<'a>(&'a self) -> Option<&'a [u8]> {
match self.sepidx {
None if b"." == self.repr.as_slice() ||
b".." == self.repr.as_slice() => None,
None => Some(self.repr.as_slice()),
Some(idx) if self.repr[idx+1..] == b".." => None,
Some(0) if self.repr[1..].is_empty() => None,
Some(idx) => Some(self.repr[idx+1..])
}
}
fn pop(&mut self) -> bool {
match self.sepidx {
None if b"." == self.repr.as_slice() => false,
None => {
self.repr = vec![b'.'];
self.sepidx = None;
true
}
Some(0) if b"/" == self.repr.as_slice() => false,
Some(idx) => {
if idx == 0 {
self.repr.truncate(idx+1);
} else {
self.repr.truncate(idx);
}
self.sepidx = self.repr.as_slice().rposition_elem(&SEP_BYTE);
true
}
}<|fim▁hole|> Some(Path::new("/"))
} else {
None
}
}
#[inline]
fn is_absolute(&self) -> bool {
self.repr[0] == SEP_BYTE
}
fn is_ancestor_of(&self, other: &Path) -> bool {
if self.is_absolute() != other.is_absolute() {
false
} else {
let mut ita = self.components();
let mut itb = other.components();
if b"." == self.repr.as_slice() {
return match itb.next() {
None => true,
Some(b) => b != b".."
};
}
loop {
match (ita.next(), itb.next()) {
(None, _) => break,
(Some(a), Some(b)) if a == b => { continue },
(Some(a), _) if a == b".." => {
// if ita contains only .. components, it's an ancestor
return ita.all(|x| x == b"..");
}
_ => return false
}
}
true
}
}
fn path_relative_from(&self, base: &Path) -> Option<Path> {
if self.is_absolute() != base.is_absolute() {
if self.is_absolute() {
Some(self.clone())
} else {
None
}
} else {
let mut ita = self.components();
let mut itb = base.components();
let mut comps = vec![];
loop {
match (ita.next(), itb.next()) {
(None, None) => break,
(Some(a), None) => {
comps.push(a);
comps.extend(ita.by_ref());
break;
}
(None, _) => comps.push(dot_dot_static),
(Some(a), Some(b)) if comps.is_empty() && a == b => (),
(Some(a), Some(b)) if b == b"." => comps.push(a),
(Some(_), Some(b)) if b == b".." => return None,
(Some(a), Some(_)) => {
comps.push(dot_dot_static);
for _ in itb {
comps.push(dot_dot_static);
}
comps.push(a);
comps.extend(ita.by_ref());
break;
}
}
}
Some(Path::new(comps.as_slice().connect_vec(&SEP_BYTE)))
}
}
fn ends_with_path(&self, child: &Path) -> bool {
if !child.is_relative() { return false; }
let mut selfit = self.components().rev();
let mut childit = child.components().rev();
loop {
match (selfit.next(), childit.next()) {
(Some(a), Some(b)) => if a != b { return false; },
(Some(_), None) => break,
(None, Some(_)) => return false,
(None, None) => break
}
}
true
}
}
impl Path {
/// Returns a new Path from a byte vector or string
///
/// # Panics
///
/// Panics the task if the vector contains a NUL.
#[inline]
pub fn new<T: BytesContainer>(path: T) -> Path {
GenericPath::new(path)
}
/// Returns a new Path from a byte vector or string, if possible
#[inline]
pub fn new_opt<T: BytesContainer>(path: T) -> Option<Path> {
GenericPath::new_opt(path)
}
/// Returns a normalized byte vector representation of a path, by removing all empty
/// components, and unnecessary . and .. components.
fn normalize<Sized? V: AsSlice<u8>>(v: &V) -> Vec<u8> {
// borrowck is being very picky
let val = {
let is_abs = !v.as_slice().is_empty() && v.as_slice()[0] == SEP_BYTE;
let v_ = if is_abs { v.as_slice()[1..] } else { v.as_slice() };
let comps = normalize_helper(v_, is_abs);
match comps {
None => None,
Some(comps) => {
if is_abs && comps.is_empty() {
Some(vec![SEP_BYTE])
} else {
let n = if is_abs { comps.len() } else { comps.len() - 1} +
comps.iter().map(|v| v.len()).sum();
let mut v = Vec::with_capacity(n);
let mut it = comps.into_iter();
if !is_abs {
match it.next() {
None => (),
Some(comp) => v.push_all(comp)
}
}
for comp in it {
v.push(SEP_BYTE);
v.push_all(comp);
}
Some(v)
}
}
}
};
match val {
None => v.as_slice().to_vec(),
Some(val) => val
}
}
/// Returns an iterator that yields each component of the path in turn.
/// Does not distinguish between absolute and relative paths, e.g.
/// /a/b/c and a/b/c yield the same set of components.
/// A path of "/" yields no components. A path of "." yields one component.
pub fn components<'a>(&'a self) -> Components<'a> {
let v = if self.repr[0] == SEP_BYTE {
self.repr[1..]
} else { self.repr.as_slice() };
let mut ret = v.split(is_sep_byte);
if v.is_empty() {
// consume the empty "" component
ret.next();
}
ret
}
/// Returns an iterator that yields each component of the path as Option<&str>.
/// See components() for details.
pub fn str_components<'a>(&'a self) -> StrComponents<'a> {
self.components().map(str::from_utf8)
}
}
// None result means the byte vector didn't need normalizing
fn normalize_helper<'a>(v: &'a [u8], is_abs: bool) -> Option<Vec<&'a [u8]>> {
if is_abs && v.as_slice().is_empty() {
return None;
}
let mut comps: Vec<&'a [u8]> = vec![];
let mut n_up = 0u;
let mut changed = false;
for comp in v.split(is_sep_byte) {
if comp.is_empty() { changed = true }
else if comp == b"." { changed = true }
else if comp == b".." {
if is_abs && comps.is_empty() { changed = true }
else if comps.len() == n_up { comps.push(dot_dot_static); n_up += 1 }
else { comps.pop().unwrap(); changed = true }
} else { comps.push(comp) }
}
if changed {
if comps.is_empty() && !is_abs {
if v == b"." {
return None;
}
comps.push(dot_static);
}
Some(comps)
} else {
None
}
}
#[allow(non_upper_case_globals)]
static dot_static: &'static [u8] = b".";
#[allow(non_upper_case_globals)]
static dot_dot_static: &'static [u8] = b"..";
#[cfg(test)]
mod tests {
use prelude::*;
use super::*;
use mem;
use str;
use str::StrPrelude;
macro_rules! t(
(s: $path:expr, $exp:expr) => (
{
let path = $path;
assert!(path.as_str() == Some($exp));
}
);
(v: $path:expr, $exp:expr) => (
{
let path = $path;
assert!(path.as_vec() == $exp);
}
)
)
#[test]
fn test_paths() {
let empty: &[u8] = &[];
t!(v: Path::new(empty), b".");
t!(v: Path::new(b"/"), b"/");
t!(v: Path::new(b"a/b/c"), b"a/b/c");
t!(v: Path::new(b"a/b/c\xFF"), b"a/b/c\xFF");
t!(v: Path::new(b"\xFF/../foo\x80"), b"foo\x80");
let p = Path::new(b"a/b/c\xFF");
assert!(p.as_str() == None);
t!(s: Path::new(""), ".");
t!(s: Path::new("/"), "/");
t!(s: Path::new("hi"), "hi");
t!(s: Path::new("hi/"), "hi");
t!(s: Path::new("/lib"), "/lib");
t!(s: Path::new("/lib/"), "/lib");
t!(s: Path::new("hi/there"), "hi/there");
t!(s: Path::new("hi/there.txt"), "hi/there.txt");
t!(s: Path::new("hi/there/"), "hi/there");
t!(s: Path::new("hi/../there"), "there");
t!(s: Path::new("../hi/there"), "../hi/there");
t!(s: Path::new("/../hi/there"), "/hi/there");
t!(s: Path::new("foo/.."), ".");
t!(s: Path::new("/foo/.."), "/");
t!(s: Path::new("/foo/../.."), "/");
t!(s: Path::new("/foo/../../bar"), "/bar");
t!(s: Path::new("/./hi/./there/."), "/hi/there");
t!(s: Path::new("/./hi/./there/./.."), "/hi");
t!(s: Path::new("foo/../.."), "..");
t!(s: Path::new("foo/../../.."), "../..");
t!(s: Path::new("foo/../../bar"), "../bar");
assert_eq!(Path::new(b"foo/bar").into_vec().as_slice(), b"foo/bar");
assert_eq!(Path::new(b"/foo/../../bar").into_vec().as_slice(),
b"/bar");
let p = Path::new(b"foo/bar\x80");
assert!(p.as_str() == None);
}
#[test]
fn test_opt_paths() {
assert!(Path::new_opt(b"foo/bar\0") == None);
t!(v: Path::new_opt(b"foo/bar").unwrap(), b"foo/bar");
assert!(Path::new_opt("foo/bar\0") == None);
t!(s: Path::new_opt("foo/bar").unwrap(), "foo/bar");
}
#[test]
fn test_null_byte() {
use task;
let result = task::try(proc() {
Path::new(b"foo/bar\0")
});
assert!(result.is_err());
let result = task::try(proc() {
Path::new("test").set_filename(b"f\0o")
});
assert!(result.is_err());
let result = task::try(proc() {
Path::new("test").push(b"f\0o");
});
assert!(result.is_err());
}
#[test]
fn test_display_str() {
macro_rules! t(
($path:expr, $disp:ident, $exp:expr) => (
{
let path = Path::new($path);
assert!(path.$disp().to_string().as_slice() == $exp);
}
)
)
t!("foo", display, "foo");
t!(b"foo\x80", display, "foo\uFFFD");
t!(b"foo\xFFbar", display, "foo\uFFFDbar");
t!(b"foo\xFF/bar", filename_display, "bar");
t!(b"foo/\xFFbar", filename_display, "\uFFFDbar");
t!(b"/", filename_display, "");
macro_rules! t(
($path:expr, $exp:expr) => (
{
let path = Path::new($path);
let mo = path.display().as_cow();
assert!(mo.as_slice() == $exp);
}
);
($path:expr, $exp:expr, filename) => (
{
let path = Path::new($path);
let mo = path.filename_display().as_cow();
assert!(mo.as_slice() == $exp);
}
)
)
t!("foo", "foo");
t!(b"foo\x80", "foo\uFFFD");
t!(b"foo\xFFbar", "foo\uFFFDbar");
t!(b"foo\xFF/bar", "bar", filename);
t!(b"foo/\xFFbar", "\uFFFDbar", filename);
t!(b"/", "", filename);
}
#[test]
fn test_display() {
macro_rules! t(
($path:expr, $exp:expr, $expf:expr) => (
{
let path = Path::new($path);
let f = format!("{}", path.display());
assert!(f.as_slice() == $exp);
let f = format!("{}", path.filename_display());
assert!(f.as_slice() == $expf);
}
)
)
t!(b"foo", "foo", "foo");
t!(b"foo/bar", "foo/bar", "bar");
t!(b"/", "/", "");
t!(b"foo\xFF", "foo\uFFFD", "foo\uFFFD");
t!(b"foo\xFF/bar", "foo\uFFFD/bar", "bar");
t!(b"foo/\xFFbar", "foo/\uFFFDbar", "\uFFFDbar");
t!(b"\xFFfoo/bar\xFF", "\uFFFDfoo/bar\uFFFD", "bar\uFFFD");
}
#[test]
fn test_components() {
macro_rules! t(
(s: $path:expr, $op:ident, $exp:expr) => (
{
unsafe {
let path = Path::new($path);
assert!(path.$op() == mem::transmute(($exp).as_bytes()));
}
}
);
(s: $path:expr, $op:ident, $exp:expr, opt) => (
{
let path = Path::new($path);
let left = path.$op().map(|x| str::from_utf8(x).unwrap());
assert!(left == $exp);
}
);
(v: $path:expr, $op:ident, $exp:expr) => (
{
unsafe {
let arg = $path;
let path = Path::new(arg);
assert!(path.$op() == mem::transmute($exp));
}
}
);
)
t!(v: b"a/b/c", filename, Some(b"c"));
t!(v: b"a/b/c\xFF", filename, Some(b"c\xFF"));
t!(v: b"a/b\xFF/c", filename, Some(b"c"));
t!(s: "a/b/c", filename, Some("c"), opt);
t!(s: "/a/b/c", filename, Some("c"), opt);
t!(s: "a", filename, Some("a"), opt);
t!(s: "/a", filename, Some("a"), opt);
t!(s: ".", filename, None, opt);
t!(s: "/", filename, None, opt);
t!(s: "..", filename, None, opt);
t!(s: "../..", filename, None, opt);
t!(v: b"a/b/c", dirname, b"a/b");
t!(v: b"a/b/c\xFF", dirname, b"a/b");
t!(v: b"a/b\xFF/c", dirname, b"a/b\xFF");
t!(s: "a/b/c", dirname, "a/b");
t!(s: "/a/b/c", dirname, "/a/b");
t!(s: "a", dirname, ".");
t!(s: "/a", dirname, "/");
t!(s: ".", dirname, ".");
t!(s: "/", dirname, "/");
t!(s: "..", dirname, "..");
t!(s: "../..", dirname, "../..");
t!(v: b"hi/there.txt", filestem, Some(b"there"));
t!(v: b"hi/there\x80.txt", filestem, Some(b"there\x80"));
t!(v: b"hi/there.t\x80xt", filestem, Some(b"there"));
t!(s: "hi/there.txt", filestem, Some("there"), opt);
t!(s: "hi/there", filestem, Some("there"), opt);
t!(s: "there.txt", filestem, Some("there"), opt);
t!(s: "there", filestem, Some("there"), opt);
t!(s: ".", filestem, None, opt);
t!(s: "/", filestem, None, opt);
t!(s: "foo/.bar", filestem, Some(".bar"), opt);
t!(s: ".bar", filestem, Some(".bar"), opt);
t!(s: "..bar", filestem, Some("."), opt);
t!(s: "hi/there..txt", filestem, Some("there."), opt);
t!(s: "..", filestem, None, opt);
t!(s: "../..", filestem, None, opt);
t!(v: b"hi/there.txt", extension, Some(b"txt"));
t!(v: b"hi/there\x80.txt", extension, Some(b"txt"));
t!(v: b"hi/there.t\x80xt", extension, Some(b"t\x80xt"));
let no: Option<&'static [u8]> = None;
t!(v: b"hi/there", extension, no);
t!(v: b"hi/there\x80", extension, no);
t!(s: "hi/there.txt", extension, Some("txt"), opt);
t!(s: "hi/there", extension, None, opt);
t!(s: "there.txt", extension, Some("txt"), opt);
t!(s: "there", extension, None, opt);
t!(s: ".", extension, None, opt);
t!(s: "/", extension, None, opt);
t!(s: "foo/.bar", extension, None, opt);
t!(s: ".bar", extension, None, opt);
t!(s: "..bar", extension, Some("bar"), opt);
t!(s: "hi/there..txt", extension, Some("txt"), opt);
t!(s: "..", extension, None, opt);
t!(s: "../..", extension, None, opt);
}
#[test]
fn test_push() {
macro_rules! t(
(s: $path:expr, $join:expr) => (
{
let path = $path;
let join = $join;
let mut p1 = Path::new(path);
let p2 = p1.clone();
p1.push(join);
assert!(p1 == p2.join(join));
}
)
)
t!(s: "a/b/c", "..");
t!(s: "/a/b/c", "d");
t!(s: "a/b", "c/d");
t!(s: "a/b", "/c/d");
}
#[test]
fn test_push_path() {
macro_rules! t(
(s: $path:expr, $push:expr, $exp:expr) => (
{
let mut p = Path::new($path);
let push = Path::new($push);
p.push(&push);
assert!(p.as_str() == Some($exp));
}
)
)
t!(s: "a/b/c", "d", "a/b/c/d");
t!(s: "/a/b/c", "d", "/a/b/c/d");
t!(s: "a/b", "c/d", "a/b/c/d");
t!(s: "a/b", "/c/d", "/c/d");
t!(s: "a/b", ".", "a/b");
t!(s: "a/b", "../c", "a/c");
}
#[test]
fn test_push_many() {
macro_rules! t(
(s: $path:expr, $push:expr, $exp:expr) => (
{
let mut p = Path::new($path);
p.push_many(&$push);
assert!(p.as_str() == Some($exp));
}
);
(v: $path:expr, $push:expr, $exp:expr) => (
{
let mut p = Path::new($path);
p.push_many(&$push);
assert!(p.as_vec() == $exp);
}
)
)
t!(s: "a/b/c", ["d", "e"], "a/b/c/d/e");
t!(s: "a/b/c", ["d", "/e"], "/e");
t!(s: "a/b/c", ["d", "/e", "f"], "/e/f");
t!(s: "a/b/c", ["d".to_string(), "e".to_string()], "a/b/c/d/e");
t!(v: b"a/b/c", [b"d", b"e"], b"a/b/c/d/e");
t!(v: b"a/b/c", [b"d", b"/e", b"f"], b"/e/f");
t!(v: b"a/b/c", [b"d".to_vec(), b"e".to_vec()], b"a/b/c/d/e");
}
#[test]
fn test_pop() {
macro_rules! t(
(s: $path:expr, $left:expr, $right:expr) => (
{
let mut p = Path::new($path);
let result = p.pop();
assert!(p.as_str() == Some($left));
assert!(result == $right);
}
);
(b: $path:expr, $left:expr, $right:expr) => (
{
let mut p = Path::new($path);
let result = p.pop();
assert!(p.as_vec() == $left);
assert!(result == $right);
}
)
)
t!(b: b"a/b/c", b"a/b", true);
t!(b: b"a", b".", true);
t!(b: b".", b".", false);
t!(b: b"/a", b"/", true);
t!(b: b"/", b"/", false);
t!(b: b"a/b/c\x80", b"a/b", true);
t!(b: b"a/b\x80/c", b"a/b\x80", true);
t!(b: b"\xFF", b".", true);
t!(b: b"/\xFF", b"/", true);
t!(s: "a/b/c", "a/b", true);
t!(s: "a", ".", true);
t!(s: ".", ".", false);
t!(s: "/a", "/", true);
t!(s: "/", "/", false);
}
#[test]
fn test_root_path() {
assert!(Path::new(b"a/b/c").root_path() == None);
assert!(Path::new(b"/a/b/c").root_path() == Some(Path::new("/")));
}
#[test]
fn test_join() {
t!(v: Path::new(b"a/b/c").join(b".."), b"a/b");
t!(v: Path::new(b"/a/b/c").join(b"d"), b"/a/b/c/d");
t!(v: Path::new(b"a/\x80/c").join(b"\xFF"), b"a/\x80/c/\xFF");
t!(s: Path::new("a/b/c").join(".."), "a/b");
t!(s: Path::new("/a/b/c").join("d"), "/a/b/c/d");
t!(s: Path::new("a/b").join("c/d"), "a/b/c/d");
t!(s: Path::new("a/b").join("/c/d"), "/c/d");
t!(s: Path::new(".").join("a/b"), "a/b");
t!(s: Path::new("/").join("a/b"), "/a/b");
}
#[test]
fn test_join_path() {
macro_rules! t(
(s: $path:expr, $join:expr, $exp:expr) => (
{
let path = Path::new($path);
let join = Path::new($join);
let res = path.join(&join);
assert!(res.as_str() == Some($exp));
}
)
)
t!(s: "a/b/c", "..", "a/b");
t!(s: "/a/b/c", "d", "/a/b/c/d");
t!(s: "a/b", "c/d", "a/b/c/d");
t!(s: "a/b", "/c/d", "/c/d");
t!(s: ".", "a/b", "a/b");
t!(s: "/", "a/b", "/a/b");
}
#[test]
fn test_join_many() {
macro_rules! t(
(s: $path:expr, $join:expr, $exp:expr) => (
{
let path = Path::new($path);
let res = path.join_many(&$join);
assert!(res.as_str() == Some($exp));
}
);
(v: $path:expr, $join:expr, $exp:expr) => (
{
let path = Path::new($path);
let res = path.join_many(&$join);
assert!(res.as_vec() == $exp);
}
)
)
t!(s: "a/b/c", ["d", "e"], "a/b/c/d/e");
t!(s: "a/b/c", ["..", "d"], "a/b/d");
t!(s: "a/b/c", ["d", "/e", "f"], "/e/f");
t!(s: "a/b/c", ["d".to_string(), "e".to_string()], "a/b/c/d/e");
t!(v: b"a/b/c", [b"d", b"e"], b"a/b/c/d/e");
t!(v: b"a/b/c", [b"d".to_vec(), b"e".to_vec()], b"a/b/c/d/e");
}
#[test]
fn test_with_helpers() {
let empty: &[u8] = &[];
t!(v: Path::new(b"a/b/c").with_filename(b"d"), b"a/b/d");
t!(v: Path::new(b"a/b/c\xFF").with_filename(b"\x80"), b"a/b/\x80");
t!(v: Path::new(b"/\xFF/foo").with_filename(b"\xCD"),
b"/\xFF/\xCD");
t!(s: Path::new("a/b/c").with_filename("d"), "a/b/d");
t!(s: Path::new(".").with_filename("foo"), "foo");
t!(s: Path::new("/a/b/c").with_filename("d"), "/a/b/d");
t!(s: Path::new("/").with_filename("foo"), "/foo");
t!(s: Path::new("/a").with_filename("foo"), "/foo");
t!(s: Path::new("foo").with_filename("bar"), "bar");
t!(s: Path::new("/").with_filename("foo/"), "/foo");
t!(s: Path::new("/a").with_filename("foo/"), "/foo");
t!(s: Path::new("a/b/c").with_filename(""), "a/b");
t!(s: Path::new("a/b/c").with_filename("."), "a/b");
t!(s: Path::new("a/b/c").with_filename(".."), "a");
t!(s: Path::new("/a").with_filename(""), "/");
t!(s: Path::new("foo").with_filename(""), ".");
t!(s: Path::new("a/b/c").with_filename("d/e"), "a/b/d/e");
t!(s: Path::new("a/b/c").with_filename("/d"), "a/b/d");
t!(s: Path::new("..").with_filename("foo"), "../foo");
t!(s: Path::new("../..").with_filename("foo"), "../../foo");
t!(s: Path::new("..").with_filename(""), "..");
t!(s: Path::new("../..").with_filename(""), "../..");
t!(v: Path::new(b"hi/there\x80.txt").with_extension(b"exe"),
b"hi/there\x80.exe");
t!(v: Path::new(b"hi/there.txt\x80").with_extension(b"\xFF"),
b"hi/there.\xFF");
t!(v: Path::new(b"hi/there\x80").with_extension(b"\xFF"),
b"hi/there\x80.\xFF");
t!(v: Path::new(b"hi/there.\xFF").with_extension(empty), b"hi/there");
t!(s: Path::new("hi/there.txt").with_extension("exe"), "hi/there.exe");
t!(s: Path::new("hi/there.txt").with_extension(""), "hi/there");
t!(s: Path::new("hi/there.txt").with_extension("."), "hi/there..");
t!(s: Path::new("hi/there.txt").with_extension(".."), "hi/there...");
t!(s: Path::new("hi/there").with_extension("txt"), "hi/there.txt");
t!(s: Path::new("hi/there").with_extension("."), "hi/there..");
t!(s: Path::new("hi/there").with_extension(".."), "hi/there...");
t!(s: Path::new("hi/there.").with_extension("txt"), "hi/there.txt");
t!(s: Path::new("hi/.foo").with_extension("txt"), "hi/.foo.txt");
t!(s: Path::new("hi/there.txt").with_extension(".foo"), "hi/there..foo");
t!(s: Path::new("/").with_extension("txt"), "/");
t!(s: Path::new("/").with_extension("."), "/");
t!(s: Path::new("/").with_extension(".."), "/");
t!(s: Path::new(".").with_extension("txt"), ".");
}
#[test]
fn test_setters() {
macro_rules! t(
(s: $path:expr, $set:ident, $with:ident, $arg:expr) => (
{
let path = $path;
let arg = $arg;
let mut p1 = Path::new(path);
p1.$set(arg);
let p2 = Path::new(path);
assert!(p1 == p2.$with(arg));
}
);
(v: $path:expr, $set:ident, $with:ident, $arg:expr) => (
{
let path = $path;
let arg = $arg;
let mut p1 = Path::new(path);
p1.$set(arg);
let p2 = Path::new(path);
assert!(p1 == p2.$with(arg));
}
)
)
t!(v: b"a/b/c", set_filename, with_filename, b"d");
t!(v: b"/", set_filename, with_filename, b"foo");
t!(v: b"\x80", set_filename, with_filename, b"\xFF");
t!(s: "a/b/c", set_filename, with_filename, "d");
t!(s: "/", set_filename, with_filename, "foo");
t!(s: ".", set_filename, with_filename, "foo");
t!(s: "a/b", set_filename, with_filename, "");
t!(s: "a", set_filename, with_filename, "");
t!(v: b"hi/there.txt", set_extension, with_extension, b"exe");
t!(v: b"hi/there.t\x80xt", set_extension, with_extension, b"exe\xFF");
t!(s: "hi/there.txt", set_extension, with_extension, "exe");
t!(s: "hi/there.", set_extension, with_extension, "txt");
t!(s: "hi/there", set_extension, with_extension, "txt");
t!(s: "hi/there.txt", set_extension, with_extension, "");
t!(s: "hi/there", set_extension, with_extension, "");
t!(s: ".", set_extension, with_extension, "txt");
}
#[test]
fn test_getters() {
macro_rules! t(
(s: $path:expr, $filename:expr, $dirname:expr, $filestem:expr, $ext:expr) => (
{
unsafe {
let path = $path;
let filename = $filename;
assert!(path.filename_str() == filename,
"{}.filename_str(): Expected `{}`, found {}",
path.as_str().unwrap(), filename, path.filename_str());
let dirname = $dirname;
assert!(path.dirname_str() == dirname,
"`{}`.dirname_str(): Expected `{}`, found `{}`",
path.as_str().unwrap(), dirname, path.dirname_str());
let filestem = $filestem;
assert!(path.filestem_str() == filestem,
"`{}`.filestem_str(): Expected `{}`, found `{}`",
path.as_str().unwrap(), filestem, path.filestem_str());
let ext = $ext;
assert!(path.extension_str() == mem::transmute(ext),
"`{}`.extension_str(): Expected `{}`, found `{}`",
path.as_str().unwrap(), ext, path.extension_str());
}
}
);
(v: $path:expr, $filename:expr, $dirname:expr, $filestem:expr, $ext:expr) => (
{
unsafe {
let path = $path;
assert!(path.filename() == mem::transmute($filename));
assert!(path.dirname() == mem::transmute($dirname));
assert!(path.filestem() == mem::transmute($filestem));
assert!(path.extension() == mem::transmute($ext));
}
}
)
)
let no: Option<&'static str> = None;
t!(v: Path::new(b"a/b/c"), Some(b"c"), b"a/b", Some(b"c"), no);
t!(v: Path::new(b"a/b/\xFF"), Some(b"\xFF"), b"a/b", Some(b"\xFF"), no);
t!(v: Path::new(b"hi/there.\xFF"), Some(b"there.\xFF"), b"hi",
Some(b"there"), Some(b"\xFF"));
t!(s: Path::new("a/b/c"), Some("c"), Some("a/b"), Some("c"), no);
t!(s: Path::new("."), None, Some("."), None, no);
t!(s: Path::new("/"), None, Some("/"), None, no);
t!(s: Path::new(".."), None, Some(".."), None, no);
t!(s: Path::new("../.."), None, Some("../.."), None, no);
t!(s: Path::new("hi/there.txt"), Some("there.txt"), Some("hi"),
Some("there"), Some("txt"));
t!(s: Path::new("hi/there"), Some("there"), Some("hi"), Some("there"), no);
t!(s: Path::new("hi/there."), Some("there."), Some("hi"),
Some("there"), Some(""));
t!(s: Path::new("hi/.there"), Some(".there"), Some("hi"), Some(".there"), no);
t!(s: Path::new("hi/..there"), Some("..there"), Some("hi"),
Some("."), Some("there"));
t!(s: Path::new(b"a/b/\xFF"), None, Some("a/b"), None, no);
t!(s: Path::new(b"a/b/\xFF.txt"), None, Some("a/b"), None, Some("txt"));
t!(s: Path::new(b"a/b/c.\x80"), None, Some("a/b"), Some("c"), no);
t!(s: Path::new(b"\xFF/b"), Some("b"), None, Some("b"), no);
}
#[test]
fn test_dir_path() {
t!(v: Path::new(b"hi/there\x80").dir_path(), b"hi");
t!(v: Path::new(b"hi\xFF/there").dir_path(), b"hi\xFF");
t!(s: Path::new("hi/there").dir_path(), "hi");
t!(s: Path::new("hi").dir_path(), ".");
t!(s: Path::new("/hi").dir_path(), "/");
t!(s: Path::new("/").dir_path(), "/");
t!(s: Path::new("..").dir_path(), "..");
t!(s: Path::new("../..").dir_path(), "../..");
}
#[test]
fn test_is_absolute() {
macro_rules! t(
(s: $path:expr, $abs:expr, $rel:expr) => (
{
let path = Path::new($path);
assert_eq!(path.is_absolute(), $abs);
assert_eq!(path.is_relative(), $rel);
}
)
)
t!(s: "a/b/c", false, true);
t!(s: "/a/b/c", true, false);
t!(s: "a", false, true);
t!(s: "/a", true, false);
t!(s: ".", false, true);
t!(s: "/", true, false);
t!(s: "..", false, true);
t!(s: "../..", false, true);
}
#[test]
fn test_is_ancestor_of() {
macro_rules! t(
(s: $path:expr, $dest:expr, $exp:expr) => (
{
let path = Path::new($path);
let dest = Path::new($dest);
assert_eq!(path.is_ancestor_of(&dest), $exp);
}
)
)
t!(s: "a/b/c", "a/b/c/d", true);
t!(s: "a/b/c", "a/b/c", true);
t!(s: "a/b/c", "a/b", false);
t!(s: "/a/b/c", "/a/b/c", true);
t!(s: "/a/b", "/a/b/c", true);
t!(s: "/a/b/c/d", "/a/b/c", false);
t!(s: "/a/b", "a/b/c", false);
t!(s: "a/b", "/a/b/c", false);
t!(s: "a/b/c", "a/b/d", false);
t!(s: "../a/b/c", "a/b/c", false);
t!(s: "a/b/c", "../a/b/c", false);
t!(s: "a/b/c", "a/b/cd", false);
t!(s: "a/b/cd", "a/b/c", false);
t!(s: "../a/b", "../a/b/c", true);
t!(s: ".", "a/b", true);
t!(s: ".", ".", true);
t!(s: "/", "/", true);
t!(s: "/", "/a/b", true);
t!(s: "..", "a/b", true);
t!(s: "../..", "a/b", true);
}
#[test]
fn test_ends_with_path() {
macro_rules! t(
(s: $path:expr, $child:expr, $exp:expr) => (
{
let path = Path::new($path);
let child = Path::new($child);
assert_eq!(path.ends_with_path(&child), $exp);
}
);
(v: $path:expr, $child:expr, $exp:expr) => (
{
let path = Path::new($path);
let child = Path::new($child);
assert_eq!(path.ends_with_path(&child), $exp);
}
)
)
t!(s: "a/b/c", "c", true);
t!(s: "a/b/c", "d", false);
t!(s: "foo/bar/quux", "bar", false);
t!(s: "foo/bar/quux", "barquux", false);
t!(s: "a/b/c", "b/c", true);
t!(s: "a/b/c", "a/b/c", true);
t!(s: "a/b/c", "foo/a/b/c", false);
t!(s: "/a/b/c", "a/b/c", true);
t!(s: "/a/b/c", "/a/b/c", false); // child must be relative
t!(s: "/a/b/c", "foo/a/b/c", false);
t!(s: "a/b/c", "", false);
t!(s: "", "", true);
t!(s: "/a/b/c", "d/e/f", false);
t!(s: "a/b/c", "a/b", false);
t!(s: "a/b/c", "b", false);
t!(v: b"a/b/c", b"b/c", true);
t!(v: b"a/b/\xFF", b"\xFF", true);
t!(v: b"a/b/\xFF", b"b/\xFF", true);
}
#[test]
fn test_path_relative_from() {
macro_rules! t(
(s: $path:expr, $other:expr, $exp:expr) => (
{
let path = Path::new($path);
let other = Path::new($other);
let res = path.path_relative_from(&other);
assert_eq!(res.as_ref().and_then(|x| x.as_str()), $exp);
}
)
)
t!(s: "a/b/c", "a/b", Some("c"));
t!(s: "a/b/c", "a/b/d", Some("../c"));
t!(s: "a/b/c", "a/b/c/d", Some(".."));
t!(s: "a/b/c", "a/b/c", Some("."));
t!(s: "a/b/c", "a/b/c/d/e", Some("../.."));
t!(s: "a/b/c", "a/d/e", Some("../../b/c"));
t!(s: "a/b/c", "d/e/f", Some("../../../a/b/c"));
t!(s: "a/b/c", "/a/b/c", None);
t!(s: "/a/b/c", "a/b/c", Some("/a/b/c"));
t!(s: "/a/b/c", "/a/b/c/d", Some(".."));
t!(s: "/a/b/c", "/a/b", Some("c"));
t!(s: "/a/b/c", "/a/b/c/d/e", Some("../.."));
t!(s: "/a/b/c", "/a/d/e", Some("../../b/c"));
t!(s: "/a/b/c", "/d/e/f", Some("../../../a/b/c"));
t!(s: "hi/there.txt", "hi/there", Some("../there.txt"));
t!(s: ".", "a", Some(".."));
t!(s: ".", "a/b", Some("../.."));
t!(s: ".", ".", Some("."));
t!(s: "a", ".", Some("a"));
t!(s: "a/b", ".", Some("a/b"));
t!(s: "..", ".", Some(".."));
t!(s: "a/b/c", "a/b/c", Some("."));
t!(s: "/a/b/c", "/a/b/c", Some("."));
t!(s: "/", "/", Some("."));
t!(s: "/", ".", Some("/"));
t!(s: "../../a", "b", Some("../../../a"));
t!(s: "a", "../../b", None);
t!(s: "../../a", "../../b", Some("../a"));
t!(s: "../../a", "../../a/b", Some(".."));
t!(s: "../../a/b", "../../a", Some("b"));
}
#[test]
fn test_components_iter() {
macro_rules! t(
(s: $path:expr, $exp:expr) => (
{
let path = Path::new($path);
let comps = path.components().collect::<Vec<&[u8]>>();
let exp: &[&str] = &$exp;
let exps = exp.iter().map(|x| x.as_bytes()).collect::<Vec<&[u8]>>();
assert!(comps == exps, "components: Expected {}, found {}",
comps, exps);
let comps = path.components().rev().collect::<Vec<&[u8]>>();
let exps = exps.into_iter().rev().collect::<Vec<&[u8]>>();
assert!(comps == exps, "rev_components: Expected {}, found {}",
comps, exps);
}
);
(b: $arg:expr, [$($exp:expr),*]) => (
{
let path = Path::new($arg);
let comps = path.components().collect::<Vec<&[u8]>>();
let exp: &[&[u8]] = &[$($exp),*];
assert_eq!(comps.as_slice(), exp);
let comps = path.components().rev().collect::<Vec<&[u8]>>();
let exp = exp.iter().rev().map(|&x|x).collect::<Vec<&[u8]>>();
assert_eq!(comps, exp)
}
)
)
t!(b: b"a/b/c", [b"a", b"b", b"c"]);
t!(b: b"/\xFF/a/\x80", [b"\xFF", b"a", b"\x80"]);
t!(b: b"../../foo\xCDbar", [b"..", b"..", b"foo\xCDbar"]);
t!(s: "a/b/c", ["a", "b", "c"]);
t!(s: "a/b/d", ["a", "b", "d"]);
t!(s: "a/b/cd", ["a", "b", "cd"]);
t!(s: "/a/b/c", ["a", "b", "c"]);
t!(s: "a", ["a"]);
t!(s: "/a", ["a"]);
t!(s: "/", []);
t!(s: ".", ["."]);
t!(s: "..", [".."]);
t!(s: "../..", ["..", ".."]);
t!(s: "../../foo", ["..", "..", "foo"]);
}
#[test]
fn test_str_components() {
macro_rules! t(
(b: $arg:expr, $exp:expr) => (
{
let path = Path::new($arg);
let comps = path.str_components().collect::<Vec<Option<&str>>>();
let exp: &[Option<&str>] = &$exp;
assert_eq!(comps.as_slice(), exp);
let comps = path.str_components().rev().collect::<Vec<Option<&str>>>();
let exp = exp.iter().rev().map(|&x|x).collect::<Vec<Option<&str>>>();
assert_eq!(comps, exp);
}
)
)
t!(b: b"a/b/c", [Some("a"), Some("b"), Some("c")]);
t!(b: b"/\xFF/a/\x80", [None, Some("a"), None]);
t!(b: b"../../foo\xCDbar", [Some(".."), Some(".."), None]);
// str_components is a wrapper around components, so no need to do
// the full set of tests
}
}
#[cfg(test)]
mod bench {
extern crate test;
use self::test::Bencher;
use super::*;
use prelude::*;
#[bench]
fn join_home_dir(b: &mut Bencher) {
let posix_path = Path::new("/");
b.iter(|| {
posix_path.join("home");
});
}
#[bench]
fn join_abs_path_home_dir(b: &mut Bencher) {
let posix_path = Path::new("/");
b.iter(|| {
posix_path.join("/home");
});
}
#[bench]
fn join_many_home_dir(b: &mut Bencher) {
let posix_path = Path::new("/");
b.iter(|| {
posix_path.join_many(&["home"]);
});
}
#[bench]
fn join_many_abs_path_home_dir(b: &mut Bencher) {
let posix_path = Path::new("/");
b.iter(|| {
posix_path.join_many(&["/home"]);
});
}
#[bench]
fn push_home_dir(b: &mut Bencher) {
let mut posix_path = Path::new("/");
b.iter(|| {
posix_path.push("home");
});
}
#[bench]
fn push_abs_path_home_dir(b: &mut Bencher) {
let mut posix_path = Path::new("/");
b.iter(|| {
posix_path.push("/home");
});
}
#[bench]
fn push_many_home_dir(b: &mut Bencher) {
let mut posix_path = Path::new("/");
b.iter(|| {
posix_path.push_many(&["home"]);
});
}
#[bench]
fn push_many_abs_path_home_dir(b: &mut Bencher) {
let mut posix_path = Path::new("/");
b.iter(|| {
posix_path.push_many(&["/home"]);
});
}
#[bench]
fn ends_with_path_home_dir(b: &mut Bencher) {
let posix_home_path = Path::new("/home");
b.iter(|| {
posix_home_path.ends_with_path(&Path::new("home"));
});
}
#[bench]
fn ends_with_path_missmatch_jome_home(b: &mut Bencher) {
let posix_home_path = Path::new("/home");
b.iter(|| {
posix_home_path.ends_with_path(&Path::new("jome"));
});
}
#[bench]
fn is_ancestor_of_path_with_10_dirs(b: &mut Bencher) {
let path = Path::new("/home/1/2/3/4/5/6/7/8/9");
let mut sub = path.clone();
sub.pop();
b.iter(|| {
path.is_ancestor_of(&sub);
});
}
#[bench]
fn path_relative_from_forward(b: &mut Bencher) {
let path = Path::new("/a/b/c");
let mut other = path.clone();
other.pop();
b.iter(|| {
path.path_relative_from(&other);
});
}
#[bench]
fn path_relative_from_same_level(b: &mut Bencher) {
let path = Path::new("/a/b/c");
let mut other = path.clone();
other.pop();
other.push("d");
b.iter(|| {
path.path_relative_from(&other);
});
}
#[bench]
fn path_relative_from_backward(b: &mut Bencher) {
let path = Path::new("/a/b");
let mut other = path.clone();
other.push("c");
b.iter(|| {
path.path_relative_from(&other);
});
}
}<|fim▁end|>
|
}
fn root_path(&self) -> Option<Path> {
if self.is_absolute() {
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski ([email protected])
#
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from mo_dots import wrap
from mo_logs import strings
from pyLibrary.aws import s3
def _key2etl(key):
"""
CONVERT S3 KEY TO ETL HEADER
S3 NAMING CONVENTION: a.b.c WHERE EACH IS A STEP IN THE ETL PROCESS
HOW TO DEAL WITH a->b AS AGGREGATION? b:a.c? b->c is agg: a.c:b
"""
key = s3.strip_extension(key)
tokens = []
s = 0
i = strings.find(key, [":", "."])
while i < len(key):
tokens.append(key[s:i])
tokens.append(key[i])
s = i + 1
i = strings.find(key, [":", "."], s)
tokens.append(key[s:i])
_reverse_aggs(tokens)
# tokens.reverse()
source = {
"id": format_id(tokens[0])
}
for i in range(2, len(tokens), 2):
source = {
"id": format_id(tokens[i]),
"source": source,
"type": "join" if tokens[i - 1] == "." else "agg"
}
return wrap(source)
def _reverse_aggs(seq):
# SHOW AGGREGATION IN REVERSE ORDER (ASSUME ONLY ONE)
for i in range(1, len(seq), 2):<|fim▁hole|>
def format_id(value):
"""
:param value:
:return: int() IF POSSIBLE
"""
try:
return int(value)
except Exception:
return unicode(value)
def lt(l, r):
"""
:param l: left key
:param r: right key
:return: True if l<r
"""
if r is None or l is None:
return True
for ll, rr in zip(l, r):
if ll < rr:
return True
elif ll > rr:
return False
return False<|fim▁end|>
|
if seq[i] == ":":
seq[i - 1], seq[i + 1] = seq[i + 1], seq[i - 1]
|
<|file_name|>DropdownFilter.tsx<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2017 Cloudbase Solutions SRL
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import React from 'react'
import { observer } from 'mobx-react'
import styled from 'styled-components'
import autobind from 'autobind-decorator'
import SearchInput from '../SearchInput'
import Palette from '../../styleUtils/Palette'
import filterImage from './images/filter'
const border = '1px solid rgba(216, 219, 226, 0.4)'
const Wrapper = styled.div<any>`
position: relative;
margin-top: -1px;
`
const Button = styled.div<any>`
width: 16px;
height: 16px;
cursor: pointer;
display: flex;
justify-content: center;
align-items: center;
`
const List = styled.div<any>`
position: absolute;
top: 24px;
right: -7px;
z-index: 9999;
padding: 8px;
background: ${Palette.grayscale[1]};
border-radius: 4px;
border: ${border};
box-shadow: 0 0 4px 0 rgba(32, 34, 52, 0.13);
`
const Tip = styled.div<any>`
position: absolute;
top: -6px;
right: 8px;
width: 10px;
height: 10px;
background: ${Palette.grayscale[1]};
border-top: ${border};
border-left: ${border};
border-bottom: 1px solid transparent;
border-right: 1px solid transparent;
transform: rotate(45deg);
`
const ListItems = styled.div<any>`
width: 199px;
height: 32px;
`
type Props = {
searchPlaceholder?: string,
searchValue?: string,
onSearchChange?: (value: string) => void,
}
type State = {
showDropdownList: boolean
}
@observer
class DropdownFilter extends React.Component<Props, State> {
static defaultProps = {
searchPlaceholder: 'Filter',
}
state: State = {
showDropdownList: false,
}
itemMouseDown: boolean | undefined
componentDidMount() {
window.addEventListener('mousedown', this.handlePageClick, false)
}
componentWillUnmount() {
window.removeEventListener('mousedown', this.handlePageClick, false)
}
@autobind
handlePageClick() {
if (!this.itemMouseDown) {
this.setState({ showDropdownList: false })
}
}
handleButtonClick() {
this.setState(prevState => ({ showDropdownList: !prevState.showDropdownList }))
}
handleCloseClick() {
this.setState({ showDropdownList: false })
}
renderList() {
if (!this.state.showDropdownList) {
return null
}
return (
<List
onMouseDown={() => { this.itemMouseDown = true }}
onMouseUp={() => { this.itemMouseDown = false }}
data-test-id="dropdownFilter-list"
>
<Tip />
<ListItems>
<SearchInput
width="100%"
alwaysOpen
placeholder={this.props.searchPlaceholder}
value={this.props.searchValue}
onChange={this.props.onSearchChange}
useFilterIcon
focusOnMount
disablePrimary
onCloseClick={() => { this.handleCloseClick() }}
/>
</ListItems>
</List>
)
}
renderButton() {
return (
<Button
data-test-id="dropdownFilter-button"
onMouseDown={() => { this.itemMouseDown = true }}
onMouseUp={() => { this.itemMouseDown = false }}
onClick={() => { this.handleButtonClick() }}
dangerouslySetInnerHTML={{
__html:
filterImage(this.props.searchValue ? Palette.primary : Palette.grayscale[5]),
}}
/>
)
}
render() {
return (
<Wrapper>
{this.renderButton()}
{this.renderList()}
</Wrapper><|fim▁hole|>export default DropdownFilter<|fim▁end|>
|
)
}
}
|
<|file_name|>contextualTypeWithUnionTypeMembers.js<|end_file_name|><|fim▁begin|>//// [contextualTypeWithUnionTypeMembers.ts]
//When used as a contextual type, a union type U has those members that are present in any of
// its constituent types, with types that are unions of the respective members in the constituent types.
interface I1<T> {
commonMethodType(a: string): string;
commonPropertyType: string;
commonMethodWithTypeParameter(a: T): T;
methodOnlyInI1(a: string): string;
propertyOnlyInI1: string;
}
interface I2<T> {
commonMethodType(a: string): string;
commonPropertyType: string;
commonMethodWithTypeParameter(a: T): T;
methodOnlyInI2(a: string): string;
propertyOnlyInI2: string;
}
// Let S be the set of types in U that has a property P.
// If S is not empty, U has a property P of a union type of the types of P from each type in S.
var i1: I1<number>;
var i2: I2<number>;
var i1Ori2: I1<number> | I2<number> = i1;
var i1Ori2: I1<number> | I2<number> = i2;
var i1Ori2: I1<number> | I2<number> = { // Like i1
commonPropertyType: "hello",
commonMethodType: a=> a,
commonMethodWithTypeParameter: a => a,
methodOnlyInI1: a => a,
propertyOnlyInI1: "Hello",
};
var i1Ori2: I1<number> | I2<number> = { // Like i2
commonPropertyType: "hello",
commonMethodType: a=> a,
commonMethodWithTypeParameter: a => a,
methodOnlyInI2: a => a,
propertyOnlyInI2: "Hello",
};
var i1Ori2: I1<number> | I2<number> = { // Like i1 and i2 both
commonPropertyType: "hello",
commonMethodType: a=> a,
commonMethodWithTypeParameter: a => a,
methodOnlyInI1: a => a,
propertyOnlyInI1: "Hello",
methodOnlyInI2: a => a,
propertyOnlyInI2: "Hello",
};
var arrayI1OrI2: Array<I1<number> | I2<number>> = [i1, i2, { // Like i1
commonPropertyType: "hello",
commonMethodType: a=> a,
commonMethodWithTypeParameter: a => a,
methodOnlyInI1: a => a,
propertyOnlyInI1: "Hello",
},
{ // Like i2
commonPropertyType: "hello",
commonMethodType: a=> a,
commonMethodWithTypeParameter: a => a,
methodOnlyInI2: a => a,
propertyOnlyInI2: "Hello",
}, { // Like i1 and i2 both
commonPropertyType: "hello",
commonMethodType: a=> a,
commonMethodWithTypeParameter: a => a,
methodOnlyInI1: a => a,
propertyOnlyInI1: "Hello",
methodOnlyInI2: a => a,
propertyOnlyInI2: "Hello",
}];
interface I11 {
commonMethodDifferentReturnType(a: string, b: number): string;
commonPropertyDifferentType: string;
}
interface I21 {
commonMethodDifferentReturnType(a: string, b: number): number;
commonPropertyDifferentType: number;
}
var i11: I11;
var i21: I21;
var i11Ori21: I11 | I21 = i11;
var i11Ori21: I11 | I21 = i21;
var i11Ori21: I11 | I21 = {
// Like i1
commonMethodDifferentReturnType: (a, b) => {
var z = a.charAt(b);
return z;
},
commonPropertyDifferentType: "hello",
};
var i11Ori21: I11 | I21 = {
// Like i2
commonMethodDifferentReturnType: (a, b) => {
var z = a.charCodeAt(b);
return z;
},
commonPropertyDifferentType: 10,
};
var arrayOrI11OrI21: Array<I11 | I21> = [i11, i21, i11 || i21, {
// Like i1
commonMethodDifferentReturnType: (a, b) => {
var z = a.charAt(b);
return z;
},
commonPropertyDifferentType: "hello",
}, {
// Like i2
commonMethodDifferentReturnType: (a, b) => {
var z = a.charCodeAt(b);
return z;
},
commonPropertyDifferentType: 10,
}];
//// [contextualTypeWithUnionTypeMembers.js]
// Let S be the set of types in U that has a property P.
// If S is not empty, U has a property P of a union type of the types of P from each type in S.
var i1;
var i2;
var i1Ori2 = i1;
var i1Ori2 = i2;
var i1Ori2 = {
commonPropertyType: "hello",
commonMethodType: function (a) { return a; },
commonMethodWithTypeParameter: function (a) { return a; },
methodOnlyInI1: function (a) { return a; },
propertyOnlyInI1: "Hello"
};
var i1Ori2 = {
commonPropertyType: "hello",
commonMethodType: function (a) { return a; },
commonMethodWithTypeParameter: function (a) { return a; },
methodOnlyInI2: function (a) { return a; },
propertyOnlyInI2: "Hello"
};
var i1Ori2 = {
commonPropertyType: "hello",
commonMethodType: function (a) { return a; },
commonMethodWithTypeParameter: function (a) { return a; },
methodOnlyInI1: function (a) { return a; },
propertyOnlyInI1: "Hello",
methodOnlyInI2: function (a) { return a; },
propertyOnlyInI2: "Hello"
};
var arrayI1OrI2 = [i1, i2, {
commonPropertyType: "hello",
commonMethodType: function (a) { return a; },
commonMethodWithTypeParameter: function (a) { return a; },
methodOnlyInI1: function (a) { return a; },
propertyOnlyInI1: "Hello"
},
{
commonPropertyType: "hello",
commonMethodType: function (a) { return a; },
commonMethodWithTypeParameter: function (a) { return a; },
methodOnlyInI2: function (a) { return a; },
propertyOnlyInI2: "Hello"
}, {
commonPropertyType: "hello",
commonMethodType: function (a) { return a; },
commonMethodWithTypeParameter: function (a) { return a; },
methodOnlyInI1: function (a) { return a; },
propertyOnlyInI1: "Hello",
methodOnlyInI2: function (a) { return a; },
propertyOnlyInI2: "Hello"
}];
var i11;
var i21;
var i11Ori21 = i11;
var i11Ori21 = i21;
var i11Ori21 = {
// Like i1
commonMethodDifferentReturnType: function (a, b) {
var z = a.charAt(b);
return z;
},
commonPropertyDifferentType: "hello"
};
var i11Ori21 = {
// Like i2
commonMethodDifferentReturnType: function (a, b) {
var z = a.charCodeAt(b);<|fim▁hole|>var arrayOrI11OrI21 = [i11, i21, i11 || i21, {
// Like i1
commonMethodDifferentReturnType: function (a, b) {
var z = a.charAt(b);
return z;
},
commonPropertyDifferentType: "hello"
}, {
// Like i2
commonMethodDifferentReturnType: function (a, b) {
var z = a.charCodeAt(b);
return z;
},
commonPropertyDifferentType: 10
}];<|fim▁end|>
|
return z;
},
commonPropertyDifferentType: 10
};
|
<|file_name|>spin-debug.js<|end_file_name|><|fim▁begin|>define("gallery/spin/2.0.0/spin-debug", [], function(require, exports, module) {
/**
* Copyright (c) 2011-2014 Felix Gnass
* Licensed under the MIT license
*/
(function(root, factory) {
/* CommonJS */
if (typeof exports == "object") module.exports = factory(); else if (typeof define == "function" && define.amd) define(factory); else root.Spinner = factory();
})(this, function() {
"use strict";
var prefixes = [ "webkit", "Moz", "ms", "O" ], animations = {}, useCssAnimations;
/* Whether to use CSS animations or setTimeout */
/**
* Utility function to create elements. If no tag name is given,
* a DIV is created. Optionally properties can be passed.
*/
function createEl(tag, prop) {
var el = document.createElement(tag || "div"), n;
for (n in prop) el[n] = prop[n];
return el;
}
/**
* Appends children and returns the parent.
*/
function ins(parent) {
for (var i = 1, n = arguments.length; i < n; i++) parent.appendChild(arguments[i]);
return parent;
}
/**
* Insert a new stylesheet to hold the @keyframe or VML rules.
*/
var sheet = function() {
var el = createEl("style", {
type: "text/css"
});
ins(document.getElementsByTagName("head")[0], el);
return el.sheet || el.styleSheet;
}();
/**
* Creates an opacity keyframe animation rule and returns its name.
* Since most mobile Webkits have timing issues with animation-delay,
* we create separate rules for each line/segment.
*/
function addAnimation(alpha, trail, i, lines) {
var name = [ "opacity", trail, ~~(alpha * 100), i, lines ].join("-"), start = .01 + i / lines * 100, z = Math.max(1 - (1 - alpha) / trail * (100 - start), alpha), prefix = useCssAnimations.substring(0, useCssAnimations.indexOf("Animation")).toLowerCase(), pre = prefix && "-" + prefix + "-" || "";
if (!animations[name]) {
sheet.insertRule("@" + pre + "keyframes " + name + "{" + "0%{opacity:" + z + "}" + start + "%{opacity:" + alpha + "}" + (start + .01) + "%{opacity:1}" + (start + trail) % 100 + "%{opacity:" + alpha + "}" + "100%{opacity:" + z + "}" + "}", sheet.cssRules.length);
animations[name] = 1;
}
return name;
}
/**
* Tries various vendor prefixes and returns the first supported property.
*/
function vendor(el, prop) {
var s = el.style, pp, i;
prop = prop.charAt(0).toUpperCase() + prop.slice(1);
for (i = 0; i < prefixes.length; i++) {
pp = prefixes[i] + prop;
if (s[pp] !== undefined) return pp;
}
if (s[prop] !== undefined) return prop;
}
/**
* Sets multiple style properties at once.
*/
function css(el, prop) {
for (var n in prop) el.style[vendor(el, n) || n] = prop[n];
return el;
}
/**
* Fills in default values.
*/
function merge(obj) {
for (var i = 1; i < arguments.length; i++) {
var def = arguments[i];
for (var n in def) if (obj[n] === undefined) obj[n] = def[n];
}
return obj;
}
/**
* Returns the absolute page-offset of the given element.
*/
function pos(el) {
var o = {
x: el.offsetLeft,
y: el.offsetTop
};
while (el = el.offsetParent) o.x += el.offsetLeft, o.y += el.offsetTop;
return o;
}
/**
* Returns the line color from the given string or array.
*/
function getColor(color, idx) {
return typeof color == "string" ? color : color[idx % color.length];
}
// Built-in defaults
var defaults = {
lines: 12,
// The number of lines to draw
length: 7,
// The length of each line
width: 5,
// The line thickness
radius: 10,
// The radius of the inner circle
rotate: 0,
// Rotation offset
corners: 1,
// Roundness (0..1)
color: "#000",
// #rgb or #rrggbb
direction: 1,
// 1: clockwise, -1: counterclockwise
speed: 1,
// Rounds per second
trail: 100,
// Afterglow percentage
opacity: 1 / 4,
// Opacity of the lines
fps: 20,
// Frames per second when using setTimeout()
zIndex: 2e9,
// Use a high z-index by default
className: "spinner",
// CSS class to assign to the element
top: "50%",
// center vertically
left: "50%",
// center horizontally
position: "absolute"
};
/** The constructor */
function Spinner(o) {
this.opts = merge(o || {}, Spinner.defaults, defaults);
}
// Global defaults that override the built-ins:
Spinner.defaults = {};
merge(Spinner.prototype, {
/**
* Adds the spinner to the given target element. If this instance is already
* spinning, it is automatically removed from its previous target b calling
* stop() internally.
*/
spin: function(target) {
this.stop();
var self = this, o = self.opts, el = self.el = css(createEl(0, {
className: o.className
}), {
position: o.position,
width: 0,
zIndex: o.zIndex
}), mid = o.radius + o.length + o.width;
if (target) {
target.insertBefore(el, target.firstChild || null);
css(el, {
left: o.left,
top: o.top
});
}
el.setAttribute("role", "progressbar");
self.lines(el, self.opts);
if (!useCssAnimations) {
// No CSS animation support, use setTimeout() instead
var i = 0, start = (o.lines - 1) * (1 - o.direction) / 2, alpha, fps = o.fps, f = fps / o.speed, ostep = (1 - o.opacity) / (f * o.trail / 100), astep = f / o.lines;
(function anim() {
i++;
for (var j = 0; j < o.lines; j++) {
alpha = Math.max(1 - (i + (o.lines - j) * astep) % f * ostep, o.opacity);
self.opacity(el, j * o.direction + start, alpha, o);
}
self.timeout = self.el && setTimeout(anim, ~~(1e3 / fps));
})();
}
return self;
},
/**
* Stops and removes the Spinner.
*/
stop: function() {
var el = this.el;
if (el) {
clearTimeout(this.timeout);
if (el.parentNode) el.parentNode.removeChild(el);
this.el = undefined;
}
return this;
},
/**
* Internal method that draws the individual lines. Will be overwritten
* in VML fallback mode below.
*/
lines: function(el, o) {
var i = 0, start = (o.lines - 1) * (1 - o.direction) / 2, seg;
function fill(color, shadow) {
return css(createEl(), {
position: "absolute",
width: o.length + o.width + "px",
height: o.width + "px",
background: color,
boxShadow: shadow,
transformOrigin: "left",
transform: "rotate(" + ~~(360 / o.lines * i + o.rotate) + "deg) translate(" + o.radius + "px" + ",0)",
borderRadius: (o.corners * o.width >> 1) + "px"
});
}
for (;i < o.lines; i++) {
seg = css(createEl(), {
position: "absolute",
top: 1 + ~(o.width / 2) + "px",
transform: o.hwaccel ? "translate3d(0,0,0)" : "",
opacity: o.opacity,
animation: useCssAnimations && addAnimation(o.opacity, o.trail, start + i * o.direction, o.lines) + " " + 1 / o.speed + "s linear infinite"
});
if (o.shadow) ins(seg, css(fill("#000", "0 0 4px " + "#000"), {
top: 2 + "px"
}));
ins(el, ins(seg, fill(getColor(o.color, i), "0 0 1px rgba(0,0,0,.1)")));
}
return el;
},
/**
* Internal method that adjusts the opacity of a single line.
* Will be overwritten in VML fallback mode below.
*/
opacity: function(el, i, val) {
if (i < el.childNodes.length) el.childNodes[i].style.opacity = val;
}
});
function initVML() {
/* Utility function to create a VML tag */
function vml(tag, attr) {
return createEl("<" + tag + ' xmlns="urn:schemas-microsoft.com:vml" class="spin-vml">', attr);
}
// No CSS transforms but VML support, add a CSS rule for VML elements:
sheet.addRule(".spin-vml", "behavior:url(#default#VML)");
Spinner.prototype.lines = function(el, o) {
var r = o.length + o.width, s = 2 * r;
function grp() {
return css(vml("group", {
coordsize: s + " " + s,
coordorigin: -r + " " + -r
}), {
width: s,
height: s
});
}
var margin = -(o.width + o.length) * 2 + "px", g = css(grp(), {
position: "absolute",
top: margin,
left: margin
}), i;
function seg(i, dx, filter) {
ins(g, ins(css(grp(), {
rotation: 360 / o.lines * i + "deg",
left: ~~dx
}), ins(css(vml("roundrect", {
arcsize: o.corners
}), {
width: r,
height: o.width,
left: o.radius,
top: -o.width >> 1,
filter: filter
}), vml("fill", {
color: getColor(o.color, i),
opacity: o.opacity
}), vml("stroke", {
opacity: 0
}))));
}
if (o.shadow) for (i = 1; i <= o.lines; i++) seg(i, -2, "progid:DXImageTransform.Microsoft.Blur(pixelradius=2,makeshadow=1,shadowopacity=.3)");
for (i = 1; i <= o.lines; i++) seg(i);
return ins(el, g);
};
Spinner.prototype.opacity = function(el, i, val, o) {
var c = el.firstChild;
o = o.shadow && o.lines || 0;
if (c && i + o < c.childNodes.length) {
c = c.childNodes[i + o];
c = c && c.firstChild;
c = c && c.firstChild;
if (c) c.opacity = val;
}
};
}
var probe = css(createEl("group"), {
behavior: "url(#default#VML)"<|fim▁hole|> if (!vendor(probe, "transform") && probe.adj) initVML(); else useCssAnimations = vendor(probe, "animation");
return Spinner;
});
});<|fim▁end|>
|
});
|
<|file_name|>elementat.ts<|end_file_name|><|fim▁begin|>import { Enumerable } from "../enumerable_";
import { wrapInThunk } from "../common/wrap";
import { OperatorR } from "../common/types";
function _elementat<T>(source: Iterable<T>, index: number, defaultValue?: T): T | undefined {
if (index >= 0) {
var i = 0;
for (var item of source) {
if (i++ === index) {
return item;
}
}
}
return defaultValue;
}
export function elementat<T>(source: Iterable<T>, index: number, defaultValue: T): T;
export function elementat<T>(source: Iterable<T>, index: number): T | undefined;
export function elementat<T>(index: number, defaultValue: T): OperatorR<T, T>;<|fim▁hole|>}
declare module '../enumerable_' {
interface Enumerable<T> {
elementat(index: number, defaultValue: T): T;
elementat(index: number): T | undefined;
}
}
Enumerable.prototype.elementat = function <T>(this: Enumerable<T>, index: number, defaultValue?: T): T | undefined {
return _elementat<T>(this, index, defaultValue);
};<|fim▁end|>
|
export function elementat<T>(index: number): OperatorR<T, T | undefined>;
export function elementat() {
return wrapInThunk(arguments, _elementat);
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import Storyview from './Story'
export default {<|fim▁hole|> component: Storyview
}<|fim▁end|>
|
path: '/stories/:storyid',
|
<|file_name|>vfmaddsub213ps.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vfmaddsub213ps_1() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM4)), operand3: Some(Direct(XMM7)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 89, 166, 255], OperandSize::Dword)
}
fn vfmaddsub213ps_2() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(XMM3)), operand2: Some(Direct(XMM4)), operand3: Some(IndirectScaledIndexed(EBX, EAX, Two, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 89, 166, 28, 67], OperandSize::Dword)
}
fn vfmaddsub213ps_3() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM2)), operand3: Some(Direct(XMM2)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 105, 166, 226], OperandSize::Qword)
}
fn vfmaddsub213ps_4() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM5)), operand3: Some(IndirectScaledIndexed(RDX, RDX, Eight, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 81, 166, 20, 210], OperandSize::Qword)
}
fn vfmaddsub213ps_5() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM7)), operand3: Some(Direct(YMM3)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 69, 166, 235], OperandSize::Dword)
}
fn vfmaddsub213ps_6() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(YMM1)), operand2: Some(Direct(YMM2)), operand3: Some(IndirectScaledIndexed(EAX, EDI, Two, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 109, 166, 12, 120], OperandSize::Dword)
}
fn vfmaddsub213ps_7() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(YMM7)), operand2: Some(Direct(YMM6)), operand3: Some(Direct(YMM3)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 77, 166, 251], OperandSize::Qword)
}
fn vfmaddsub213ps_8() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(YMM7)), operand2: Some(Direct(YMM7)), operand3: Some(Indirect(RDI, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 69, 166, 63], OperandSize::Qword)
}
fn vfmaddsub213ps_9() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM7)), operand3: Some(Direct(XMM4)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 242, 69, 140, 166, 196], OperandSize::Dword)
}
fn vfmaddsub213ps_10() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM5)), operand3: Some(IndirectScaledDisplaced(EAX, Four, 300828003, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 242, 85, 140, 166, 52, 133, 99, 69, 238, 17], OperandSize::Dword)
}
fn vfmaddsub213ps_11() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM7)), operand3: Some(IndirectScaledIndexed(EDI, ECX, Two, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: Some(BroadcastMode::Broadcast1To4) }, &[98, 242, 69, 159, 166, 52, 79], OperandSize::Dword)
}
fn vfmaddsub213ps_12() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(XMM20)), operand2: Some(Direct(XMM0)), operand3: Some(Direct(XMM22)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 162, 125, 142, 166, 230], OperandSize::Qword)
}
fn vfmaddsub213ps_13() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(XMM11)), operand2: Some(Direct(XMM4)), operand3: Some(IndirectDisplaced(RCX, 921723653, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 114, 93, 139, 166, 153, 5, 99, 240, 54], OperandSize::Qword)
}
fn vfmaddsub213ps_14() {<|fim▁hole|> run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(XMM8)), operand2: Some(Direct(XMM30)), operand3: Some(IndirectScaledIndexedDisplaced(RCX, RCX, Two, 1613619070, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: Some(BroadcastMode::Broadcast1To4) }, &[98, 114, 13, 146, 166, 132, 73, 126, 223, 45, 96], OperandSize::Qword)
}
fn vfmaddsub213ps_15() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(YMM1)), operand2: Some(Direct(YMM3)), operand3: Some(Direct(YMM6)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 242, 101, 171, 166, 206], OperandSize::Dword)
}
fn vfmaddsub213ps_16() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(YMM7)), operand2: Some(Direct(YMM7)), operand3: Some(Indirect(EDI, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 242, 69, 171, 166, 63], OperandSize::Dword)
}
fn vfmaddsub213ps_17() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM7)), operand3: Some(IndirectScaledDisplaced(ESI, Two, 1060845969, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: Some(BroadcastMode::Broadcast1To8) }, &[98, 242, 69, 185, 166, 36, 117, 145, 57, 59, 63], OperandSize::Dword)
}
fn vfmaddsub213ps_18() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(YMM15)), operand2: Some(Direct(YMM17)), operand3: Some(Direct(YMM27)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 18, 117, 167, 166, 251], OperandSize::Qword)
}
fn vfmaddsub213ps_19() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM24)), operand3: Some(IndirectDisplaced(RCX, 1156191814, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 242, 61, 165, 166, 129, 70, 22, 234, 68], OperandSize::Qword)
}
fn vfmaddsub213ps_20() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(YMM28)), operand2: Some(Direct(YMM19)), operand3: Some(Indirect(RDX, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: Some(BroadcastMode::Broadcast1To8) }, &[98, 98, 101, 178, 166, 34], OperandSize::Qword)
}
fn vfmaddsub213ps_21() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(ZMM4)), operand2: Some(Direct(ZMM7)), operand3: Some(Direct(ZMM1)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Nearest), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 242, 69, 154, 166, 225], OperandSize::Dword)
}
fn vfmaddsub213ps_22() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(ZMM1)), operand2: Some(Direct(ZMM4)), operand3: Some(IndirectDisplaced(EAX, 198146833, Some(OperandSize::Zmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 242, 93, 207, 166, 136, 17, 123, 207, 11], OperandSize::Dword)
}
fn vfmaddsub213ps_23() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(ZMM0)), operand2: Some(Direct(ZMM5)), operand3: Some(IndirectDisplaced(EBX, 1921046755, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: Some(BroadcastMode::Broadcast1To16) }, &[98, 242, 85, 220, 166, 131, 227, 216, 128, 114], OperandSize::Dword)
}
fn vfmaddsub213ps_24() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(ZMM0)), operand2: Some(Direct(ZMM31)), operand3: Some(Direct(ZMM25)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Up), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 146, 5, 213, 166, 193], OperandSize::Qword)
}
fn vfmaddsub213ps_25() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(ZMM25)), operand2: Some(Direct(ZMM17)), operand3: Some(IndirectScaledDisplaced(RSI, Four, 1299732925, Some(OperandSize::Zmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 98, 117, 197, 166, 12, 181, 189, 89, 120, 77], OperandSize::Qword)
}
fn vfmaddsub213ps_26() {
run_test(&Instruction { mnemonic: Mnemonic::VFMADDSUB213PS, operand1: Some(Direct(ZMM31)), operand2: Some(Direct(ZMM25)), operand3: Some(IndirectScaledIndexed(RCX, RDX, Two, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: Some(BroadcastMode::Broadcast1To16) }, &[98, 98, 53, 213, 166, 60, 81], OperandSize::Qword)
}<|fim▁end|>
| |
<|file_name|>extensionManagement.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { localize } from 'vs/nls';
import { TPromise } from 'vs/base/common/winjs.base';
import Event from 'vs/base/common/event';
import { IPager } from 'vs/base/common/paging';
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
export const EXTENSION_IDENTIFIER_PATTERN = '^[a-z0-9A-Z][a-z0-9\-A-Z]*\\.[a-z0-9A-Z][a-z0-9\-A-Z]*$';
export const EXTENSION_IDENTIFIER_REGEX = new RegExp(EXTENSION_IDENTIFIER_PATTERN);
export interface ICommand {
command: string;
title: string;
category?: string;
}
export interface IConfigurationProperty {
description: string;
type: string | string[];
default?: any;
}
export interface IConfiguration {
properties: { [key: string]: IConfigurationProperty; };
}
export interface IDebugger {
label?: string;
type: string;
runtime: string;
}
export interface IGrammar {
language: string;
}
export interface IJSONValidation {
fileMatch: string;
}
export interface IKeyBinding {
command: string;
key: string;
when?: string;
mac?: string;
linux?: string;
win?: string;
}
export interface ILanguage {
id: string;
extensions: string[];
aliases: string[];
}
export interface IMenu {
command: string;
alt?: string;
when?: string;
group?: string;
}
export interface ISnippet {
language: string;
}
export interface ITheme {
label: string;
}
export interface ITreeExplorer {
treeExplorerNodeProviderId: string;
treeLabel: string;
icon: string;
}
export interface IExtensionContributions {
commands?: ICommand[];
configuration?: IConfiguration;
debuggers?: IDebugger[];
grammars?: IGrammar[];
jsonValidation?: IJSONValidation[];
keybindings?: IKeyBinding[];
languages?: ILanguage[];
menus?: { [context: string]: IMenu[] };
snippets?: ISnippet[];
themes?: ITheme[];
explorer?: ITreeExplorer;
}
export interface IExtensionManifest {
name: string;
publisher: string;
version: string;
engines: { vscode: string };
displayName?: string;
description?: string;
main?: string;
icon?: string;
categories?: string[];
activationEvents?: string[];
extensionDependencies?: string[];
contributes?: IExtensionContributions;
}
export interface IExtensionIdentity {
name: string;
publisher: string;
}
export interface IGalleryExtensionProperties {
dependencies?: string[];
engine?: string;
}
export interface IGalleryExtensionAsset {
uri: string;
fallbackUri: string;
}
export interface IGalleryExtensionAssets {
manifest: IGalleryExtensionAsset;
readme: IGalleryExtensionAsset;
changelog: IGalleryExtensionAsset;
download: IGalleryExtensionAsset;
icon: IGalleryExtensionAsset;
license: IGalleryExtensionAsset;
}
export interface IGalleryExtension {
id: string;
name: string;
version: string;
date: string;
displayName: string;
publisherId: string;
publisher: string;
publisherDisplayName: string;
description: string;
installCount: number;
rating: number;
ratingCount: number;
assets: IGalleryExtensionAssets;
properties: IGalleryExtensionProperties;
}
export interface IGalleryMetadata {
id: string;
publisherId: string;
publisherDisplayName: string;
}
export enum LocalExtensionType {
System,
User
}
export interface ILocalExtension {
type: LocalExtensionType;
id: string;
manifest: IExtensionManifest;
metadata: IGalleryMetadata;
path: string;
readmeUrl: string;
changelogUrl: string;
}
export const IExtensionManagementService = createDecorator<IExtensionManagementService>('extensionManagementService');
export const IExtensionGalleryService = createDecorator<IExtensionGalleryService>('extensionGalleryService');
export enum SortBy {
NoneOrRelevance = 0,
LastUpdatedDate = 1,
Title = 2,
PublisherName = 3,
InstallCount = 4,
PublishedDate = 5,
AverageRating = 6
}
export enum SortOrder {
Default = 0,
Ascending = 1,
Descending = 2
}
export interface IQueryOptions {
text?: string;
ids?: string[];
names?: string[];
pageSize?: number;
sortBy?: SortBy;
sortOrder?: SortOrder;
}
export interface IExtensionGalleryService {
_serviceBrand: any;
isEnabled(): boolean;
getRequestHeaders(): TPromise<{ [key: string]: string; }>;
query(options?: IQueryOptions): TPromise<IPager<IGalleryExtension>>;
download(extension: IGalleryExtension): TPromise<string>;
getReadme(extension: IGalleryExtension): TPromise<string>;
getManifest(extension: IGalleryExtension): TPromise<IExtensionManifest>;
getChangelog(extension: IGalleryMetadata): TPromise<string>;
loadCompatibleVersion(extension: IGalleryExtension): TPromise<IGalleryExtension>;
getAllDependencies(extension: IGalleryExtension): TPromise<IGalleryExtension[]>;
}
export interface InstallExtensionEvent {
id: string;
zipPath?: string;
gallery?: IGalleryExtension;
}
export interface DidInstallExtensionEvent {
id: string;
zipPath?: string;
gallery?: IGalleryExtension;
local?: ILocalExtension;
error?: Error;
}
export interface DidUninstallExtensionEvent {
id: string;
error?: Error;
}
export interface IExtensionManagementService {
_serviceBrand: any;
onInstallExtension: Event<InstallExtensionEvent>;
onDidInstallExtension: Event<DidInstallExtensionEvent>;
onUninstallExtension: Event<string>;
onDidUninstallExtension: Event<DidUninstallExtensionEvent>;
install(zipPath: string): TPromise<void>;
installFromGallery(extension: IGalleryExtension, promptToInstallDependencies?: boolean): TPromise<void>;
uninstall(extension: ILocalExtension): TPromise<void>;
getInstalled(type?: LocalExtensionType): TPromise<ILocalExtension[]>;
}
export const IExtensionEnablementService = createDecorator<IExtensionEnablementService>('extensionEnablementService');
// TODO: @sandy: Merge this into IExtensionManagementService when we have a storage service available in Shared process
export interface IExtensionEnablementService {
_serviceBrand: any;
/**
* Event to listen on for extension enablement changes
*/
onEnablementChanged: Event<string>;
/**
* Returns all globally disabled extension identifiers.
* Returns an empty array if none exist.
*/
getGloballyDisabledExtensions(): string[];
/**
* Returns all workspace disabled extension identifiers.
* Returns an empty array if none exist or workspace does not exist.
*/
getWorkspaceDisabledExtensions(): string[];
/**
* Returns `true` if given extension can be enabled by calling `setEnablement`, otherwise false`.
*/
canEnable(identifier: string): boolean;
/**
* Enable or disable the given extension.
* if `workspace` is `true` then enablement is done for workspace, otherwise globally.
*
* Returns a promise that resolves to boolean value.
* if resolves to `true` then requires restart for the change to take effect.
*
* Throws error if enablement is requested for workspace and there is no workspace
*/
setEnablement(identifier: string, enable: boolean, workspace?: boolean): TPromise<boolean>;
}
export const IExtensionTipsService = createDecorator<IExtensionTipsService>('extensionTipsService');
export interface IExtensionTipsService {
_serviceBrand: any;
getRecommendations(): string[];
getWorkspaceRecommendations(): TPromise<string[]>;<|fim▁hole|>export const ExtensionsChannelId = 'extensions';
export const PreferencesLabel = localize('preferences', "Preferences");<|fim▁end|>
|
getKeymapRecommendations(): string[];
}
export const ExtensionsLabel = localize('extensions', "Extensions");
|
<|file_name|>PsyInfo.java<|end_file_name|><|fim▁begin|>/* -*-mode:java; c-basic-offset:2; indent-tabs-mode:nil -*- */
/* JOrbis
* Copyright (C) 2000 ymnk, JCraft,Inc.
*
* Written by: 2000 ymnk<[email protected]>
*
* Many thanks to
* Monty <[email protected]> and
* The XIPHOPHORUS Company http://www.xiph.org/ .
* JOrbis has been based on their awesome works, Vorbis codec.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public License
* as published by the Free Software Foundation; either version 2 of
* the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
package com.jcraft.jorbis;
// psychoacoustic setup
class PsyInfo{
int athp;
int decayp;
int smoothp;
int noisefitp;
int noisefit_subblock;
float noisefit_threshdB;
float ath_att;
int tonemaskp;
float[] toneatt_125Hz=new float[5];
float[] toneatt_250Hz=new float[5];
float[] toneatt_500Hz=new float[5];
float[] toneatt_1000Hz=new float[5];
float[] toneatt_2000Hz=new float[5];
float[] toneatt_4000Hz=new float[5];
float[] toneatt_8000Hz=new float[5];
int peakattp;
float[] peakatt_125Hz=new float[5];
float[] peakatt_250Hz=new float[5];
float[] peakatt_500Hz=new float[5];
float[] peakatt_1000Hz=new float[5];
float[] peakatt_2000Hz=new float[5];
float[] peakatt_4000Hz=new float[5];
float[] peakatt_8000Hz=new float[5];
<|fim▁hole|> float[] noiseatt_1000Hz=new float[5];
float[] noiseatt_2000Hz=new float[5];
float[] noiseatt_4000Hz=new float[5];
float[] noiseatt_8000Hz=new float[5];
float max_curve_dB;
float attack_coeff;
float decay_coeff;
void free(){
}
}<|fim▁end|>
|
int noisemaskp;
float[] noiseatt_125Hz=new float[5];
float[] noiseatt_250Hz=new float[5];
float[] noiseatt_500Hz=new float[5];
|
<|file_name|>test_crossdomain.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 eNovance SAS <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.object_storage import base
from tempest.common import custom_matchers
from tempest.common import utils
from tempest.lib import decorators
class CrossdomainTest(base.BaseObjectTest):
@classmethod
def resource_setup(cls):
super(CrossdomainTest, cls).resource_setup()
cls.xml_start = '<?xml version="1.0"?>\n' \
'<!DOCTYPE cross-domain-policy SYSTEM ' \
'"http://www.adobe.com/xml/dtds/cross-domain-policy.' \
'dtd" >\n<cross-domain-policy>\n'
cls.xml_end = "</cross-domain-policy>"
def setUp(self):<|fim▁hole|> def test_get_crossdomain_policy(self):
url = self.account_client._get_base_version_url() + "crossdomain.xml"
resp, body = self.account_client.raw_request(url, "GET")
self.account_client._error_checker(resp, body)
body = body.decode()
self.assertTrue(body.startswith(self.xml_start) and
body.endswith(self.xml_end))
# The target of the request is not any Swift resource. Therefore, the
# existence of response header is checked without a custom matcher.
self.assertIn('content-length', resp)
self.assertIn('content-type', resp)
self.assertIn('x-trans-id', resp)
self.assertIn('date', resp)
# Check only the format of common headers with custom matcher
self.assertThat(resp, custom_matchers.AreAllWellFormatted())<|fim▁end|>
|
super(CrossdomainTest, self).setUp()
@decorators.idempotent_id('d1b8b031-b622-4010-82f9-ff78a9e915c7')
@utils.requires_ext(extension='crossdomain', service='object')
|
<|file_name|>analyzer_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package analysis
import (
"encoding/json"
"fmt"
"log"
"os"
"path/filepath"
"sort"
"strconv"
"testing"
"github.com/stretchr/testify/require"
"github.com/go-openapi/spec"
"github.com/go-openapi/swag"
"github.com/stretchr/testify/assert"
)
func schemeNames(schemes [][]SecurityRequirement) []string {
var names []string
for _, scheme := range schemes {
for _, v := range scheme {
names = append(names, v.Name)
}
}
sort.Strings(names)
return names
}
func makeFixturepec(pi, pi2 spec.PathItem, formatParam *spec.Parameter) *spec.Swagger {
return &spec.Swagger{
SwaggerProps: spec.SwaggerProps{
Consumes: []string{"application/json"},
Produces: []string{"application/json"},
Security: []map[string][]string{
{"apikey": nil},
},
SecurityDefinitions: map[string]*spec.SecurityScheme{
"basic": spec.BasicAuth(),
"apiKey": spec.APIKeyAuth("api_key", "query"),
"oauth2": spec.OAuth2AccessToken("http://authorize.com", "http://token.com"),
},
Parameters: map[string]spec.Parameter{"format": *formatParam},
Paths: &spec.Paths{
Paths: map[string]spec.PathItem{
"/": pi,
"/items": pi2,
},
},
},
}
}
func TestAnalyzer(t *testing.T) {
formatParam := spec.QueryParam("format").Typed("string", "")
limitParam := spec.QueryParam("limit").Typed("integer", "int32")
limitParam.Extensions = spec.Extensions(map[string]interface{}{})
limitParam.Extensions.Add("go-name", "Limit")
skipParam := spec.QueryParam("skip").Typed("integer", "int32")
pi := spec.PathItem{}
pi.Parameters = []spec.Parameter{*limitParam}
op := &spec.Operation{}
op.Consumes = []string{"application/x-yaml"}
op.Produces = []string{"application/x-yaml"}
op.Security = []map[string][]string{
{"oauth2": {}},
{"basic": nil},
}
op.ID = "someOperation"
op.Parameters = []spec.Parameter{*skipParam}
pi.Get = op
pi2 := spec.PathItem{}
pi2.Parameters = []spec.Parameter{*limitParam}
op2 := &spec.Operation{}
op2.ID = "anotherOperation"
op2.Parameters = []spec.Parameter{*skipParam}
pi2.Get = op2
spec := makeFixturepec(pi, pi2, formatParam)
analyzer := New(spec)
assert.Len(t, analyzer.consumes, 2)
assert.Len(t, analyzer.produces, 2)
assert.Len(t, analyzer.operations, 1)
assert.Equal(t, analyzer.operations["GET"]["/"], spec.Paths.Paths["/"].Get)
expected := []string{"application/x-yaml"}
sort.Strings(expected)
consumes := analyzer.ConsumesFor(spec.Paths.Paths["/"].Get)
sort.Strings(consumes)
assert.Equal(t, expected, consumes)
produces := analyzer.ProducesFor(spec.Paths.Paths["/"].Get)
sort.Strings(produces)
assert.Equal(t, expected, produces)
expected = []string{"application/json"}
sort.Strings(expected)
consumes = analyzer.ConsumesFor(spec.Paths.Paths["/items"].Get)
sort.Strings(consumes)
assert.Equal(t, expected, consumes)
produces = analyzer.ProducesFor(spec.Paths.Paths["/items"].Get)
sort.Strings(produces)
assert.Equal(t, expected, produces)
expectedSchemes := [][]SecurityRequirement{
{
{Name: "oauth2", Scopes: []string{}},
{Name: "basic", Scopes: nil},
},
}
schemes := analyzer.SecurityRequirementsFor(spec.Paths.Paths["/"].Get)
assert.Equal(t, schemeNames(expectedSchemes), schemeNames(schemes))
securityDefinitions := analyzer.SecurityDefinitionsFor(spec.Paths.Paths["/"].Get)
assert.Equal(t, *spec.SecurityDefinitions["basic"], securityDefinitions["basic"])
assert.Equal(t, *spec.SecurityDefinitions["oauth2"], securityDefinitions["oauth2"])
parameters := analyzer.ParamsFor("GET", "/")
assert.Len(t, parameters, 2)
operations := analyzer.OperationIDs()
assert.Len(t, operations, 2)
producers := analyzer.RequiredProduces()
assert.Len(t, producers, 2)
consumers := analyzer.RequiredConsumes()
assert.Len(t, consumers, 2)
authSchemes := analyzer.RequiredSecuritySchemes()
assert.Len(t, authSchemes, 3)
ops := analyzer.Operations()
assert.Len(t, ops, 1)
assert.Len(t, ops["GET"], 2)
op, ok := analyzer.OperationFor("get", "/")
assert.True(t, ok)
assert.NotNil(t, op)
op, ok = analyzer.OperationFor("delete", "/")
assert.False(t, ok)
assert.Nil(t, op)
// check for duplicates in sec. requirements for operation
pi.Get.Security = []map[string][]string{
{"oauth2": {}},
{"basic": nil},
{"basic": nil},
}
spec = makeFixturepec(pi, pi2, formatParam)
analyzer = New(spec)
securityDefinitions = analyzer.SecurityDefinitionsFor(spec.Paths.Paths["/"].Get)
assert.Len(t, securityDefinitions, 2)
assert.Equal(t, *spec.SecurityDefinitions["basic"], securityDefinitions["basic"])
assert.Equal(t, *spec.SecurityDefinitions["oauth2"], securityDefinitions["oauth2"])
// check for empty (optional) in sec. requirements for operation
pi.Get.Security = []map[string][]string{
{"oauth2": {}},
{"": nil},
{"basic": nil},
}
spec = makeFixturepec(pi, pi2, formatParam)
analyzer = New(spec)
securityDefinitions = analyzer.SecurityDefinitionsFor(spec.Paths.Paths["/"].Get)
assert.Len(t, securityDefinitions, 2)
assert.Equal(t, *spec.SecurityDefinitions["basic"], securityDefinitions["basic"])
assert.Equal(t, *spec.SecurityDefinitions["oauth2"], securityDefinitions["oauth2"])
}
func TestDefinitionAnalysis(t *testing.T) {
doc, err := loadSpec(filepath.Join("fixtures", "definitions.yml"))
if assert.NoError(t, err) {
analyzer := New(doc)
definitions := analyzer.allSchemas
// parameters
assertSchemaRefExists(t, definitions, "#/parameters/someParam/schema")
assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/parameters/1/schema")
assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/get/parameters/1/schema")
// responses
assertSchemaRefExists(t, definitions, "#/responses/someResponse/schema")
assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/get/responses/default/schema")
assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/get/responses/200/schema")
// definitions
assertSchemaRefExists(t, definitions, "#/definitions/tag")
assertSchemaRefExists(t, definitions, "#/definitions/tag/properties/id")
assertSchemaRefExists(t, definitions, "#/definitions/tag/properties/value")
assertSchemaRefExists(t, definitions, "#/definitions/tag/definitions/category")
assertSchemaRefExists(t, definitions, "#/definitions/tag/definitions/category/properties/id")
assertSchemaRefExists(t, definitions, "#/definitions/tag/definitions/category/properties/value")
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalProps")
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalProps/additionalProperties")
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems")
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems/items/0")
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems/items/1")
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems/additionalItems")
assertSchemaRefExists(t, definitions, "#/definitions/withNot")
assertSchemaRefExists(t, definitions, "#/definitions/withNot/not")
assertSchemaRefExists(t, definitions, "#/definitions/withAnyOf")
assertSchemaRefExists(t, definitions, "#/definitions/withAnyOf/anyOf/0")
assertSchemaRefExists(t, definitions, "#/definitions/withAnyOf/anyOf/1")
assertSchemaRefExists(t, definitions, "#/definitions/withAllOf")
assertSchemaRefExists(t, definitions, "#/definitions/withAllOf/allOf/0")
assertSchemaRefExists(t, definitions, "#/definitions/withAllOf/allOf/1")
assertSchemaRefExists(t, definitions, "#/definitions/withOneOf/oneOf/0")
assertSchemaRefExists(t, definitions, "#/definitions/withOneOf/oneOf/1")
allOfs := analyzer.allOfs
assert.Len(t, allOfs, 1)
assert.Contains(t, allOfs, "#/definitions/withAllOf")
}
}
func loadSpec(path string) (*spec.Swagger, error) {
spec.PathLoader = func(path string) (json.RawMessage, error) {
ext := filepath.Ext(path)
if ext == ".yml" || ext == ".yaml" {
return swag.YAMLDoc(path)
}
data, err := swag.LoadFromFileOrHTTP(path)
if err != nil {
return nil, err
}
return json.RawMessage(data), nil
}
data, err := swag.YAMLDoc(path)
if err != nil {
return nil, err
}
var sw spec.Swagger
if err := json.Unmarshal(data, &sw); err != nil {
return nil, err
}
return &sw, nil
}
func TestReferenceAnalysis(t *testing.T) {
doc, err := loadSpec(filepath.Join("fixtures", "references.yml"))
if assert.NoError(t, err) {
an := New(doc)
definitions := an.references
// parameters
assertRefExists(t, definitions.parameters, "#/paths/~1some~1where~1{id}/parameters/0")
assertRefExists(t, definitions.parameters, "#/paths/~1some~1where~1{id}/get/parameters/0")
// path items
assertRefExists(t, definitions.pathItems, "#/paths/~1other~1place")
// responses
assertRefExists(t, definitions.responses, "#/paths/~1some~1where~1{id}/get/responses/404")
// definitions
assertRefExists(t, definitions.schemas, "#/responses/notFound/schema")
assertRefExists(t, definitions.schemas, "#/paths/~1some~1where~1{id}/get/responses/200/schema")
assertRefExists(t, definitions.schemas, "#/definitions/tag/properties/audit")
// items
// Supported non-swagger 2.0 constructs ($ref in simple schema items)
assertRefExists(t, definitions.allRefs, "#/paths/~1some~1where~1{id}/get/parameters/1/items")
assertRefExists(t, definitions.allRefs, "#/paths/~1some~1where~1{id}/get/parameters/2/items")
assertRefExists(t, definitions.allRefs,
"#/paths/~1some~1where~1{id}/get/responses/default/headers/x-array-header/items")
assert.Lenf(t, an.AllItemsReferences(), 3, "Expected 3 items references in this spec")
assertRefExists(t, definitions.parameterItems, "#/paths/~1some~1where~1{id}/get/parameters/1/items")
assertRefExists(t, definitions.parameterItems, "#/paths/~1some~1where~1{id}/get/parameters/2/items")
assertRefExists(t, definitions.headerItems,
"#/paths/~1some~1where~1{id}/get/responses/default/headers/x-array-header/items")
}
}
func assertRefExists(t testing.TB, data map[string]spec.Ref, key string) bool {
if _, ok := data[key]; !ok {
return assert.Fail(t, fmt.Sprintf("expected %q to exist in the ref bag", key))
}
return true
}
func assertSchemaRefExists(t testing.TB, data map[string]SchemaRef, key string) bool {
if _, ok := data[key]; !ok {
return assert.Fail(t, fmt.Sprintf("expected %q to exist in schema ref bag", key))
}
return true
}
func TestPatternAnalysis(t *testing.T) {
doc, err := loadSpec(filepath.Join("fixtures", "patterns.yml"))
if assert.NoError(t, err) {
an := New(doc)
pt := an.patterns
// parameters
assertPattern(t, pt.parameters, "#/parameters/idParam", "a[A-Za-Z0-9]+")
assertPattern(t, pt.parameters, "#/paths/~1some~1where~1{id}/parameters/1", "b[A-Za-z0-9]+")
assertPattern(t, pt.parameters, "#/paths/~1some~1where~1{id}/get/parameters/0", "[abc][0-9]+")
// responses
assertPattern(t, pt.headers, "#/responses/notFound/headers/ContentLength", "[0-9]+")
assertPattern(t, pt.headers,
"#/paths/~1some~1where~1{id}/get/responses/200/headers/X-Request-Id", "d[A-Za-z0-9]+")
// definitions
assertPattern(t, pt.schemas,
"#/paths/~1other~1place/post/parameters/0/schema/properties/value", "e[A-Za-z0-9]+")
assertPattern(t, pt.schemas, "#/paths/~1other~1place/post/responses/200/schema/properties/data", "[0-9]+[abd]")
assertPattern(t, pt.schemas, "#/definitions/named", "f[A-Za-z0-9]+")
assertPattern(t, pt.schemas, "#/definitions/tag/properties/value", "g[A-Za-z0-9]+")
// items<|fim▁hole|>
// patternProperties (beyond Swagger 2.0)
_, ok := an.spec.Definitions["withPatternProperties"]
assert.True(t, ok)
_, ok = an.allSchemas["#/definitions/withPatternProperties/patternProperties/^prop[0-9]+$"]
assert.True(t, ok)
}
}
func assertPattern(t testing.TB, data map[string]string, key, pattern string) bool {
if assert.Contains(t, data, key) {
return assert.Equal(t, pattern, data[key])
}
return false
}
func panickerParamsAsMap() {
s := prepareTestParamsInvalid("fixture-342.yaml")
if s == nil {
return
}
m := make(map[string]spec.Parameter)
if pi, ok := s.spec.Paths.Paths["/fixture"]; ok {
pi.Parameters = pi.PathItemProps.Get.OperationProps.Parameters
s.paramsAsMap(pi.Parameters, m, nil)
}
}
func panickerParamsAsMap2() {
s := prepareTestParamsInvalid("fixture-342-2.yaml")
if s == nil {
return
}
m := make(map[string]spec.Parameter)
if pi, ok := s.spec.Paths.Paths["/fixture"]; ok {
pi.Parameters = pi.PathItemProps.Get.OperationProps.Parameters
s.paramsAsMap(pi.Parameters, m, nil)
}
}
func panickerParamsAsMap3() {
s := prepareTestParamsInvalid("fixture-342-3.yaml")
if s == nil {
return
}
m := make(map[string]spec.Parameter)
if pi, ok := s.spec.Paths.Paths["/fixture"]; ok {
pi.Parameters = pi.PathItemProps.Get.OperationProps.Parameters
s.paramsAsMap(pi.Parameters, m, nil)
}
}
func TestAnalyzer_paramsAsMap(Pt *testing.T) {
s := prepareTestParamsValid()
if assert.NotNil(Pt, s) {
m := make(map[string]spec.Parameter)
pi, ok := s.spec.Paths.Paths["/items"]
if assert.True(Pt, ok) {
s.paramsAsMap(pi.Parameters, m, nil)
assert.Len(Pt, m, 1)
p, ok := m["query#Limit"]
assert.True(Pt, ok)
assert.Equal(Pt, p.Name, "limit")
}
}
// An invalid spec, but passes this step (errors are figured out at a higher level)
s = prepareTestParamsInvalid("fixture-1289-param.yaml")
if assert.NotNil(Pt, s) {
m := make(map[string]spec.Parameter)
pi, ok := s.spec.Paths.Paths["/fixture"]
if assert.True(Pt, ok) {
pi.Parameters = pi.PathItemProps.Get.OperationProps.Parameters
s.paramsAsMap(pi.Parameters, m, nil)
assert.Len(Pt, m, 1)
p, ok := m["body#DespicableMe"]
assert.True(Pt, ok)
assert.Equal(Pt, p.Name, "despicableMe")
}
}
}
func TestAnalyzer_paramsAsMapWithCallback(Pt *testing.T) {
s := prepareTestParamsInvalid("fixture-342.yaml")
if assert.NotNil(Pt, s) {
// No bail out callback
m := make(map[string]spec.Parameter)
e := []string{}
pi, ok := s.spec.Paths.Paths["/fixture"]
if assert.True(Pt, ok) {
pi.Parameters = pi.PathItemProps.Get.OperationProps.Parameters
s.paramsAsMap(pi.Parameters, m, func(param spec.Parameter, err error) bool {
//Pt.Logf("ERROR on %+v : %v", param, err)
e = append(e, err.Error())
return true // Continue
})
}
assert.Contains(Pt, e, `resolved reference is not a parameter: "#/definitions/sample_info/properties/sid"`)
assert.Contains(Pt, e, `invalid reference: "#/definitions/sample_info/properties/sids"`)
// bail out callback
m = make(map[string]spec.Parameter)
e = []string{}
pi, ok = s.spec.Paths.Paths["/fixture"]
if assert.True(Pt, ok) {
pi.Parameters = pi.PathItemProps.Get.OperationProps.Parameters
s.paramsAsMap(pi.Parameters, m, func(param spec.Parameter, err error) bool {
//Pt.Logf("ERROR on %+v : %v", param, err)
e = append(e, err.Error())
return false // Bail out
})
}
// We got one then bail out
assert.Len(Pt, e, 1)
}
// Bail out after ref failure: exercising another path
s = prepareTestParamsInvalid("fixture-342-2.yaml")
if assert.NotNil(Pt, s) {
// bail out callback
m := make(map[string]spec.Parameter)
e := []string{}
pi, ok := s.spec.Paths.Paths["/fixture"]
if assert.True(Pt, ok) {
pi.Parameters = pi.PathItemProps.Get.OperationProps.Parameters
s.paramsAsMap(pi.Parameters, m, func(param spec.Parameter, err error) bool {
//Pt.Logf("ERROR on %+v : %v", param, err)
e = append(e, err.Error())
return false // Bail out
})
}
// We got one then bail out
assert.Len(Pt, e, 1)
}
// Bail out after ref failure: exercising another path
s = prepareTestParamsInvalid("fixture-342-3.yaml")
if assert.NotNil(Pt, s) {
// bail out callback
m := make(map[string]spec.Parameter)
e := []string{}
pi, ok := s.spec.Paths.Paths["/fixture"]
if assert.True(Pt, ok) {
pi.Parameters = pi.PathItemProps.Get.OperationProps.Parameters
s.paramsAsMap(pi.Parameters, m, func(param spec.Parameter, err error) bool {
//Pt.Logf("ERROR on %+v : %v", param, err)
e = append(e, err.Error())
return false // Bail out
})
}
// We got one then bail out
assert.Len(Pt, e, 1)
}
}
func TestAnalyzer_paramsAsMap_Panic(Pt *testing.T) {
assert.Panics(Pt, panickerParamsAsMap)
// Specifically on invalid resolved type
assert.Panics(Pt, panickerParamsAsMap2)
// Specifically on invalid ref
assert.Panics(Pt, panickerParamsAsMap3)
}
func TestAnalyzer_SafeParamsFor(Pt *testing.T) {
s := prepareTestParamsInvalid("fixture-342.yaml")
if assert.NotNil(Pt, s) {
e := []string{}
pi, ok := s.spec.Paths.Paths["/fixture"]
if assert.True(Pt, ok) {
pi.Parameters = pi.PathItemProps.Get.OperationProps.Parameters
for range s.SafeParamsFor("Get", "/fixture", func(param spec.Parameter, err error) bool {
e = append(e, err.Error())
return true // Continue
}) {
assert.Fail(Pt, "There should be no safe parameter in this testcase")
}
}
assert.Contains(Pt, e, `resolved reference is not a parameter: "#/definitions/sample_info/properties/sid"`)
assert.Contains(Pt, e, `invalid reference: "#/definitions/sample_info/properties/sids"`)
}
}
func panickerParamsFor() {
s := prepareTestParamsInvalid("fixture-342.yaml")
pi, ok := s.spec.Paths.Paths["/fixture"]
if ok {
pi.Parameters = pi.PathItemProps.Get.OperationProps.Parameters
s.ParamsFor("Get", "/fixture")
}
}
func TestAnalyzer_ParamsFor(Pt *testing.T) {
// Valid example
s := prepareTestParamsValid()
if assert.NotNil(Pt, s) {
params := s.ParamsFor("Get", "/items")
assert.True(Pt, len(params) > 0)
}
// Invalid example
assert.Panics(Pt, panickerParamsFor)
}
func TestAnalyzer_SafeParametersFor(Pt *testing.T) {
s := prepareTestParamsInvalid("fixture-342.yaml")
if assert.NotNil(Pt, s) {
e := []string{}
pi, ok := s.spec.Paths.Paths["/fixture"]
if assert.True(Pt, ok) {
pi.Parameters = pi.PathItemProps.Get.OperationProps.Parameters
for range s.SafeParametersFor("fixtureOp", func(param spec.Parameter, err error) bool {
e = append(e, err.Error())
return true // Continue
}) {
assert.Fail(Pt, "There should be no safe parameter in this testcase")
}
}
assert.Contains(Pt, e, `resolved reference is not a parameter: "#/definitions/sample_info/properties/sid"`)
assert.Contains(Pt, e, `invalid reference: "#/definitions/sample_info/properties/sids"`)
}
}
func panickerParametersFor() {
s := prepareTestParamsInvalid("fixture-342.yaml")
if s == nil {
return
}
pi, ok := s.spec.Paths.Paths["/fixture"]
if ok {
pi.Parameters = pi.PathItemProps.Get.OperationProps.Parameters
//func (s *Spec) ParametersFor(operationID string) []spec.Parameter {
s.ParametersFor("fixtureOp")
}
}
func TestAnalyzer_ParametersFor(Pt *testing.T) {
// Valid example
s := prepareTestParamsValid()
params := s.ParamsFor("Get", "/items")
assert.True(Pt, len(params) > 0)
// Invalid example
assert.Panics(Pt, panickerParametersFor)
}
func prepareTestParamsValid() *Spec {
formatParam := spec.QueryParam("format").Typed("string", "")
limitParam := spec.QueryParam("limit").Typed("integer", "int32")
limitParam.Extensions = spec.Extensions(map[string]interface{}{})
limitParam.Extensions.Add("go-name", "Limit")
skipParam := spec.QueryParam("skip").Typed("integer", "int32")
pi := spec.PathItem{}
pi.Parameters = []spec.Parameter{*limitParam}
op := &spec.Operation{}
op.Consumes = []string{"application/x-yaml"}
op.Produces = []string{"application/x-yaml"}
op.Security = []map[string][]string{
{"oauth2": {}},
{"basic": nil},
}
op.ID = "someOperation"
op.Parameters = []spec.Parameter{*skipParam}
pi.Get = op
pi2 := spec.PathItem{}
pi2.Parameters = []spec.Parameter{*limitParam}
op2 := &spec.Operation{}
op2.ID = "anotherOperation"
op2.Parameters = []spec.Parameter{*skipParam}
pi2.Get = op2
spec := makeFixturepec(pi, pi2, formatParam)
analyzer := New(spec)
return analyzer
}
func prepareTestParamsInvalid(fixture string) *Spec {
cwd, _ := os.Getwd()
bp := filepath.Join(cwd, "fixtures", fixture)
spec, err := loadSpec(bp)
if err != nil {
log.Printf("Warning: fixture %s could not be loaded: %v", fixture, err)
return nil
}
analyzer := New(spec)
return analyzer
}
func TestSecurityDefinitionsFor(t *testing.T) {
spec := prepareTestParamsAuth()
pi1 := spec.spec.Paths.Paths["/"].Get
pi2 := spec.spec.Paths.Paths["/items"].Get
defs1 := spec.SecurityDefinitionsFor(pi1)
require.Contains(t, defs1, "oauth2")
require.Contains(t, defs1, "basic")
require.NotContains(t, defs1, "apiKey")
defs2 := spec.SecurityDefinitionsFor(pi2)
require.Contains(t, defs2, "oauth2")
require.Contains(t, defs2, "basic")
require.Contains(t, defs2, "apiKey")
}
func TestSecurityRequirements(t *testing.T) {
spec := prepareTestParamsAuth()
pi1 := spec.spec.Paths.Paths["/"].Get
pi2 := spec.spec.Paths.Paths["/items"].Get
scopes := []string{"the-scope"}
reqs1 := spec.SecurityRequirementsFor(pi1)
require.Len(t, reqs1, 2)
require.Len(t, reqs1[0], 1)
require.Equal(t, reqs1[0][0].Name, "oauth2")
require.Equal(t, reqs1[0][0].Scopes, scopes)
require.Len(t, reqs1[1], 1)
require.Equal(t, reqs1[1][0].Name, "basic")
require.Empty(t, reqs1[1][0].Scopes)
reqs2 := spec.SecurityRequirementsFor(pi2)
require.Len(t, reqs2, 3)
require.Len(t, reqs2[0], 1)
require.Equal(t, reqs2[0][0].Name, "oauth2")
require.Equal(t, reqs2[0][0].Scopes, scopes)
require.Len(t, reqs2[1], 1)
require.Empty(t, reqs2[1][0].Name)
require.Empty(t, reqs2[1][0].Scopes)
require.Len(t, reqs2[2], 2)
//
//require.Equal(t, reqs2[2][0].Name, "basic")
require.Contains(t, reqs2[2], SecurityRequirement{Name: "basic", Scopes: []string{}})
require.Empty(t, reqs2[2][0].Scopes)
//require.Equal(t, reqs2[2][1].Name, "apiKey")
require.Contains(t, reqs2[2], SecurityRequirement{Name: "apiKey", Scopes: []string{}})
require.Empty(t, reqs2[2][1].Scopes)
}
func TestSecurityRequirementsDefinitions(t *testing.T) {
spec := prepareTestParamsAuth()
pi1 := spec.spec.Paths.Paths["/"].Get
pi2 := spec.spec.Paths.Paths["/items"].Get
reqs1 := spec.SecurityRequirementsFor(pi1)
defs11 := spec.SecurityDefinitionsForRequirements(reqs1[0])
require.Contains(t, defs11, "oauth2")
defs12 := spec.SecurityDefinitionsForRequirements(reqs1[1])
require.Contains(t, defs12, "basic")
require.NotContains(t, defs12, "apiKey")
reqs2 := spec.SecurityRequirementsFor(pi2)
defs21 := spec.SecurityDefinitionsForRequirements(reqs2[0])
require.Len(t, defs21, 1)
require.Contains(t, defs21, "oauth2")
require.NotContains(t, defs21, "basic")
require.NotContains(t, defs21, "apiKey")
defs22 := spec.SecurityDefinitionsForRequirements(reqs2[1])
require.NotNil(t, defs22)
require.Empty(t, defs22)
defs23 := spec.SecurityDefinitionsForRequirements(reqs2[2])
require.Len(t, defs23, 2)
require.NotContains(t, defs23, "oauth2")
require.Contains(t, defs23, "basic")
require.Contains(t, defs23, "apiKey")
}
func prepareTestParamsAuth() *Spec {
formatParam := spec.QueryParam("format").Typed("string", "")
limitParam := spec.QueryParam("limit").Typed("integer", "int32")
limitParam.Extensions = spec.Extensions(map[string]interface{}{})
limitParam.Extensions.Add("go-name", "Limit")
skipParam := spec.QueryParam("skip").Typed("integer", "int32")
pi := spec.PathItem{}
pi.Parameters = []spec.Parameter{*limitParam}
op := &spec.Operation{}
op.Consumes = []string{"application/x-yaml"}
op.Produces = []string{"application/x-yaml"}
op.Security = []map[string][]string{
{"oauth2": {"the-scope"}},
{"basic": nil},
}
op.ID = "someOperation"
op.Parameters = []spec.Parameter{*skipParam}
pi.Get = op
pi2 := spec.PathItem{}
pi2.Parameters = []spec.Parameter{*limitParam}
op2 := &spec.Operation{}
op2.ID = "anotherOperation"
op2.Security = []map[string][]string{
{"oauth2": {"the-scope"}},
{},
{
"basic": {},
"apiKey": {},
},
}
op2.Parameters = []spec.Parameter{*skipParam}
pi2.Get = op2
oauth := spec.OAuth2AccessToken("http://authorize.com", "http://token.com")
oauth.AddScope("the-scope", "the scope gives access to ...")
spec := &spec.Swagger{
SwaggerProps: spec.SwaggerProps{
Consumes: []string{"application/json"},
Produces: []string{"application/json"},
Security: []map[string][]string{
{"apikey": nil},
},
SecurityDefinitions: map[string]*spec.SecurityScheme{
"basic": spec.BasicAuth(),
"apiKey": spec.APIKeyAuth("api_key", "query"),
"oauth2": oauth,
},
Parameters: map[string]spec.Parameter{"format": *formatParam},
Paths: &spec.Paths{
Paths: map[string]spec.PathItem{
"/": pi,
"/items": pi2,
},
},
},
}
analyzer := New(spec)
return analyzer
}
func TestMoreParamAnalysis(t *testing.T) {
cwd, _ := os.Getwd()
bp := filepath.Join(cwd, "fixtures", "parameters", "fixture-parameters.yaml")
sp, err := loadSpec(bp)
if !assert.NoError(t, err) {
t.FailNow()
return
}
an := New(sp)
res := an.AllPatterns()
assert.Lenf(t, res, 6, "Expected 6 patterns in this spec")
res = an.SchemaPatterns()
assert.Lenf(t, res, 1, "Expected 1 schema pattern in this spec")
res = an.HeaderPatterns()
assert.Lenf(t, res, 2, "Expected 2 header pattern in this spec")
res = an.ItemsPatterns()
assert.Lenf(t, res, 2, "Expected 2 items pattern in this spec")
res = an.ParameterPatterns()
assert.Lenf(t, res, 1, "Expected 1 simple param pattern in this spec")
refs := an.AllRefs()
assert.Lenf(t, refs, 10, "Expected 10 reference usage in this spec")
references := an.AllReferences()
assert.Lenf(t, references, 14, "Expected 14 reference usage in this spec")
references = an.AllItemsReferences()
assert.Lenf(t, references, 0, "Expected 0 items reference in this spec")
references = an.AllPathItemReferences()
assert.Lenf(t, references, 1, "Expected 1 pathItem reference in this spec")
references = an.AllResponseReferences()
assert.Lenf(t, references, 3, "Expected 3 response references in this spec")
references = an.AllParameterReferences()
assert.Lenf(t, references, 6, "Expected 6 parameter references in this spec")
schemaRefs := an.AllDefinitions()
assert.Lenf(t, schemaRefs, 14, "Expected 14 schema definitions in this spec")
//for _, refs := range schemaRefs {
// t.Logf("Schema Ref: %s (%s)", refs.Name, refs.Ref.String())
//}
schemaRefs = an.SchemasWithAllOf()
assert.Lenf(t, schemaRefs, 1, "Expected 1 schema with AllOf definition in this spec")
method, path, op, found := an.OperationForName("postSomeWhere")
assert.Equal(t, "POST", method)
assert.Equal(t, "/some/where", path)
if assert.NotNil(t, op) && assert.True(t, found) {
sec := an.SecurityRequirementsFor(op)
assert.Nil(t, sec)
secScheme := an.SecurityDefinitionsFor(op)
assert.Nil(t, secScheme)
bag := an.ParametersFor("postSomeWhere")
assert.Lenf(t, bag, 6, "Expected 6 parameters for this operation")
}
method, path, op, found = an.OperationForName("notFound")
assert.Equal(t, "", method)
assert.Equal(t, "", path)
assert.Nil(t, op)
assert.False(t, found)
// does not take ops under pathItem $ref
ops := an.OperationMethodPaths()
assert.Lenf(t, ops, 3, "Expected 3 ops")
ops = an.OperationIDs()
assert.Lenf(t, ops, 3, "Expected 3 ops")
assert.Contains(t, ops, "postSomeWhere")
assert.Contains(t, ops, "GET /some/where/else")
assert.Contains(t, ops, "GET /some/where")
}
func Test_EdgeCases(t *testing.T) {
// check return values are consistent in some nil/empty edge cases
sp := Spec{}
res1 := sp.AllPaths()
assert.Nil(t, res1)
res2 := sp.OperationIDs()
assert.Nil(t, res2)
res3 := sp.OperationMethodPaths()
assert.Nil(t, res3)
res4 := sp.structMapKeys(nil)
assert.Nil(t, res4)
res5 := sp.structMapKeys(make(map[string]struct{}, 10))
assert.Nil(t, res5)
// check AllRefs() skips empty $refs
sp.references.allRefs = make(map[string]spec.Ref, 3)
for i := 0; i < 3; i++ {
sp.references.allRefs["ref"+strconv.Itoa(i)] = spec.Ref{}
}
assert.Len(t, sp.references.allRefs, 3)
res6 := sp.AllRefs()
assert.Len(t, res6, 0)
// check AllRefs() skips duplicate $refs
sp.references.allRefs["refToOne"] = spec.MustCreateRef("#/ref1")
sp.references.allRefs["refToOneAgain"] = spec.MustCreateRef("#/ref1")
res7 := sp.AllRefs()
assert.NotNil(t, res7)
assert.Len(t, res7, 1)
}
func TestEnumAnalysis(t *testing.T) {
doc, err := loadSpec(filepath.Join("fixtures", "enums.yml"))
if assert.NoError(t, err) {
an := New(doc)
en := an.enums
// parameters
assertEnum(t, en.parameters, "#/parameters/idParam", []interface{}{"aA", "b9", "c3"})
assertEnum(t, en.parameters, "#/paths/~1some~1where~1{id}/parameters/1", []interface{}{"bA", "ba", "b9"})
assertEnum(t, en.parameters, "#/paths/~1some~1where~1{id}/get/parameters/0", []interface{}{"a0", "b1", "c2"})
// responses
assertEnum(t, en.headers, "#/responses/notFound/headers/ContentLength", []interface{}{"1234", "123"})
assertEnum(t, en.headers,
"#/paths/~1some~1where~1{id}/get/responses/200/headers/X-Request-Id", []interface{}{"dA", "d9"})
// definitions
assertEnum(t, en.schemas,
"#/paths/~1other~1place/post/parameters/0/schema/properties/value", []interface{}{"eA", "e9"})
assertEnum(t, en.schemas, "#/paths/~1other~1place/post/responses/200/schema/properties/data",
[]interface{}{"123a", "123b", "123d"})
assertEnum(t, en.schemas, "#/definitions/named", []interface{}{"fA", "f9"})
assertEnum(t, en.schemas, "#/definitions/tag/properties/value", []interface{}{"gA", "ga", "g9"})
assertEnum(t, en.schemas, "#/definitions/record",
[]interface{}{`{"createdAt": "2018-08-31"}`, `{"createdAt": "2018-09-30"}`})
// array enum
assertEnum(t, en.parameters, "#/paths/~1some~1where~1{id}/get/parameters/1",
[]interface{}{[]interface{}{"cA", "cz", "c9"}, []interface{}{"cA", "cz"}, []interface{}{"cz", "c9"}})
// items
assertEnum(t, en.items, "#/paths/~1some~1where~1{id}/get/parameters/1/items", []interface{}{"cA", "cz", "c9"})
assertEnum(t, en.items, "#/paths/~1other~1place/post/responses/default/headers/Via/items",
[]interface{}{"AA", "Ab"})
res := an.AllEnums()
assert.Lenf(t, res, 14, "Expected 14 enums in this spec, but got %d", len(res))
res = an.ParameterEnums()
assert.Lenf(t, res, 4, "Expected 4 enums in this spec, but got %d", len(res))
res = an.SchemaEnums()
assert.Lenf(t, res, 6, "Expected 6 schema enums in this spec, but got %d", len(res))
res = an.HeaderEnums()
assert.Lenf(t, res, 2, "Expected 2 header enums in this spec, but got %d", len(res))
res = an.ItemsEnums()
assert.Lenf(t, res, 2, "Expected 2 items enums in this spec, but got %d", len(res))
}
}
func assertEnum(t testing.TB, data map[string][]interface{}, key string, enum []interface{}) bool {
if assert.Contains(t, data, key) {
return assert.Equal(t, enum, data[key])
}
return false
}<|fim▁end|>
|
assertPattern(t, pt.items, "#/paths/~1some~1where~1{id}/get/parameters/1/items", "c[A-Za-z0-9]+")
assertPattern(t, pt.items, "#/paths/~1other~1place/post/responses/default/headers/Via/items", "[A-Za-z]+")
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
setup(name='uwosh.emergency.master',
version=version,
description="",
long_description=open("README.txt").read() + "\n" +
open(os.path.join("docs", "HISTORY.txt")).read(),
# Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Framework :: Plone",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='',
author='Nathan Van Gheem',
author_email='[email protected]',
url='http://svn.plone.org/svn/plone/plone.example',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['uwosh', 'uwosh.emergency'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'uwosh.simpleemergency>=1.1',
'rsa'
],
entry_points="""
# -*- Entry points: -*-
[z3c.autoinclude.plugin]
target = plone
""",
)<|fim▁end|>
|
from setuptools import setup, find_packages
import os
version = '0.5'
|
<|file_name|>57-add-parent-numbered-lists.ts<|end_file_name|><|fim▁begin|>// Numbered lists - Add parent number in sub number
// Import from 'docx' rather than '../build' if you install from npm
import * as fs from "fs";
import { AlignmentType, convertInchesToTwip, Document, HeadingLevel, LevelFormat, Packer, Paragraph } from "../build";
const doc = new Document({
numbering: {
config: [
{
levels: [
{
level: 0,
format: LevelFormat.DECIMAL,
text: "%1",
alignment: AlignmentType.START,
style: {
paragraph: {
indent: { left: convertInchesToTwip(0.5), hanging: 260 },
},
},
},
{
level: 1,
format: LevelFormat.DECIMAL,
text: "%1.%2",
alignment: AlignmentType.START,
style: {
paragraph: {
indent: { left: 1.25 * convertInchesToTwip(0.5), hanging: 1.25 * 260 },
},
run: {
bold: true,
size: 18,
font: "Times New Roman",
},
},
},
],
reference: "my-number-numbering-reference",
},
],
},
sections: [
{
children: [
new Paragraph({
text: "How to make cake",
heading: HeadingLevel.HEADING_1,
}),
new Paragraph({
text: "Step 1 - Add sugar",
numbering: {
reference: "my-number-numbering-reference",
level: 0,
},
}),
new Paragraph({
text: "Step 2 - Add wheat",
numbering: {
reference: "my-number-numbering-reference",
level: 0,
},
}),
new Paragraph({
text: "Step 2a - Stir the wheat in a circle",
numbering: {
reference: "my-number-numbering-reference",
level: 1,<|fim▁hole|> numbering: {
reference: "my-number-numbering-reference",
level: 0,
},
}),
new Paragraph({
text: "How to make cake",
heading: HeadingLevel.HEADING_1,
}),
],
},
],
});
Packer.toBuffer(doc).then((buffer) => {
fs.writeFileSync("My Document.docx", buffer);
});<|fim▁end|>
|
},
}),
new Paragraph({
text: "Step 3 - Put in oven",
|
<|file_name|>D3.java<|end_file_name|><|fim▁begin|>/**********************************************************************
Copyright (c) 2005 Erik Bengtson and others.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributors:
...
**********************************************************************/
package org.datanucleus.samples.metadata.datastoreidentity;
public class D3
{
private String name;
/**
* @return Returns the name.
*/
public String getName()
{
return name;
}
<|fim▁hole|> public void setName(String name)
{
this.name = name;
}
}<|fim▁end|>
|
/**
* @param name The name to set.
*/
|
<|file_name|>TestEmitter.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>
showSectionRequest($scope, sectionId) {
var payload = {
id: sectionId
};
return payload;
}
}<|fim▁end|>
|
export class TestEmitter {
|
<|file_name|>IUNegotiationLookupableHelperServiceImpl.java<|end_file_name|><|fim▁begin|>package org.kuali.kra.negotiations.lookup;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.kuali.coeus.common.framework.custom.attr.CustomAttribute;
import org.kuali.coeus.common.framework.person.KcPerson;
import org.kuali.kra.negotiations.bo.Negotiation;
import org.kuali.kra.negotiations.customdata.NegotiationCustomData;
import org.kuali.rice.kns.lookup.HtmlData.AnchorHtmlData;
import org.kuali.rice.kns.web.struts.form.LookupForm;
import org.kuali.rice.kns.web.ui.Column;
import org.kuali.rice.kns.web.ui.Field;
import org.kuali.rice.kns.web.ui.ResultRow;
import org.kuali.rice.kns.web.ui.Row;
import org.kuali.rice.krad.bo.BusinessObject;
import org.kuali.rice.krad.lookup.CollectionIncomplete;
import org.kuali.rice.krad.util.BeanPropertyComparator;
import org.kuali.coeus.common.framework.person.KcPersonService;
import org.kuali.coeus.sys.framework.util.CollectionUtils;
import org.kuali.coeus.sys.framework.service.KcServiceLocator;
import edu.iu.uits.kra.negotiations.lookup.IUNegotiationDaoOjb;
/**
* Negotiation Lookup Helper Service
*/
public class IUNegotiationLookupableHelperServiceImpl extends NegotiationLookupableHelperServiceImpl {
private static final long serialVersionUID = -7144337780492481726L;
private static final String USER_ID = "userId";
private NegotiationDao negotiationDao;
private KcPersonService kcPersonService;
@SuppressWarnings("unchecked")
@Override
public List<? extends BusinessObject> getSearchResults(Map<String, String> fieldValues) {
super.setBackLocationDocFormKey(fieldValues);
if (this.getParameters().containsKey(USER_ID)) {
fieldValues.put("associatedNegotiable.piId", ((String[]) this.getParameters().get(USER_ID))[0]);
fieldValues.put("negotiatorPersonId", ((String[]) this.getParameters().get(USER_ID))[0]);
}
List<Long> ids = null;
/* Begin IU Customization */
//UITSRA-2543
Map<String, String> formProps = new HashMap<String, String>();
if (!StringUtils.isEmpty(fieldValues.get("sponsorAwardNumber"))
&& !StringUtils.equals("*", fieldValues.get("sponsorAwardNumber").trim())) {
formProps.put("value", fieldValues.get("sponsorAwardNumber"));
formProps.put("customAttributeId", getCustomAttributeId("All Negotiations", "SPON_AWD_ID"));
ids = getCustomDataIds(formProps, null);
}
fieldValues.remove("sponsorAwardNumber");
//UITSRA-2893, UITSRA-2894
if (!StringUtils.isEmpty(fieldValues.get("gsTeam")) && !StringUtils.equals("*", fieldValues.get("gsTeam").trim())) {
formProps.put("value", fieldValues.get("gsTeam"));
formProps.put("customAttributeId", getCustomAttributeId("All Negotiations", "gsTeam"));
ids = getCustomDataIds(formProps, ids);
}
if (!StringUtils.isEmpty(fieldValues.get("recordResidesWith")) && !StringUtils.equals("*", fieldValues.get("recordResidesWith").trim())) {
formProps.put("value", fieldValues.get("recordResidesWith"));
formProps.put("customAttributeId", getCustomAttributeId("All Negotiations", "recordLocation"));
ids = getCustomDataIds(formProps, ids);
}
if (!StringUtils.isEmpty(fieldValues.get("accountId")) && !StringUtils.equals("*", fieldValues.get("accountId").trim())) {
formProps.put("value", fieldValues.get("accountId"));
formProps.put("customAttributeId", getCustomAttributeId("All Negotiations", "accountID"));
ids = getCustomDataIds(formProps, ids);
}
fieldValues.remove("recordResidesWith");
fieldValues.remove("gsTeam");
fieldValues.remove("accountId");
//End of UITSRA-2893, UITSRA-2894
// UITSRA-4218
if (!StringUtils.isEmpty(fieldValues.get("contractDate")) && !StringUtils.equals("*", fieldValues.get("contractDate").trim())) {
formProps.put("value", fieldValues.get("contractDate"));
formProps.put("customAttributeId", getCustomAttributeId("All Negotiations", "contractDate"));
ids = getCustomDataIds(formProps, ids);
}
fieldValues.remove("contractDate");
// End of UITSRA-4218
// UITSRA-3190 -Add Person Lookup capability to Search screens
List<Long> piNegotiationIds = null;
if (fieldValues.containsKey("associatedNegotiable.principalInvestigatorUserName")) {
String piUserName = fieldValues.get("associatedNegotiable.principalInvestigatorUserName");
if (StringUtils.isNotBlank(piUserName)) {
// UITSRA-3477
if (StringUtils.contains(piUserName, "*")) {
piUserName = StringUtils.remove(piUserName, '*');
}
// End of UITSRA-3477
KcPerson person = getKcPersonService().getKcPersonByUserName(piUserName);
if (person != null && person.getPersonId() != null) {
piNegotiationIds = new ArrayList<Long>(((IUNegotiationDaoOjb) getNegotiationDao()).getNegotiationIdsByPI(person.getPersonId()));
if (piNegotiationIds.size() > 0) {
if (fieldValues.containsKey("negotiationId") && StringUtils.isNotBlank(fieldValues.get("negotiationId"))) {
String regex = "[0-9]+";
String negotiationId = fieldValues.get("negotiationId");
if (negotiationId.matches(regex)) {
if (!piNegotiationIds.contains(new Long(negotiationId))) {
return new ArrayList<Negotiation>();
}
}
}
else {
fieldValues.put("negotiationId", StringUtils.join(piNegotiationIds, '|'));
}
}
else {
fieldValues.put("associatedDocumentId", "Invalid PI Id");
}
}
else {
fieldValues.put("associatedDocumentId", "Invalid PI Id");
}
}
fieldValues.remove("associatedNegotiable.principalInvestigatorUserName");
}
// End of UITSRA-3190
// UITSRA-3191 - Change Negotiator Lookup field in Negotiation search options
KcPerson negotiator = null;
if (fieldValues.containsKey("negotiatorUserName") && StringUtils.isNotBlank(fieldValues.get("negotiatorUserName")) ) {
negotiator = getKcPersonService().getKcPersonByUserName(fieldValues.get("negotiatorUserName"));
if (negotiator != null && StringUtils.isNotBlank(negotiator.getPersonId())) {
fieldValues.put("negotiatorPersonId", negotiator.getPersonId());
}
else {
fieldValues.put("negotiatorPersonId", "Invalid Negotiator Person Id");
}
fieldValues.remove("negotiatorUserName");
}
// End of UITSRA-3191
// UITSRA-3761 - Update Negotiation Search Options and Results
List<Long> subAwardNegotiationIds = null;
if (fieldValues.containsKey("associatedNegotiable.requisitionerUserName")) {
String requisitionerUserName = fieldValues.get("associatedNegotiable.requisitionerUserName");
if (StringUtils.isNotBlank(requisitionerUserName)) {
if (StringUtils.contains(requisitionerUserName, "*")) {
requisitionerUserName = StringUtils.remove(requisitionerUserName, '*');
}
KcPerson person = getKcPersonService().getKcPersonByUserName(requisitionerUserName);
if (person != null && person.getPersonId() != null) {
subAwardNegotiationIds = new ArrayList<Long>(((IUNegotiationDaoOjb) getNegotiationDao()).getNegotiationIdsByRequisitioner(person.getPersonId()));
if (subAwardNegotiationIds.size() > 0) {
if (fieldValues.containsKey("negotiationId") && StringUtils.isNotBlank(fieldValues.get("negotiationId"))) {
String regex = "[0-9]+";
String negotiationId = fieldValues.get("negotiationId");
if (negotiationId.matches(regex)) {
if (!subAwardNegotiationIds.contains(new Long(negotiationId))) {
return new ArrayList<Negotiation>();
}
}
}
else {
fieldValues.put("negotiationId", StringUtils.join(subAwardNegotiationIds, '|'));
}
}
else {
fieldValues.put("associatedDocumentId", "Invalid PI Id");
}
}
else {
fieldValues.put("associatedDocumentId", "Invalid PI Id");
}
}
fieldValues.remove("associatedNegotiable.requisitionerUserName");
fieldValues.remove("associatedNegotiable.subAwardRequisitionerId");
}
if (!StringUtils.isEmpty(fieldValues.get("modification_id")) && !StringUtils.equals("*", fieldValues.get("modification_id").trim())) {
formProps.put("value", fieldValues.get("modification_id"));
formProps.put("customAttributeId", getCustomAttributeId("All Negotiations", "MOD_NUM"));
ids = getCustomDataIds(formProps, ids);
}
if (!StringUtils.isEmpty(fieldValues.get("proposalDocID")) && !StringUtils.equals("*", fieldValues.get("proposalDocID").trim())) {
formProps.put("value", fieldValues.get("proposalDocID"));
formProps.put("customAttributeId", getCustomAttributeId("All Negotiations", "proposalDocID"));
ids = getCustomDataIds(formProps, ids);
}
if (!StringUtils.isEmpty(fieldValues.get("ipid")) && !StringUtils.equals("*", fieldValues.get("ipid").trim())) {
formProps.put("value", fieldValues.get("ipid"));
formProps.put("customAttributeId", getCustomAttributeId("All Negotiations", "CSU_REF_NUM"));
ids = getCustomDataIds(formProps, ids);
}
if (!StringUtils.isEmpty(fieldValues.get("proposalType")) && !StringUtils.equals("*", fieldValues.get("proposalType").trim())) {
formProps.put("value", fieldValues.get("proposalType"));
formProps.put("customAttributeId", getCustomAttributeId("Grant Services Negotiations", "proposalType"));
ids = getCustomDataIds(formProps, ids);
}
fieldValues.remove("modification_id");
fieldValues.remove("proposalDocID");
fieldValues.remove("ipid");
fieldValues.remove("proposalType");
// End of UITSRA-3761
if (!StringUtils.isEmpty(fieldValues.get("ricroCleared")) ) {
formProps.put("value", fieldValues.get("ricroCleared"));
formProps.put("customAttributeId", getCustomAttributeId("SP Office Negotiations", "RICRO_CLEARED"));
ids = getCustomDataIds(formProps, ids);
}
fieldValues.remove("ricroCleared");
if (!StringUtils.isEmpty(fieldValues.get("coiCleared")) ) {
formProps.put("value", fieldValues.get("coiCleared"));
formProps.put("customAttributeId", getCustomAttributeId("SP Office Negotiations", "COI_CLEARED"));
ids = getCustomDataIds(formProps, ids);
}
fieldValues.remove("coiCleared");
if (!StringUtils.isEmpty(fieldValues.get("proposalActionType")) ) {
formProps.put("value", fieldValues.get("proposalActionType"));
formProps.put("customAttributeId", getCustomAttributeId("SP Office Negotiations", "PROP_ACTION_TYPE"));
ids = getCustomDataIds(formProps, ids);
}
fieldValues.remove("proposalActionType");
if (!StringUtils.isEmpty(fieldValues.get("csuRefNum")) && !StringUtils.equals("*", fieldValues.get("csuRefNum").trim())) {
formProps.put("value", fieldValues.get("csuRefNum"));
formProps.put("customAttributeId", getCustomAttributeId("All Negotiations", "CSU_REF_NUM"));
ids = getCustomDataIds(formProps, ids);
}
fieldValues.remove("csuRefNum");
/* CSU customization custom data arg search fix */
fieldValues.put("negotiationCustomDataList.negotiationCustomDataId", StringUtils.join(ids, '|'));
/* End IU Customization */
// UITSRA-3138
// In class LookupDaoOjb.java (method addCriteria()), a String data type is required in order to create the
// search criteria for a Negotiation Id wild card search. Currently negotiationId is Long rather than String,
// which is not consistent with other KC modules like Award, IP etc. The ideal fix is to change the Negotiation Id's
// data type from Long to String, but it requires a major design change on the foundation side.
List<Long> wildcardNegotiationIds = null;
if (fieldValues.containsKey("negotiationId") && fieldValues.get("negotiationId").contains("*")) {
wildcardNegotiationIds = new ArrayList<Long>(((IUNegotiationDaoOjb) getNegotiationDao()).getNegotiationIdsWithWildcard(fieldValues.get("negotiationId")));<|fim▁hole|> }
List<Negotiation> searchResults = new ArrayList<Negotiation>();
CollectionIncomplete<Negotiation> limitedSearchResults;
// UITSRA-3138
if (wildcardNegotiationIds == null || wildcardNegotiationIds.size() != 0 ||
piNegotiationIds == null || piNegotiationIds.size() != 0) {
// UITSRA-4033
limitedSearchResults = (CollectionIncomplete<Negotiation>) getNegotiationDao().getNegotiationResults(fieldValues);
searchResults.addAll(limitedSearchResults);
List defaultSortColumns = getDefaultSortColumns();
if (defaultSortColumns.size() > 0) {
org.kuali.coeus.sys.framework.util.CollectionUtils.sort(searchResults, new BeanPropertyComparator(defaultSortColumns, true)); //UITSRA-4320
return new CollectionIncomplete<Negotiation>(searchResults, limitedSearchResults.getActualSizeIfTruncated());
}
return limitedSearchResults;
}
return searchResults;
}
/**
* @see org.kuali.rice.kns.lookup.AbstractLookupableHelperServiceImpl#getRows()
*/
@Override
public List<Row> getRows() {
List<Row> rows = super.getRows();
for (Row row : rows) {
for (Field field : row.getFields()) {
if (field.getPropertyName().equals("negotiatorUserName")) {
field.setFieldConversions("principalName:negotiatorUserName,principalId:negotiatorPersonId");
}
if (field.getPropertyName().equals("associatedNegotiable.principalInvestigatorUserName")) {
field.setFieldConversions("principalName:associatedNegotiable.principalInvestigatorUserName,principalId:associatedNegotiable.principalInvestigatorPersonId");
}
if (field.getPropertyName().equals("associatedNegotiable.requisitionerUserName")) {
field.setFieldConversions("principalName:associatedNegotiable.requisitionerUserName,principalId:associatedNegotiable.subAwardRequisitionerId");
}
}
}
return rows;
}
public KcPersonService getKcPersonService() {
if (this.kcPersonService == null) {
this.kcPersonService = KcServiceLocator.getService(KcPersonService.class);
}
return this.kcPersonService;
}
/* Begin IU Customization */
public String getCustomAttributeId(String groupName, String attributeName) {
Map<String, String> fieldValues = new HashMap<String, String>();
fieldValues.put("groupName", groupName);
fieldValues.put("name", attributeName);
List<CustomAttribute> customAttributes = (List<CustomAttribute>) getBusinessObjectService().findMatching(CustomAttribute.class, fieldValues);
if (org.apache.commons.collections4.CollectionUtils.isNotEmpty(customAttributes)) {
return customAttributes.get(0).getId().toString();
}
else {
return null;
}
}
/**
* Call's the super class's performLookup function and edits the URLs for the unit name, unit number, sponsor name, subAwardOrganization name, and pi name.
* @see org.kuali.kra.lookup.KraLookupableHelperServiceImpl#performLookup(LookupForm, Collection, boolean)
*/
@Override
public Collection performLookup(LookupForm lookupForm, Collection resultTable, boolean bounded) {
final String leadUnitName = "associatedNegotiable.leadUnitName";
final String leadUnitNumber = "associatedNegotiable.leadUnitNumber";
final String sponsorName = "associatedNegotiable.sponsorName";
final String piName = "associatedNegotiable.piName";
final String subAwardOrganizationName = "associatedNegotiable.subAwardOrganizationName";
Collection lookupStuff = super.performLookup(lookupForm, resultTable, bounded);
Iterator i = resultTable.iterator();
while (i.hasNext()) {
ResultRow row = (ResultRow) i.next();
for (Column column : row.getColumns()) {
//the Subaward Organization name, unit name, pi Name and sponsor name don't need to generate links.
if (StringUtils.equalsIgnoreCase(column.getPropertyName(), leadUnitName)
|| StringUtils.equalsIgnoreCase(column.getPropertyName(), sponsorName)
|| StringUtils.equalsIgnoreCase(column.getPropertyName(), subAwardOrganizationName)
|| StringUtils.equalsIgnoreCase(column.getPropertyName(), piName)) {
column.setPropertyURL("");
for (AnchorHtmlData data : column.getColumnAnchors()) {
if (data != null) {
data.setHref("");
}
}
}
if (StringUtils.equalsIgnoreCase(column.getPropertyName(), leadUnitNumber)){
String unitNumber = column.getPropertyValue();
//String newUrl = "http://127.0.0.1:8080/kc-dev/kr/inquiry.do?businessObjectClassName=org.kuali.kra.bo.Unit&unitNumber=" + unitNumber + "&methodToCall=start";
String newUrl = "inquiry.do?businessObjectClassName=org.kuali.kra.bo.Unit&unitNumber=" + unitNumber + "&methodToCall=start";
column.setPropertyURL(newUrl);
for (AnchorHtmlData data : column.getColumnAnchors()) {
if (data != null) {
data.setHref(newUrl);
}
}
}
}
}
return lookupStuff;
}
/* End IU Customization */
protected List<Long> getCustomDataIds(Map<String, String> formProps, List<Long> commonIds) {
List<Long> ids = null;
// UITSRA-3138
Collection<NegotiationCustomData> customDatas = getLookupService().findCollectionBySearchUnbounded(NegotiationCustomData.class, formProps);
if (!customDatas.isEmpty()) {
ids = new ArrayList<Long>();
for (NegotiationCustomData customData : customDatas) {
ids.add(customData.getNegotiationCustomDataId());
}
}
if (commonIds != null && ids !=null ) {
ids.retainAll(commonIds);
}
return ids;
}
}<|fim▁end|>
|
fieldValues.put("negotiationId", StringUtils.join(wildcardNegotiationIds, '|'));
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::io::BufReader;
use std::io::BufWriter;
use std::io::prelude::*;
use std::fs::File;
fn main() {
let mut input_path: String = String::new();
let mut output_path = String::new();
let mut input = std::io::stdin();
let mut vec = vec![String::from("<page>"),String::from("</page>")];
println!("Enter the path of the input file: ");
input.read_line(&mut input_path).unwrap();
let input_path = input_path.trim();
let file = File::open(&input_path).expect("Failed to open file");
println!("Enter the path of the output file: ");
input.read_line(&mut output_path).unwrap();
let output_path = output_path.trim();
let n_file = File::create(&output_path).expect("Failed to create file");
get_options(&mut input, &mut vec);
let reader = BufReader::new(file);
let mut writer = BufWriter::new(n_file);<|fim▁hole|> if should_write(&line, &mut in_text, &vec) {
writer.write(line.as_bytes()).unwrap();
writer.write(b"\n").unwrap();
}
}
}
fn should_write(x: &str, in_text: &mut bool, list : &Vec<String>) -> bool {
if x.contains("<text") {
*in_text = true;
} else if x.contains("</text>") {
*in_text = false;
}
list.iter().any(|tag| x.contains(tag))
}
fn get_options(input: &mut std::io::Stdin, vec : &mut Vec<String>) -> () {
let mut option = String::new();
println!("Capture title? y/n?");
input.read_line(&mut option).unwrap();
if option.trim().eq("y") {
println!("here");
vec.push(String::from("<title>"));
}
option.clear();
println!("Capture redirect? y/n?");
input.read_line(&mut option).unwrap();
if option.trim().eq("y") {
vec.push(String::from("<redirect"));
}
option.clear();
println!("Capture timestamp? y/n?");
input.read_line(&mut option).unwrap();
if option.trim().eq("y") {
vec.push(String::from("<timestamp>"));
}
option.clear();
println!("Capture username? y/n?");
input.read_line(&mut option).unwrap();
if option.trim().eq("y") {
vec.push(String::from("<contributor>"));
vec.push(String::from("</contributor>"));
vec.push(String::from("<username>"));
}
option.clear();
println!("Capture text? y/n?");
input.read_line(&mut option).unwrap();
if option.trim().eq("y") {
vec.push(String::from("<text"));
vec.push(String::from("</text>"));
}
}<|fim▁end|>
|
let mut in_text = false;
let mut line_iter = reader.lines();
while let Some(l1) = line_iter.next(){
let line = l1.unwrap();
|
<|file_name|>writer.rs<|end_file_name|><|fim▁begin|>//! Formats a DOM structure to a Write
//!
//! ### Example
//! ```
//! use sxd_document::Package;
//! use sxd_document::writer::format_document;
//!
//! let package = Package::new();
//! let doc = package.as_document();
//!
//! let hello = doc.create_element("hello");
//! hello.set_attribute_value("planet", "Earth");
//! doc.root().append_child(hello);
//!
//! let mut output = Vec::new();
//! format_document(&doc, &mut output).expect("unable to output XML");
//! ```
//!
//! ### Potential options to support
//!
//! - Space before `/>`
//! - Single vs double quotes
//! - Fixed ordering of attributes
use std::{
borrow::ToOwned,
io::{self, Write},
slice,
};
use self::Content::*;
use super::{
str_ext::{SplitKeepingDelimiterExt, SplitType},
QName,
};
use super::{
dom,
dom::{ChildOfElement, ChildOfRoot},
lazy_hash_map::LazyHashMap,
};
trait WriteStr: Write {
fn write_str(&mut self, s: &str) -> io::Result<()> {
self.write_all(s.as_bytes())
}
}
impl<W: ?Sized> WriteStr for W where W: Write {}
// TODO: Duplicating the String seems inefficient...
struct PrefixScope<'d> {
ns_to_prefix: LazyHashMap<&'d str, String>,
prefix_to_ns: LazyHashMap<String, &'d str>,
defined_prefixes: Vec<(String, &'d str)>,
default_namespace_uri: Option<&'d str>,
}
impl<'d> PrefixScope<'d> {
fn new() -> PrefixScope<'d> {
PrefixScope {
ns_to_prefix: LazyHashMap::new(),
prefix_to_ns: LazyHashMap::new(),
defined_prefixes: Vec::new(),
default_namespace_uri: None,
}
}
fn has_prefix(&self, prefix: &str) -> bool {
self.prefix_to_ns.contains_key(prefix)
}
fn has_namespace_uri(&self, namespace_uri: &str) -> bool {
self.ns_to_prefix.contains_key(namespace_uri)
}
fn prefix_is(&self, prefix: &str, namespace_uri: &str) -> bool {
match self.prefix_to_ns.get(prefix) {
Some(ns) => *ns == namespace_uri,
_ => false,
}
}
fn namespace_uri_for(&self, prefix: &str) -> Option<&'d str> {
self.prefix_to_ns.get(prefix).cloned()
}
fn prefix_for(&self, namespace_uri: &str) -> Option<&str> {
self.ns_to_prefix.get(namespace_uri).map(|p| &p[..])
}
fn add_mapping(&mut self, prefix: &str, namespace_uri: &'d str) {
let prefix = prefix.to_owned();
self.prefix_to_ns.insert(prefix.clone(), namespace_uri);
self.ns_to_prefix.insert(namespace_uri, prefix);
}
fn define_prefix(&mut self, prefix: String, namespace_uri: &'d str) {
self.defined_prefixes.push((prefix, namespace_uri));
}
}
enum NamespaceType<'a> {
Default,
Prefix(&'a str),
Unknown,
}
struct PrefixMapping<'d> {
scopes: Vec<PrefixScope<'d>>,
generated_prefix_count: usize,
}
impl<'d> PrefixMapping<'d> {
fn new() -> PrefixMapping<'d> {
PrefixMapping {
scopes: vec![PrefixScope::new()],
generated_prefix_count: 0,
}
}
fn push_scope(&mut self) {
self.scopes.push(PrefixScope::new());
}
fn pop_scope(&mut self) {
self.scopes.pop();
}
fn active_default_namespace_uri(&self) -> Option<&'d str> {
self.scopes
.iter()
.rev()
.filter_map(|s| s.default_namespace_uri)
.next()
}
fn active_namespace_uri_for_prefix(&self, prefix: &str) -> Option<&'d str> {
self.scopes
.iter()
.rev()
.filter_map(|s| s.namespace_uri_for(prefix))
.next()
}
fn default_namespace_uri_in_current_scope(&self) -> Option<&'d str> {
self.scopes.last().unwrap().default_namespace_uri
}
fn prefixes_in_current_scope(&self) -> slice::Iter<'_, (String, &'d str)> {
self.scopes.last().unwrap().defined_prefixes.iter()
}
fn populate_scope(&mut self, element: &dom::Element<'d>, attributes: &[dom::Attribute<'d>]) {
self.scopes.last_mut().unwrap().default_namespace_uri = element.default_namespace_uri();
if let Some(prefix) = element.preferred_prefix() {
let name = element.name();
if let Some(uri) = name.namespace_uri {
self.set_prefix(prefix, uri);
}
}
for attribute in attributes.iter() {
if let Some(prefix) = attribute.preferred_prefix() {
let name = attribute.name();
if let Some(uri) = name.namespace_uri {
self.set_prefix(prefix, uri);
}
}
}
let name = element.name();
if let Some(uri) = name.namespace_uri {
self.generate_prefix(uri);
}
for attribute in attributes.iter() {
let name = attribute.name();
if let Some(uri) = name.namespace_uri {
self.generate_prefix(uri);
}
}
}
fn set_prefix(&mut self, prefix: &str, namespace_uri: &'d str) {
let idx_of_last = self.scopes.len().saturating_sub(1);
let (parents, current_scope) = self.scopes.split_at_mut(idx_of_last);
let current_scope = &mut current_scope[0];
// If we're already using this prefix, we can't redefine it.
if current_scope.has_prefix(prefix) {
return;
}
// We are definitely going to use this prefix, claim it
current_scope.add_mapping(prefix, namespace_uri);
for parent_scope in parents.iter().rev() {
if parent_scope.prefix_is(prefix, namespace_uri) {
// A parent defines it as the URI we want.
// Prevent redefining it
return;
}
}
// Defined by us, must be added to the element
current_scope.define_prefix(prefix.to_owned(), namespace_uri);
}
fn generate_prefix(&mut self, namespace_uri: &'d str) {
if Some(namespace_uri) == self.active_default_namespace_uri() {
// We already map this namespace to the default
return;
}
let idx_of_last = self.scopes.len().saturating_sub(1);
let (parents, current_scope) = self.scopes.split_at_mut(idx_of_last);
let current_scope = &mut current_scope[0];
if current_scope.has_namespace_uri(namespace_uri) {
// We already map this namespace to *some* prefix
return;
}
// Check if the parent already defined a prefix for this ns
for parent_scope in parents.iter().rev() {
if let Some(prefix) = parent_scope.prefix_for(namespace_uri) {
// A parent happens to have a prefix for this URI.
// Prevent redefining it
current_scope.add_mapping(prefix, namespace_uri);
return;
}
}
loop {
let prefix = format!("autons{}", self.generated_prefix_count);
self.generated_prefix_count += 1;
if !current_scope.has_prefix(&prefix) {
current_scope.add_mapping(&prefix, namespace_uri);
current_scope.define_prefix(prefix, namespace_uri);
break;
}
}
}
fn namespace_type<'a>(
&'a self,
preferred_prefix: Option<&'a str>,
namespace_uri: &str,
ignore_default: bool,
) -> NamespaceType<'a> {
if !ignore_default && Some(namespace_uri) == self.active_default_namespace_uri() {
return NamespaceType::Default;
}
if let Some(prefix) = preferred_prefix {
if Some(namespace_uri) == self.active_namespace_uri_for_prefix(prefix) {
return NamespaceType::Prefix(prefix);
}
}
for scope in self.scopes.iter().rev() {
if let Some(prefix) = scope.prefix_for(namespace_uri) {
return NamespaceType::Prefix(prefix);
}
}
NamespaceType::Unknown
}
}
enum Content<'d> {
Element(dom::Element<'d>),
ElementEnd(dom::Element<'d>),
Text(dom::Text<'d>),
Comment(dom::Comment<'d>),
ProcessingInstruction(dom::ProcessingInstruction<'d>),
}
/// Write a document, specifying some formatting options
///
/// For example, the default is to use single-quotes for attributes. To use
/// double quotes for attributes, you need to use `set_single_quotes(false)`.
///
/// ```
/// use sxd_document::{Package, writer::Writer};
///
/// // Create a new document
/// let p = Package::new();
/// let doc = p.as_document();
/// let el = doc.create_element("hello");
/// el.set_attribute_value("a", "b");
/// doc.root().append_child(el);
///
/// // Format the document as bytes
/// let mut output = Vec::new();
/// Writer::new().set_single_quotes(false).format_document(&doc, &mut output);
///
/// // Check that the output is correct
/// let output_string = String::from_utf8(output).unwrap();
/// assert_eq!(output_string, r#"<?xml version="1.0"?><hello a="b"/>"#);
/// ```
pub struct Writer {
single_quotes: bool,
write_encoding: bool,
}
impl Default for Writer {
fn default() -> Self {
Self {
single_quotes: true,
write_encoding: false,
}
}
}
impl Writer {
/// Create a new `Writer` with default settings.
pub fn new() -> Self {
Self::default()
}
/// Set whether single quotes should be used for writing a document.
pub fn set_single_quotes(mut self, single_quotes: bool) -> Self {
self.single_quotes = single_quotes;
self
}
/// Set whether the encoding should be specified in the output document header.
pub fn set_write_encoding(mut self, write_encoding: bool) -> Self {
self.write_encoding = write_encoding;
self
}
fn quote_char(&self) -> &'static str {
if self.single_quotes {
"'"
} else {
"\""
}
}
}
impl Writer {
fn format_qname<'d, W: ?Sized>(
&self,
q: QName<'d>,
mapping: &mut PrefixMapping<'d>,
preferred_prefix: Option<&str>,
ignore_default: bool,
writer: &mut W,
) -> io::Result<()>
where
W: Write,
{
// Can something without a namespace be prefixed? No, because
// defining a prefix requires a non-empty URI
if let Some(namespace_uri) = q.namespace_uri {
match mapping.namespace_type(preferred_prefix, namespace_uri, ignore_default) {
NamespaceType::Default => {
// No need to do anything
}
NamespaceType::Prefix(prefix) => {
writer.write_str(prefix)?;
writer.write_str(":")?;
}
NamespaceType::Unknown => {
panic!("No namespace prefix available for {}", namespace_uri);
}
}
}
writer.write_str(q.local_part)
}
fn format_attribute_value<W: ?Sized>(&self, value: &str, writer: &mut W) -> io::Result<()>
where
W: Write,
{
for item in value
.split_keeping_delimiter(|c| c == '<' || c == '>' || c == '&' || c == '\'' || c == '"')
{
match item {
SplitType::Match(t) => writer.write_str(t)?,
SplitType::Delimiter("<") => writer.write_str("<")?,
SplitType::Delimiter(">") => writer.write_str(">")?,
SplitType::Delimiter("&") => writer.write_str("&")?,
SplitType::Delimiter("'") => writer.write_str("'")?,
SplitType::Delimiter("\"") => writer.write_str(""")?,
SplitType::Delimiter(..) => unreachable!(),
}
}
Ok(())
}
fn format_element<'d, W: ?Sized>(
&self,
element: dom::Element<'d>,
todo: &mut Vec<Content<'d>>,
mapping: &mut PrefixMapping<'d>,
writer: &mut W,
) -> io::Result<()>
where
W: Write,
{
let attrs = element.attributes();
mapping.populate_scope(&element, &attrs);
writer.write_str("<")?;
self.format_qname(
element.name(),
mapping,
element.preferred_prefix(),
false,
writer,
)?;
for attr in &attrs {
writer.write_str(" ")?;
self.format_qname(attr.name(), mapping, attr.preferred_prefix(), true, writer)?;
write!(writer, "=")?;
write!(writer, "{}", self.quote_char())?;
self.format_attribute_value(attr.value(), writer)?;
write!(writer, "{}", self.quote_char())?;
}
if let Some(ns_uri) = mapping.default_namespace_uri_in_current_scope() {
writer.write_str(" xmlns='")?;
writer.write_str(ns_uri)?;
writer.write_str("'")?;
}
for &(ref prefix, ref ns_uri) in mapping.prefixes_in_current_scope() {
writer.write_str(" xmlns:")?;
writer.write_str(prefix)?;
write!(writer, "='{}'", ns_uri)?;
}
let mut children = element.children();
if children.is_empty() {
writer.write_str("/>")?;
mapping.pop_scope();
Ok(())
} else {
writer.write_str(">")?;
todo.push(ElementEnd(element));
children.reverse();
let x = children.into_iter().map(|c| match c {
ChildOfElement::Element(element) => Element(element),
ChildOfElement::Text(t) => Text(t),
ChildOfElement::Comment(c) => Comment(c),
ChildOfElement::ProcessingInstruction(p) => ProcessingInstruction(p),
});
todo.extend(x);
Ok(())
}
}
fn format_element_end<'d, W: ?Sized>(
&self,
element: dom::Element<'d>,
mapping: &mut PrefixMapping<'d>,
writer: &mut W,
) -> io::Result<()>
where
W: Write,
{
writer.write_str("</")?;
self.format_qname(
element.name(),
mapping,
element.preferred_prefix(),
false,
writer,
)?;
writer.write_str(">")
}
fn format_text<W: ?Sized>(&self, text: dom::Text<'_>, writer: &mut W) -> io::Result<()>
where
W: Write,
{
for item in text
.text()
.split_keeping_delimiter(|c| c == '<' || c == '>' || c == '&')
{
match item {
SplitType::Match(t) => writer.write_str(t)?,
SplitType::Delimiter("<") => writer.write_str("<")?,
SplitType::Delimiter(">") => writer.write_str(">")?,
SplitType::Delimiter("&") => writer.write_str("&")?,
SplitType::Delimiter(..) => unreachable!(),
}
}
Ok(())
}
fn format_comment<W: ?Sized>(&self, comment: dom::Comment<'_>, writer: &mut W) -> io::Result<()>
where
W: Write,
{
write!(writer, "<!--{}-->", comment.text())
}
fn format_processing_instruction<W: ?Sized>(
&self,
pi: dom::ProcessingInstruction<'_>,
writer: &mut W,
) -> io::Result<()>
where
W: Write,
{
match pi.value() {
None => write!(writer, "<?{}?>", pi.target()),
Some(v) => write!(writer, "<?{} {}?>", pi.target(), v),
}
}
fn format_one<'d, W: ?Sized>(
&self,
content: Content<'d>,
todo: &mut Vec<Content<'d>>,
mapping: &mut PrefixMapping<'d>,
writer: &mut W,
) -> io::Result<()>
where
W: Write,
{
match content {
Element(e) => {
mapping.push_scope();
self.format_element(e, todo, mapping, writer)
}
ElementEnd(e) => {
let r = self.format_element_end(e, mapping, writer);
mapping.pop_scope();
r
}
Text(t) => self.format_text(t, writer),
Comment(c) => self.format_comment(c, writer),
ProcessingInstruction(p) => self.format_processing_instruction(p, writer),
}
}
fn format_body<W: ?Sized>(&self, element: dom::Element<'_>, writer: &mut W) -> io::Result<()>
where
W: Write,
{
let mut todo = vec![Element(element)];
let mut mapping = PrefixMapping::new();
while !todo.is_empty() {
self.format_one(todo.pop().unwrap(), &mut todo, &mut mapping, writer)?;
}
Ok(())
}
fn format_declaration<W: ?Sized>(&self, writer: &mut W) -> io::Result<()>
where
W: Write,
{
write!(
writer,
"<?xml version={}1.0{}",
self.quote_char(),
self.quote_char()
)?;
if self.write_encoding {
write!(
writer,
" encoding={}UTF-8{}",
self.quote_char(),
self.quote_char()
)?;
}
write!(writer, "?>")?;
Ok(())
}
/// Formats a document into a Write
pub fn format_document<'d, W: ?Sized>(
&self,
doc: &'d dom::Document<'d>,
writer: &mut W,
) -> io::Result<()>
where
W: Write,
{
self.format_declaration(writer)?;
for child in doc.root().children().into_iter() {
match child {
ChildOfRoot::Element(e) => self.format_body(e, writer),
ChildOfRoot::Comment(c) => self.format_comment(c, writer),
ChildOfRoot::ProcessingInstruction(p) => {
self.format_processing_instruction(p, writer)
}
}?
}
Ok(())
}
}
/// Formats a document into a `Write` using the default `Writer`
pub fn format_document<'d, W: ?Sized>(doc: &'d dom::Document<'d>, writer: &mut W) -> io::Result<()>
where
W: Write,
{
Writer::default().format_document(doc, writer)
}
#[cfg(test)]
mod test {
use super::{
super::{dom, Package},
Writer,
};
fn format_xml<'d>(doc: &'d dom::Document<'d>) -> String {
format_xml_writer(Writer::default(), doc)
}
fn format_xml_writer(writer: Writer, doc: &dom::Document<'_>) -> String {
let mut w = Vec::new();
writer.format_document(doc, &mut w).expect("Not formatted");
String::from_utf8(w).expect("Not a string")
}
#[test]
fn top_element() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element("hello");
d.root().append_child(e);
let xml = format_xml(&d);
assert_eq!(xml, "<?xml version='1.0'?><hello/>");
}<|fim▁hole|>
#[test]
fn element_with_namespace() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element(("namespace", "local-part"));
d.root().append_child(e);
let xml = format_xml(&d);
assert_eq!(
xml,
"<?xml version='1.0'?><autons0:local-part xmlns:autons0='namespace'/>"
);
}
#[test]
fn element_with_default_namespace() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element(("namespace", "local-part"));
e.set_default_namespace_uri(Some("namespace"));
d.root().append_child(e);
let xml = format_xml(&d);
assert_eq!(xml, "<?xml version='1.0'?><local-part xmlns='namespace'/>");
}
#[test]
fn element_with_preferred_namespace_prefix() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element(("namespace", "local-part"));
e.set_preferred_prefix(Some("prefix"));
d.root().append_child(e);
let xml = format_xml(&d);
assert_eq!(
xml,
"<?xml version='1.0'?><prefix:local-part xmlns:prefix='namespace'/>"
);
}
#[test]
fn element_with_attributes() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element("hello");
e.set_attribute_value("a", "b");
d.root().append_child(e);
let xml = format_xml(&d);
assert_eq!(xml, "<?xml version='1.0'?><hello a='b'/>");
}
#[test]
fn element_with_attributes_double_quotes() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element("hello");
e.set_attribute_value("a", "b");
d.root().append_child(e);
let xml = format_xml_writer(Writer::new().set_single_quotes(false), &d);
assert_eq!(xml, r#"<?xml version="1.0"?><hello a="b"/>"#);
}
#[test]
fn attribute_with_namespace() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element("hello");
e.set_attribute_value(("namespace", "a"), "b");
d.root().append_child(e);
let xml = format_xml(&d);
assert_eq!(
xml,
"<?xml version='1.0'?><hello autons0:a='b' xmlns:autons0='namespace'/>"
);
}
#[test]
fn attribute_with_preferred_namespace_prefix() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element("hello");
let a = e.set_attribute_value(("namespace", "a"), "b");
a.set_preferred_prefix(Some("p"));
d.root().append_child(e);
let xml = format_xml(&d);
assert_eq!(
xml,
"<?xml version='1.0'?><hello p:a='b' xmlns:p='namespace'/>"
);
}
#[test]
fn attribute_with_default_namespace_prefix() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element(("namespace", "hello"));
e.set_preferred_prefix(Some("p"));
e.set_default_namespace_uri(Some("namespace"));
e.set_attribute_value(("namespace", "a"), "b");
d.root().append_child(e);
let xml = format_xml(&d);
assert_eq!(
xml,
"<?xml version='1.0'?><hello p:a='b' xmlns='namespace' xmlns:p='namespace'/>"
);
}
#[test]
fn attributes_with_conflicting_preferred_namespace_prefixes() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element("hello");
let a = e.set_attribute_value(("namespace1", "a1"), "b1");
a.set_preferred_prefix(Some("p"));
let a = e.set_attribute_value(("namespace2", "a2"), "b2");
a.set_preferred_prefix(Some("p"));
d.root().append_child(e);
let xml = format_xml(&d);
assert_eq!(xml, "<?xml version='1.0'?><hello p:a1='b1' autons0:a2='b2' xmlns:p='namespace1' xmlns:autons0='namespace2'/>");
}
#[test]
fn attributes_with_different_preferred_namespace_prefixes_for_same_namespace() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element("hello");
let a = e.set_attribute_value(("namespace", "a1"), "b1");
a.set_preferred_prefix(Some("p1"));
let a = e.set_attribute_value(("namespace", "a2"), "b2");
a.set_preferred_prefix(Some("p2"));
d.root().append_child(e);
let xml = format_xml(&d);
assert_eq!(xml, "<?xml version='1.0'?><hello p1:a1='b1' p2:a2='b2' xmlns:p1='namespace' xmlns:p2='namespace'/>");
}
#[test]
fn attribute_values_with_less_than_greater_than_ampersand_apostrophe_or_quote_are_escaped() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element("hello");
e.set_attribute_value("name", r#"'1 < 2' & "4 > 3""#);
d.root().append_child(e);
let xml = format_xml(&d);
assert_eq!(
xml,
"<?xml version='1.0'?><hello name=''1 < 2' & "4 > 3"'/>"
);
}
#[test]
fn nested_element() {
let p = Package::new();
let d = p.as_document();
let hello = d.create_element("hello");
let world = d.create_element("world");
hello.append_child(world);
d.root().append_child(hello);
let xml = format_xml(&d);
assert_eq!(xml, "<?xml version='1.0'?><hello><world/></hello>");
}
#[test]
fn nested_element_with_namespaces() {
let p = Package::new();
let d = p.as_document();
let hello = d.create_element(("outer", "hello"));
let world = d.create_element(("inner", "world"));
hello.append_child(world);
d.root().append_child(hello);
let xml = format_xml(&d);
assert_eq!(xml, "<?xml version='1.0'?><autons0:hello xmlns:autons0='outer'><autons1:world xmlns:autons1='inner'/></autons0:hello>");
}
#[test]
fn nested_empty_element_with_namespaces() {
let p = Package::new();
let d = p.as_document();
let hello = d.create_element(("outer", "hello"));
hello.set_default_namespace_uri(Some("outer"));
hello.set_preferred_prefix(Some("o"));
let world = d.create_element("world");
world.set_default_namespace_uri(Some("inner"));
let empty = d.create_element("empty");
world.append_child(empty);
hello.append_child(world);
d.root().append_child(hello);
let xml = format_xml(&d);
assert_eq!(xml, "<?xml version='1.0'?><hello xmlns='outer' xmlns:o='outer'><world xmlns='inner'><empty/></world></hello>");
}
#[test]
fn nested_element_with_namespaces_with_reused_namespaces() {
let p = Package::new();
let d = p.as_document();
let hello = d.create_element(("ns", "hello"));
let world = d.create_element(("ns", "world"));
hello.append_child(world);
d.root().append_child(hello);
let xml = format_xml(&d);
assert_eq!(xml, "<?xml version='1.0'?><autons0:hello xmlns:autons0='ns'><autons0:world/></autons0:hello>");
}
#[test]
fn nested_element_with_with_conflicting_preferred_namespace_prefixes() {
let p = Package::new();
let d = p.as_document();
let hello = d.create_element(("outer", "hello"));
let world = d.create_element(("inner", "world"));
hello.set_preferred_prefix(Some("p"));
world.set_preferred_prefix(Some("p"));
hello.append_child(world);
d.root().append_child(hello);
let xml = format_xml(&d);
assert_eq!(
xml,
"<?xml version='1.0'?><p:hello xmlns:p='outer'><p:world xmlns:p='inner'/></p:hello>"
);
}
#[test]
fn nested_text() {
let p = Package::new();
let d = p.as_document();
let hello = d.create_element("hello");
let text = d.create_text("A fine day to you!");
hello.append_child(text);
d.root().append_child(hello);
let xml = format_xml(&d);
assert_eq!(
xml,
"<?xml version='1.0'?><hello>A fine day to you!</hello>"
);
}
#[test]
fn text_escapes_less_than_greater_than_and_ampersand() {
let p = Package::new();
let d = p.as_document();
let hello = d.create_element("escaped");
let text = d.create_text("1 < 3 & 4 > 2");
hello.append_child(text);
d.root().append_child(hello);
let xml = format_xml(&d);
assert_eq!(
xml,
"<?xml version='1.0'?><escaped>1 < 3 & 4 > 2</escaped>"
);
}
#[test]
fn nested_comment() {
let p = Package::new();
let d = p.as_document();
let hello = d.create_element("hello");
let comment = d.create_comment(" Fill this in ");
hello.append_child(comment);
d.root().append_child(hello);
let xml = format_xml(&d);
assert_eq!(
xml,
"<?xml version='1.0'?><hello><!-- Fill this in --></hello>"
);
}
#[test]
fn nested_processing_instruction_without_value() {
let p = Package::new();
let d = p.as_document();
let hello = d.create_element("hello");
let pi = d.create_processing_instruction("display", None);
hello.append_child(pi);
d.root().append_child(hello);
let xml = format_xml(&d);
assert_eq!(xml, "<?xml version='1.0'?><hello><?display?></hello>");
}
#[test]
fn nested_processing_instruction_with_value() {
let p = Package::new();
let d = p.as_document();
let hello = d.create_element("hello");
let pi = d.create_processing_instruction("display", Some("screen"));
hello.append_child(pi);
d.root().append_child(hello);
let xml = format_xml(&d);
assert_eq!(
xml,
"<?xml version='1.0'?><hello><?display screen?></hello>"
);
}
#[test]
fn top_level_comment() {
let p = Package::new();
let d = p.as_document();
let comment = d.create_comment(" Fill this in ");
d.root().append_child(comment);
let xml = format_xml(&d);
assert_eq!(xml, "<?xml version='1.0'?><!-- Fill this in -->");
}
#[test]
fn top_level_processing_instruction() {
let p = Package::new();
let d = p.as_document();
let pi = d.create_processing_instruction("display", None);
d.root().append_child(pi);
let xml = format_xml(&d);
assert_eq!(xml, "<?xml version='1.0'?><?display?>");
}
#[test]
fn declaration_with_encoding() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element("hello");
d.root().append_child(e);
let xml = format_xml_writer(Writer::new().set_write_encoding(true), &d);
assert_eq!(xml, "<?xml version='1.0' encoding='UTF-8'?><hello/>");
}
#[test]
fn declaration_with_encoding_and_double_quotes() {
let p = Package::new();
let d = p.as_document();
let e = d.create_element("hello");
d.root().append_child(e);
let xml = format_xml_writer(
Writer::new()
.set_write_encoding(true)
.set_single_quotes(false),
&d,
);
assert_eq!(xml, r#"<?xml version="1.0" encoding="UTF-8"?><hello/>"#);
}
}<|fim▁end|>
| |
<|file_name|>allocator.rs<|end_file_name|><|fim▁begin|>use core::ptr::RawPtr;
use core::mem::transmute;
use core::ptr::{set_memory, copy_memory};
use core::intrinsics::offset;
use core::intrinsics::ctlz32;
//use core::cmp::expect;
use util::bitv::Bitv;
#[repr(u8)]
enum Node {
UNUSED = 0,
USED = 1,
SPLIT = 2,
FULL = 3
}
pub trait Allocator {
fn alloc(&mut self, size: uint) -> (*mut u8, uint);
fn zero_alloc(&mut self, s: uint) -> (*mut u8, uint) {<|fim▁hole|>
fn realloc(&mut self, src: *mut u8, size: uint) -> (*mut u8, uint) {
self.free(src);
let (ptr, sz) = self.alloc(size);
unsafe { copy_memory(ptr, src as *const u8, sz); }
(ptr, sz)
}
fn free(&mut self, ptr: *mut u8);
}
pub struct BuddyAlloc {
pub order: uint,
pub tree: Bitv
}
pub struct Alloc {
pub parent: BuddyAlloc,
pub base: *mut u8,
pub el_size: uint
}
impl BuddyAlloc {
pub fn new(order: uint, storage: Bitv) -> BuddyAlloc {
storage.clear(1 << (order + 1));
BuddyAlloc { order: order, tree: storage }
}
#[inline]
fn offset(&self, index: uint, level: uint) -> uint {
(index + 1 - (1 << self.order >> level)) << level
}
fn alloc(&mut self, mut size: uint) -> (uint, uint) {
if size == 0 {
size = 1;
}
// smallest power of 2 >= size
let lg2_size = 32 - unsafe { ctlz32(size as u32 - 1) } as uint;
let mut index = 0; // points to current tree node
let mut level = self.order; // current height
loop {
match (self.get(index), level == lg2_size) {
(UNUSED, true) => {
// Found appropriate unused node
self.set(index, USED); // use
let mut parent = index;
loop {
let buddy = parent - 1 + (parent & 1) * 2;
match self.get(buddy) {
USED | FULL if parent > 0 => {
parent = (parent + 1) / 2 - 1;
self.set(parent, FULL);
}
_ => break
}
}
return (
self.offset(index, level),
1 << lg2_size
);
}
(UNUSED, false) => {
// This large node is unused, split it!
self.set(index, SPLIT);
self.set(index*2 + 1, UNUSED);
self.set(index*2 + 2, UNUSED);
index = index * 2 + 1; // left child
level -= 1;
}
(SPLIT, false) => {
// Traverse children
index = index * 2 + 1; // left child
level -= 1;
}
_ => loop {
// Go either right or back up
if index & 1 == 1 {
// right sibling
index += 1;
break;
}
// go up by one level
level += 1;
if index == 0 {
// out of memory -- back at tree's root after traversal
return (0, 0);
}
index = (index + 1) / 2 - 1; // parent
}
}
}
}
fn free(&mut self, offset: uint) {
let mut length = 1 << self.order;
let mut left = 0;
let mut index = 0;
loop {
match self.get(index) {
UNUSED => return,
USED => loop {
if index == 0 {
self.set(0, UNUSED);
return;
}
let buddy = index - 1 + (index & 1) * 2;
match self.get(buddy) {
UNUSED => {}
_ => {
self.set(index, UNUSED);
loop {
let parent = (index + 1) / 2 - 1; // parent
match self.get(parent) {
FULL if index > 0 => {
self.set(parent, SPLIT);
}
_ => return
}
index = parent;
}
}
}
index = (index + 1) / 2 - 1; // parent
},
_ => {
length /= 2;
if offset < left + length {
index = index * 2 + 1; // left child
}
else {
left += length;
index = index * 2 + 2; // right child
}
}
}
}
}
fn get(&self, i: uint) -> Node {
unsafe {
transmute(self.tree.get(i))
}
}
fn set(&self, i: uint, x: Node) {
self.tree.set(i, x as u8);
}
}
impl Allocator for Alloc {
fn alloc(&mut self, size: uint) -> (*mut u8, uint) {
let (offset, size) = self.parent.alloc(size);
unsafe {
return (
self.base.offset((offset << self.el_size) as int),
size << self.el_size
)
}
}
fn free(&mut self, ptr: *mut u8) {
let length = 1 << self.parent.order << self.el_size;
unsafe {
if ptr < self.base || ptr >= self.base.offset(length) {
return;
}
}
let offset = (ptr as uint - self.base as uint) >> self.el_size;
self.parent.free(offset);
}
}
impl Alloc {
pub fn new(parent: BuddyAlloc, base: *mut u8, el_size: uint) -> Alloc {
Alloc { parent: parent, base: base, el_size: el_size }
}
}<|fim▁end|>
|
let (ptr, size) = self.alloc(s);
unsafe { set_memory(ptr, 0, size); }
(ptr, size)
}
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>// Copyright 2015 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import {util} from '@google-cloud/common';
import {CallbackifyAllOptions} from '@google-cloud/promisify';
import arrify = require('arrify');
import * as assert from 'assert';
import {describe, it, beforeEach, before} from 'mocha';
import * as extend from 'extend';
import * as proxyquire from 'proxyquire';
import {
Logging as LOGGING,
LoggingOptions,
GetLogsRequest,
Log,
CreateSinkRequest,
GetSinksRequest,
Sink,
} from '../src/index';
import {Duplex, PassThrough} from 'stream';
import {Policy} from '@google-cloud/pubsub';
import {GetEntriesRequest} from '../src/log';
import {Dataset} from '@google-cloud/bigquery';
import {Bucket} from '@google-cloud/storage';
import * as metadata from '../src/utils/metadata';
import * as sinon from 'sinon';
// eslint-disable-next-line @typescript-eslint/no-var-requires
const {v2} = require('../src');
const version = require('../../package.json').version;
interface AbortableDuplex extends Duplex {
cancel: Function;
abort: Function;
}
const through = () =>
new PassThrough({objectMode: true}) as {} as AbortableDuplex;
const noop = () => {};
let extended = false;
const fakePaginator = {
paginator: {
extend(klass: Function, methods: string[]) {
if (klass.name !== 'Logging') {
return;
}
extended = true;
methods = arrify(methods);
assert.deepStrictEqual(methods, ['getEntries', 'getLogs', 'getSinks']);
},
streamify(methodName: string) {
return methodName;
},
},
};
let googleAuthOverride: Function | null;
function fakeGoogleAuth() {
// eslint-disable-next-line prefer-spread,prefer-rest-params
return (googleAuthOverride || noop).apply(null, arguments);
}
let isCustomTypeOverride: Function | null;
let callbackified = false;
let replaceProjectIdTokenOverride: Function | null;
const fakeUtil = extend({}, util, {
isCustomType() {
if (isCustomTypeOverride) {
// eslint-disable-next-line prefer-spread,prefer-rest-params
return isCustomTypeOverride.apply(null, arguments);
}
return false;
},
});
const fakeCallbackify = {
callbackifyAll(c: Function, options: CallbackifyAllOptions) {
if (c.name !== 'Logging') {
return;
}
callbackified = true;
assert.deepStrictEqual(options.exclude, ['request']);
},
};
const fakeProjectify = {
replaceProjectIdToken(reqOpts: {}) {
if (replaceProjectIdTokenOverride) {
// eslint-disable-next-line prefer-spread,prefer-rest-params
return replaceProjectIdTokenOverride.apply(null, arguments);
}
return reqOpts;
},
};
const originalFakeUtil = extend(true, {}, fakeUtil);
function fakeV2() {}
class FakeEntry {
calledWith_: IArguments;
constructor() {
// eslint-disable-next-line prefer-rest-params
this.calledWith_ = arguments;
}
static fromApiResponse_() {
// eslint-disable-next-line prefer-rest-params
return arguments;
}
}
class FakeLog {
calledWith_: IArguments;
constructor() {
// eslint-disable-next-line prefer-rest-params
this.calledWith_ = arguments;
}
}
class FakeSink {
calledWith_: IArguments;
constructor() {
// eslint-disable-next-line prefer-rest-params
this.calledWith_ = arguments;
}
}
describe('Logging', () => {
let Logging: typeof LOGGING;
let logging: LOGGING;
const PROJECT_ID = 'project-id';
before(() => {
Logging = proxyquire('../src', {
'@google-cloud/common': {
util: fakeUtil,
},
'@google-cloud/promisify': fakeCallbackify,
'@google-cloud/paginator': fakePaginator,
'@google-cloud/projectify': fakeProjectify,
'google-gax': {
GoogleAuth: fakeGoogleAuth,
},
'./log': {Log: FakeLog},
'./log-sync': {LogSync: FakeLog},
'./entry': {Entry: FakeEntry},
'./sink': {Sink: FakeSink},
'./v2': fakeV2,
}).Logging;
});
beforeEach(() => {
extend(fakeUtil, originalFakeUtil);
googleAuthOverride = null;
isCustomTypeOverride = null;
replaceProjectIdTokenOverride = null;
logging = new Logging({
projectId: PROJECT_ID,
});
});
describe('instantiation', () => {
const EXPECTED_SCOPES: string[] = [];
const clientClasses = [
v2.ConfigServiceV2Client,
v2.LoggingServiceV2Client,
v2.MetricsServiceV2Client,
];
for (const clientClass of clientClasses) {
for (const scope of clientClass.scopes) {
if (EXPECTED_SCOPES.indexOf(scope) === -1) {
EXPECTED_SCOPES.push(scope);
}
}
}
it('should extend the correct methods', () => {
assert(extended); // See `fakePaginator.extend`
});
it('should callbackify all the things', () => {
assert(callbackified);
});
it('should initialize the API object', () => {
assert.deepStrictEqual(logging.api, {});
});
it('should cache a local GoogleAuth instance', () => {
const fakeGoogleAuthInstance = {};
const options = {
a: 'b',
c: 'd',
} as LoggingOptions;
googleAuthOverride = (options_: {}) => {
assert.deepStrictEqual(
options_,
extend(
{
libName: 'gccl',
libVersion: version,
scopes: EXPECTED_SCOPES,
},
options
)
);
return fakeGoogleAuthInstance;
};
const logging = new Logging(options);
assert.strictEqual(logging.auth, fakeGoogleAuthInstance);
});
it('should localize the options', () => {
const options = {
a: 'b',
c: 'd',
clientConfig: {},
port: 443,
servicePath: 'logging.googleapis.com',
} as LoggingOptions;
const logging = new Logging(options);
assert.notStrictEqual(logging.options, options);
assert.deepStrictEqual(
logging.options,
extend(
{
libName: 'gccl',
libVersion: version,
scopes: EXPECTED_SCOPES,
},
options
)
);
});
it('should set the projectId', () => {
assert.strictEqual(logging.projectId, PROJECT_ID);
});
it('should default the projectId to the token', () => {
const logging = new Logging({});
assert.strictEqual(logging.projectId, '{{projectId}}');
});
});
describe('createSink', () => {
const SINK_NAME = 'name';
beforeEach(() => {
logging.configService.createSink = async () => [{}];
});
it('should throw if a name is not provided', () => {
const error = new Error('A sink name must be provided.');
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(logging as any).createSink().then(noop, (err: Error) => {
assert.deepStrictEqual(err, error);
});
});
it('should throw if a config object is not provided', () => {
const error = new Error('A sink configuration object must be provided.');
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(logging as any).createSink(SINK_NAME).then(noop, (err: Error) => {
assert.deepStrictEqual(err, error);
});
});
it('should set acls for a Dataset destination', async () => {
const dataset = {};
const CONFIG = {
destination: dataset,
};
isCustomTypeOverride = (destination: {}, type: string) => {
assert.strictEqual(destination, dataset);
return type === 'bigquery/dataset';
};
logging.setAclForDataset_ = async config => {
assert.strictEqual(config, CONFIG);
};
await logging.createSink(SINK_NAME, CONFIG);
});
it('should set acls for a Topic destination', async () => {
const topic = {};
const CONFIG = {
destination: topic,
};
isCustomTypeOverride = (destination: {}, type: string) => {
assert.strictEqual(destination, topic);
return type === 'pubsub/topic';
};
logging.setAclForTopic_ = async config => {
assert.strictEqual(config, CONFIG);
};
await logging.createSink(SINK_NAME, CONFIG);
});
it('should set acls for a Bucket destination', async () => {
const bucket = {};
const CONFIG = {
destination: bucket,
};
isCustomTypeOverride = (destination: {}, type: string) => {
assert.strictEqual(destination, bucket);
return type === 'storage/bucket';
};
logging.setAclForBucket_ = async config => {
assert.strictEqual(config, CONFIG);
};
await logging.createSink(SINK_NAME, CONFIG);
});
describe('API request', () => {
it('should call GAX method', async () => {
const config = {
a: 'b',
c: 'd',
} as {} as CreateSinkRequest;
const expectedConfig = extend({}, config, {
name: SINK_NAME,
});
logging.configService.createSink = async (
// eslint-disable-next-line @typescript-eslint/no-explicit-any
reqOpts: any,
gaxOpts: {}
) => {
const expectedParent = 'projects/' + logging.projectId;
assert.strictEqual(reqOpts.parent, expectedParent);
assert.deepStrictEqual(reqOpts.sink, expectedConfig);
assert.strictEqual(gaxOpts, undefined);
return [{}];
};
await logging.createSink(SINK_NAME, config);
});
it('should accept uniqueWriterIdentity', async () => {
const config = {
destination: '...',
uniqueWriterIdentity: '...',
};
logging.configService.createSink = async (
// eslint-disable-next-line @typescript-eslint/no-explicit-any
reqOpts: any,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
gaxOpts: {}
) => {
assert.strictEqual(
reqOpts.uniqueWriterIdentity,
config.uniqueWriterIdentity
);
assert.strictEqual(reqOpts.sink.uniqueWriterIdentity, undefined);
return [{}];
};
await logging.createSink(SINK_NAME, config);
});
it('should accept GAX options', async () => {
const config = {
a: 'b',
c: 'd',
gaxOptions: {},
} as {} as CreateSinkRequest;
logging.configService.createSink = async (
// eslint-disable-next-line @typescript-eslint/no-explicit-any
reqOpts: any,
gaxOpts: {}
) => {
assert.strictEqual(reqOpts.sink.gaxOptions, undefined);
assert.strictEqual(gaxOpts, config.gaxOptions);
return [{}];
};
await logging.createSink(SINK_NAME, config);
});
describe('error', () => {
const error = new Error('Error.');
const apiResponse = {};
beforeEach(() => {
(logging.request as Function) = (config: {}, callback: Function) => {
callback(error, apiResponse);
};
});
it('should reject Promise with an error', () => {
logging.configService.createSink = async () => {
throw error;
};
logging
.createSink(SINK_NAME, {} as CreateSinkRequest)
.then(noop, (err: Error) => assert.deepStrictEqual(err, error));
});
});
describe('success', () => {
const apiResponse = {
name: SINK_NAME,
};
beforeEach(() => {
(logging.request as Function) = (config: {}, callback: Function) => {
callback(null, apiResponse);
};
});
it('should resolve Promise Sink & API response', async () => {
const sink = {} as Sink;
logging.sink = name_ => {
assert.strictEqual(name_, SINK_NAME);
return sink;
};
logging.configService.createSink = async () => {
return [apiResponse];
};
const [sink_, apiResponse_] = await logging.createSink(
SINK_NAME,
{} as CreateSinkRequest
);
assert.strictEqual(sink_, sink);
assert.strictEqual(sink_.metadata, apiResponse);
assert.strictEqual(apiResponse_, apiResponse);
});
});
});
});
describe('entry', () => {
const RESOURCE = {};
const DATA = {};
it('should return an Entry object', () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const entry = logging.entry(RESOURCE, DATA) as any;
assert(entry instanceof FakeEntry);
assert.strictEqual(entry.calledWith_[0], RESOURCE);
assert.strictEqual(entry.calledWith_[1], DATA);
});<|fim▁hole|>
describe('getEntries', () => {
beforeEach(() => {
logging.auth.getProjectId = async () => PROJECT_ID;
});
it('should exec without options (with defaults)', async () => {
logging.loggingService.listLogEntries = async (
// eslint-disable-next-line @typescript-eslint/no-explicit-any
reqOpts: any,
gaxOpts: {}
) => {
assert.deepStrictEqual(reqOpts, {
filter: reqOpts?.filter,
orderBy: 'timestamp desc',
resourceNames: ['projects/' + logging.projectId],
});
assert.deepStrictEqual(gaxOpts, {
autoPaginate: undefined,
});
assert.ok(reqOpts?.filter.includes('timestamp'));
return [[]];
};
await logging.getEntries();
});
it('should accept options (and not overwrite timestamp)', async () => {
const options = {filter: 'timestamp > "2020-11-11T15:01:23.045123456Z"'};
logging.loggingService.listLogEntries = async (
reqOpts: {},
gaxOpts: {}
) => {
assert.deepStrictEqual(
reqOpts,
extend(options, {
filter: 'timestamp > "2020-11-11T15:01:23.045123456Z"',
orderBy: 'timestamp desc',
resourceNames: ['projects/' + logging.projectId],
})
);
assert.deepStrictEqual(gaxOpts, {
autoPaginate: undefined,
});
return [[]];
};
await logging.getEntries(options);
});
it('should append default timestamp to existing filters', async () => {
const options = {filter: 'test'};
logging.loggingService.listLogEntries = async (
// eslint-disable-next-line @typescript-eslint/no-explicit-any
reqOpts: any,
gaxOpts: {}
) => {
assert.deepStrictEqual(
reqOpts,
extend(options, {
filter: reqOpts?.filter,
orderBy: 'timestamp desc',
resourceNames: ['projects/' + logging.projectId],
})
);
assert.deepStrictEqual(gaxOpts, {
autoPaginate: undefined,
});
assert.ok(reqOpts?.filter.includes('test AND timestamp'));
return [[]];
};
await logging.getEntries(options);
});
it('should not push the same resourceName again', async () => {
const options = {
resourceNames: ['projects/' + logging.projectId],
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
logging.loggingService.listLogEntries = async (reqOpts: any) => {
assert.deepStrictEqual(reqOpts.resourceNames, [
'projects/' + logging.projectId,
]);
return [[]];
};
logging.getEntries(options, assert.ifError);
});
it('should allow overriding orderBy', async () => {
const options = {
orderBy: 'timestamp asc',
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
logging.loggingService.listLogEntries = async (reqOpts: any) => {
assert.deepStrictEqual(reqOpts.orderBy, options.orderBy);
return [[]];
};
await logging.getEntries(options);
});
it('should accept GAX options', async () => {
const options = {
a: 'b',
c: 'd',
gaxOptions: {
autoPaginate: true,
},
};
logging.loggingService.listLogEntries = async (
// eslint-disable-next-line @typescript-eslint/no-explicit-any
reqOpts: any,
gaxOpts: {}
) => {
assert.deepStrictEqual(reqOpts, {
a: 'b',
c: 'd',
filter: reqOpts?.filter,
orderBy: 'timestamp desc',
resourceNames: ['projects/' + logging.projectId],
});
// eslint-disable-next-line @typescript-eslint/no-explicit-any
assert.strictEqual((reqOpts as any).gaxOptions, undefined);
assert.deepStrictEqual(gaxOpts, options.gaxOptions);
assert.ok(reqOpts?.filter.includes('timestamp'));
return [[]];
};
await logging.getEntries(options);
});
describe('error', () => {
const error = new Error('Error.');
beforeEach(() => {
logging.loggingService.listLogEntries = async () => {
throw error;
};
});
it('should reject promise with error', () => {
logging.getEntries().then(noop, err => assert.strictEqual(err, error));
});
});
describe('success', () => {
const expectedResponse = [
[
{
logName: 'syslog',
},
],
];
beforeEach(() => {
logging.loggingService.listLogEntries = async () => {
return expectedResponse;
};
});
it('should resolve promise with entries & API resp', async () => {
const [entries] = await logging.getEntries();
assert.strictEqual(entries[0], expectedResponse[0][0]);
});
});
});
describe('getEntriesStream', () => {
const OPTIONS = {
a: 'b',
c: 'd',
gaxOptions: {
a: 'b',
c: 'd',
},
} as GetEntriesRequest;
let GAX_STREAM: AbortableDuplex;
const RESULT = {};
beforeEach(() => {
GAX_STREAM = through();
GAX_STREAM.push(RESULT);
logging.loggingService.listLogEntriesStream = () => GAX_STREAM;
logging.auth.getProjectId = async () => PROJECT_ID;
});
it('should make request once reading', done => {
logging.loggingService.listLogEntriesStream = (
reqOpts: {},
gaxOpts: {}
) => {
assert.deepStrictEqual(reqOpts, {
resourceNames: ['projects/' + logging.projectId],
orderBy: 'timestamp desc',
a: 'b',
c: 'd',
});
assert.deepStrictEqual(gaxOpts, {
autoPaginate: undefined,
a: 'b',
c: 'd',
});
setImmediate(done);
return GAX_STREAM;
};
const stream = logging.getEntriesStream(OPTIONS);
stream.emit('reading');
});
it('should set logName filter if has logName flag', done => {
const logName = 'log-name';
logging = new LOGGING({projectId: PROJECT_ID});
logging.loggingService.listLogEntriesStream = (
reqOpts: {},
gaxOpts: {}
) => {
assert.deepStrictEqual(reqOpts, {
resourceNames: ['projects/' + logging.projectId],
orderBy: 'timestamp desc',
a: 'b',
c: 'd',
filter: `logName="${[
'projects',
PROJECT_ID,
'logs',
encodeURIComponent(logName),
].join('/')}"`,
});
assert.deepStrictEqual(gaxOpts, {
autoPaginate: undefined,
a: 'b',
c: 'd',
});
setImmediate(done);
return GAX_STREAM;
};
const log = logging.log('log-name');
const stream = log.getEntriesStream(OPTIONS);
stream.emit('reading');
});
it('should add logName filter to user provided filter', done => {
const logName = 'log-name';
const OPTIONS_WITH_FILTER = extend(
{
filter: 'custom filter',
},
OPTIONS
);
logging = new LOGGING({projectId: PROJECT_ID});
logging.loggingService.listLogEntriesStream = (
reqOpts: {},
gaxOpts: {}
) => {
assert.deepStrictEqual(reqOpts, {
resourceNames: ['projects/' + logging.projectId],
orderBy: 'timestamp desc',
a: 'b',
c: 'd',
filter: `(${OPTIONS_WITH_FILTER.filter}) AND logName="${[
'projects',
PROJECT_ID,
'logs',
encodeURIComponent(logName),
].join('/')}"`,
});
assert.deepStrictEqual(gaxOpts, {
autoPaginate: undefined,
a: 'b',
c: 'd',
});
setImmediate(done);
return GAX_STREAM;
};
const log = logging.log('log-name');
const stream = log.getEntriesStream(OPTIONS_WITH_FILTER);
stream.emit('reading');
});
it('should destroy request stream if gax fails', done => {
const error = new Error('Error.');
logging.loggingService.listLogEntriesStream = () => {
throw error;
};
const stream = logging.getEntriesStream(OPTIONS);
stream.emit('reading');
stream.once('error', err => {
assert.strictEqual(err, error);
done();
});
});
it('should destroy request stream if gaxStream catches error', done => {
const error = new Error('Error.');
const stream = logging.getEntriesStream(OPTIONS);
stream.emit('reading');
stream.on('error', err => {
assert.strictEqual(err, error);
done();
});
setImmediate(() => {
GAX_STREAM.emit('error', error);
});
});
it('should return if in snippet sandbox', done => {
logging.setProjectId = async () => {
return done(new Error('Should not have gotten project ID'));
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(global as any).GCLOUD_SANDBOX_ENV = true;
const stream = logging.getEntriesStream(OPTIONS);
stream.emit('reading');
// eslint-disable-next-line @typescript-eslint/no-explicit-any
delete (global as any).GCLOUD_SANDBOX_ENV;
assert(stream instanceof require('stream'));
done();
});
it('should convert results from request to Entry', done => {
const stream = logging.getEntriesStream(OPTIONS);
stream.on('data', entry => {
const argsPassedToFromApiResponse_ = entry[0];
assert.strictEqual(argsPassedToFromApiResponse_, RESULT);
done();
});
stream.emit('reading');
});
it('should expose abort function', done => {
GAX_STREAM.cancel = done;
const stream = logging.getEntriesStream(OPTIONS) as AbortableDuplex;
stream.emit('reading');
setImmediate(() => {
stream.abort();
});
});
it('should not require an options object', () => {
assert.doesNotThrow(() => {
const stream = logging.getEntriesStream();
stream.emit('reading');
});
});
});
describe('getLogs', () => {
beforeEach(() => {
(logging.auth.getProjectId as Function) = async () => {};
});
const OPTIONS = {
a: 'b',
c: 'd',
gaxOptions: {
a: 'b',
c: 'd',
},
} as GetLogsRequest;
it('should exec without options', async () => {
logging.loggingService.listLogs = async (reqOpts: {}, gaxOpts: {}) => {
assert.deepStrictEqual(gaxOpts, {autoPaginate: undefined});
return [[]];
};
await logging.getLogs();
});
it('should call gax method', async () => {
logging.loggingService.listLogs = async (reqOpts: {}, gaxOpts: {}) => {
assert.deepStrictEqual(reqOpts, {
parent: 'projects/' + logging.projectId,
a: 'b',
c: 'd',
});
assert.deepStrictEqual(gaxOpts, {
autoPaginate: undefined,
a: 'b',
c: 'd',
});
return [[]];
};
await logging.getLogs(OPTIONS);
});
describe('error', () => {
it('should reject promise with error', () => {
const error = new Error('Error.');
logging.loggingService.listLogs = async () => {
throw error;
};
logging
.getLogs(OPTIONS)
.then(noop, err => assert.strictEqual(err, error));
});
});
describe('success', () => {
const RESPONSE = ['log1'];
beforeEach(() => {
logging.loggingService.listLogs = async () => {
return [RESPONSE];
};
});
it('should resolve promise with Logs & API resp', async () => {
const logInstance = {} as Log;
logging.log = name => {
assert.strictEqual(name, RESPONSE[0]);
return logInstance;
};
const [logs] = await logging.getLogs(OPTIONS);
assert.strictEqual(logs[0], logInstance);
});
});
});
describe('getLogsStream', () => {
const OPTIONS = {
a: 'b',
c: 'd',
gaxOptions: {
a: 'b',
c: 'd',
},
} as GetLogsRequest;
let GAX_STREAM: AbortableDuplex;
const RESPONSE = ['log1'];
beforeEach(() => {
GAX_STREAM = through();
GAX_STREAM.push(RESPONSE[0]);
logging.loggingService.listLogsStream = () => GAX_STREAM;
(logging.auth.getProjectId as Function) = async () => {};
});
it('should make request once reading', done => {
logging.loggingService.listLogsStream = (reqOpts: {}, gaxOpts: {}) => {
assert.deepStrictEqual(reqOpts, {
parent: 'projects/' + logging.projectId,
a: 'b',
c: 'd',
});
assert.deepStrictEqual(gaxOpts, {
autoPaginate: undefined,
a: 'b',
c: 'd',
});
setImmediate(done);
return GAX_STREAM;
};
const stream = logging.getLogsStream(OPTIONS);
stream.emit('reading');
});
it('should destroy request stream if gax fails', done => {
const error = new Error('Error.');
logging.loggingService.listLogsStream = () => {
throw error;
};
const stream = logging.getLogsStream(OPTIONS);
stream.emit('reading');
stream.once('error', err => {
assert.strictEqual(err, error);
done();
});
});
it('should destroy request stream if gaxStream catches error', done => {
const error = new Error('Error.');
const stream = logging.getLogsStream(OPTIONS);
stream.emit('reading');
stream.on('error', err => {
assert.strictEqual(err, error);
done();
});
setImmediate(() => {
GAX_STREAM.emit('error', error);
});
});
it('should convert results from request to Log', done => {
const stream = logging.getLogsStream(OPTIONS);
const logInstance = {} as Log;
logging.log = (name: string) => {
assert.strictEqual(name, RESPONSE[0]);
return logInstance;
};
stream.on('data', log => {
assert.strictEqual(log, logInstance);
done();
});
stream.emit('reading');
});
it('should expose abort function', done => {
GAX_STREAM.cancel = done;
const stream = logging.getLogsStream(OPTIONS) as AbortableDuplex;
stream.emit('reading');
setImmediate(() => {
stream.abort();
});
});
});
describe('getSinks', () => {
beforeEach(() => {
(logging.auth.getProjectId as Function) = async () => {};
});
const OPTIONS = {
a: 'b',
c: 'd',
gaxOptions: {
a: 'b',
c: 'd',
},
} as GetSinksRequest;
it('should exec without options', async () => {
logging.configService.listSinks = async (reqOpts: {}, gaxOpts: {}) => {
assert.deepStrictEqual(gaxOpts, {autoPaginate: undefined});
return [[]];
};
await logging.getSinks();
});
it('should call gax method', async () => {
logging.configService.listSinks = async (reqOpts: {}, gaxOpts: {}) => {
assert.deepStrictEqual(reqOpts, {
parent: 'projects/' + logging.projectId,
a: 'b',
c: 'd',
});
assert.deepStrictEqual(gaxOpts, {
autoPaginate: undefined,
a: 'b',
c: 'd',
});
return [[]];
};
await logging.getSinks(OPTIONS);
});
describe('error', () => {
it('should reject promise with error', () => {
const error = new Error('Error.');
logging.configService.listSinks = async () => {
throw error;
};
logging
.getSinks(OPTIONS)
.then(noop, err => assert.strictEqual(err, error));
});
});
describe('success', () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const ARGS: any = [
[
{
name: 'sink-name',
},
],
{},
];
beforeEach(() => {
logging.configService.listSinks = async () => {
return ARGS;
};
});
it('should resolve promise with Logs & API resp', async () => {
const sinkInstance = {} as Sink;
logging.sink = name => {
assert.strictEqual(name, ARGS[0]![0].name);
return sinkInstance;
};
const [sinks] = await logging.getSinks(OPTIONS);
assert.strictEqual(sinks[0], sinkInstance);
assert.strictEqual(sinks[0].metadata, ARGS[0][0].metadata);
});
});
});
describe('getSinksStream', () => {
const OPTIONS = {
a: 'b',
c: 'd',
gaxOptions: {
a: 'b',
c: 'd',
},
} as GetSinksRequest;
let GAX_STREAM: AbortableDuplex;
const RESULT = {
name: 'sink-name',
};
beforeEach(() => {
GAX_STREAM = through();
GAX_STREAM.push(RESULT);
logging.configService.listSinksStream = () => GAX_STREAM;
(logging.auth.getProjectId as Function) = async () => {};
});
it('should make request once reading', done => {
logging.configService.listSinksStream = (reqOpts: {}, gaxOpts: {}) => {
assert.deepStrictEqual(reqOpts, {
parent: 'projects/' + logging.projectId,
a: 'b',
c: 'd',
});
assert.deepStrictEqual(gaxOpts, {
autoPaginate: undefined,
a: 'b',
c: 'd',
});
setImmediate(done);
return GAX_STREAM;
};
const stream = logging.getSinksStream(OPTIONS);
stream.emit('reading');
});
it('should destroy request stream if gax fails', done => {
const error = new Error('Error.');
logging.configService.listSinksStream = () => {
throw error;
};
const stream = logging.getSinksStream(OPTIONS);
stream.emit('reading');
stream.once('error', err => {
assert.strictEqual(err, error);
done();
});
});
it('should destroy request stream if gaxStream catches error', done => {
const error = new Error('Error.');
const stream = logging.getSinksStream(OPTIONS);
stream.emit('reading');
stream.on('error', err => {
assert.strictEqual(err, error);
done();
});
setImmediate(() => {
GAX_STREAM.emit('error', error);
});
});
it('should return if in snippet sandbox', done => {
logging.setProjectId = async () => {
return done(new Error('Should not have gotten project ID'));
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(global as any).GCLOUD_SANDBOX_ENV = true;
const stream = logging.getSinksStream(OPTIONS);
stream.emit('reading');
// eslint-disable-next-line @typescript-eslint/no-explicit-any
delete (global as any).GCLOUD_SANDBOX_ENV;
assert(stream instanceof require('stream'));
done();
});
it('should convert results from request to Sink', done => {
const stream = logging.getSinksStream(OPTIONS);
const sinkInstance = {} as Sink;
logging.sink = (name: string) => {
assert.strictEqual(name, RESULT.name);
return sinkInstance;
};
stream.on('data', sink => {
assert.strictEqual(sink, sinkInstance);
assert.strictEqual(sink.metadata, RESULT);
done();
});
stream.emit('reading');
});
it('should expose abort function', done => {
GAX_STREAM.cancel = done;
const stream = logging.getSinksStream(OPTIONS) as AbortableDuplex;
stream.emit('reading');
setImmediate(() => {
stream.abort();
});
});
});
describe('log', () => {
const NAME = 'log-name';
it('should return a Log object', () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const log = logging.log(NAME) as any;
assert(log instanceof FakeLog);
assert.strictEqual(log.calledWith_[0], logging);
assert.strictEqual(log.calledWith_[1], NAME);
});
});
describe('logSync', () => {
const NAME = 'log-name';
it('should return a LogSync object', () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const log = logging.logSync(NAME) as any;
assert(log instanceof FakeLog);
assert.strictEqual(log.calledWith_[0], logging);
assert.strictEqual(log.calledWith_[1], NAME);
});
});
describe('request', () => {
const CONFIG = {
client: 'client',
method: 'method',
reqOpts: {
a: 'b',
c: 'd',
},
gaxOpts: {},
};
const PROJECT_ID = 'project-id';
beforeEach(() => {
(logging.auth as {}) = {
getProjectId: (callback: Function) => {
callback(null, PROJECT_ID);
},
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(logging.api as any)[CONFIG.client] = {
[CONFIG.method]: noop,
};
});
describe('prepareGaxRequest', () => {
it('should get the project ID', done => {
(logging.auth.getProjectId as Function) = () => done();
logging.request(CONFIG, assert.ifError);
});
it('should cache the project ID', done => {
(logging.auth.getProjectId as Function) = () => {
setImmediate(() => {
assert.strictEqual(logging.projectId, PROJECT_ID);
done();
});
};
logging.request(CONFIG, assert.ifError);
});
it('should return error if getting project ID failed', done => {
const error = new Error('Error.');
(logging.auth.getProjectId as Function) = (callback: Function) => {
callback(error);
};
logging.request(CONFIG, err => {
assert.deepStrictEqual(err, error);
done();
});
});
it('should initiate and cache the client', () => {
const fakeClient = {
[CONFIG.method]: noop,
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(fakeV2 as any)[CONFIG.client] = class {
constructor(options: {}) {
assert.strictEqual(options, logging.options);
return fakeClient;
}
};
logging.api = {};
logging.request(CONFIG, assert.ifError);
assert.strictEqual(logging.api[CONFIG.client], fakeClient);
});
it('should use the cached client', done => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(fakeV2 as any)[CONFIG.client] = () => {
done(new Error('Should not re-instantiate a GAX client.'));
};
logging.request(CONFIG);
done();
});
it('should replace the project ID token', done => {
const replacedReqOpts = {};
replaceProjectIdTokenOverride = (reqOpts: {}, projectId: string) => {
assert.notStrictEqual(reqOpts, CONFIG.reqOpts);
assert.deepStrictEqual(reqOpts, CONFIG.reqOpts);
assert.strictEqual(projectId, PROJECT_ID);
return replacedReqOpts;
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(logging.api as any)[CONFIG.client][CONFIG.method] = {
bind(gaxClient: {}, reqOpts: {}) {
assert.strictEqual(reqOpts, replacedReqOpts);
setImmediate(done);
return noop;
},
};
logging.request(CONFIG, assert.ifError);
});
});
describe('makeRequestCallback', () => {
it('should return if in snippet sandbox', done => {
(logging.auth.getProjectId as Function) = () => {
done(new Error('Should not have gotten project ID.'));
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(global as any).GCLOUD_SANDBOX_ENV = true;
const returnValue = logging.request(CONFIG, assert.ifError);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
delete (global as any).GCLOUD_SANDBOX_ENV;
assert.strictEqual(returnValue, undefined);
done();
});
it('should prepare the request', done => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(logging.api as any)[CONFIG.client][CONFIG.method] = {
bind(gaxClient: {}, reqOpts: {}, gaxOpts: {}) {
assert.strictEqual(gaxClient, logging.api[CONFIG.client]);
assert.deepStrictEqual(reqOpts, CONFIG.reqOpts);
assert.strictEqual(gaxOpts, CONFIG.gaxOpts);
setImmediate(done);
return noop;
},
};
logging.request(CONFIG, assert.ifError);
});
it('should execute callback with error', done => {
const error = new Error('Error.');
logging.api[CONFIG.client][CONFIG.method] = (...args: Array<{}>) => {
const callback = args.pop() as Function;
callback(error);
};
logging.request(CONFIG, err => {
assert.deepStrictEqual(err, error);
done();
});
});
it('should execute the request function', () => {
logging.api[CONFIG.client][CONFIG.method] = (
done: boolean,
...args: Array<{}>
) => {
const callback = args.pop() as Function;
callback(null, done); // so it ends the test
};
logging.request(CONFIG, assert.ifError);
});
});
describe('makeRequestStream', () => {
let GAX_STREAM: AbortableDuplex;
beforeEach(() => {
GAX_STREAM = through();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(logging.api as any)[CONFIG.client][CONFIG.method] = {
bind() {
return () => GAX_STREAM;
},
};
});
it('should return if in snippet sandbox', done => {
(logging.auth.getProjectId as Function) = () => {
done(new Error('Should not have gotten project ID.'));
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(global as any).GCLOUD_SANDBOX_ENV = true;
const returnValue = logging.request(CONFIG);
returnValue.emit('reading');
// eslint-disable-next-line @typescript-eslint/no-explicit-any
delete (global as any).GCLOUD_SANDBOX_ENV;
assert(returnValue instanceof require('stream'));
done();
});
it('should expose an abort function', done => {
GAX_STREAM.cancel = done;
const requestStream = logging.request(CONFIG) as AbortableDuplex;
requestStream.emit('reading');
requestStream.abort();
});
it('should prepare the request once reading', done => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(logging.api as any)[CONFIG.client][CONFIG.method] = {
bind(gaxClient: {}, reqOpts: {}, gaxOpts: {}) {
assert.strictEqual(gaxClient, logging.api[CONFIG.client]);
assert.deepStrictEqual(reqOpts, CONFIG.reqOpts);
assert.strictEqual(gaxOpts, CONFIG.gaxOpts);
setImmediate(done);
return () => GAX_STREAM;
},
};
const requestStream = logging.request(CONFIG);
requestStream.emit('reading');
});
it('should destroy the stream with prepare error', done => {
const error = new Error('Error.');
(logging.auth.getProjectId as Function) = (callback: Function) => {
callback(error);
};
const requestStream = logging.request(CONFIG);
requestStream.emit('reading');
requestStream.on('error', err => {
assert.deepStrictEqual(err, error);
done();
});
});
it('should destroy the stream with GAX error', done => {
const error = new Error('Error.');
const requestStream = logging.request(CONFIG);
requestStream.emit('reading');
requestStream.on('error', err => {
assert.deepStrictEqual(err, error);
done();
});
GAX_STREAM.emit('error', error);
});
});
});
describe('sink', () => {
const NAME = 'sink-name';
it('should return a Log object', () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const sink = logging.sink(NAME) as any;
assert(sink instanceof FakeSink);
assert.strictEqual(sink.calledWith_[0], logging);
assert.strictEqual(sink.calledWith_[1], NAME);
});
});
describe('setAclForBucket_', () => {
let CONFIG: CreateSinkRequest;
let bucket: Bucket;
beforeEach(() => {
bucket = {
name: 'bucket-name',
acl: {
owners: {
addGroup: noop,
},
},
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} as any;
CONFIG = {
destination: bucket,
};
});
it('should add cloud-logs as an owner', async () => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(bucket.acl.owners as any).addGroup = async (entity: {}) => {
assert.strictEqual(entity, '[email protected]');
};
await logging.setAclForBucket_(CONFIG);
});
describe('error', () => {
const error = new Error('Error.');
beforeEach(() => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(bucket.acl.owners as any).addGroup = async () => {
throw error;
};
});
it('should return error', () => {
logging
.setAclForBucket_(CONFIG)
.then(noop, err => assert.deepStrictEqual(err, error));
});
});
describe('success', () => {
beforeEach(() => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(bucket.acl.owners as any).addGroup = async () => {};
});
it('should set string destination', async () => {
const expectedDestination = 'storage.googleapis.com/' + bucket.name;
await logging.setAclForBucket_(CONFIG);
assert.strictEqual(CONFIG.destination, expectedDestination);
});
});
});
describe('setAclForDataset_', () => {
let CONFIG: CreateSinkRequest;
let dataset: Dataset;
beforeEach(() => {
dataset = {
id: 'dataset-id',
parent: {
projectId: PROJECT_ID,
},
} as {} as Dataset;
CONFIG = {
destination: dataset,
};
});
describe('metadata refresh', () => {
describe('error', () => {
const error = new Error('Error.');
beforeEach(() => {
dataset.getMetadata = async () => {
throw error;
};
});
it('should reject with error', () => {
logging
.setAclForDataset_(CONFIG)
.then(noop, err => assert.deepStrictEqual(err, error));
});
});
describe('success', () => {
const apiResponse = {
access: [{}, {}],
};
const originalAccess = [].slice.call(apiResponse.access);
beforeEach(() => {
(dataset.getMetadata as Function) = async () => {
return [apiResponse, apiResponse];
};
});
it('should set the correct metadata', async () => {
const access = {
role: 'WRITER',
groupByEmail: '[email protected]',
};
const expectedAccess =
// eslint-disable-next-line @typescript-eslint/no-explicit-any
([] as any[]).slice.call(originalAccess).concat(access);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(dataset.setMetadata as Function) = async (metadata: any) => {
assert.deepStrictEqual(apiResponse.access, originalAccess);
assert.deepStrictEqual(metadata.access, expectedAccess);
};
await logging.setAclForDataset_(CONFIG);
});
describe('updating metadata error', () => {
const error = new Error('Error.');
beforeEach(() => {
dataset.setMetadata = async () => {
throw error;
};
});
it('should reject with error', () => {
logging
.setAclForDataset_(CONFIG)
.then(noop, err => assert.deepStrictEqual(err, error));
});
});
describe('updating metadata success', () => {
beforeEach(() => {
(dataset.setMetadata as Function) = async () => {};
});
it('should set string destination', async () => {
const expectedDestination = [
'bigquery.googleapis.com',
'projects',
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(dataset.parent as any).projectId,
'datasets',
dataset.id,
].join('/');
await logging.setAclForDataset_(CONFIG);
assert.strictEqual(CONFIG.destination, expectedDestination);
});
});
});
});
});
describe('setAclForTopic_', () => {
let CONFIG: CreateSinkRequest;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let topic: any;
beforeEach(() => {
topic = {
name: 'topic-name',
iam: {
getPolicy: noop,
setPolicy: noop,
},
};
CONFIG = {
destination: topic,
};
});
describe('get policy', () => {
describe('error', () => {
const error = new Error('Error.');
beforeEach(() => {
topic.iam.getPolicy = async () => {
throw error;
};
});
it('should throw error', () => {
logging
.setAclForTopic_(CONFIG)
.then(noop, err => assert.deepStrictEqual(err, error));
});
});
describe('success', () => {
const apiResponse = {
bindings: [{}, {}],
};
const originalBindings = [].slice.call(apiResponse.bindings);
beforeEach(() => {
(topic.iam.getPolicy as Function) = async () => {
return [apiResponse, apiResponse];
};
});
it('should set the correct policy bindings', async () => {
const binding = {
role: 'roles/pubsub.publisher',
members: ['serviceAccount:[email protected]'],
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const expectedBindings = ([] as any[]).slice.call(originalBindings);
expectedBindings.push(binding);
(topic.iam.setPolicy as Function) = async (policy: Policy) => {
assert.strictEqual(policy, apiResponse);
assert.deepStrictEqual(policy.bindings, expectedBindings);
};
await logging.setAclForTopic_(CONFIG);
});
describe('updating policy error', () => {
const error = new Error('Error.');
beforeEach(() => {
topic.iam.setPolicy = async () => {
throw error;
};
});
it('should throw error', () => {
logging
.setAclForTopic_(CONFIG)
.then(noop, err => assert.deepStrictEqual(err, error));
});
});
describe('updating policy success', () => {
beforeEach(() => {
(topic.iam.setPolicy as Function) = async () => {};
});
it('should set string destination', async () => {
const expectedDestination = 'pubsub.googleapis.com/' + topic.name;
await logging.setAclForTopic_(CONFIG);
assert.strictEqual(CONFIG.destination, expectedDestination);
});
});
});
});
});
describe('setProjectId', () => {
it('should update project id in case of default placeholder', async () => {
logging = new Logging({projectId: '{{projectId}}'});
logging.auth.getProjectId = async () => {
return PROJECT_ID;
};
await logging.setProjectId({});
assert.strictEqual(logging.projectId, PROJECT_ID);
});
});
describe('setDetectedResource', () => {
it('should update detected resource if none', async () => {
logging = new Logging();
sinon.stub(metadata, 'getDefaultResource').resolves({type: 'bar'});
await logging.setDetectedResource();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
assert.strictEqual((logging.detectedResource as any).type, 'bar');
sinon.restore();
});
});
});<|fim▁end|>
|
});
|
<|file_name|>TriggersContainer.spec.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { default as TriggersContainer } from './TriggersContainer';
import { mount } from 'enzyme';
const emptyDispatch = () => null;
const emptyActions = { setGeoJSON: () => null };<|fim▁hole|>describe('(Container) TriggersContainer', () => {
it('Renders a TriggersContainer', () => {
const _component = mount(<TriggersContainer dispatch={emptyDispatch} actions={emptyActions} />);
expect(_component.type()).to.eql(TriggersContainer);
});
});<|fim▁end|>
| |
<|file_name|>accounts.js<|end_file_name|><|fim▁begin|>function collectWithWildcard(test) {
test.expect(4);
var api_server = new Test_ApiServer(function handler(request, callback) {
var url = request.url;
<|fim▁hole|> });
return void callback(null, [
account.redact()
]);
default:
let error = new Error('Invalid url: ' + url);
return void callback(error);
}
});
var parameters = {
username: 'chariz*'
};
function handler(error, results) {
test.equals(error, null);
test.equals(results.length, 1);
var account = results[0];
test.equals(account.get('username'), 'charizard');
test.equals(account.get('type'), Enum_AccountTypes.MEMBER);
api_server.destroy();
test.done();
}
Resource_Accounts.collect(parameters, handler);
}
module.exports = {
collectWithWildcard
};<|fim▁end|>
|
switch (url) {
case '/accounts?username=chariz*':
let account = new Model_Account({
username: 'charizard'
|
<|file_name|>uFin3015.cpp<|end_file_name|><|fim▁begin|>/*
Módulo: Financeiro
Função: Relatório Recebimento por Cliente
Autor.: Jackson Patrick Werka
Data..: 01/07/2012
© Copyright 2012-2012 SoftGreen - All Rights Reserved
*/
//---------------------------------------------------------------------------
#include <vcl.h>
#pragma hdrstop
#include "uFin3015.h"
//---------------------------------------------------------------------------
#pragma package(smart_init)
#pragma resource "*.dfm"
//---------------------------------------------------------------------------
__fastcall TFin3015::TFin3015(TComponent* Owner)
: TfrmRelatBase01(Owner)
{
}<|fim▁hole|><|fim▁end|>
|
//---------------------------------------------------------------------------
|
<|file_name|>HaloCraft.java<|end_file_name|><|fim▁begin|>package com.tikaji.halocraft;
import com.tikaji.halocraft.common.handlers.ConfigurationHandler;
import com.tikaji.halocraft.common.proxy.IProxy;
import com.tikaji.halocraft.common.utility.Reference;
import com.tikaji.halocraft.common.utility.VersionChecker;<|fim▁hole|>import net.minecraftforge.fml.common.SidedProxy;
import net.minecraftforge.fml.common.event.FMLInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPostInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
/**
* Created by Jacob Williams on 6/17/2015.
*/
@Mod(modid = Reference.ModInfo.MOD_ID, name = Reference.ModInfo.MOD_NAME, version = Reference.ModInfo.VERSION)
public class HaloCraft
{
@Mod.Instance
public static HaloCraft INSTANCE;
public static boolean haveWarnedVersionIsOutOfDate = false;
@SidedProxy(clientSide = Reference.ModInfo.CLIENT_PROXY_CLASS, serverSide = Reference.ModInfo.SERVER_PROXY_CLASS)
public static IProxy proxy;
public static VersionChecker versionChecker;
public static String prependModID(String name)
{
return Reference.ModInfo.MOD_ID + ":" + name;
}
@Mod.EventHandler
public void initialize(FMLInitializationEvent event)
{
proxy.init();
}
@Mod.EventHandler
public void postInit(FMLPostInitializationEvent event)
{
proxy.postInit();
ConfigurationHandler.postInit();
}
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent event)
{
ConfigurationHandler.init(new Configuration(event.getSuggestedConfigurationFile()));
proxy.preInit();
ConfigurationHandler.save();
}
}<|fim▁end|>
|
import net.minecraftforge.common.config.Configuration;
import net.minecraftforge.fml.common.Mod;
|
<|file_name|>rc4_md5.rs<|end_file_name|><|fim▁begin|>//! Rc4Md5 cipher definition
use crate::crypto::{
digest::{self, Digest, DigestType},
openssl::OpenSSLCrypto,
CipherResult,
CipherType,
CryptoMode,
StreamCipher,
};
use bytes::{BufMut, BytesMut};
/// Rc4Md5 Cipher
pub struct Rc4Md5Cipher {
crypto: OpenSSLCrypto,
}
impl Rc4Md5Cipher {
pub fn new(key: &[u8], iv: &[u8], mode: CryptoMode) -> Rc4Md5Cipher {
let mut md5_digest = digest::with_type(DigestType::Md5);
md5_digest.update(key);
md5_digest.update(iv);<|fim▁hole|> let mut key = BytesMut::with_capacity(md5_digest.digest_len());
md5_digest.digest(&mut key);
Rc4Md5Cipher {
crypto: OpenSSLCrypto::new(CipherType::Rc4, &key, b"", mode),
}
}
}
impl StreamCipher for Rc4Md5Cipher {
fn update<B: BufMut>(&mut self, data: &[u8], out: &mut B) -> CipherResult<()> {
self.crypto.update(data, out)
}
fn finalize<B: BufMut>(&mut self, out: &mut B) -> CipherResult<()> {
self.crypto.finalize(out)
}
fn buffer_size(&self, data: &[u8]) -> usize {
self.crypto.buffer_size(data)
}
}
unsafe impl Send for Rc4Md5Cipher {}
#[cfg(test)]
mod test {
use super::*;
use crate::crypto::{CipherType, CryptoMode, StreamCipher};
#[test]
fn test_rc4_md5_cipher() {
let msg = b"abcd1234";
let key = b"key";
let t = CipherType::Rc4Md5;
let iv = t.gen_init_vec();
let mut enc = Rc4Md5Cipher::new(key, &iv[..], CryptoMode::Encrypt);
let mut encrypted_msg = Vec::new();
enc.update(msg, &mut encrypted_msg)
.and_then(|_| enc.finalize(&mut encrypted_msg))
.unwrap();
let mut dec = Rc4Md5Cipher::new(key, &iv[..], CryptoMode::Decrypt);
let mut decrypted_msg = Vec::new();
dec.update(&encrypted_msg[..], &mut decrypted_msg)
.and_then(|_| dec.finalize(&mut decrypted_msg))
.unwrap();
assert_eq!(msg, &decrypted_msg[..]);
}
}<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Import all metrics<|fim▁hole|><|fim▁end|>
|
from django_prometheus.db.metrics import *
|
<|file_name|>demo-extend.test.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import { extendTest } from '../../../tests/shared/demoTest';
extendTest('anchor');<|fim▁end|>
| |
<|file_name|>MergeIntervals.java<|end_file_name|><|fim▁begin|>package com.zs.leetcode.array;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
<|fim▁hole|> public static void main(String[] args) {
}
public List<Interval> merge(List<Interval> intervals) {
List<Interval> list = new ArrayList<Interval>();
if (intervals.size() == 0) {
return list;
}
Collections.sort(intervals, new MyComparator());
int start = intervals.get(0).start;
int end = intervals.get(0).end;
for (int i = 1; i < intervals.size(); i++) {
Interval inter = intervals.get(i);
if (inter.start > end) {
list.add(new Interval(start, end));
start = inter.start;
end = inter.end;
}else{
end = Math.max(end, inter.end);
}
}
list.add(new Interval(start, end));
return list;
}
class MyComparator implements Comparator<Interval> {
@Override
public int compare(Interval o1, Interval o2) {
return o1.start - o2.start;
}
}
}<|fim▁end|>
|
public class MergeIntervals {
|
<|file_name|>import_repo.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, print_function
import logging
from datetime import datetime
from changes.config import db
from changes.models import Repository, RepositoryStatus
from changes.queue.task import tracked_task
logger = logging.getLogger('repo.sync')
@tracked_task(max_retries=None)
def import_repo(repo_id, parent=None):
repo = Repository.query.get(repo_id)
if not repo:
logger.error('Repository %s not found', repo_id)
return
vcs = repo.get_vcs()
if vcs is None:
logger.warning('Repository %s has no VCS backend set', repo.id)
return
if repo.status == RepositoryStatus.inactive:<|fim▁hole|>
Repository.query.filter(
Repository.id == repo.id,
).update({
'last_update_attempt': datetime.utcnow(),
}, synchronize_session=False)
db.session.commit()
if vcs.exists():
vcs.update()
else:
vcs.clone()
for commit in vcs.log(parent=parent):
revision, created = commit.save(repo)
db.session.commit()
parent = commit.id
Repository.query.filter(
Repository.id == repo.id,
).update({
'last_update': datetime.utcnow(),
'status': RepositoryStatus.active,
}, synchronize_session=False)
db.session.commit()
if parent:
import_repo.delay(
repo_id=repo.id.hex,
task_id=repo.id.hex,
parent=parent,
)<|fim▁end|>
|
logger.info('Repository %s is inactive', repo.id)
return
|
<|file_name|>errors.rs<|end_file_name|><|fim▁begin|>use std::ffi::OsString;
use std::path::PathBuf;
use std::result::Result as StdResult;
#[derive(Fail, Debug, Serialize, Deserialize)]
pub enum FFIError {
#[fail(display = "Could not chdir to {:?}: {}", path, error)]
ChdirError { path: PathBuf, error: String },
#[fail(display = "Could not chroot to {:?}: {}", path, error)]
ChrootError { path: PathBuf, error: String },
#[fail(display = "Could not clone process: {}", _0)]
CloneError(String),
#[fail(display = "Could not dup file descriptor {}({}): {}", name, fd, error)]
DupFdError {
fd: i32,
name: String,
error: String,
},
#[fail(display = "Could not create directory {:?}: {}", path, error)]
CreateDirError { path: PathBuf, error: String },
#[fail(
display = "Could not exec {:?} (arguments: {:?}): {}",
command, arguments, error
)]
ExecError {
command: PathBuf,
arguments: Vec<OsString>,
error: String,
},
#[fail(display = "Could not mount path: {:?}: {}", path, error)]
MountError { path: PathBuf, error: String },
#[fail(display = "Could not open file descriptor {}({}): {}", name, fd, error)]
OpenFdError {
fd: i32,
name: String,
error: String,
},
#[fail(display = "Could not create pipe: {}", _0)]
Pipe2Error(String),
#[fail(
display = "Could not pivot_root to {:?} with old root at {:?}: {}",
new_root, old_root, error
)]
PivotRootError {
new_root: PathBuf,
old_root: PathBuf,
error: String,
},
#[fail(display = "Could not set process to die when parent dies: {}", _0)]
PrSetPDeathSigError(String),
#[fail(display = "Could not set interval timer alarm: {}", _0)]
SetITimerError(String),
#[fail(
display = "Could not set process group id of {} to {}: {}",
pid, pgid, error
)]
SetpgidError { pid: i32, pgid: i32, error: String },
#[fail(display = "Could not set resource limit: {}", _0)]
SetRLimitError(String),
#[fail(display = "Could not set a signal handler for {}: {}", signal, error)]
SigActionError { signal: String, error: String },
#[fail(display = "Could not umount path: {:?}: {}", path, error)]
UMountError { path: PathBuf, error: String },
#[fail(display = "Could not unshare cgroup namespace: {}", _0)]
UnshareCGroupError(String),
#[fail(display = "Could not usleep for {} microseconds: {}", time, error)]
UsleepError { time: u32, error: String },
#[fail(display = "Could not write /proc/self/uid_map file: {}", _0)]
WriteUidError(String),
#[fail(display = "Could not write /proc/self/uid_map file: {}", _0)]
WriteGidError(String),
#[fail(display = "Could not wait for process: {}", _0)]
WaitPidError(String),
#[fail(display = "Could not write /proc/self/setgroups file: {}", _0)]
WriteSetGroupsError(String),
}
#[derive(Fail, Debug, Serialize, Deserialize)]
pub enum CGroupError {
#[fail(display = "Cgroup controller missing: {:?}", _0)]
ControllerMissing(PathBuf),
#[fail(
display = "Could not create instance controller under {:?} for {:?}: {}",
controller_path, instance_name, error
)]
InstanceControllerCreateError {
controller_path: PathBuf,
instance_name: OsString,
error: String,
},<|fim▁hole|> #[fail(
display = "Could not open {:?} for controller {:?}: {}",
file, controller_path, error
)]
OpenCGroupFileError {
controller_path: PathBuf,
file: PathBuf,
error: String,
},
#[fail(
display = "Could not parse `{}` from {:?} for controller {:?}: {}",
buffer, file, controller_path, error
)]
ParseCGroupFileError {
controller_path: PathBuf,
file: PathBuf,
buffer: String,
error: String,
},
#[fail(
display = "Could not read from {:?} for controller {:?}: {}",
file, controller_path, error
)]
ReadCGroupFileError {
controller_path: PathBuf,
file: PathBuf,
error: String,
},
#[fail(
display = "Could not write to {:?} for controller {:?}: {}",
file, controller_path, error
)]
WriteCGroupFileError {
controller_path: PathBuf,
file: PathBuf,
error: String,
},
}
#[derive(Fail, Debug, Serialize, Deserialize)]
pub enum ChildError {
#[fail(display = "Cgroup error occurred.")]
CGroupError(#[cause] CGroupError),
#[fail(display = "FFI Error occurred.")]
FFIError(#[cause] FFIError),
}
impl From<CGroupError> for ChildError {
fn from(err: CGroupError) -> Self {
Self::CGroupError(err)
}
}
impl From<FFIError> for ChildError {
fn from(err: FFIError) -> Self {
Self::FFIError(err)
}
}
#[derive(Fail, Debug, Serialize, Deserialize)]
pub enum Error {
#[fail(display = "Cgroup error occurred.")]
CGroupError(#[cause] CGroupError),
#[fail(display = "Child process error occurred.")]
ChildError(#[cause] ChildError),
#[fail(display = "Child process successfully completed even though it used exec")]
ContinuedPastExecError(String),
#[fail(display = "Could not deserialize process result: {}", _0)]
DeserializeError(String),
#[fail(display = "FFI Error occurred.")]
FFIError(#[cause] FFIError),
#[fail(display = "Child process stopped/continued unexpected")]
StoppedContinuedError,
#[fail(display = "Supervisor process died and could not collect execution information")]
SupervisorProcessDiedError,
}
impl From<CGroupError> for Error {
fn from(err: CGroupError) -> Self {
Self::CGroupError(err)
}
}
impl From<ChildError> for Error {
fn from(err: ChildError) -> Self {
Self::ChildError(err)
}
}
impl From<FFIError> for Error {
fn from(err: FFIError) -> Self {
Self::FFIError(err)
}
}
pub type Result<T> = StdResult<T, Error>;<|fim▁end|>
| |
<|file_name|>export-notebook.ts<|end_file_name|><|fim▁begin|>import * as path from "path";
import { promises } from "fs";
const { writeFile } = promises;
import { remote } from "electron";
const { dialog } = remote;
import { stringifyNotebook } from "@nteract/commutable";
import store from "./store";
export async function exportNotebook() {
const editor = atom.workspace.getActiveTextEditor();
const editorPath = editor.getPath();
const directory = path.dirname(editorPath);
const rawFileName = path.basename(editorPath, path.extname(editorPath));
const noteBookPath = path.join(directory, `${rawFileName}.ipynb`);
const { canceled, filePath } = await dialog.showSaveDialog({
title: editor.getTitle(),
defaultPath: noteBookPath,
});
if (!canceled) {
await saveNoteBook(filePath);
}
}
async function saveNoteBook(filePath: string) {
if (filePath.length === 0) {
return;
}
// add default extension
const ext = path.extname(filePath) === "" ? ".ipynb" : "";
const fname = `${filePath}${ext}`;
try {
await writeFile(fname, stringifyNotebook(store.notebook));
atom.notifications.addSuccess("Save successful", {
detail: `Saved notebook as ${fname}`,<|fim▁hole|> atom.notifications.addError("Error saving file", {
detail: err.message,
});
}
}<|fim▁end|>
|
});
} catch (err) {
|
<|file_name|>log_writer.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2011 The LevelDB Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. See the AUTHORS file for names of contributors.
#include "db/log_writer.h"
#include <stdint.h>
#include "leveldb/env.h"
#include "util/coding.h"
#include "util/crc32c.h"
namespace leveldb {
namespace log {
Writer::Writer(WritableFile* dest)
: dest_(dest),
block_offset_(0) {
for (int i = 0; i <= kMaxRecordType; i++) {
char t = static_cast<char>(i);
type_crc_[i] = crc32c::Value(&t, 1);
}
}
Writer::~Writer() {
}
Status Writer::AddRecord(const Slice& slice) {
const char* ptr = slice.data();
size_t left = slice.size();
// Fragment the record if necessary and emit it. Note that if slice
// is empty, we still want to iterate once to emit a single
// zero-length record
Status s;
bool begin = true;
do {
const int leftover = kBlockSize - block_offset_;
assert(leftover >= 0);
if (leftover < kHeaderSize) {
// Switch to a new block
if (leftover > 0) {
// Fill the trailer (literal below relies on kHeaderSize being 7)
assert(kHeaderSize == 7);
dest_->Append(Slice("\x00\x00\x00\x00\x00\x00", leftover));
}
block_offset_ = 0;
}
// Invariant: we never leave < kHeaderSize bytes in a block.
assert(kBlockSize - block_offset_ - kHeaderSize >= 0);
const size_t avail = kBlockSize - block_offset_ - kHeaderSize;
const size_t fragment_length = (left < avail) ? left : avail;
RecordType type;
const bool end = (left == fragment_length);
if (begin && end) {
type = kFullType;
} else if (begin) {
type = kFirstType;
} else if (end) {
type = kLastType;
} else {
type = kMiddleType;
}
s = EmitPhysicalRecord(type, ptr, fragment_length);
ptr += fragment_length;
left -= fragment_length;
begin = false;
} while (s.ok() && left > 0);
return s;
}
Status Writer::EmitPhysicalRecord(RecordType t, const char* ptr, size_t n) {
assert(n <= 0xffff); // Must fit in two bytes
assert(block_offset_ + kHeaderSize + n <= kBlockSize);
// Format the header
char buf[kHeaderSize];
buf[4] = static_cast<char>(n & 0xff);
buf[5] = static_cast<char>(n >> 8);
buf[6] = static_cast<char>(t);
// Compute the crc of the record type and the payload.
uint32_t crc = crc32c::Extend(type_crc_[t], ptr, n);
crc = crc32c::Mask(crc); // Adjust for storage
EncodeFixed32(buf, crc);
// Write the header and the payload
Status s = dest_->Append(Slice(buf, kHeaderSize));
if (s.ok()) {
s = dest_->Append(Slice(ptr, n));
if (s.ok()) {
s = dest_->Flush();
}
}
block_offset_ += kHeaderSize + n;
return s;
}
} // namespace log<|fim▁hole|><|fim▁end|>
|
} // namespace leveldb
|
<|file_name|>averageflags.py<|end_file_name|><|fim▁begin|>from pudzu.charts import *
from pudzu.sandbox.bamboo import *
flags = pd.read_csv("../dataviz/datasets/countries.csv").filter_rows("organisations >> un").split_columns('country', "|").split_rows('country').set_index('country').drop_duplicates(subset='flag', keep='first')
def flag_image(c):
return Image.from_url_with_cache(flags['flag'][c]).convert("RGBA").remove_transparency("white").convert("RGB")
def average_image(imgs, size, weights=None):
if weights is None: weights = [1 for _ in imgs]
average = ImageColor.from_linear(sum(ImageColor.to_linear(np.array(img.resize(size))) * w for img,w in zip(imgs, weights)) / sum(weights))
return Image.fromarray(np.uint8(average))
def average_flag(df, size, weights=None):
if callable(weights): weights = weights(df)
flags = [flag_image(i) for i in df.index]
return average_image(flags, (size[0]-2,size[1]-2), weights).pad(1, "black")
continents = flags.groupby("continent").count().index
continentlabels = [ Image.from_text(continent.upper(), calibri(60, bold=True), "black", "white") for continent in continents ]
world = average_flag(flags, (1200,800))
world_weighted = average_flag(flags, (1200,800), lambda df: df.population)
continent = Image.from_array([continentlabels, [average_flag(flags[flags.continent == continent], (600, 400)) for continent in continents]], padding=5, bg="white")
continent_weighted = Image.from_array([continentlabels, [average_flag(flags[flags.continent == continent], (600, 400), lambda df: df.population) for continent in continents]], padding=5, bg="white")
os.makedirs("output/averageflags", exist_ok=True)
world.save("output/averageflags/world.png")
world_weighted.save("output/averageflags/world_weighted.png")
continent.save("output/averageflags/continents.png")
continent_weighted.save("output/averageflags/continents_weighted.png")
# quick and dirty scrape of some area data: will add to the country dataset at some point<|fim▁hole|>df = df.rename(columns=df.iloc[0])[1:].fillna("0")
df = df.assign_rows(country=lambda d: d[next(c for c in df.columns if "state" in c)].split(" (")[0].split(" !")[-1].strip(" \xa0"),
area=lambda d: d[next(c for c in df.columns if "Total" in c)].split(" (")[0].split(chr(9824))[1].replace(",","").replace("<","")).set_index("country")
flags = flags.assign_rows(area=lambda d,c: df["area"][c]).apply(pd.to_numeric, errors='ignore')
world_area = average_flag(flags, (1200,800), lambda df: df.area)
world_area.save("output/averageflags/world_area.png")
world_density = average_flag(flags, (1200,800), lambda df: df.population / df.area)
world_density.save("output/averageflags/world_density.png")
continent_area = Image.from_array([continentlabels, [average_flag(flags[flags.continent == continent], (600, 400), lambda df: df.area) for continent in continents]], padding=5, bg="white")
continent_area.save("output/averageflags/continents_area.png")
continent_density = Image.from_array([continentlabels, [average_flag(flags[flags.continent == continent], (600, 400), lambda df: df.population / df.area) for continent in continents]], padding=5, bg="white")
continent_density.save("output/averageflags/continents_density.png")<|fim▁end|>
|
df = pd.read_html("https://en.wikipedia.org/wiki/List_of_countries_and_dependencies_by_area")[0]
|
<|file_name|>postmessage.js<|end_file_name|><|fim▁begin|>// @license
// Redistribution and use in source and binary forms ...
// Class for sending and receiving postMessages.
// Based off the library by Daniel Park (http://metaweb.com, http://postmessage.freebaseapps.com)
//
// Dependencies:
// * None
//
// Copyright 2014 Carnegie Mellon University. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification, are
// permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of
// conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list
// of conditions and the following disclaimer in the documentation and/or other materials
// provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY CARNEGIE MELLON UNIVERSITY ''AS IS'' AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
// FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CARNEGIE MELLON UNIVERSITY OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// The views and conclusions contained in the software and documentation are those of the
// authors and should not be interpreted as representing official policies, either expressed
// or implied, of Carnegie Mellon University.
//
// Authors:
// Paul Dille ([email protected])
//
"use strict";
(function(window) {
// Send postMessages.
window.pm = function(options) {
pm.send(options);
};
// Bind a handler to a postMessage response.
window.pm.bind = function(type, fn) {
pm.bind(type, fn);
};
// Unbind postMessage handlers
window.pm.unbind = function(type, fn) {
pm.unbind(type, fn);
};
var pm = {
// The origin domain.
_origin: null,
// Internal storage (keep track of listeners, etc).
data: function(key, value) {
if (value === undefined) {
return pm._data[key];
}
pm._data[key] = value;
return value;
},
_data: {},
// Send postMessages.
send: function(options) {
if (!options) {
console.warn("Need to specify at least 3 options (origin, target, type).");
return;
}
if (options.origin) {
if (!pm._origin) {
pm._origin = options.origin;
}
} else {
console.warn("postMessage origin must be specified.");
return;<|fim▁hole|> return;
}
if (!options.type) {
console.warn("postMessage message type required.");
return;
}
var msg = {data: options.data, type: options.type};
if ("postMessage" in target) {
// Send the postMessage.
try {
target.postMessage(JSON.stringify(msg), options.origin);
} catch (ex) {
console.warn("postMessage failed with " + ex.name + ":", ex.message);
}
} else {
console.warn("postMessage not supported");
}
},
// Listen to incoming postMessages.
bind: function(type, fn) {
if (!pm.data("listening.postMessage")) {
if (window.addEventListener) {
window.addEventListener("message", pm._dispatch, false);
}
// Make sure we create only one receiving postMessage listener.
pm.data("listening.postMessage", true);
}
// Keep track of listeners and their handlers.
var listeners = pm.data("listeners.postMessage");
if (!listeners) {
listeners = {};
pm.data("listeners.postMessage", listeners);
}
var fns = listeners[type];
if (!fns) {
fns = [];
listeners[type] = fns;
}
fns.push({fn: fn, origin: pm._origin});
},
// Unbind postMessage listeners.
unbind: function(type, fn) {
var listeners = pm.data("listeners.postMessage");
if (listeners) {
if (type) {
if (fn) {
// Remove specific listener
var fns = listeners[type];
if (fns) {
var newListeners = [];
for (var i = 0, len = fns.length; i < len; i++) {
var obj = fns[i];
if (obj.fn !== fn) {
newListeners.push(obj);
}
}
listeners[type] = newListeners;
}
} else {
// Remove all listeners by type
delete listeners[type];
}
} else {
// Unbind all listeners of all types
for (var i in listeners) {
delete listeners[i];
}
}
}
},
// Run the handler, if one exists, based on the type defined in the postMessage.
_dispatch: function(e) {
var msg = {};
try {
msg = JSON.parse(e.data);
} catch (ex) {
console.warn("postMessage data parsing failed: ", ex);
return;
}
if (!msg.type) {
console.warn("postMessage message type required.");
return;
}
var listeners = pm.data("listeners.postMessage") || {};
var fns = listeners[msg.type] || [];
for (var i = 0, len = fns.length; i < len; i++) {
var obj = fns[i];
if (obj.origin && obj.origin !== '*' && e.origin !== obj.origin) {
console.warn("postMessage message origin mismatch: ", e.origin, obj.origin);
continue;
}
// Run handler
try {
obj.fn(msg.data);
} catch (ex) {
throw ex;
}
}
}
};
})(this);<|fim▁end|>
|
}
var target = options.target;
if (!target) {
console.warn("postMessage target window required.");
|
<|file_name|>20151021114814-default-null.js<|end_file_name|><|fim▁begin|>export function up(queryInterface, Sequelize) {
return Promise.all([
queryInterface.changeColumn('memberships', 'approved', {
type: Sequelize.BOOLEAN,
defaultValue: null,
}),
queryInterface.changeColumn('quotes', 'approved', {
type: Sequelize.BOOLEAN,
defaultValue: null,
}),
]);
}
export function down(queryInterface, Sequelize) {
return Promise.all([<|fim▁hole|> type: Sequelize.BOOLEAN,
defaultValue: false,
}),
queryInterface.changeColumn('quotes', 'approved', {
type: Sequelize.BOOLEAN,
defaultValue: false,
}),
]);
}<|fim▁end|>
|
queryInterface.changeColumn('memberships', 'approved', {
|
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>use std::collections::TreeMap;
use serde::json::Json;
use gfx;
use gfx_gl::types::GLenum;
#[deriving_deserializable]
pub struct Buffer {
pub uri: String,
#[serial_name = "byteLength"]
pub byte_length: uint,
pub name: String,
#[serial_name = "type"]
pub ty: String,
}
#[deriving_deserializable]
pub struct Accessor {
#[serial_name = "bufferView"]
pub buffer_view: String,
#[serial_name = "byteOffset"]
pub byte_offset: gfx::attrib::Offset,
#[serial_name = "byteStride"]
pub byte_stride: gfx::attrib::Stride,
#[serial_name = "componentType"]
pub component_type: GLenum,
pub count: gfx::VertexCount,
#[serial_name = "type"]
pub ty: String,
pub name: String,
pub max: (f32, f32, f32),
pub min: (f32, f32, f32),
}
#[deriving_deserializable]
pub struct Primitive {
pub attributes: TreeMap<String, String>,
pub indices: String,
pub material: String,
pub primitive: uint,
}
#[deriving_deserializable]
pub struct Mesh {
pub name: String,
pub primitives: Vec<Primitive>,
}
#[deriving_deserializable]
pub struct Shader {
pub name: String,
pub uri: String,
#[serial_name = "type"]
pub ty: GLenum,
}
#[deriving_deserializable]
pub struct Program {
pub name: String,
pub attributes: Vec<String>,
#[serial_name = "vertexShader"]
pub vertex_shader: String,
#[serial_name = "fragmentShader"]
pub fragment_shader: String,
}
#[deriving_deserializable]
pub struct TechniqueParameter {
#[serial_name = "type"]
pub ty: uint,
pub semantic: String, //optional
pub node: String, //optional
pub value: Vec<f32>, //optional
}
#[deriving_deserializable]
pub struct InstanceProgram {
pub attributes: TreeMap<String, String>,
pub program: String,
pub uniforms: TreeMap<String, String>,
}
#[deriving_deserializable]
pub struct StateFunctions {
#[serial_name = "blendColor"]
pub blend_color: (f32, f32, f32, f32),
#[serial_name = "blendEquationSeparate"]
pub blend_equation_separate: (GLenum, GLenum),
#[serial_name = "blendFuncSeparate"]<|fim▁hole|> #[serial_name = "cullFace"]
pub cull_face: (GLenum, ),
#[serial_name = "depthFunc"]
pub depth_func: (GLenum, ),
#[serial_name = "depthMask"]
pub depth_mask: (bool, ),
#[serial_name = "depthRange"]
pub depth_range: (f32, f32),
#[serial_name = "frontFace"]
pub front_face: (GLenum, ),
#[serial_name = "lineWidth"]
pub line_width: (gfx::state::LineWidth, ),
#[serial_name = "polygonOffset"]
pub polygon_offset: (gfx::state::OffsetFactor, gfx::state::OffsetUnits),
pub scissor: (u16, u16, u16, u16),
}
#[deriving_deserializable]
pub struct States {
pub enable: Vec<GLenum>,
pub functions: StateFunctions,
}
#[deriving_deserializable]
pub struct PassProfile {
#[serial_name = "lightingModel"]
pub lighting_model: String,
pub parameters: Vec<String>,
#[serial_name = "texcoordBindings"]
pub texcoord_bindings: TreeMap<String, String>,
}
#[deriving_deserializable]
pub struct PassDetails {
#[serial_name = "commonProfile"]
pub common_profile: PassProfile,
#[serial_name = "type"]
pub ty: String,
}
#[deriving_deserializable]
pub struct Pass {
pub details: PassDetails,
#[serial_name = "instanceProgram"]
pub instance_program: InstanceProgram,
pub states: States,
}
#[deriving_deserializable]
pub struct Technique {
pub name: String,
pub parameters: TreeMap<String, TechniqueParameter>,
pub pass: String,
pub passes: TreeMap<String, Pass>,
}
#[deriving_deserializable]
pub struct InstanceTechnique {
pub technique: String,
pub values: TreeMap<String, Json>, //raw
}
#[deriving_deserializable]
pub struct Material {
pub name: String,
#[serial_name = "instanceTechnique"]
pub instance_technique: InstanceTechnique,
}<|fim▁end|>
|
pub blend_func_separate: (GLenum, GLenum, GLenum, GLenum),
#[serial_name = "colorMask"]
pub color_mask: (bool, bool, bool, bool),
|
<|file_name|>test_utils.rs<|end_file_name|><|fim▁begin|>// ams - Advanced Memory Scanner
// Copyright (C) 2018 th0rex
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
use std::mem::size_of;
use std::slice::from_raw_parts;
use communication::MemoryRegion;
use {Address, Node};
pub(crate) fn simple_node_content<T>(address: Address, content: &[T]) -> Node {<|fim▁hole|> from_raw_parts(
content.as_ptr() as *const u8,
content.len() * size_of::<T>(),
)
}),
)
}
pub(crate) fn simple_node(address: Address) -> Node {
Node::new(address, MemoryRegion::new(&[0; 1]))
}<|fim▁end|>
|
Node::new(
address,
MemoryRegion::new(unsafe {
|
<|file_name|>lstm_ops.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""LSTM Block Cell ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
from tensorflow.contrib.rnn.ops import gen_lstm_ops
from tensorflow.contrib.rnn.python.ops import fused_rnn_cell
from tensorflow.contrib.util import loader
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import resource_loader
_lstm_ops_so = loader.load_op_library(
resource_loader.get_path_to_datafile("_lstm_ops.so"))
# pylint: disable=invalid-name
def _lstm_block_cell(x,
cs_prev,
h_prev,
w,
b,
wci=None,
wcf=None,
wco=None,
forget_bias=None,
cell_clip=None,
use_peephole=None,
name=None):
r"""Computes the LSTM cell forward propagation for 1 time step.
This implementation uses 1 weight matrix and 1 bias vector, and there's an
optional peephole connection.
This kernel op implements the following mathematical equations:
```python
xh = [x, h_prev]
[i, ci, f, o] = xh * w + b
f = f + forget_bias
if not use_peephole:
wci = wcf = wco = 0
i = sigmoid(cs_prev * wci + i)
f = sigmoid(cs_prev * wcf + f)
ci = tanh(ci)
cs = ci .* i + cs_prev .* f
cs = clip(cs, cell_clip)
o = sigmoid(cs * wco + o)
co = tanh(cs)
h = co .* o
```
Args:
x: A `Tensor`. Must be one of the following types: `float32`.
The input to the LSTM cell, shape (batch_size, num_inputs).
cs_prev: A `Tensor`. Must have the same type as `x`.
Value of the cell state at previous time step.
h_prev: A `Tensor`. Must have the same type as `x`.
Output of the previous cell at previous time step.
w: A `Tensor`. Must have the same type as `x`. The weight matrix.
b: A `Tensor`. Must have the same type as `x`. The bias vector.
wci: A `Tensor`. Must have the same type as `x`.
The weight matrix for input gate peephole connection.
wcf: A `Tensor`. Must have the same type as `x`.
The weight matrix for forget gate peephole connection.
wco: A `Tensor`. Must have the same type as `x`.
The weight matrix for output gate peephole connection.
forget_bias: An optional `float`. Defaults to `1`. The forget gate bias.
cell_clip: An optional `float`. Defaults to `-1` (no clipping).
Value to clip the 'cs' value to. Disable by setting to negative value.
use_peephole: An optional `bool`. Defaults to `False`.
Whether to use peephole weights.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (i, cs, f, o, ci, co, h).
i: A `Tensor`. Has the same type as `x`. The input gate.
cs: A `Tensor`. Has the same type as `x`. The cell state before the tanh.
f: A `Tensor`. Has the same type as `x`. The forget gate.
o: A `Tensor`. Has the same type as `x`. The output gate.
ci: A `Tensor`. Has the same type as `x`. The cell input.
co: A `Tensor`. Has the same type as `x`. The cell after the tanh.
h: A `Tensor`. Has the same type as `x`. The output h vector.
Raises:
ValueError: If cell_size is None.
"""
if wci is None:
cell_size = cs_prev.get_shape().with_rank(2)[1].value
if cell_size is None:
raise ValueError("cell_size from `cs_prev` should not be None.")
wci = array_ops.constant(0, dtype=dtypes.float32, shape=[cell_size])
wcf = wci
wco = wci
# pylint: disable=protected-access
return gen_lstm_ops.lstm_block_cell(
x=x,
cs_prev=cs_prev,
h_prev=h_prev,
w=w,
wci=wci,
wcf=wcf,
wco=wco,
b=b,
forget_bias=forget_bias,
cell_clip=cell_clip if cell_clip is not None else -1,
use_peephole=use_peephole,
name=name)
# pylint: enable=protected-access
def _block_lstm(seq_len_max,
x,
w,
b,
cs_prev=None,
h_prev=None,
wci=None,
wcf=None,
wco=None,
forget_bias=None,
cell_clip=None,
use_peephole=None,
name=None):
r"""TODO(williamchan): add doc.
Args:
seq_len_max: A `Tensor` of type `int64`.
x: A list of at least 1 `Tensor` objects of the same type in: `float32`.
w: A `Tensor`. Must have the same type as `x`.
b: A `Tensor`. Must have the same type as `x`.
cs_prev: A `Tensor`. Must have the same type as `x`.
h_prev: A `Tensor`. Must have the same type as `x`.
wci: A `Tensor`. Must have the same type as `x`.
wcf: A `Tensor`. Must have the same type as `x`.
wco: A `Tensor`. Must have the same type as `x`.
forget_bias: An optional `float`. Defaults to `1`.
cell_clip: An optional `float`. Defaults to `-1` (no clipping).
use_peephole: An optional `bool`. Defaults to `False`.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (i, cs, f, o, ci, co, h).
i: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
cs: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
f: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
o: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
ci: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
co: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
h: A list with the same number of `Tensor` objects as `x` of `Tensor`
objects of the same type as x.
Raises:
ValueError: If `b` does not have a valid shape.
"""
batch_size = x[0].get_shape().with_rank(2)[0].value
cell_size4 = b.get_shape().with_rank(1)[0].value
if cell_size4 is None:
raise ValueError("`b` shape must not be None.")
cell_size = cell_size4 / 4
zero_state = None
if cs_prev is None or h_prev is None:
zero_state = array_ops.constant(
0, dtype=dtypes.float32, shape=[batch_size, cell_size])
if cs_prev is None:
cs_prev = zero_state
if h_prev is None:
h_prev = zero_state
if wci is None:
wci = array_ops.constant(0, dtype=dtypes.float32, shape=[cell_size])
wcf = wci
wco = wci
# pylint: disable=protected-access
i, cs, f, o, ci, co, h = gen_lstm_ops.block_lstm(
seq_len_max=seq_len_max,
x=array_ops.stack(x),
cs_prev=cs_prev,
h_prev=h_prev,
w=w,
wci=wci,
wcf=wcf,
wco=wco,
b=b,
forget_bias=forget_bias,
cell_clip=cell_clip if cell_clip is not None else -1,
name=name,
use_peephole=use_peephole)
return array_ops.unstack(i), array_ops.unstack(cs), array_ops.unstack(
f), array_ops.unstack(o), array_ops.unstack(ci), array_ops.unstack(
co), array_ops.unstack(h)
# pylint: enable=protected-access
# pylint: enable=invalid-name
_lstm_block_cell_grad_outputs = ["cs_prev_grad", "dicfo"]
@ops.RegisterGradient("LSTMBlockCell")
def _LSTMBlockCellGrad(op, *grad):
"""Gradient for LSTMBlockCell."""
(x, cs_prev, h_prev, w, wci, wcf, wco, b) = op.inputs
(i, cs, f, o, ci, co, _) = op.outputs
(_, cs_grad, _, _, _, _, h_grad) = grad
batch_size = x.get_shape().with_rank(2)[0].value
if batch_size is None:
batch_size = -1
input_size = x.get_shape().with_rank(2)[1].value
if input_size is None:
raise ValueError("input_size from `x` should not be None.")
cell_size = cs_prev.get_shape().with_rank(2)[1].value
if cell_size is None:
raise ValueError("cell_size from `cs_prev` should not be None.")
(cs_prev_grad, dicfo, wci_grad, wcf_grad,
wco_grad) = gen_lstm_ops.lstm_block_cell_grad(
x,
cs_prev,
h_prev,
w,
wci,
wcf,
wco,
b,
i,
cs,
f,
o,
ci,
co,
cs_grad,
h_grad,
use_peephole=op.get_attr("use_peephole"))
# Backprop from dicfo to xh.
xh_grad = math_ops.matmul(dicfo, w, transpose_b=True)
x_grad = array_ops.slice(xh_grad, (0, 0), (batch_size, input_size))
x_grad.get_shape().merge_with(x.get_shape())
h_prev_grad = array_ops.slice(xh_grad, (0, input_size),
(batch_size, cell_size))
h_prev_grad.get_shape().merge_with(h_prev.get_shape())
# Backprop from dicfo to w.
xh = array_ops.concat([x, h_prev], 1)
w_grad = math_ops.matmul(xh, dicfo, transpose_a=True)
w_grad.get_shape().merge_with(w.get_shape())
# Backprop from dicfo to b.
b_grad = nn_ops.bias_add_grad(dicfo)
b_grad.get_shape().merge_with(b.get_shape())
return (x_grad, cs_prev_grad, h_prev_grad, w_grad, wci_grad, wcf_grad,
wco_grad, b_grad)
@ops.RegisterGradient("BlockLSTM")
def _BlockLSTMGrad(op, *grad):
"""Gradient for BlockLSTM."""
seq_len_max, x, cs_prev, h_prev, w, wci, wcf, wco, b = op.inputs
i, cs, f, o, ci, co, h = op.outputs
cs_grad = grad[1]
h_grad = grad[6]
(x_grad, cs_prev_grad, h_prev_grad, w_grad, wci_grad, wcf_grad, wco_grad,
b_grad) = gen_lstm_ops.block_lstm_grad(
seq_len_max,
x,
cs_prev,
h_prev,
w,
wci,
wcf,
wco,
b,
i,
cs,
f,
o,
ci,
co,
h,
cs_grad,
h_grad,
use_peephole=op.get_attr("use_peephole"))
return [
None, x_grad, cs_prev_grad, h_prev_grad, w_grad, wci_grad, wcf_grad,
wco_grad, b_grad
]
class LSTMBlockCell(rnn_cell_impl.RNNCell):
"""Basic LSTM recurrent network cell.
The implementation is based on: http://arxiv.org/abs/1409.2329.
We add `forget_bias` (default: 1) to the biases of the forget gate in order to
reduce the scale of forgetting in the beginning of the training.
Unlike `rnn_cell_impl.LSTMCell`, this is a monolithic op and should be much
faster. The weight and bias matrices should be compatible as long as the
variable scope matches.
"""
def __init__(self,
num_units,
forget_bias=1.0,
cell_clip=None,
use_peephole=False,
reuse=None):
"""Initialize the basic LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (see above).
cell_clip: An optional `float`. Defaults to `-1` (no clipping).
use_peephole: Whether to use peephole connections or not.
reuse: (optional) boolean describing whether to reuse variables in an
existing scope. If not `True`, and the existing scope already has the
given variables, an error is raised.
When restoring from CudnnLSTM-trained checkpoints, must use
CudnnCompatibleLSTMBlockCell instead.
"""
super(LSTMBlockCell, self).__init__(_reuse=reuse)
self._num_units = num_units
self._forget_bias = forget_bias
self._use_peephole = use_peephole
self._cell_clip = cell_clip if cell_clip is not None else -1
self._names = {
"W": "kernel",
"b": "bias",
"wci": "w_i_diag",
"wcf": "w_f_diag",
"wco": "w_o_diag",
"scope": "lstm_cell"
}
@property
def state_size(self):
return rnn_cell_impl.LSTMStateTuple(self._num_units, self._num_units)
@property
def output_size(self):
return self._num_units
def __call__(self, x, states_prev, scope=None):
"""Long short-term memory cell (LSTM)."""
with vs.variable_scope(scope or self._names["scope"]):
x_shape = x.get_shape().with_rank(2)
if not x_shape[1].value:
raise ValueError("Expecting x_shape[1] to be set: %s" % str(x_shape))
if len(states_prev) != 2:
raise ValueError("Expecting states_prev to be a tuple with length 2.")
input_size = x_shape[1].value
w = vs.get_variable(self._names["W"], [input_size + self._num_units,
self._num_units * 4])
b = vs.get_variable(
self._names["b"], [w.get_shape().with_rank(2)[1].value],
initializer=init_ops.constant_initializer(0.0))
if self._use_peephole:
wci = vs.get_variable(self._names["wci"], [self._num_units])
wcf = vs.get_variable(self._names["wcf"], [self._num_units])
wco = vs.get_variable(self._names["wco"], [self._num_units])
else:
wci = wcf = wco = array_ops.zeros([self._num_units])
(cs_prev, h_prev) = states_prev
(_, cs, _, _, _, _, h) = _lstm_block_cell(
x,
cs_prev,
h_prev,
w,
b,
wci=wci,
wcf=wcf,
wco=wco,
forget_bias=self._forget_bias,
cell_clip=self._cell_clip,
use_peephole=self._use_peephole)
new_state = rnn_cell_impl.LSTMStateTuple(cs, h)
return h, new_state
class LSTMBlockWrapper(fused_rnn_cell.FusedRNNCell):
"""This is a helper class that provides housekeeping for LSTM cells.
This may be useful for alternative LSTM and similar type of cells.
The subclasses must implement `_call_cell` method and `num_units` property.
"""
@abc.abstractproperty
def num_units(self):
"""Number of units in this cell (output dimension)."""
pass
@abc.abstractmethod
def _call_cell(self, inputs, initial_cell_state, initial_output, dtype,
sequence_length):
"""Run this LSTM on inputs, starting from the given state.
This method must be implemented by subclasses and does the actual work
of calling the cell.
Args:
inputs: `3-D` tensor with shape `[time_len, batch_size, input_size]`
initial_cell_state: initial value for cell state, shape `[batch_size,
self._num_units]`
initial_output: initial value of cell output, shape `[batch_size,
self._num_units]`
dtype: The data type for the initial state and expected output.
sequence_length: Specifies the length of each sequence in inputs. An int32
or int64 vector (tensor) size [batch_size], values in [0, time_len) or
None.
Returns:
A pair containing:
- State: A `3-D` tensor of shape `[time_len, batch_size, output_size]`
- Output: A `3-D` tensor of shape `[time_len, batch_size, output_size]`
"""
pass
def __call__(self,
inputs,
initial_state=None,
dtype=None,
sequence_length=None,
scope=None):
"""Run this LSTM on inputs, starting from the given state.
Args:
inputs: `3-D` tensor with shape `[time_len, batch_size, input_size]`
or a list of `time_len` tensors of shape `[batch_size, input_size]`.
initial_state: a tuple `(initial_cell_state, initial_output)` with tensors
of shape `[batch_size, self._num_units]`. If this is not provided, the
cell is expected to create a zero initial state of type `dtype`.
dtype: The data type for the initial state and expected output. Required
if `initial_state` is not provided or RNN state has a heterogeneous
dtype.
sequence_length: Specifies the length of each sequence in inputs. An
`int32` or `int64` vector (tensor) size `[batch_size]`, values in `[0,
time_len).`
Defaults to `time_len` for each element.
scope: `VariableScope` for the created subgraph; defaults to class name.
Returns:
A pair containing:
- Output: A `3-D` tensor of shape `[time_len, batch_size, output_size]`
or a list of time_len tensors of shape `[batch_size, output_size]`,
to match the type of the `inputs`.
- Final state: a tuple `(cell_state, output)` matching `initial_state`.
Raises:
ValueError: in case of shape mismatches
"""
with vs.variable_scope(scope or "lstm_block_wrapper"):
is_list = isinstance(inputs, list)
if is_list:
inputs = array_ops.stack(inputs)
inputs_shape = inputs.get_shape().with_rank(3)
if not inputs_shape[2]:
raise ValueError("Expecting inputs_shape[2] to be set: %s" %
inputs_shape)
batch_size = inputs_shape[1].value
if batch_size is None:
batch_size = array_ops.shape(inputs)[1]
time_len = inputs_shape[0].value
if time_len is None:
time_len = array_ops.shape(inputs)[0]
# Provide default values for initial_state and dtype
if initial_state is None:
if dtype is None:
raise ValueError(
"Either initial_state or dtype needs to be specified")
z = array_ops.zeros(<|fim▁hole|> initial_state = z, z
else:
if len(initial_state) != 2:
raise ValueError(
"Expecting initial_state to be a tuple with length 2 or None")
if dtype is None:
dtype = initial_state[0].dtype
# create the actual cell
if sequence_length is not None:
sequence_length = ops.convert_to_tensor(sequence_length)
initial_cell_state, initial_output = initial_state # pylint: disable=unpacking-non-sequence
cell_states, outputs = self._call_cell(inputs, initial_cell_state,
initial_output, dtype,
sequence_length)
if sequence_length is not None:
# Mask out the part beyond sequence_length
mask = array_ops.transpose(
array_ops.sequence_mask(
sequence_length, time_len, dtype=dtype), [1, 0])
mask = array_ops.tile(
array_ops.expand_dims(mask, [-1]), [1, 1, self.num_units])
outputs *= mask
# Prepend initial states to cell_states and outputs for indexing to work
# correctly,since we want to access the last valid state at
# sequence_length - 1, which can even be -1, corresponding to the
# initial state.
mod_cell_states = array_ops.concat(
[array_ops.expand_dims(initial_cell_state, [0]), cell_states], 0)
mod_outputs = array_ops.concat(
[array_ops.expand_dims(initial_output, [0]), outputs], 0)
final_cell_state = self._gather_states(mod_cell_states, sequence_length,
batch_size)
final_output = self._gather_states(mod_outputs, sequence_length,
batch_size)
else:
# No sequence_lengths used: final state is the last state
final_cell_state = cell_states[-1]
final_output = outputs[-1]
if is_list:
# Input was a list, so return a list
outputs = array_ops.unstack(outputs)
final_state = rnn_cell_impl.LSTMStateTuple(final_cell_state, final_output)
return outputs, final_state
def _gather_states(self, data, indices, batch_size):
"""Produce `out`, s.t. out(i, j) = data(indices(i), i, j)."""
mod_indices = indices * batch_size + math_ops.range(batch_size)
return array_ops.gather(
array_ops.reshape(data, [-1, self.num_units]), mod_indices)
class LSTMBlockFusedCell(LSTMBlockWrapper):
"""FusedRNNCell implementation of LSTM.
This is an extremely efficient LSTM implementation, that uses a single TF op
for the entire LSTM. It should be both faster and more memory-efficient than
LSTMBlockCell defined above.
The implementation is based on: http://arxiv.org/abs/1409.2329.
We add forget_bias (default: 1) to the biases of the forget gate in order to
reduce the scale of forgetting in the beginning of the training.
The variable naming is consistent with `rnn_cell_impl.LSTMCell`.
"""
def __init__(self,
num_units,
forget_bias=1.0,
cell_clip=None,
use_peephole=False):
"""Initialize the LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell.
forget_bias: float, The bias added to forget gates (see above).
cell_clip: clip the cell to this value. Default is no cell clipping.
use_peephole: Whether to use peephole connections or not.
"""
self._num_units = num_units
self._forget_bias = forget_bias
self._cell_clip = cell_clip if cell_clip is not None else -1
self._use_peephole = use_peephole
@property
def num_units(self):
"""Number of units in this cell (output dimension)."""
return self._num_units
def _call_cell(self, inputs, initial_cell_state, initial_output, dtype,
sequence_length):
"""Run this LSTM on inputs, starting from the given state.
Args:
inputs: `3-D` tensor with shape `[time_len, batch_size, input_size]`
initial_cell_state: initial value for cell state, shape `[batch_size,
self._num_units]`
initial_output: initial value of cell output, shape `[batch_size,
self._num_units]`
dtype: The data type for the initial state and expected output.
sequence_length: Specifies the length of each sequence in inputs. An
`int32` or `int64` vector (tensor) size `[batch_size]`, values in `[0,
time_len)` or None.
Returns:
A pair containing:
- Cell state (cs): A `3-D` tensor of shape `[time_len, batch_size,
output_size]`
- Output (h): A `3-D` tensor of shape `[time_len, batch_size,
output_size]`
"""
inputs_shape = inputs.get_shape().with_rank(3)
time_len = inputs_shape[0].value
if time_len is None:
time_len = array_ops.shape(inputs)[0]
input_size = inputs_shape[2].value
w = vs.get_variable(
"kernel",
[input_size + self._num_units, self._num_units * 4], dtype=dtype)
b = vs.get_variable(
"bias", [w.get_shape().with_rank(2)[1]],
initializer=init_ops.constant_initializer(0.0),
dtype=dtype)
if self._use_peephole:
wci = vs.get_variable("w_i_diag", [self._num_units], dtype=dtype)
wcf = vs.get_variable("w_f_diag", [self._num_units], dtype=dtype)
wco = vs.get_variable("w_o_diag", [self._num_units], dtype=dtype)
else:
wci = wcf = wco = array_ops.zeros([self._num_units], dtype=dtype)
if sequence_length is None:
max_seq_len = math_ops.to_int64(time_len)
else:
max_seq_len = math_ops.to_int64(math_ops.reduce_max(sequence_length))
_, cs, _, _, _, _, h = gen_lstm_ops.block_lstm(
seq_len_max=max_seq_len,
x=inputs,
cs_prev=initial_cell_state,
h_prev=initial_output,
w=w,
wci=wci,
wcf=wcf,
wco=wco,
b=b,
forget_bias=self._forget_bias,
cell_clip=self._cell_clip,
use_peephole=self._use_peephole)
return cs, h<|fim▁end|>
|
array_ops.stack([batch_size, self.num_units]), dtype=dtype)
|
<|file_name|>create.ts<|end_file_name|><|fim▁begin|>import Rails from '@rails/ujs';
import { showNotification, showErrorNotification } from 'utilities/notifications';
import I18n from 'retrospring/i18n';
export function createListHandler(event: Event): void {
const button = event.target as HTMLButtonElement;
const input = document.querySelector<HTMLInputElement>('input#new-list-name');
Rails.ajax({
url: '/ajax/create_list',
type: 'POST',
data: new URLSearchParams({
name: input.value,
user: button.dataset.user
}).toString(),<|fim▁hole|> }
showNotification(data.message, data.success);
},
error: (data, status, xhr) => {
console.log(data, status, xhr);
showErrorNotification(I18n.translate('frontend.error.message'));
}
});
}
export function createListInputHandler(event: KeyboardEvent): void {
// Return key
if (event.which === 13) {
event.preventDefault();
document.querySelector<HTMLButtonElement>('button#create-list').click();
}
}<|fim▁end|>
|
success: (data) => {
if (data.success) {
document.querySelector('#lists-list ul.list-group').insertAdjacentHTML('beforeend', data.render);
|
<|file_name|>LazyBannerAd.ts<|end_file_name|><|fim▁begin|>import {
AdObserver,
IBannerAd,
} from "../../ads";
import {
ObserverHandle,
ObserverManager,
Utils,
} from "../../core";
export class LazyBannerAd extends ObserverManager<AdObserver> implements IBannerAd {
private _ad?: IBannerAd;
private _visible: boolean;
private _anchor: [number, number];
private _position: [number, number];
private _size?: [number, number];
private readonly _handle: ObserverHandle;
public constructor() {
super();
this._visible = false;
this._anchor = [0, 0];
this._position = [0, 0];
this._size = undefined;
this._handle = new ObserverHandle();
}
public get ad(): IBannerAd | undefined {
return this._ad;
}
public set ad(value: IBannerAd | undefined) {
this._handle.clear();
if (value === undefined) {
return;
}
this._handle.bind(value)
.addObserver({
onLoaded: () => {
if (this._visible) {
value.isVisible = false;
value.isVisible = true;
}
this.dispatchEvent(observer => observer.onLoaded && observer.onLoaded());
},
onLoadResult: result => this.dispatchEvent(observer =>
observer.onLoadResult && observer.onLoadResult(result)),
onClicked: () => this.dispatchEvent(observer =>
observer.onClicked && observer.onClicked()),
});
this._ad = value;
this._ad.isVisible = this._visible;
this._ad.anchor = this._anchor;
this._ad.position = this._position;
if (this._size !== undefined) {
this._ad.size = this._size;
}
Utils.noAwait(async () => {
await value.load();
});
}
public destroy(): void {
this._ad?.destroy();
this._handle.clear();
}
public get isLoaded(): boolean {
return this._ad?.isLoaded ?? false;
}
public async load(): Promise<boolean> {
if (this._ad == undefined) {
return false;
}
return await this._ad.load();
}
public get anchor(): [number, number] {
return this._anchor;
}
public set anchor(value: [number, number]) {
this._anchor = value;
if (this._ad !== undefined) {
this._ad.anchor = value;
}
}
public get position(): [number, number] {
return this._position;
}
public set position(value: [number, number]) {
this._position = value;
if (this._ad !== undefined) {
this._ad.position = value;
}
}
public get size(): [number, number] {
if (this._size !== undefined) {
return this._size;
}
return this._ad?.size ?? [0, 0];
}
public set size(value: [number, number]) {
this._size = value;
if (this._ad !== undefined) {
this._ad.size = value;
}
}
public get isVisible(): boolean {
return this._visible;
}
public set isVisible(value: boolean) {
this._visible = value;
if (this._ad !== undefined) {
this._ad.isVisible = value;
}<|fim▁hole|>}<|fim▁end|>
|
}
|
<|file_name|>vdoaiBidAdapter.js<|end_file_name|><|fim▁begin|>import * as utils from '../src/utils.js';
import {config} from '../src/config.js';
import {registerBidder} from '../src/adapters/bidderFactory.js';
import {BANNER, VIDEO} from '../src/mediaTypes.js';
const BIDDER_CODE = 'vdoai';
const ENDPOINT_URL = 'https://prebid.vdo.ai/auction';
export const spec = {
code: BIDDER_CODE,
supportedMediaTypes: [BANNER, VIDEO],
/**
* Determines whether or not the given bid request is valid.
*
* @param {BidRequest} bid The bid params to validate.<|fim▁hole|> * @return boolean True if this is a valid bid, and false otherwise.
*/
isBidRequestValid: function (bid) {
return !!(bid.params.placementId);
},
/**
* Make a server request from the list of BidRequests.
*
* @return Array Info describing the request to the server.
* @param validBidRequests
* @param bidderRequest
*/
buildRequests: function (validBidRequests, bidderRequest) {
if (validBidRequests.length === 0) {
return [];
}
return validBidRequests.map(bidRequest => {
const sizes = utils.getAdUnitSizes(bidRequest);
const payload = {
placementId: bidRequest.params.placementId,
sizes: sizes,
bidId: bidRequest.bidId,
referer: bidderRequest.refererInfo.referer,
id: bidRequest.auctionId,
mediaType: bidRequest.mediaTypes.video ? 'video' : 'banner'
};
bidRequest.params.bidFloor && (payload['bidFloor'] = bidRequest.params.bidFloor);
return {
method: 'POST',
url: ENDPOINT_URL,
data: payload
};
});
},
/**
* Unpack the response from the server into a list of bids.
*
* @param {ServerResponse} serverResponse A successful response from the server.
* @param bidRequest
* @return {Bid[]} An array of bids which were nested inside the server.
*/
interpretResponse: function (serverResponse, bidRequest) {
const bidResponses = [];
const response = serverResponse.body;
const creativeId = response.adid || 0;
// const width = response.w || 0;
const width = response.width;
// const height = response.h || 0;
const height = response.height;
const cpm = response.price || 0;
response.rWidth = width;
response.rHeight = height;
const adCreative = response.vdoCreative;
if (width !== 0 && height !== 0 && cpm !== 0 && creativeId !== 0) {
// const dealId = response.dealid || '';
const currency = response.cur || 'USD';
const netRevenue = true;
// const referrer = bidRequest.data.referer;
const bidResponse = {
requestId: response.bidId,
cpm: cpm,
width: width,
height: height,
creativeId: creativeId,
// dealId: dealId,
currency: currency,
netRevenue: netRevenue,
ttl: config.getConfig('_bidderTimeout'),
// referrer: referrer,
// ad: response.adm
// ad: adCreative,
mediaType: response.mediaType
};
if (response.mediaType == 'video') {
bidResponse.vastXml = adCreative;
} else {
bidResponse.ad = adCreative;
}
if (response.adDomain) {
bidResponse.meta = {
advertiserDomains: response.adDomain
}
}
bidResponses.push(bidResponse);
}
return bidResponses;
},
getUserSyncs: function(syncOptions, serverResponse) {
let syncUrls = serverResponse[0] && serverResponse[0].body && serverResponse[0].body.cookiesync && serverResponse[0].body.cookiesync.bidder_status;
if (syncOptions.iframeEnabled && syncUrls && syncUrls.length > 0) {
let prebidSyncUrls = syncUrls.map(syncObj => {
return {
url: syncObj.usersync.url,
type: 'iframe'
}
})
return prebidSyncUrls;
}
return [];
},
onTImeout: function(data) {},
onBidWon: function(bid) {},
onSetTargeting: function(bid) {}
};
registerBidder(spec);<|fim▁end|>
| |
<|file_name|>LessonRouter.js<|end_file_name|><|fim▁begin|>import express from 'express';
import LessonController from '../controllers/LessonController';
const router = express.Router();
/* eslint-disable no-unused-vars */
const LessonsPath = '/api/lessonplans';
const teacherName = 'yonderWay';
const subjectName = 'theatre1';
const lessonNumber = '/:id';
// the data access should look something like this:
// '/api/teachers/{name}/{class}/{lesson#}'
router.post(LessonsPath, LessonController.create);
router.get(LessonsPath, LessonController.list);
router.get(LessonsPath + '/:id', LessonController.listOne);
router.put(LessonsPath + '/:id', LessonController.update);
router.delete(LessonsPath + '/:id', LessonController.delete);<|fim▁hole|>export default router;<|fim▁end|>
| |
<|file_name|>test.js<|end_file_name|><|fim▁begin|>var expect = require('chai').expect;
var search = require('./');
describe('binary search', function() {
it('should search 0 elements', function() {
var arr = [];
expect(search(arr, 4)).to.equal(-1);
});
it('should search 1 element not found', function() {
var arr = [1];
expect(search(arr, 4)).to.equal(-1);
});
it('should search 1 element found', function() {
var arr = [1];
expect(search(arr, 1)).to.equal(0);
});
<|fim▁hole|> it('should search odd', function() {
var arr = [1, 2, 3, 4, 5, 6, 7];
expect(search(arr, 4)).to.equal(3);
});
it('should search even', function() {
var arr = [1, 2, 3, 4, 5, 6, 7, 8];
expect(search(arr, 4)).to.equal(3);
});
});<|fim▁end|>
| |
<|file_name|>setup_for_centos7.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
import os
import subprocess
import shutil
import fix_rocks_network
import json
pxelinux_kernels_dir='/tftpboot/pxelinux/';
centos7_templates_dir='./centos7_ks'
centos7_dir='/export/rocks/install/centos7/';
centos7_ks_scripts_dir=centos7_dir+'/scripts/';
centos7_pxeboot_dir=centos7_dir+'/images/pxeboot';
#Fix PXE boot bug
def fix_pxe_bug():
shutil.copy('/usr/share/syslinux/chain.c32', pxelinux_kernels_dir);
subprocess.call('rocks add bootaction action=os kernel="com32 chain.c32" args="hd0"', shell=True);
def fix_install_action():
shutil.copy(centos7_pxeboot_dir+'/vmlinuz', pxelinux_kernels_dir+'/vmlinuz-centos7');
shutil.copy(centos7_pxeboot_dir+'/initrd.img', pxelinux_kernels_dir+'/initrd.img-centos7');
ks_host = fix_rocks_network.get_rocks_attr('Kickstart_PrivateKickstartHost');
ks_base_dir = fix_rocks_network.get_rocks_attr('Kickstart_PrivateKickstartBasedir');
subprocess.call('rocks add bootaction action=install kernel=vmlinuz-centos7 ramdisk=initrd.img-centos7 args="ksdevice=bootif ramdisk_size=16000 ks=http://'+ks_host+'/'+ks_base_dir+'/centos7/ks.cfg rhgb quiet console=tty0 console=ttyS0,115200n8"', shell=True);
def setup_for_centos7(params):
if(not os.path.isdir(centos7_dir)):
sys.stderr.write('ERROR: the contents of a CentOS-7 iso must be unpacked in the directory: '+centos7_dir+'\n');
raise Exception('Missing directory containing CentOS-7 iso contents');
try:<|fim▁hole|> os.mkdir(centos7_ks_scripts_dir, 0755);
except OSError:
pass
#PXE boot changes
fix_pxe_bug();
fix_install_action();
#ssh public key
shutil.rmtree(centos7_ks_scripts_dir+'/ssh_public_keys', ignore_errors=True);
try:
os.remove(centos7_ks_scripts_dir+'/ssh_public_keys');
except Exception:
pass
if('ssh_public_keys_file' in params):
shutil.copy(params['ssh_public_keys_file'], centos7_ks_scripts_dir+'/ssh_public_keys');
#Get root password
root_passwd='$6$CdGXnN6zABQ0Pc/7$lsUtU27wSxwpGNrLQq00Mzpwb27ujgkV5Trq8wlZrqOmrmFuX6q5X0hebNKKs5DSk8.fU3o.b6Z0ISOfNnpTl.';
sys.stderr.write('Enter the root password to be set for your cluster by kickstart\n');
pid = subprocess.Popen('grub-crypt --sha-512', shell=True, stdout=subprocess.PIPE);
stdout_str = pid.communicate()[0];
if(pid.returncode == 0):
root_passwd = stdout_str.strip();
else:
sys.stderr.write('ERROR: could not obtain root password, using a random string. Re-run the program to set your root passwd\n');
#Copy disk.py file for partitioning
shutil.copy(centos7_templates_dir+'/scripts/disk.py', centos7_ks_scripts_dir+'/disk.py');
#Create files from templates
shutil.copy(centos7_templates_dir+'/ks_template.cfg', centos7_dir+'/ks.cfg');
shutil.copy(centos7_templates_dir+'/scripts/pre_install_template.sh', centos7_ks_scripts_dir+'/pre_install.sh');
shutil.copy(centos7_templates_dir+'/scripts/post_install_template.sh', centos7_ks_scripts_dir+'/post_install.sh');
ks_host = fix_rocks_network.get_rocks_attr('Kickstart_PrivateKickstartHost');
ks_base_dir = fix_rocks_network.get_rocks_attr('Kickstart_PrivateKickstartBasedir');
cmd = 'sed -i -e \'s/Kickstart_PrivateKickstartHost/'+ks_host+'/g\' -e \'s/Kickstart_PrivateKickstartBasedir/'+ks_base_dir+'/g\' '+centos7_ks_scripts_dir+'/post_install.sh '+centos7_ks_scripts_dir+'/pre_install.sh '+centos7_dir+'/ks.cfg';
status = subprocess.call(cmd, shell=True);
if(status != 0):
sys.stderr.write('ERROR: could not setup pre/post install scripts and kickstart file\n');
raise Exception('Could not setup pre/post install scripts and kickstart file');
if('timezone' in params):
cmd = 'sed -i -e \'/^timezone/c\\\ntimezone '+params['timezone']+'\' '+centos7_dir+'/ks.cfg'
status = subprocess.call(cmd, shell=True);
if(status != 0):
sys.stderr.write('ERROR: could not setup timezone in kickstart file\n');
raise Exception('Could not setup timezone in kickstart file');
with open(centos7_dir+'/ks.cfg', 'ab') as fptr:
fptr.write('rootpw --iscrypted '+root_passwd+' \n');
fptr.close();
if __name__ == "__main__":
params = {};
if(len(sys.argv) >= 2):
with open(sys.argv[1], 'rb') as data_file:
params = json.load(data_file);
directory = os.path.dirname(sys.argv[0]);
if(directory and directory != ''):
os.chdir(directory);
setup_for_centos7(params);<|fim▁end|>
| |
<|file_name|>reject.js<|end_file_name|><|fim▁begin|>var _complement = require('./internal/_complement');<|fim▁hole|>var filter = require('./filter');
/**
* Similar to `filter`, except that it keeps only values for which the given predicate
* function returns falsy. The predicate function is passed one argument: *(value)*.
*
* Acts as a transducer if a transformer is given in list position.
* @see R.transduce
*
* @func
* @memberOf R
* @category List
* @sig (a -> Boolean) -> [a] -> [a]
* @param {Function} fn The function called per iteration.
* @param {Array} list The collection to iterate over.
* @return {Array} The new filtered array.
* @see R.filter
* @example
*
* var isOdd = function(n) {
* return n % 2 === 1;
* };
* R.reject(isOdd, [1, 2, 3, 4]); //=> [2, 4]
*/
module.exports = _curry2(function reject(fn, list) {
return filter(_complement(fn), list);
});<|fim▁end|>
|
var _curry2 = require('./internal/_curry2');
|
<|file_name|>get_process.go<|end_file_name|><|fim▁begin|>// Copyright © 2021 Sascha Andres <[email protected]>
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software<|fim▁hole|>// See the License for the specific language governing permissions and
// limitations under the License.
package helper
import (
"github.com/sascha-andres/devenv"
)
func GetProcess(e *devenv.EnvironmentConfiguration) devenv.EnvironmentExternalProcessConfiguration {
return e.ProcessConfiguration
}<|fim▁end|>
|
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|>var C = crel2;
var ventana = API.widget.create();
var textarea;
C(ventana,
C('button', ['onclick', local_set_test], 'LOCAL SET test'),
C('button', ['onclick', local_get_test], 'LOCAL GET test'),
C('button', ['onclick', local_delete_test], 'LOCAL DELETE test'),
C('button', ['onclick', local_delete_all_test], 'LOCAL DELETE ALL test'),
C('button', ['onclick', local_exists], 'LOCAL EXISTS test'),
C('br'),
C('button', ['onclick', remote_set_test], 'REMOTE SET test'),
C('button', ['onclick', remote_get_test], 'REMOTE GET test'),
C('button', ['onclick', remote_delete_test], 'REMOTE DELETE test'),
C('button', ['onclick', remote_delete_all_test], 'REMOTE DELETE ALL test'),
C('button', ['onclick', remote_exists], 'REMOTE EXISTS test'),
C('br'),
C('button', ['onclick', global_set_test], 'GLOBAL SET test'),
C('button', ['onclick', global_get_test], 'GLOBAL GET test'),
C('button', ['onclick', global_delete_test], 'GLOBAL DELETE test'),
C('button', ['onclick', global_exists], 'GLOBAL EXISTS test'),
C('br'),
textarea = C('textarea')
);
function log(what){
textarea.value += what+"\n";
}
//log(API.url(widgetID,'main.js'));
// Variables are saved as text
/////////////////////////////
// LOCAL
/////////////////////////////
function local_set_test(){
var text_rnd = Math.random();
log('LOCAL SET Test Saving the text: '+text_rnd);
API.storage.localStorage.set('test', text_rnd, function(entrada){
if(entrada){
log('LOCAL SET Test Text Saved.');
}
else{
log('LOCAL SET Test Text NOT saved.');
}
});
}
function local_get_test(){
log('LOCAL GET Test');
API.storage.localStorage.get('test', function(entrada){
if(entrada){
log('LOCAL GET Test Got the text: '+entrada);
}
else{
log('LOCAL GET Test There is not a saved variable with that name.');
}
});
}
function local_delete_test(){
log('LOCAL DELETE Test');
API.storage.localStorage.delete('test', function(entrada){
if(entrada){
log('LOCAL DELETE Test deleted OK.');
}
else{
log('LOCAL DELETE Test deleted FAIL.');
}
});
API.storage.localStorage.get('test', function(entrada){
if(entrada){
log('LOCAL DELETE Test confirmed FAIL.');
}
else{
log('LOCAL DELETE Test confirmed OK.');
}
});
}
function local_delete_all_test(){
log('LOCAL DELETE ALL Test');
API.storage.localStorage.deleteAll(function(entrada){
if(entrada){
log('LOCAL DELETE ALL Test deleted OK.');
}
else{
log('LOCAL DELETE ALL Test deleted FAIL.');
}
});
API.storage.localStorage.get('test', function(entrada){
if(entrada){
log('LOCAL DELETE ALL Test confirmed FAIL.');
}
else{
log('LOCAL DELETE ALL Test confirmed OK.');
}
});
}
function local_exists(){
log('LOCAL EXISTS Test');
API.storage.localStorage.exists('test', function(entrada){
if(entrada){
log('LOCAL EXISTS Test variable exists = YES.');
}
else{
log('LOCAL EXISTS Test variable exists = NO.');
}
});
}
/////////////////////////////
// REMOTE
/////////////////////////////
function remote_set_test(){
var text_rnd = Math.random();
log('REMOTE SET Test Saving the text: '+text_rnd);
API.storage.remoteStorage.set('test', text_rnd, function(entrada){
if(entrada){
log('REMOTE SET Test Text Saved.');
}
else{
log('REMOTE SET Test Text NOT saved.');
}
});
}
function remote_get_test(){
log('REMOTE GET Test');
API.storage.remoteStorage.get('test', function(entrada){
if(entrada){
log('REMOTE GET Test Got the text: '+entrada);
}
else{
log('REMOTE GET Test There is not a saved variable with that name.');
}
});
}
function remote_delete_test(){
log('REMOTE DELETE Test');
<|fim▁hole|> }
else{
log('REMOTE DELETE Test deleted FAIL.');
}
});
API.storage.remoteStorage.get('test', function(entrada){
if(entrada){
log('REMOTE DELETE Test confirmed FAIL.');
}
else{
log('REMOTE DELETE Test confirmed OK.');
}
});
}
function remote_delete_all_test(){
log('REMOTE DELETE ALL Test');
API.storage.remoteStorage.deleteAll(function(entrada){
if(entrada){
log('REMOTE DELETE ALL Test deleted OK.');
}
else{
log('REMOTE DELETE ALL Test deleted FAIL.');
}
});
API.storage.remoteStorage.get('test', function(entrada){
if(entrada){
log('REMOTE DELETE ALL Test confirmed FAIL.');
}
else{
log('REMOTE DELETE ALL Test confirmed OK.');
}
});
}
function remote_exists(){
log('REMOTE EXISTS Test');
API.storage.remoteStorage.exists('test', function(entrada){
if(entrada){
log('REMOTE EXISTS Test variable exists = YES.');
}
else{
log('REMOTE EXISTS Test variable exists = NO.');
}
});
}
/////////////////////////////
// GLOBAL
/////////////////////////////
function global_set_test(){
var text_rnd = Math.random();
log('GLOBAL SET Test Saving the text: '+text_rnd);
API.storage.sharedStorage.set('test', text_rnd, function(entrada){
if(entrada){
log('GLOBAL SET Test Text Saved.');
}
else{
log('GLOBAL SET Test Text NOT saved.');
}
});
}
function global_get_test(){
log('GLOBAL GET Test');
API.storage.sharedStorage.get('test', function(entrada){
if(entrada){
log('GLOBAL GET Test Got the text: '+entrada);
}
else{
log('GLOBAL GET Test There is not a saved variable with that name.');
}
});
}
function global_delete_test(){
log('GLOBAL DELETE Test');
API.storage.sharedStorage.delete('test', function(entrada){
if(entrada){
log('GLOBAL DELETE Test deleted OK.');
}
else{
log('GLOBAL DELETE Test deleted FAIL.');
}
});
API.storage.sharedStorage.get('test', function(entrada){
if(entrada){
log('GLOBAL DELETE Test confirmed FAIL.');
}
else{
log('GLOBAL DELETE Test confirmed OK.');
}
});
}
function global_exists(){
log('GLOBAL EXISTS Test');
API.storage.sharedStorage.exists('test', function(entrada){
if(entrada){
log('GLOBAL EXISTS Test variable exists = YES.');
}
else{
log('GLOBAL EXISTS Test variable exists = NO.');
}
});
}<|fim▁end|>
|
API.storage.remoteStorage.delete('test', function(entrada){
if(entrada){
log('REMOTE DELETE Test deleted OK.');
|
<|file_name|>voting.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from commands.utilities import register
import asyncio
import re
class Voting():
def __init__(self, config=None):
# regex to match a number at the start of the message.
# Being a float is optional.
self.length_re = r'--((\d*)?(\.\d*)?)'
# regex to atch and capture vote options in square bracket.
self.options_re = r'\[(.+)\]'
self.vote_length = 0.5
self.default_options = ['yes', 'no']
self.active_votes = {}
def apply_regex(self, msg, regex):
'''
Applies the regex and removes the matched
elements from the message.
Returns the matched group.
'''
result = re.search(regex, msg)
if result:
msg = re.sub(regex, '', msg).strip()
return msg, result.group(0)
else:
return False
def handle_input(self, msg):
'''
Parses the supplied message to determine the vote
length and supplied parameteres(if any).
Expected vote format:
!vote[--time] String of the vote [[parameter1, parm2]]
'''
# Check if the user supplied a length
regex_result = self.apply_regex(msg, self.length_re)
if regex_result:
msg, matched_length = regex_result
# start at the second index to avoid the -- at the start
# of the time parameter.
vote_length = float(matched_length[2:])
else:
vote_length = self.vote_length
# Check if the user supplied extra parameters
regex_result = self.apply_regex(msg, self.options_re)
if regex_result:
msg, extra_options = regex_result
# remove square brackets, split on comma and strip whitespace
extra_options = extra_options.replace('[', '').replace(']', '')
options = extra_options.lower().split(',')
options = [word.strip() for word in options]
# Storing length in a variable here to later compare
# after forming a dictionary to ensure there were no
# duplicates.
option_len = len(options)
if len(options) < 2:
return False
# Create a dictionary with the voter counts set to 0
values = [0 for option in options]
vote_options = dict(zip(options, values))
# Make sure the options aren't repeated by comparing length
# before the dictionary was created.
if option_len != len(vote_options):
return False
else:
values = [0 for index in self.default_options]
vote_options = dict(zip(self.default_options, values))
# What remains of the msg should be the vote question.
if len(msg.strip()) < 1:
return False
return msg, vote_length, vote_options
async def send_start_message(self, client, channel, vote_length, msg):
'''
Simple function that sends a message that a
vote has started asyncronously.
'''
vote_parms = self.active_votes[channel][1]
start_string = 'Starting vote ```%s``` with options ' % msg
param_string = ' '.join(['%s' for index in range(len(vote_parms))])
start_string += '[ ' + param_string % tuple(vote_parms.keys()) + ' ]'
start_string += ' For %s minutes.' % vote_length
await channel.send(start_string)
async def end_vote(self, client, channel, msg):
'''
Counts the votes to determine the winner and sends
the finish message. Cant simply check the max value
because there might be a draw. Should probably break
it up.
'''
vote_parms = self.active_votes[channel][1]
end_string = 'Voting for ```%s``` completed.' % msg
max_value = max(vote_parms.values())
winners = [key for key, value in vote_parms.items()
if value == max_value]
if len(winners) == 1:
end_string += ' The winner is **%s**' % tuple(winners)
else:
winner_string = ' '.join(['%s' for index in range(len(winners))])
end_string += ' The winners are [ **' + winner_string % tuple(winners) + '** ]'
await channel.send(end_string)
async def run_vote(self, client, channel, vote_length, msg):
'''
Simple async function that sleeps for the vote length
and calls the start and end voting functions.
'''
await self.send_start_message(client, channel, vote_length, msg)
# sleep for the vote length.
await asyncio.sleep(vote_length * 60)
# Count the votes and send the ending message
await self.end_vote(client, channel, msg)
# Delete the dictionary entry now that the vote is finished.<|fim▁hole|> '''
Main function that handles the vote function. Makes sure
that only vote is going at a time in a channel.
Calls from a channel that has a vote going on are
considered to be a vote for the ongoing vote.
dict entry: active_votes(client, {option: count}, [voters])
'''
if channel not in self.active_votes:
processed_input = self.handle_input(msg)
if processed_input:
msg, vote_len, params = processed_input
# Save a reference to the sleep function, the valid params
# for the specific vote and an empty list which will contain
# the name of users who have already voted.
self.active_votes[channel] = (self.run_vote, params, [])
# print('starting vote with ', params)
# Start the actual vote.
await self.active_votes[channel][0](client, channel, vote_len, msg)
else:
return ('Invalid format for starting a vote. The correct format is '
'```!vote[--time] Vote question [vote options]``` '
'**eg:** !vote start a vote on some topic? [yes, no, maybe]')
else:
# An active vote already exists for this channel.
# First check if the user has already voted in it.
if user in self.active_votes[channel][2]:
return ("Stop attempting electoral fraud %s, "
"you've already voted") % user
else:
# Check if the supplied argument is a valid vote option.
vote_option = msg.lower().strip()
valid_options = self.active_votes[channel][1]
if vote_option in valid_options:
self.active_votes[channel][1][vote_option] += 1
# Add the user to the list of users.
self.active_votes[channel][2].append(user)
# return 'Increasing %s vote :)' % vote_option
else:
error_str = 'Invalid vote option %s. ' % user
error_str += 'The options are ' + str(tuple(valid_options.keys()))
return error_str<|fim▁end|>
|
del self.active_votes[channel]
@register('!vote')
async def start_vote(self, msg, user, channel, client, *args, **kwargs):
|
<|file_name|>SettingsEthernet.js<|end_file_name|><|fim▁begin|>"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));<|fim▁hole|>
var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", {
d: "M7.77 6.76 6.23 5.48.82 12l5.41 6.52 1.54-1.28L3.42 12l4.35-5.24zM7 13h2v-2H7v2zm10-2h-2v2h2v-2zm-6 2h2v-2h-2v2zm6.77-7.52-1.54 1.28L20.58 12l-4.35 5.24 1.54 1.28L23.18 12l-5.41-6.52z"
}), 'SettingsEthernet');
exports.default = _default;<|fim▁end|>
|
var _jsxRuntime = require("react/jsx-runtime");
|
<|file_name|>BPMNServiceComponent.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.bpmn.core.internal;
import org.activiti.engine.ProcessEngines;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.osgi.service.component.ComponentContext;
import org.wso2.carbon.bpmn.core.ActivitiEngineBuilder;
import org.wso2.carbon.bpmn.core.BPMNServerHolder;
import org.wso2.carbon.bpmn.core.db.DataSourceHandler;
import org.wso2.carbon.bpmn.core.deployment.TenantManager;
import org.wso2.carbon.bpmn.core.exception.BPMNMetaDataTableCreationException;
import org.wso2.carbon.bpmn.core.exception.DatabaseConfigurationException;
import org.wso2.carbon.registry.core.service.RegistryService;
/**
* @scr.component name="org.wso2.carbon.bpmn.core.internal.BPMNServiceComponent" immediate="true"
* @scr.reference name="registry.service" interface="org.wso2.carbon.registry.core.service.RegistryService"
* cardinality="1..1" policy="dynamic" bind="setRegistryService" unbind="unsetRegistryService"
*/
public class BPMNServiceComponent {
private static Log log = LogFactory.getLog(BPMNServiceComponent.class);
protected void activate(ComponentContext ctxt) {
log.info("Initializing the BPMN core component...");
try {
BPMNServerHolder holder = BPMNServerHolder.getInstance();
ActivitiEngineBuilder activitiEngineBuilder = new ActivitiEngineBuilder();
holder.setEngine(activitiEngineBuilder.buildEngine());
holder.setTenantManager(new TenantManager());
DataSourceHandler dataSourceHandler = new DataSourceHandler();
dataSourceHandler.initDataSource(activitiEngineBuilder.getDataSourceJndiName());
dataSourceHandler.closeDataSource();
} catch (BPMNMetaDataTableCreationException e) {
log.error("Could not create BPMN checksum table", e);
} catch (DatabaseConfigurationException e) {
log.error("Could not create BPMN checksum table", e);
}catch (Throwable e) {
log.error("Failed to initialize the BPMN core component.", e);
}
}
protected void deactivate(ComponentContext ctxt) {
log.info("Stopping the BPMN core component...");
ProcessEngines.destroy();
}
protected void setRegistryService(RegistryService registrySvc) {
if (log.isDebugEnabled()) {
log.debug("RegistryService bound to the BPMN component");
}
BPMNServerHolder.getInstance().setRegistryService(registrySvc);
}
public void unsetRegistryService(RegistryService registryService) {
if (log.isDebugEnabled()) {
log.debug("RegistryService unbound from the BPMN component");
}
BPMNServerHolder.getInstance().unsetRegistryService(registryService);<|fim▁hole|> }
}<|fim▁end|>
| |
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>class ParamsException(Exception):
"""Exception raised when tp, fmt and size values are wrongs"""
pass
class LineSizeException(Exception):
"""Exception raised when line size is bigger then specified"""
pass
class LineIdentifierException(Exception):
"""Exception raised when line indentifier rased from the
file is different to the line identifier used in the specification
<|fim▁hole|> pass<|fim▁end|>
|
obs: line identifier is defined using .eq() function
"""
|
<|file_name|>test_traversal.py<|end_file_name|><|fim▁begin|>from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import assert_array_almost_equal
from scipy.sparse.csgraph import breadth_first_tree, depth_first_tree,\
csgraph_to_dense, csgraph_from_dense
def test_graph_breadth_first():
csgraph = np.array([[0, 1, 2, 0, 0],
[1, 0, 0, 0, 3],
[2, 0, 0, 7, 0],
[0, 0, 7, 0, 1],
[0, 3, 0, 1, 0]])
csgraph = csgraph_from_dense(csgraph, null_value=0)
bfirst = np.array([[0, 1, 2, 0, 0],
[0, 0, 0, 0, 3],
[0, 0, 0, 7, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
for directed in [True, False]:
bfirst_test = breadth_first_tree(csgraph, 0, directed)
assert_array_almost_equal(csgraph_to_dense(bfirst_test),
bfirst)
def test_graph_depth_first():
csgraph = np.array([[0, 1, 2, 0, 0],
[1, 0, 0, 0, 3],
[2, 0, 0, 7, 0],
[0, 0, 7, 0, 1],
[0, 3, 0, 1, 0]])
csgraph = csgraph_from_dense(csgraph, null_value=0)
dfirst = np.array([[0, 1, 0, 0, 0],
[0, 0, 0, 0, 3],
[0, 0, 0, 0, 0],
[0, 0, 7, 0, 0],
[0, 0, 0, 1, 0]])<|fim▁hole|>
for directed in [True, False]:
dfirst_test = depth_first_tree(csgraph, 0, directed)
assert_array_almost_equal(csgraph_to_dense(dfirst_test),
dfirst)
def test_graph_breadth_first_trivial_graph():
csgraph = np.array([[0]])
csgraph = csgraph_from_dense(csgraph, null_value=0)
bfirst = np.array([[0]])
for directed in [True, False]:
bfirst_test = breadth_first_tree(csgraph, 0, directed)
assert_array_almost_equal(csgraph_to_dense(bfirst_test),
bfirst)
def test_graph_depth_first_trivial_graph():
csgraph = np.array([[0]])
csgraph = csgraph_from_dense(csgraph, null_value=0)
bfirst = np.array([[0]])
for directed in [True, False]:
bfirst_test = depth_first_tree(csgraph, 0, directed)
assert_array_almost_equal(csgraph_to_dense(bfirst_test),
bfirst)<|fim▁end|>
| |
<|file_name|>HttpJsonRoutesCallableFactory.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import com.google.api.core.BetaApi;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonCallableFactory;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshotCallable;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.rpc.BatchingCallSettings;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.ServerStreamingCallSettings;
import com.google.api.gax.rpc.ServerStreamingCallable;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.Operation;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST callable factory implementation for the Routes service API.
*
* <p>This class is for advanced usage.
*/
@Generated("by gapic-generator-java")
@BetaApi
public class HttpJsonRoutesCallableFactory
implements HttpJsonStubCallableFactory<Operation, GlobalOperationsStub> {
@Override
public <RequestT, ResponseT> UnaryCallable<RequestT, ResponseT> createUnaryCallable(
HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings,
UnaryCallSettings<RequestT, ResponseT> callSettings,
ClientContext clientContext) {
return HttpJsonCallableFactory.createUnaryCallable(
httpJsonCallSettings, callSettings, clientContext);
}
@Override
public <RequestT, ResponseT, PagedListResponseT>
UnaryCallable<RequestT, PagedListResponseT> createPagedCallable(
HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings,
PagedCallSettings<RequestT, ResponseT, PagedListResponseT> callSettings,
ClientContext clientContext) {
return HttpJsonCallableFactory.createPagedCallable(
httpJsonCallSettings, callSettings, clientContext);
}
@Override
public <RequestT, ResponseT> UnaryCallable<RequestT, ResponseT> createBatchingCallable(
HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings,
BatchingCallSettings<RequestT, ResponseT> callSettings,
ClientContext clientContext) {
return HttpJsonCallableFactory.createBatchingCallable(
httpJsonCallSettings, callSettings, clientContext);
}
@BetaApi(
"The surface for long-running operations is not stable yet and may change in the future.")
@Override
public <RequestT, ResponseT, MetadataT>
OperationCallable<RequestT, ResponseT, MetadataT> createOperationCallable(
HttpJsonCallSettings<RequestT, Operation> httpJsonCallSettings,
OperationCallSettings<RequestT, ResponseT, MetadataT> callSettings,
ClientContext clientContext,
GlobalOperationsStub operationsStub) {
UnaryCallable<RequestT, Operation> innerCallable =
HttpJsonCallableFactory.createBaseUnaryCallable(
httpJsonCallSettings, callSettings.getInitialCallSettings(), clientContext);
HttpJsonOperationSnapshotCallable<RequestT, Operation> initialCallable =
new HttpJsonOperationSnapshotCallable<RequestT, Operation>(
innerCallable,
httpJsonCallSettings.getMethodDescriptor().getOperationSnapshotFactory());
return HttpJsonCallableFactory.createOperationCallable(
callSettings, clientContext, operationsStub.longRunningClient(), initialCallable);
}
<|fim▁hole|> public <RequestT, ResponseT>
ServerStreamingCallable<RequestT, ResponseT> createServerStreamingCallable(
HttpJsonCallSettings<RequestT, ResponseT> httpJsonCallSettings,
ServerStreamingCallSettings<RequestT, ResponseT> callSettings,
ClientContext clientContext) {
return HttpJsonCallableFactory.createServerStreamingCallable(
httpJsonCallSettings, callSettings, clientContext);
}
}<|fim▁end|>
|
@Override
|
<|file_name|>DevGarden_fr.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="fr_FR">
<context>
<name>DGWindow</name>
<message>
<location filename="src/ui/dgwindow.cpp" line="24"/>
<source>&File</source>
<translation>&fichier</translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="25"/>
<source>New File/Project...</source>
<translation>Nouveau fichier / projet ...</translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="26"/>
<source>Open Folder/Project...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="27"/>
<source>Open Files...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="27"/>
<source>Ctrl+Shift+O</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="29"/>
<source>Save</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="30"/>
<source>Save As...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="31"/>
<source>Save All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="32"/>
<source>Reload</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="34"/>
<source>Close</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="35"/>
<source>Close Project</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="37"/>
<source>Import...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="38"/>
<source>Export...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="40"/>
<source>Quit</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="42"/>
<source>&Edit</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="43"/>
<source>Undo</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="44"/>
<source>Redo</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="46"/>
<source>Cut</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="47"/>
<source>Copy</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="48"/>
<source>Paste</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="49"/>
<source>Paste from Clipboard History</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="51"/>
<source>Select All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="52"/>
<source>Find/Replace</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="53"/>
<source>Advanced Find/Replace</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="53"/>
<source>Ctrl+Shift+F</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="55"/>
<source>Format Selection</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="56"/>
<source>Comment Selection</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="58"/>
<source>&Build</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="59"/>
<source>Cancel Build</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="60"/>
<source>Regen Build Scripts</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="61"/>
<source>Build Settings...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="63"/>
<source>Build</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="64"/>
<location filename="src/ui/dgwindow.cpp" line="73"/>
<source>Last Target</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="65"/>
<location filename="src/ui/dgwindow.cpp" line="74"/>
<location filename="src/ui/dgwindow.cpp" line="85"/>
<source>Debug</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="66"/>
<location filename="src/ui/dgwindow.cpp" line="75"/>
<source>Release</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="67"/>
<location filename="src/ui/dgwindow.cpp" line="76"/>
<source>Custom...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="68"/>
<source>Rebuild</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="69"/>
<source>Clean</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="70"/>
<source>Deploy</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="72"/>
<source>Build All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="77"/>
<source>Rebuild All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="78"/>
<source>Clean All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="79"/>
<source>Deploy All</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="81"/>
<source>&Run/Debug</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="82"/>
<source>Run</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="83"/>
<source>Run Settings...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="86"/>
<source>Debug External Application...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="87"/>
<source>Load Core File...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="88"/>
<source>Show Debug Window</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="89"/>
<source>Debugger Settings...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="91"/>
<source>Interrupt</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="92"/>
<source>Continue</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="93"/>
<source>Toggle Breakpoint</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="94"/>
<source>Step Over</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="95"/>
<source>Step Into</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="97"/>
<source>Analyze</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="98"/>
<source>Memory Checker</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="99"/>
<source>Thread Checker</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="100"/>
<source>Call Graph</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="101"/>
<source>Cache Profiler</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="102"/>
<source>Heap Profiler</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="103"/>
<source>Analyze External Application...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="104"/>
<source>Analysis Settings...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="106"/>
<source>Test</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="107"/>
<source>Test Settings...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="109"/><|fim▁hole|> <translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="110"/>
<source>Minimize</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="111"/>
<source>Expand</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="112"/>
<source>Toggle Full Screen</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="114"/>
<source>Toggle Lower Bar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="115"/>
<source>Toggle Split View Bar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="116"/>
<source>Toggle Sidebar</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="118"/>
<source>Increase Font Size</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="119"/>
<source>Decrease Font Size</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="120"/>
<source>Reset Font Size</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="122"/>
<source>Toggle Light/Dark Theme</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="123"/>
<source>Run Command Prompt...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="123"/>
<source>Alt+Space</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="124"/>
<source>Settings...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="126"/>
<source>&Help</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="127"/>
<source>Manual</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="128"/>
<source>Command Reference</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="129"/>
<source>Hacking DevGarden</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="131"/>
<source>Project Page</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="src/ui/dgwindow.cpp" line="132"/>
<source>Report Bug...</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS><|fim▁end|>
|
<source>&Window</source>
|
<|file_name|>TestCheckYahooFinanceCurrencyData.py<|end_file_name|><|fim▁begin|>'''
Check Yahoo finance currency data helper.
Update log: (date / version / author : comments)
2017-12-10 / 1.0.0 / Du Jiang : Creation
2017-12-13 / 2.0.0 / Du Jiang : Use new API
'''
from com.djs.learn.financeapi import CheckFinanceDataRequests
__data_type = 1
__inventory_info_file_path = "../../../../etc/CurrencyInfo.csv"
__result_output_file_path = "../../../../Temp/CurrencyDataY.json"
argv = ["-d", __data_type, "-i", __inventory_info_file_path,
"-o", __result_output_file_path]
CheckFinanceDataRequests.main(argv)
<|fim▁hole|>'''
Or run:
python CheckFinanceDataRequests.py -d 1 -i "../../../../etc/CurrencyData.csv" -o "../../../../Temp/CurrencyDataY.json"
'''
if __name__ == '__main__':
pass<|fim▁end|>
| |
<|file_name|>CreateAuctionBox.js<|end_file_name|><|fim▁begin|>// @flow
import React, { Component, PropTypes } from "react"
import { connect } from "react-redux"
import TextField from 'material-ui/TextField'
import { CreateAuctionButton } from "../../molecules/"
import * as AuctionActions from "../../../actions/auction"
import { Button } from "../../atoms/"
import styles from "./styles.css"
export class CreateAuctionBox extends Component {
constructor(props) {<|fim▁hole|> this.state = {
name: "",
minimum: "",
message: "",
image: "",
auctionMsg: props.auctionMsg,
}
}
change = (event) => {
if ("id" in event.target && "value" in event.target) {
console.log(event.target.id)
console.log(event.target.value)
this.setState({ [event.target.id]: event.target.value })
}
}
data = () => {
const { name, minimum, message, image } = this.state
const base = { name : name, minimum : minimum, message : message, image : image }
if (firebase.auth().currentUser) {
return Object.assign({}, base, { usrID : firebase.auth().currentUser.uid })
}
return base
}
action = () => {
const { name, minimum, message, image } = this.state
if (name && minimum && message && image && minimum > 0) {
const { dispatch } = this.props
console.log(this.data())
dispatch(AuctionActions.queryCreateAuction(JSON.stringify(this.data())))
}
}
componentWillReceiveProps(nextProps) {
if(JSON.stringify(this.state.auctionMsg) !== JSON.stringify(nextProps.auctionMsg))
{
this.setState({ auctionMsg: nextProps.auctionMsg })
}
}
render() {
const { auctionMsg } = this.state
return (
<div className={ styles.root }>
<h3>Create Bid</h3>
<div>Status: <span style={{ color : (auctionMsg.status.toLowerCase() !== 'success' ? "red" : "green") }}>{ auctionMsg.status }</span></div>
<TextField id="name" value={ this.state.name } floatingLabelText="Domain" onChange={this.change}/><br />
<TextField id="minimum" type="number" value={ this.state.minimum } floatingLabelText="Minimum" onChange={this.change}/><br />
<TextField id="message" value={ this.state.message } floatingLabelText="Message" onChange={this.change}/><br />
<TextField id="image" value={ this.state.image } floatingLabelText="Image" style={{ "marginBottom" : "1em"}} onChange={this.change}/><br />
<Button onClick={ this.action }>
Create Auction
</Button>
</div>
)
}
}
CreateAuctionBox.propTypes = {
auctionMsg: PropTypes.object.isRequired,
dispatch: PropTypes.func.isRequired,
}
function mapStateToProps(state) {
const { auctionMsg } = state.auction
return {
auctionMsg,
}
}
export default connect(mapStateToProps)(CreateAuctionBox)<|fim▁end|>
|
super(props)
|
<|file_name|>1579-Remove Max Number of Edges to Keep Graph Fully Traversable.py<|end_file_name|><|fim▁begin|>class Solution:
def maxNumEdgesToRemove(self, n: int, edges: List[List[int]]) -> int:
parent = list(range(n + 1))
def findParent(i):
while parent[i] != i:
parent[i] = parent[parent[i]]
i = parent[i]
return i
def union(u, v):
pu = findParent(u)
pv = findParent(v)
if pu != pv:
parent[pv] = pu
return 1
else:
return 0
e1 = e2 = result = 0
for t, u, v in edges:
if t == 3:
if union(u, v):<|fim▁hole|> result += 1
parentOrig = parent[:]
for t, u, v in edges:
if t == 1:
if union(u, v):
e1 += 1
else:
result += 1
parent = parentOrig
for t, u, v in edges:
if t == 2:
if union(u, v):
e2 += 1
else:
result += 1
return result if e1 == e2 == n - 1 else -1<|fim▁end|>
|
e1 += 1
e2 += 1
else:
|
<|file_name|>no.js<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("placeholder","no",{title:"Egenskaper for plassholder",toolbar:"Opprett plassholder",name:"Navn på plassholder",invalidName:"Plassholderen kan ikke være tom, og kan ikke inneholde følgende tegn: [, ], <, >",pathName:"plassholder"});<|fim▁end|>
| |
<|file_name|>Eastern.py<|end_file_name|><|fim▁begin|>'''tzinfo timezone information for US/Eastern.'''
from pytz.tzinfo import DstTzInfo
from pytz.tzinfo import memorized_datetime as d
from pytz.tzinfo import memorized_ttinfo as i
class Eastern(DstTzInfo):
'''US/Eastern timezone definition. See datetime.tzinfo for details'''
zone = 'US/Eastern'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1918,3,31,7,0,0),
d(1918,10,27,6,0,0),
d(1919,3,30,7,0,0),
d(1919,10,26,6,0,0),
d(1920,3,28,7,0,0),
d(1920,10,31,6,0,0),
d(1921,4,24,7,0,0),
d(1921,9,25,6,0,0),
d(1922,4,30,7,0,0),
d(1922,9,24,6,0,0),
d(1923,4,29,7,0,0),
d(1923,9,30,6,0,0),
d(1924,4,27,7,0,0),
d(1924,9,28,6,0,0),
d(1925,4,26,7,0,0),
d(1925,9,27,6,0,0),
d(1926,4,25,7,0,0),
d(1926,9,26,6,0,0),
d(1927,4,24,7,0,0),
d(1927,9,25,6,0,0),
d(1928,4,29,7,0,0),
d(1928,9,30,6,0,0),
d(1929,4,28,7,0,0),
d(1929,9,29,6,0,0),
d(1930,4,27,7,0,0),
d(1930,9,28,6,0,0),
d(1931,4,26,7,0,0),
d(1931,9,27,6,0,0),
d(1932,4,24,7,0,0),
d(1932,9,25,6,0,0),
d(1933,4,30,7,0,0),
d(1933,9,24,6,0,0),
d(1934,4,29,7,0,0),
d(1934,9,30,6,0,0),
d(1935,4,28,7,0,0),
d(1935,9,29,6,0,0),
d(1936,4,26,7,0,0),
d(1936,9,27,6,0,0),
d(1937,4,25,7,0,0),
d(1937,9,26,6,0,0),
d(1938,4,24,7,0,0),
d(1938,9,25,6,0,0),
d(1939,4,30,7,0,0),
d(1939,9,24,6,0,0),
d(1940,4,28,7,0,0),
d(1940,9,29,6,0,0),
d(1941,4,27,7,0,0),
d(1941,9,28,6,0,0),
d(1942,2,9,7,0,0),
d(1945,8,14,23,0,0),
d(1945,9,30,6,0,0),
d(1946,4,28,7,0,0),
d(1946,9,29,6,0,0),
d(1947,4,27,7,0,0),
d(1947,9,28,6,0,0),
d(1948,4,25,7,0,0),
d(1948,9,26,6,0,0),
d(1949,4,24,7,0,0),
d(1949,9,25,6,0,0),
d(1950,4,30,7,0,0),
d(1950,9,24,6,0,0),
d(1951,4,29,7,0,0),
d(1951,9,30,6,0,0),
d(1952,4,27,7,0,0),
d(1952,9,28,6,0,0),
d(1953,4,26,7,0,0),
d(1953,9,27,6,0,0),
d(1954,4,25,7,0,0),
d(1954,9,26,6,0,0),
d(1955,4,24,7,0,0),
d(1955,10,30,6,0,0),
d(1956,4,29,7,0,0),
d(1956,10,28,6,0,0),
d(1957,4,28,7,0,0),
d(1957,10,27,6,0,0),
d(1958,4,27,7,0,0),
d(1958,10,26,6,0,0),
d(1959,4,26,7,0,0),
d(1959,10,25,6,0,0),
d(1960,4,24,7,0,0),
d(1960,10,30,6,0,0),
d(1961,4,30,7,0,0),
d(1961,10,29,6,0,0),
d(1962,4,29,7,0,0),
d(1962,10,28,6,0,0),
d(1963,4,28,7,0,0),
d(1963,10,27,6,0,0),
d(1964,4,26,7,0,0),
d(1964,10,25,6,0,0),
d(1965,4,25,7,0,0),
d(1965,10,31,6,0,0),
d(1966,4,24,7,0,0),
d(1966,10,30,6,0,0),
d(1967,4,30,7,0,0),
d(1967,10,29,6,0,0),
d(1968,4,28,7,0,0),
d(1968,10,27,6,0,0),
d(1969,4,27,7,0,0),
d(1969,10,26,6,0,0),
d(1970,4,26,7,0,0),
d(1970,10,25,6,0,0),
d(1971,4,25,7,0,0),
d(1971,10,31,6,0,0),
d(1972,4,30,7,0,0),
d(1972,10,29,6,0,0),
d(1973,4,29,7,0,0),
d(1973,10,28,6,0,0),<|fim▁hole|>d(1975,10,26,6,0,0),
d(1976,4,25,7,0,0),
d(1976,10,31,6,0,0),
d(1977,4,24,7,0,0),
d(1977,10,30,6,0,0),
d(1978,4,30,7,0,0),
d(1978,10,29,6,0,0),
d(1979,4,29,7,0,0),
d(1979,10,28,6,0,0),
d(1980,4,27,7,0,0),
d(1980,10,26,6,0,0),
d(1981,4,26,7,0,0),
d(1981,10,25,6,0,0),
d(1982,4,25,7,0,0),
d(1982,10,31,6,0,0),
d(1983,4,24,7,0,0),
d(1983,10,30,6,0,0),
d(1984,4,29,7,0,0),
d(1984,10,28,6,0,0),
d(1985,4,28,7,0,0),
d(1985,10,27,6,0,0),
d(1986,4,27,7,0,0),
d(1986,10,26,6,0,0),
d(1987,4,5,7,0,0),
d(1987,10,25,6,0,0),
d(1988,4,3,7,0,0),
d(1988,10,30,6,0,0),
d(1989,4,2,7,0,0),
d(1989,10,29,6,0,0),
d(1990,4,1,7,0,0),
d(1990,10,28,6,0,0),
d(1991,4,7,7,0,0),
d(1991,10,27,6,0,0),
d(1992,4,5,7,0,0),
d(1992,10,25,6,0,0),
d(1993,4,4,7,0,0),
d(1993,10,31,6,0,0),
d(1994,4,3,7,0,0),
d(1994,10,30,6,0,0),
d(1995,4,2,7,0,0),
d(1995,10,29,6,0,0),
d(1996,4,7,7,0,0),
d(1996,10,27,6,0,0),
d(1997,4,6,7,0,0),
d(1997,10,26,6,0,0),
d(1998,4,5,7,0,0),
d(1998,10,25,6,0,0),
d(1999,4,4,7,0,0),
d(1999,10,31,6,0,0),
d(2000,4,2,7,0,0),
d(2000,10,29,6,0,0),
d(2001,4,1,7,0,0),
d(2001,10,28,6,0,0),
d(2002,4,7,7,0,0),
d(2002,10,27,6,0,0),
d(2003,4,6,7,0,0),
d(2003,10,26,6,0,0),
d(2004,4,4,7,0,0),
d(2004,10,31,6,0,0),
d(2005,4,3,7,0,0),
d(2005,10,30,6,0,0),
d(2006,4,2,7,0,0),
d(2006,10,29,6,0,0),
d(2007,3,11,7,0,0),
d(2007,11,4,6,0,0),
d(2008,3,9,7,0,0),
d(2008,11,2,6,0,0),
d(2009,3,8,7,0,0),
d(2009,11,1,6,0,0),
d(2010,3,14,7,0,0),
d(2010,11,7,6,0,0),
d(2011,3,13,7,0,0),
d(2011,11,6,6,0,0),
d(2012,3,11,7,0,0),
d(2012,11,4,6,0,0),
d(2013,3,10,7,0,0),
d(2013,11,3,6,0,0),
d(2014,3,9,7,0,0),
d(2014,11,2,6,0,0),
d(2015,3,8,7,0,0),
d(2015,11,1,6,0,0),
d(2016,3,13,7,0,0),
d(2016,11,6,6,0,0),
d(2017,3,12,7,0,0),
d(2017,11,5,6,0,0),
d(2018,3,11,7,0,0),
d(2018,11,4,6,0,0),
d(2019,3,10,7,0,0),
d(2019,11,3,6,0,0),
d(2020,3,8,7,0,0),
d(2020,11,1,6,0,0),
d(2021,3,14,7,0,0),
d(2021,11,7,6,0,0),
d(2022,3,13,7,0,0),
d(2022,11,6,6,0,0),
d(2023,3,12,7,0,0),
d(2023,11,5,6,0,0),
d(2024,3,10,7,0,0),
d(2024,11,3,6,0,0),
d(2025,3,9,7,0,0),
d(2025,11,2,6,0,0),
d(2026,3,8,7,0,0),
d(2026,11,1,6,0,0),
d(2027,3,14,7,0,0),
d(2027,11,7,6,0,0),
d(2028,3,12,7,0,0),
d(2028,11,5,6,0,0),
d(2029,3,11,7,0,0),
d(2029,11,4,6,0,0),
d(2030,3,10,7,0,0),
d(2030,11,3,6,0,0),
d(2031,3,9,7,0,0),
d(2031,11,2,6,0,0),
d(2032,3,14,7,0,0),
d(2032,11,7,6,0,0),
d(2033,3,13,7,0,0),
d(2033,11,6,6,0,0),
d(2034,3,12,7,0,0),
d(2034,11,5,6,0,0),
d(2035,3,11,7,0,0),
d(2035,11,4,6,0,0),
d(2036,3,9,7,0,0),
d(2036,11,2,6,0,0),
d(2037,3,8,7,0,0),
d(2037,11,1,6,0,0),
]
_transition_info = [
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EWT'),
i(-14400,3600,'EPT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
i(-14400,3600,'EDT'),
i(-18000,0,'EST'),
]
Eastern = Eastern()<|fim▁end|>
|
d(1974,1,6,7,0,0),
d(1974,10,27,6,0,0),
d(1975,2,23,7,0,0),
|
<|file_name|>name.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: UTF-8 -*-
# ---------------------------------------------------------------------------
# ___ __ ___ ___ ____ ____ __
# | \ | \ | | / | | | \ Automatic
# |__/ |__/ | | | |__ |__ | | Conference
# | |\_ | | | | | | | Proceedings
# | | \ |___| \___ |___ |___ |__/ Generator
# ==========================================================<|fim▁hole|># http://www.lpl-aix.fr/~bigi/
#
# ---------------------------------------------------------------------------
# developed at:
#
# Laboratoire Parole et Langage
#
# Copyright (C) 2013-2014 Brigitte Bigi
#
# Use of this software is governed by the GPL, v3
# This banner notice must not be removed
# ---------------------------------------------------------------------------
#
# Proceed is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Proceed is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Proceed. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------
__docformat__ = "epytext"
# ---------------------------------------------------------------------------
import sys
import os
import random
import tempfile
from datetime import date
# ---------------------------------------------------------------------------
class GenName():
"""
@authors: Brigitte Bigi
@contact: [email protected]
@license: GPL
@summary: A class to generates a random file name of a non-existing file.
"""
def __init__(self,extension=""):
self.name = "/"
while (os.path.exists(self.name)==True):
self.set_name(extension)
def set_name(self, extension):
"""
Set a new file name.
"""
# random float value
randval = str(int(random.random()*10000))
# process pid
pid = str(os.getpid())
# today's date
today = str(date.today())
# filename
filename = "tmp_"+today+"_"+pid+"_"+randval
# final file name is path/filename
self.name = filename + extension
def get_name(self):
"""
Get the current file name.
"""
return str(self.name)
# ---------------------------------------------------------------------------
if __name__ == "__main__":
print GenName().get_name()
# ---------------------------------------------------------------------------<|fim▁end|>
|
#
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|>/*
* Store drawing on server
*/
function saveDrawing() {
var drawing = $('#imagePaint').wPaint('image');
var imageid = $('#imageTarget').data('imageid');
var creatormail = $('input[name=creatorMail]').val();
//Add spinning wheel
var spin = $(document.createElement('div'));
spin.addClass('spin');
$('#dialog-content').html(spin);
$.ajax({
type: 'POST',
url: 'savedrawing',
dataType: 'json',
data: {drawing: drawing, imageid: imageid, creatormail: creatormail},<|fim▁hole|> popup("<p>Die Zeichnung konnte leider nicht gespeichert werden.</p><p>Der Fehler wird untersucht.</p>");
},
fail: function() {
popup("<p>Die Zeichnung konnte leider nicht gespeichert werden.</p><p>Der Fehler wird untersucht.</p>");
}
});
}
function saveDrawingLocal() {
var imageid = $('img#imageTarget').data('imageid');
var drawingid = $('img#drawingTarget').data('drawingid');
window.location.href = 'getfile?drawingid=' + drawingid + '&imageid=' + imageid;
}
/*
* Popup message
*/
function popup(message) {
// get the screen height and width
var maskHeight = $(document).height();
var maskWidth = $(window).width();
// calculate the values for center alignment
var dialogTop = (maskHeight/2) - ($('#dialog-box').height()/2);
var dialogLeft = (maskWidth/2) - ($('#dialog-box').width()/2);
// assign values to the overlay and dialog box
$('#dialog-overlay').css({height:maskHeight, width:maskWidth}).show();
$('#dialog-box').css({top:dialogTop, left:dialogLeft}).show();
// display the message
$('#dialog-content').html(message);
}
$(document).ready(function() {
/***********************************************
* Encrypt Email script- Please keep notice intact
* Tool URL: http://www.dynamicdrive.com/emailriddler/
* **********************************************/
var emailriddlerarray=[104,111,115,116,109,97,115,116,101,114,64,109,97,99,104,100,105,101,115,116,114,97,115,115,101,98,117,110,116,46,99,111,109]
var encryptedemail_id65='' //variable to contain encrypted email
for (var i=0; i<emailriddlerarray.length; i++)
encryptedemail_id65+=String.fromCharCode(emailriddlerarray[i])
$('#mailMaster').attr('href', 'mailto:' + encryptedemail_id65 );
var emailriddlerarray=[105,110,102,111,64,109,97,99,104,100,105,101,115,116,114,97,115,115,101,98,117,110,116,46,99,111,109]
var encryptedemail_id23='' //variable to contain encrypted email
for (var i=0; i<emailriddlerarray.length; i++)
encryptedemail_id23+=String.fromCharCode(emailriddlerarray[i])
$('#mailInfo').attr('href', 'mailto:' + encryptedemail_id23 );
/*
* Change image
*/
$(".imageSmallContainer").click(function(evt){
var imageid = $(this).data('imageid');
var drawingid = $(this).data('drawingid');
var ids = {imageid : imageid,
drawingid : drawingid};
$.get('changeimage', ids)
.done(function(data){
var imageContainer = $('#imageContainer');
//Hide all images in container
imageContainer.children('.imageRegular').hide();
//Add spinning wheel
var spin = $(document.createElement('div'));
spin.addClass('spin');
imageContainer.prepend(spin);
//Remove hidden old image
$('#imageTarget').remove();
//Create new hidden image
var imageNew = $(document.createElement('img'));
imageNew.attr('id', 'imageTarget');
imageNew.addClass('imageRegular');
imageNew.attr('data-imageid', imageid);
imageNew.data('imageid', imageid);
imageNew.attr('src', data.imagefile);
imageNew.css('display', 'none');
//Prepend new Image to container
imageContainer.prepend(imageNew);
//For Admin and Gallery also change Drawing
if (typeof drawingid != 'undefined') {
var drawing = $('#drawingTarget');
drawing.attr('src', data.drawingfile);
drawing.attr('data-drawingid', drawingid);
drawing.data('drawingid', drawingid);
drawing.attr('drawingid', drawingid);
}
// If newImage src is loaded, remove spin and fade all imgs
// Fires too early in FF
imageContainer.imagesLoaded(function() {
spin.remove();
imageContainer.children('.imageRegular').fadeIn();
});
});
});
/*
* Change the class of moderated images
*/
$('.imageSmallContainerOuter input').change(function(evt){
var container = $(this).parent();
var initial = container.data('initialstate');
var checked = $(this).prop('checked');
container.removeClass('notApproved');
container.removeClass('approved');
container.removeClass('changed');
if (checked && initial == 'approved')
container.addClass('approved');
else if ((checked && initial == 'notApproved') || (!checked && initial == 'approved'))
container.addClass('changed');
else if (!checked && initial == 'notApproved')
container.addClass('notApproved')
});
$('#dialog-box .close, #dialog-overlay').click(function () {
$('#dialog-overlay, #dialog-box').hide();
return false;
});
$('#drawingDialogBtn').click(function(evt){
popup("<p>Aus den besten eingeschickten Zeichnungen werden Freecards gedruckt.</p> \
<p>Mit dem Klick auf den Speicherbutton erklärst du dich dafür bereit, dass dein Bild vielleicht veröffentlicht wird.</p> \
<p>Wenn du deine Email-Adresse angibst können wir dich informieren falls wir deine Zeichnung ausgewählt haben.</p> \
<input type='text' name='creatorMail' placeholder='Email Adresse'> \
<a id='drawingSaveBtn' href='javascript:saveDrawing();' class='btn'>Speichern</a>");
});
});<|fim▁end|>
|
success: function (resp) {
if (resp.kind == 'success')
popup("<p>Die Zeichnung wurde erfolgreich gespeichert.</p><p>Sie wird jedoch zuerst überprüft bevor sie in der Galerie zu sehen ist.</p>");
if (resp.kind == 'error')
|
<|file_name|>code1a.py<|end_file_name|><|fim▁begin|># coding=utf-8
import pygame
import pygame.locals
class Board(object):
"""
Plansza do gry. Odpowiada za rysowanie okna gry.
"""
def __init__(self, width, height):
"""
Konstruktor planszy do gry. Przygotowuje okienko gry.
:param width: szerokość w pikselach
:param height: wysokość w pikselach
"""
self.surface = pygame.display.set_mode((width, height), 0, 32)
pygame.display.set_caption('Game of life')
def draw(self, *args):
"""
Rysuje okno gry
:param args: lista obiektów do narysowania
"""
background = (0, 0, 0)
self.surface.fill(background)
for drawable in args:
drawable.draw_on(self.surface)
# dopiero w tym miejscu następuje fatyczne rysowanie
# w oknie gry, wcześniej tylko ustalaliśmy co i jak ma zostać narysowane
pygame.display.update()
class GameOfLife(object):
"""
Łączy wszystkie elementy gry w całość.
"""
def __init__(self, width, height, cell_size=10):
"""
Przygotowanie ustawień gry
:param width: szerokość planszy mierzona liczbą komórek
:param height: wysokość planszy mierzona liczbą komórek
:param cell_size: bok komórki w pikselach
"""
pygame.init()
self.board = Board(width * cell_size, height * cell_size)
# zegar którego użyjemy do kontrolowania szybkości rysowania
# kolejnych klatek gry
self.fps_clock = pygame.time.Clock()
def run(self):
"""
Główna pętla gry
"""
while not self.handle_events():
# działaj w pętli do momentu otrzymania sygnału do wyjścia
self.board.draw()
self.fps_clock.tick(15)
def handle_events(self):
"""
Obsługa zdarzeń systemowych, tutaj zinterpretujemy np. ruchy myszką
:return True jeżeli pygame przekazał zdarzenie wyjścia z gry
"""
for event in pygame.event.get():
<|fim▁hole|> return True
# magiczne liczby używane do określenia czy komórka jest żywa
DEAD = 0
ALIVE = 1
class Population(object):
"""
Populacja komórek
"""
def __init__(self, width, height, cell_size=10):
"""
Przygotowuje ustawienia populacji
:param width: szerokość planszy mierzona liczbą komórek
:param height: wysokość planszy mierzona liczbą komórek
:param cell_size: bok komórki w pikselach
"""
self.box_size = cell_size
self.height = height
self.width = width
self.generation = self.reset_generation()
def reset_generation(self):
"""
Tworzy i zwraca macierz pustej populacji
"""
# w pętli wypełnij listę kolumnami
# które także w pętli zostają wypełnione wartością 0 (DEAD)
return [[DEAD for y in xrange(self.height)] for x in xrange(self.width)]
def handle_mouse(self):
# pobierz stan guzików myszki z wykorzystaniem funcji pygame
buttons = pygame.mouse.get_pressed()
if not any(buttons):
# ignoruj zdarzenie jeśli żaden z guzików nie jest wciśnięty
return
# dodaj żywą komórką jeśli wciśnięty jest pierwszy guzik myszki
# będziemy mogli nie tylko dodawać żywe komórki ale także je usuwać
alive = True if buttons[0] else False
# pobierz pozycję kursora na planszy mierzoną w pikselach
x, y = pygame.mouse.get_pos()
# przeliczamy współrzędne komórki z pikseli na współrzędne komórki w macierz
# gracz może kliknąć w kwadracie o szerokości box_size by wybrać komórkę
x /= self.box_size
y /= self.box_size
# ustaw stan komórki na macierzy
self.generation[x][y] = ALIVE if alive else DEAD
def draw_on(self, surface):
"""
Rysuje komórki na planszy
"""
for x, y in self.alive_cells():
size = (self.box_size, self.box_size)
position = (x * self.box_size, y * self.box_size)
color = (255, 255, 255)
thickness = 1
pygame.draw.rect(surface, color, pygame.locals.Rect(position, size), thickness)
def alive_cells(self):
"""
Generator zwracający współrzędne żywych komórek.
"""
for x in range(len(self.generation)):
column = self.generation[x]
for y in range(len(column)):
if column[y] == ALIVE:
# jeśli komórka jest żywa zwrócimy jej współrzędne
yield x, y
# Ta część powinna być zawsze na końcu modułu (ten plik jest modułem)
# chcemy uruchomić naszą grę dopiero po tym jak wszystkie klasy zostaną zadeklarowane
if __name__ == "__main__":
game = GameOfLife(80, 40)
game.run()<|fim▁end|>
|
if event.type == pygame.locals.QUIT:
pygame.quit()
|
<|file_name|>panic-handler-bad-signature-1.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags:-C panic=abort
#![no_std]
#![no_main]
use core::panic::PanicInfo;
#[panic_handler]<|fim▁hole|>fn panic(
info: PanicInfo, //~ ERROR argument should be `&PanicInfo`
) -> () //~ ERROR return type should be `!`
{
}<|fim▁end|>
| |
<|file_name|>configs-fn_call_style-block-trailing-comma.rs<|end_file_name|><|fim▁begin|>// rustfmt-error_on_line_overflow: false
// rustfmt-fn_call_style: Block
// rustfmt should not add trailing comma when rewriting macro. See #1528.
fn a() {
panic!(
"this is a long string that goes past the maximum line length causing rustfmt to insert a comma here:"
);<|fim▁hole|> a,
oooptoptoptoptptooptoptoptoptptooptoptoptoptptoptoptoptoptpt(),
);
}<|fim▁end|>
|
foo(
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.views.generic.detail import DetailView
from django.core.urlresolvers import reverse_lazy
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from django.views import generic
from django.views.generic.edit import DeleteView, CreateView, UpdateView
from .forms import BandForm, ArchivForm, InstitutionForm, PersonForm, BearbeiterForm
from .models import Band, Archiv, Institution, Person, Bearbeiter
class BearbeiterListView(generic.ListView):
model = Bearbeiter
template_name = 'entities/bearbeiter_list.html'
context_object_name = 'object_list'
class BearbeiterDetailView(DetailView):
model = Bearbeiter
class BearbeiterCreate(CreateView):
model = Bearbeiter
template_name_suffix = '_create'
form_class = BearbeiterForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(BearbeiterCreate, self).dispatch(*args, **kwargs)
class BearbeiterUpdate(UpdateView):
model = Bearbeiter
template_name_suffix = '_create'
form_class = BearbeiterForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(BearbeiterUpdate, self).dispatch(*args, **kwargs)
class BearbeiterDelete(DeleteView):
model = Bearbeiter
template_name = 'vocabs/confirm_delete.html'
success_url = reverse_lazy('browsing:browse_bearbeiter')
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(BearbeiterDelete, self).dispatch(*args, **kwargs)
class PersonListView(generic.ListView):
model = Person
template_name = 'entities/band_list.html'
context_object_name = 'object_list'
class PersonDetailView(DetailView):
model = Person
class PersonCreate(CreateView):
model = Person
template_name_suffix = '_create'
form_class = PersonForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PersonCreate, self).dispatch(*args, **kwargs)
class PersonUpdate(UpdateView):
model = Person
template_name_suffix = '_create'
form_class = PersonForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PersonUpdate, self).dispatch(*args, **kwargs)
class PersonDelete(DeleteView):
model = Person
template_name = 'vocabs/confirm_delete.html'
success_url = reverse_lazy('browsing:browse_persons')
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PersonDelete, self).dispatch(*args, **kwargs)
class BandListView(generic.ListView):
model = Band
template_name = 'entities/band_list.html'
context_object_name = 'object_list'
def get_queryset(self):
return Band.objects.order_by('signatur')
class BandDetailView(DetailView):
model = Band
class BandCreate(CreateView):
model = Band
template_name_suffix = '_create'
form_class = BandForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(BandCreate, self).dispatch(*args, **kwargs)
class BandUpdate(UpdateView):
model = Band
template_name_suffix = '_create'
form_class = BandForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(BandUpdate, self).dispatch(*args, **kwargs)
class BandDelete(DeleteView):
model = Band
template_name = 'vocabs/confirm_delete.html'
success_url = reverse_lazy('browsing:browse_baende')
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(BandDelete, self).dispatch(*args, **kwargs)
# Archiv
class ArchivDetailView(DetailView):
model = Archiv
class ArchivCreate(CreateView):
model = Archiv
template_name_suffix = '_create'
form_class = ArchivForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ArchivCreate, self).dispatch(*args, **kwargs)
class ArchivUpdate(UpdateView):
model = Archiv
template_name_suffix = '_create'
form_class = ArchivForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ArchivUpdate, self).dispatch(*args, **kwargs)
class ArchivDelete(DeleteView):
model = Archiv
template_name = 'vocabs/confirm_delete.html'
success_url = reverse_lazy('browsing:browse_archivs')
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ArchivDelete, self).dispatch(*args, **kwargs)
# Institution
class InstitutionDetailView(DetailView):
model = Institution
class InstitutionCreate(CreateView):
model = Institution
template_name_suffix = '_create'
form_class = InstitutionForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(InstitutionDelete, self).dispatch(*args, **kwargs)
class InstitutionUpdate(UpdateView):
model = Institution
template_name_suffix = '_create'
form_class = InstitutionForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(InstitutionUpdate, self).dispatch(*args, **kwargs)
class InstitutionDelete(DeleteView):
model = Institution
template_name = 'vocabs/confirm_delete.html'
success_url = reverse_lazy('browsing:browse_institutions')
<|fim▁hole|><|fim▁end|>
|
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(InstitutionDelete, self).dispatch(*args, **kwargs)
|
<|file_name|>benchmark_covariance.py<|end_file_name|><|fim▁begin|>import pandas as pd
import numpy as np
import pymbar
from pymbar.testsystems.pymbar_datasets import load_gas_data, load_8proteins_data
import time
def load_oscillators(n_states, n_samples):
name = "%dx%d oscillators" % (n_states, n_samples)
O_k = np.linspace(1, 5, n_states)
k_k = np.linspace(1, 3, n_states)
N_k = (np.ones(n_states) * n_samples).astype('int')
test = pymbar.testsystems.harmonic_oscillators.HarmonicOscillatorsTestCase(O_k, k_k)
x_n, u_kn, N_k_output, s_n = test.sample(N_k, mode='u_kn')
return name, u_kn, N_k_output, s_n
<|fim▁hole|> test = pymbar.testsystems.exponential_distributions.ExponentialTestCase(rates)
x_n, u_kn, N_k_output, s_n = test.sample(N_k, mode='u_kn')
return name, u_kn, N_k_output, s_n
mbar_gens = {"new":lambda u_kn, N_k: pymbar.MBAR(u_kn, N_k)}
systems = [lambda : load_exponentials(25, 100), lambda : load_exponentials(100, 100), lambda : load_exponentials(250, 250),
lambda : load_oscillators(25, 100), lambda : load_oscillators(100, 100), lambda : load_oscillators(250, 250),
lambda : load_oscillators(500, 100), lambda : load_oscillators(1000, 50), lambda : load_oscillators(2000, 20), lambda : load_oscillators(4000, 10),
lambda : load_exponentials(500, 100), lambda : load_exponentials(1000, 50), lambda : load_exponentials(2000, 20), lambda : load_oscillators(4000, 10),
load_gas_data, load_8proteins_data]
timedata = []
for version, mbar_gen in mbar_gens.items():
for sysgen in systems:
name, u_kn, N_k, s_n = sysgen()
K, N = u_kn.shape
mbar = mbar_gen(u_kn, N_k)
time0 = time.time()
fij, dfij = mbar.getFreeEnergyDifferences(uncertainty_method="svd-ew-kab")
dt = time.time() - time0
timedata.append([name, K, N, dt])
timedata = pd.DataFrame(timedata, columns=["name", "K", "N", "time"])
print timedata.to_string(float_format=lambda x: "%.3g" % x)<|fim▁end|>
|
def load_exponentials(n_states, n_samples):
name = "%dx%d exponentials" % (n_states, n_samples)
rates = np.linspace(1, 3, n_states)
N_k = (np.ones(n_states) * n_samples).astype('int')
|
<|file_name|>0022_add_activity_types.py<|end_file_name|><|fim▁begin|># Generated by Django 3.0.9 on 2020-08-16 20:47
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('groups', '0042_auto_20200507_1258'),
('activities', '0021_remove_activity_feedback_as_sum'),
]
operations = [
migrations.CreateModel(
name='ActivityType',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='activity_types', to='groups.Group')),
('name', models.CharField(max_length=80)),
('colour', models.CharField(max_length=6)),
('icon', models.CharField(max_length=32)),
('feedback_icon', models.CharField(max_length=32)),
('has_feedback', models.BooleanField(default=True)),
('has_feedback_weight', models.BooleanField(default=True)),
],
options={
'abstract': False,
},
),
migrations.AddField(<|fim▁hole|> name='activity_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='activities', to='activities.ActivityType'),
),
migrations.AddField(
model_name='activityseries',
name='activity_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='activity_series', to='activities.ActivityType'),
),
migrations.AlterUniqueTogether(
name='activitytype',
unique_together={('group', 'name')},
),
]<|fim▁end|>
|
model_name='activity',
|
<|file_name|>register.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1beta2
import (
"github.com/GoogleCloudPlatform/kubernetes/pkg/api"
"github.com/GoogleCloudPlatform/kubernetes/pkg/runtime"
)
// Codec encodes internal objects to the v1beta2 scheme
var Codec = runtime.CodecFor(api.Scheme, "v1beta2")
func init() {
api.Scheme.AddKnownTypes("v1beta2",
&Pod{},
&PodList{},
&ReplicationController{},
&ReplicationControllerList{},
&Service{},
&ServiceList{},
&Endpoints{},
&EndpointsList{},
&Minion{},
&MinionList{},
&Binding{},
&Status{},
&ServerOp{},
&ServerOpList{},
&Event{},
&EventList{},
&ContainerManifest{},
&ContainerManifestList{},
&BoundPod{},
&BoundPods{},
)
}
func (*Pod) IsAnAPIObject() {}
func (*PodList) IsAnAPIObject() {}
func (*ReplicationController) IsAnAPIObject() {}
func (*ReplicationControllerList) IsAnAPIObject() {}
func (*Service) IsAnAPIObject() {}
func (*ServiceList) IsAnAPIObject() {}
func (*Endpoints) IsAnAPIObject() {}
func (*EndpointsList) IsAnAPIObject() {}
func (*Minion) IsAnAPIObject() {}
func (*MinionList) IsAnAPIObject() {}
func (*Binding) IsAnAPIObject() {}
func (*Status) IsAnAPIObject() {}
func (*ServerOp) IsAnAPIObject() {}
func (*ServerOpList) IsAnAPIObject() {}
func (*Event) IsAnAPIObject() {}
func (*EventList) IsAnAPIObject() {}
func (*ContainerManifest) IsAnAPIObject() {}<|fim▁hole|>func (*BoundPod) IsAnAPIObject() {}
func (*BoundPods) IsAnAPIObject() {}<|fim▁end|>
|
func (*ContainerManifestList) IsAnAPIObject() {}
|
<|file_name|>mock_api_parser.py<|end_file_name|><|fim▁begin|># Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Parsing API Discovery document."""
import mock_api_types
from gcutil_lib import mock_api_types
class Parser(object):
"""Discovery document parser.
Parses discovery document types, resources and methods. Result of parsing is a
dictionary method_id -> method.
"""
__slots__ = ('_discovery_document', '_parsed_schemas', '_parsed_methods',
'_base_url', '_common_parameters')
def __init__(self, doc):<|fim▁hole|> self._parsed_methods = {}
self._base_url = ''
self._common_parameters = {}
def _ParseType(self, discovery_type):
ref = discovery_type.get('$ref')
if ref:
return self._GetSchema(ref)
type_name = discovery_type['type']
if type_name == 'any':
return mock_api_types.AnyType()
elif type_name == 'array':
return mock_api_types.ArrayType(self._ParseType(discovery_type['items']))
elif type_name == 'boolean':
return mock_api_types.BooleanType()
elif type_name == 'integer':
return self._ParseIntegerType(discovery_type)
elif type_name == 'number':
return self._ParseNumberType(discovery_type)
elif type_name == 'object':
return self._ParseObjectType(discovery_type)
elif type_name == 'string':
return self._ParseStringType(discovery_type)
else:
raise ValueError('Unrecognized type {type}'.format(type=type_name))
def _ParseIntegerType(self, discovery_type):
value_format = discovery_type.get('format')
if value_format in (None, 'int32', 'uint32'):
return mock_api_types.IntegerType(value_format or 'int32')
raise ValueError('Invalid integer format {value}'.format(
value=value_format))
def _ParseNumberType(self, discovery_type):
value_format = discovery_type.get('format')
if value_format in (None, 'double', 'float'):
return mock_api_types.NumberType(value_format or 'double')
raise ValueError('Invalid number format {value}'.format(
value=value_format))
def _ParseStringType(self, discovery_type):
value_format = discovery_type.get('format')
if value_format in (None, 'byte', 'date', 'date-time', 'int64', 'uint64'):
return mock_api_types.StringType(value_format)
raise ValueError('Invalid string format {value}'.format(
value=value_format))
def _ParseObjectType(self, discovery_type):
properties, additional = self._ParseProperties(discovery_type)
object_type = mock_api_types.ObjectType()
object_type.Define('', properties, additional)
return object_type
def _ParseSchema(self, discovery_schema):
properties, additional = self._ParseProperties(discovery_schema)
return self._CreateSchema(
discovery_schema.get('id'), properties, additional)
def _ParseProperties(self, discovery_object_type):
"""Parses properties of a discovery document object tyoe."""
assert discovery_object_type.get('type') == 'object'
properties = []
for property_name, property_type in (
discovery_object_type.get('properties', {}).iteritems()):
properties.append(mock_api_types.Property(
property_name, self._ParseType(property_type)))
additional = None
additional_properties = discovery_object_type.get('additionalProperties')
if additional_properties is not None:
additional = self._ParseType(additional_properties)
return properties, additional
def _ParseSchemas(self, discovery_schemas):
for _, discovery_schema in discovery_schemas.iteritems():
self._ParseSchema(discovery_schema)
def _ParseMethods(self, discovery_methods):
for method_name, discovery_method in discovery_methods.iteritems():
self._ParseMethod(method_name, discovery_method)
def _ParseParameter(self, parameter_name, parameter_type):
return mock_api_types.Parameter(
parameter_name, self._ParseType(parameter_type))
def _ParseParameters(self, discovery_method_parameters):
parameters = []
for parameter_name, parameter_type in (
discovery_method_parameters.iteritems()):
parameters.append(
self._ParseParameter(parameter_name, parameter_type))
parameters.sort(key=lambda parameter: parameter.name)
return parameters
def _ParseMethod(self, method_name, discovery_method):
parameters = self._ParseParameters(discovery_method.get('parameters', {}))
# Parse request type
discovery_method_request = discovery_method.get('request')
if discovery_method_request is None:
request_type = None
else:
request_type = self._ParseType(discovery_method_request)
# Parse response type.
discovery_method_response = discovery_method.get('response')
if discovery_method_response is None:
response_type = None
else:
response_type = self._ParseType(discovery_method_response)
return self._CreateMethod(
discovery_method.get('id'), method_name,
discovery_method.get('path', ''), parameters,
request_type, response_type)
def _ParseResources(self, discovery_resources):
for _, discovery_resource in discovery_resources.iteritems():
self._ParseResource(discovery_resource)
# Return all accumulated methods.
return self._parsed_methods
def _ParseResource(self, discovery_resource):
discovery_methods = discovery_resource.get('methods')
if discovery_methods:
self._ParseMethods(discovery_methods)
discovery_resources = discovery_resource.get('resources')
if discovery_resources:
self._ParseResources(discovery_resources)
def _ParseGlobals(self, discovery_document):
self._base_url = discovery_document.get('baseUrl')
self._common_parameters = self._ParseParameters(
discovery_document.get('parameters', {}))
def Parse(self):
self._ParseGlobals(self._discovery_document)
self._ParseSchemas(self._discovery_document.get('schemas'))
return self._ParseResources(self._discovery_document.get('resources'))
def _GetSchema(self, name):
schema = self._parsed_schemas.get(name)
if schema is None:
self._parsed_schemas[name] = schema = mock_api_types.ObjectType()
return schema
def _CreateSchema(self, name, properties, additional):
schema = self._GetSchema(name)
schema.Define(name, properties, additional)
return schema
def _CreateMethod(self, method_id, name, path, parameters, request, response):
if method_id in self._parsed_methods:
raise ValueError('Duplicate method {method}'.format(method=method_id))
all_parameters = dict((p.name, p) for p in self._common_parameters)
all_parameters.update(dict((p.name, p) for p in parameters))
path = self._base_url + path
method = mock_api_types.Method(
method_id, name, path, all_parameters, request, response)
self._parsed_methods[method_id] = method
return method<|fim▁end|>
|
self._discovery_document = doc
self._parsed_schemas = {}
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/*
* Katana - a powerful, open-source screenshot utility
*
* Copyright (C) 2018, Gage Alexander <[email protected]>
*
* Katana is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* Katana is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Katana. If not, see <http://www.gnu.org/licenses/>.
*/
const packager = require('electron-packager')
const options = {
platform: ['darwin'],
arch: 'x64',
icon: './app/static/images/icon',
dir: '.',
ignore: ['build'],
out: './build/Release',<|fim▁hole|> prune: true
}
packager(options, (error, path) => {
if (error) {
return (
console.log(`Error: ${error}`)
)
}
console.log(`Package created, path: ${path}`)
})<|fim▁end|>
|
overwrite: true,
|
<|file_name|>testmfd.cpp<|end_file_name|><|fim▁begin|>#include "stdhdr.h"<|fim▁hole|>#include "simdrive.h"
#include "camp2sim.h"
#include "hud.h"
#include "aircrft.h"
#include "fack.h"
#include "otwdrive.h" //MI
#include "cpmanager.h" //MI
#include "icp.h" //MI
#include "aircrft.h" //MI
#include "fcc.h" //MI
#include "radardoppler.h" //MI
//MI
void DrawBullseyeCircle(VirtualDisplay* display, float cursorX, float cursorY);
struct MfdTestButtons
{
char *label1, *label2;
enum { ModeNoop = 0, // do nothing
ModeParent, // hand off to parent
ModeTest1,
ModeTest2, // two test sub modes
ModeRaltTest,
ModeRunTest,
ModeClear,
};
int nextMode;
};
#define NOENTRY { NULL, NULL, MfdTestButtons::ModeNoop}
#define PARENT { NULL, NULL, MfdTestButtons::ModeParent}
static const MfdTestButtons testpage1[20] =
{
// test page menu
{"BIT1", NULL, MfdTestButtons::ModeTest2}, // 1
NOENTRY,
{"CLR", NULL, MfdTestButtons::ModeClear},
NOENTRY,
NOENTRY, // 5
{"MFDS", NULL, MfdTestButtons::ModeRunTest},
{"RALT", "500", MfdTestButtons::ModeRaltTest},
{"TGP", NULL, MfdTestButtons::ModeRunTest},
{"FINS", NULL, MfdTestButtons::ModeRunTest},
{"TFR", NULL, MfdTestButtons::ModeRunTest}, // 10
PARENT,
PARENT,
PARENT,
PARENT, // current mode
PARENT, // 15
{"RSU", NULL, MfdTestButtons::ModeRunTest},
{"INS", NULL, MfdTestButtons::ModeRunTest},
{"SMS", NULL, MfdTestButtons::ModeNoop},
{"FCR", NULL, MfdTestButtons::ModeRunTest},
{"DTE", NULL, MfdTestButtons::ModeRunTest}, // 20
};
static const MfdTestButtons testpage2[20] =
{
// test page menu
{"BIT2", NULL, MfdTestButtons::ModeTest1}, // 1
NOENTRY,
{"CLR", NULL, MfdTestButtons::ModeClear},
NOENTRY,
NOENTRY, // 5
{"IFF1", NULL, MfdTestButtons::ModeRunTest},
{"IFF2", NULL, MfdTestButtons::ModeRunTest},
{"IFF3", NULL, MfdTestButtons::ModeRunTest},
{"IFFC", NULL, MfdTestButtons::ModeRunTest},
{"TCN", NULL, MfdTestButtons::ModeRunTest}, // 10
PARENT,
PARENT,
PARENT,
PARENT,
PARENT, // 15
{NULL, NULL, MfdTestButtons::ModeNoop},
{NULL, NULL, MfdTestButtons::ModeNoop},
{NULL, NULL, MfdTestButtons::ModeNoop},
{"TISL", NULL, MfdTestButtons::ModeRunTest},
{"UFC", NULL, MfdTestButtons::ModeRunTest}, // 20
};
struct MfdTestPage
{
const MfdTestButtons *buttons;
};
static const MfdTestPage mfdpages[] =
{
{testpage1},
{testpage2},
};
static const int NMFDPAGES = sizeof(mfdpages) / sizeof(mfdpages[0]);
TestMfdDrawable::TestMfdDrawable()
{
bitpage = 0;
bittest = -1;
timer = 0;
}
void TestMfdDrawable::Display(VirtualDisplay* newDisplay)
{
AircraftClass *playerAC = SimDriver.GetPlayerAircraft();
//MI
float cX, cY = 0;
if (g_bRealisticAvionics)
{
RadarDopplerClass* theRadar = (RadarDopplerClass*)FindSensor(playerAC, SensorClass::Radar);
if ( not theRadar)
{
ShiWarning("Oh Oh shouldn't be here without a radar");
return;
}
else
{
theRadar->GetCursorPosition(&cX, &cY);
}
}
display = newDisplay;
ShiAssert(bitpage >= 0 and bitpage < sizeof(mfdpages) / sizeof(mfdpages[0]));
ShiAssert(display not_eq NULL);
const MfdTestButtons *mb = mfdpages[bitpage].buttons;
AircraftClass *self = MfdDisplay[OnMFD()]->GetOwnShip();
ShiAssert(self not_eq NULL);
//MI changed
if (g_bRealisticAvionics)
{
if (OTWDriver.pCockpitManager and OTWDriver.pCockpitManager->mpIcp and
OTWDriver.pCockpitManager->mpIcp->ShowBullseyeInfo)
{
DrawBullseyeCircle(display, cX, cY);
}
else
DrawReference(self);
}
else
DrawReference(self);
display->SetColor(GetMfdColor(MFD_LABELS));
char buf[100];
for (int i = 0; i < 20; i++)
{
int hilite = 0;
if (i == bittest and timer > SimLibElapsedTime)
hilite = 1;
switch (mb[i].nextMode)
{
case MfdTestButtons::ModeRaltTest:
sprintf(buf, "%.0f", hilite ? 300.0f : TheHud->lowAltWarning);
LabelButton(i, mb[i].label1, buf, hilite);
break;
default:
if (mb[i].label1)
LabelButton(i, mb[i].label1, mb[i].label2, hilite);
else if (mb[i].nextMode == MfdTestButtons::ModeParent)
MfdDrawable::DefaultLabel(i);
}
}
if (playerAC and playerAC->mFaults)
{
FackClass *fack = playerAC->mFaults;
float yinc = display->TextHeight();
const static float namex = -0.6f;
const static float starty = 0.6f;
float y = starty;
float x = namex;
float xinc = 0.3F;
for (int i = 0; i < fack->GetMflListCount(); i++)
{
const char *fname;
int subsys;
int count;
char timestr[100];
if (fack->GetMflEntry(i, &fname, &subsys, &count, timestr) == false)
continue;
char outstr[100];
for (int i = 0; i < 5; i++)
{
switch (i)
{
case 1:
sprintf(outstr, "%-4s", fname);
display->TextLeft(x, y, outstr);
x += xinc;
break;
case 2:
sprintf(outstr, "%03d", subsys);
display->TextLeft(x, y, outstr);
x += xinc;
break;
case 3:
x -= 0.1F;
sprintf(outstr, "%2d", count);
display->TextLeft(x, y, outstr);
x += xinc;
break;
case 4:
x -= 0.1F;
sprintf(outstr, "%s", timestr);
display->TextLeft(x, y, outstr);
x += xinc;
break;
default:
break;
}
}
//sprintf (outstr, "%-4s %03d %2d %s", fname, subsys, count, timestr);
//ShiAssert(strlen(outstr) < sizeof outstr);
//display->TextLeft(namex, y, outstr);
y -= yinc;
x = namex;
}
}
}
void TestMfdDrawable::PushButton(int whichButton, int whichMFD)
{
ShiAssert(bitpage >= 0 and bitpage < sizeof(mfdpages) / sizeof(mfdpages[0]));
ShiAssert(whichButton >= 0 and whichButton < 20);
AircraftClass *playerAC = SimDriver.GetPlayerAircraft();
switch (mfdpages[bitpage].buttons[whichButton].nextMode)
{
case MfdTestButtons::ModeNoop:
break;
case MfdTestButtons::ModeRaltTest:
case MfdTestButtons::ModeRunTest:
bittest = whichButton;
timer = SimLibElapsedTime + 5 * CampaignSeconds;
break;
case MfdTestButtons::ModeTest2:
bitpage = 1;
break;
case MfdTestButtons::ModeTest1:
bitpage = 0;
break;
case MfdTestButtons::ModeParent:
MfdDrawable::PushButton(whichButton, whichMFD);
break;
case MfdTestButtons::ModeClear: // clear MFL
if (playerAC and playerAC->mFaults)
playerAC->mFaults->ClearMfl();
break;
}
}<|fim▁end|>
|
#include "camplib.h"
#include "mfd.h"
#include "Graphics/Include/render2d.h"
#include "dispcfg.h"
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!python3.3
# -*- coding: utf-8 -*-
"""
.. module:: examples.benchmarks
:platform: Agnostic, Windows
:synopsis: Full suite of benchmarks
Created on 10/08/2013
"""
def standard_iges_setup(system, filename):
system.StartSection.Prolog = " "
system.GlobalSection.IntegerBits = int(32)
system.GlobalSection.SPMagnitude = int(38)
system.GlobalSection.SPSignificance = int(6)
system.GlobalSection.DPMagnitude = int(38)
system.GlobalSection.DPSignificance = int(15)
system.GlobalSection.MaxNumberLineWeightGrads = int(8)
system.GlobalSection.WidthMaxLineWeightUnits = float(0.016)
system.GlobalSection.MaxCoordValue = float(71)
index_dot = filename.index('.')
system.GlobalSection.ProductIdentificationFromSender = filename[:index_dot]<|fim▁hole|> system.GlobalSection.FileName = filename
system.GlobalSection.ProductIdentificationForReceiver = \
system.GlobalSection.ProductIdentificationFromSender
system.GlobalSection.AuthorOrg = "Queensland Uni. of Tech."
system.GlobalSection.NameOfAuthor = "Rodney Persky"<|fim▁end|>
| |
<|file_name|>output.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var p = Promise.resolve(0);
Promise.all([p]).then(function (outcome) {
alert("OK");
});<|fim▁end|>
|
import "core-js/modules/web.dom.iterable";
import "core-js/modules/es6.promise";
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># PyAutoGUI: Cross-platform GUI automation for human beings.
# BSD license
# Al Sweigart [email protected] (Send me feedback & suggestions!)
"""
IMPORTANT NOTE!
To use this module on Mac OS X, you need the PyObjC module installed.
For Python 3, run:
sudo pip3 install pyobjc-core
sudo pip3 install pyobjc
For Python 2, run:
sudo pip install pyobjc-core
sudo pip install pyobjc
(There's some bug with their installer, so install pyobjc-core first or else
the install takes forever.)
To use this module on Linux, you need Xlib module installed.
For Python 3, run:
sudo pip3 install python3-Xlib
For Python 2, run:
sudo pip install Xlib
To use this module on Windows, you do not need anything else.
You will need PIL/Pillow to use the screenshot features.
"""
from __future__ import absolute_import, division, print_function
__version__ = '0.9.33'
import collections
import sys
import time
KEY_NAMES = ['\t', '\n', '\r', ' ', '!', '"', '#', '$', '%', '&', "'", '(',
')', '*', '+', ',', '-', '.', '/', '0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', ':', ';', '<', '=', '>', '?', '@', '[', '\\', ']', '^', '_', '`',
'a', 'b', 'c', 'd', 'e','f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o',
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '{', '|', '}', '~',
'accept', 'add', 'alt', 'altleft', 'altright', 'apps', 'back' 'backspace',
'browserback', 'browserfavorites', 'browserforward', 'browserhome',
'browserrefresh', 'browsersearch', 'browserstop', 'capslock', 'clear',
'convert', 'ctrl', 'ctrlleft', 'ctrlright', 'decimal', 'del', 'delete',
'divide', 'down', 'end', 'enter', 'esc', 'escape', 'execute', 'f1', 'f10',
'f11', 'f12', 'f13', 'f14', 'f15', 'f16', 'f17', 'f18', 'f19', 'f2', 'f20',
'f21', 'f22', 'f23', 'f24', 'f3', 'f4', 'f5', 'f6', 'f7', 'f8', 'f9',
'final', 'fn', 'hanguel', 'hangul', 'hanja', 'help', 'home', 'insert', 'junja',
'kana', 'kanji', 'launchapp1', 'launchapp2', 'launchmail',
'launchmediaselect', 'left', 'modechange', 'multiply', 'nexttrack',
'nonconvert', 'num0', 'num1', 'num2', 'num3', 'num4', 'num5', 'num6',
'num7', 'num8', 'num9', 'numlock', 'pagedown', 'pageup', 'pause', 'pgdn',
'pgup', 'playpause', 'prevtrack', 'print', 'printscreen', 'prntscrn',
'prtsc', 'prtscr', 'return', 'right', 'scrolllock', 'select', 'separator',
'shift', 'shiftleft', 'shiftright', 'sleep', 'stop', 'subtract', 'tab',
'up', 'volumedown', 'volumemute', 'volumeup', 'win', 'winleft', 'winright', 'yen',
'command', 'option', 'optionleft', 'optionright']
KEYBOARD_KEYS = KEY_NAMES # keeping old KEYBOARD_KEYS for backwards compatibility
def isShiftCharacter(character):
"""Returns True if the key character is uppercase or shifted."""
return character.isupper() or character in '~!@#$%^&*()_+{}|:"<>?'
# The platformModule is where we reference the platform-specific functions.
if sys.platform.startswith('java'):
#from . import _pyautogui_java as platformModule
raise NotImplementedError('Jython is not yet supported by PyAutoGUI.')
elif sys.platform == 'darwin':
from . import _pyautogui_osx as platformModule
elif sys.platform == 'win32':
from . import _pyautogui_win as platformModule
else:
from . import _pyautogui_x11 as platformModule
# TODO: Having module-wide user-writable global variables is bad. It makes
# restructuring the code very difficult. For instance, what if we decide to
# move the mouse-related functions to a separate file (a submodule)? How that
# file will access this module vars? It will probably lead to a circular
# import.
# In seconds. Any duration less than this is rounded to 0.0 to instantly move<|fim▁hole|># If sleep_amount is too short, time.sleep() will be a no-op and the mouse
# cursor moves there instantly.
# TODO: This value should vary with the platform. http://stackoverflow.com/q/1133857
MINIMUM_SLEEP = 0.05
PAUSE = 0.1 # The number of seconds to pause after EVERY public function call. Useful for debugging.
FAILSAFE = True
# General Functions
# =================
def getPointOnLine(x1, y1, x2, y2, n):
"""Returns the (x, y) tuple of the point that has progressed a proportion
n along the line defined by the two x, y coordinates.
Copied from pytweening module.
"""
x = ((x2 - x1) * n) + x1
y = ((y2 - y1) * n) + y1
return (x, y)
def linear(n):
"""Trivial linear tweening function.
Copied from pytweening module.
"""
if not 0.0 <= n <= 1.0:
raise ValueError('Argument must be between 0.0 and 1.0.')
return n
def _autoPause(pause, _pause):
if _pause:
if pause is not None:
time.sleep(pause)
elif PAUSE != 0:
time.sleep(PAUSE)
def _unpackXY(x, y):
"""If x is a sequence and y is None, returns x[0], y[0]. Else, returns x, y.
On functions that receive a pair of x,y coordinates, they can be passed as
separate arguments, or as a single two-element sequence.
"""
if isinstance(x, collections.Sequence):
if len(x) == 2:
if y is None:
x, y = x
else:
raise ValueError('When passing a sequence at the x argument, the y argument must not be passed (received {0}).'.format(repr(y)))
else:
raise ValueError('The supplied sequence must have exactly 2 elements ({0} were received).'.format(len(x)))
else:
pass
return x, y
def position(x=None, y=None):
"""Returns the current xy coordinates of the mouse cursor as a two-integer
tuple.
Args:
x (int, None, optional) - If not None, this argument overrides the x in
the return value.
y (int, None, optional) - If not None, this argument overrides the y in
the return value.
Returns:
(x, y) tuple of the current xy coordinates of the mouse cursor.
"""
posx, posy = platformModule._position()
posx = int(posx)
posy = int(posy)
if x is not None:
posx = int(x)
if y is not None:
posy = int(y)
return posx, posy
def size():
"""Returns the width and height of the screen as a two-integer tuple.
Returns:
(width, height) tuple of the screen size, in pixels.
"""
return platformModule._size()
def onScreen(x, y=None):
"""Returns whether the given xy coordinates are on the screen or not.
Args:
Either the arguments are two separate values, first arg for x and second
for y, or there is a single argument of a sequence with two values, the
first x and the second y.
Example: onScreen(x, y) or onScreen([x, y])
Returns:
bool: True if the xy coordinates are on the screen at its current
resolution, otherwise False.
"""
x, y = _unpackXY(x, y)
x = int(x)
y = int(y)
width, height = platformModule._size()
return 0 <= x < width and 0 <= y < height
# Mouse Functions
# ===============
def mouseDown(x=None, y=None, button='left', duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs pressing a mouse button down (but not up).
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
mouse down happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
mouse down happens. None by default.
button (str, int, optional): The mouse button pressed down. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
Raises:
ValueError: If button is not one of 'left', 'middle', 'right', 1, 2, or 3
"""
if button not in ('left', 'middle', 'right', 1, 2, 3):
raise ValueError("button argument must be one of ('left', 'middle', 'right', 1, 2, 3), not %s" % button)
_failSafeCheck()
x, y = _unpackXY(x, y)
_mouseMoveDrag('move', x, y, 0, 0, duration=0, tween=None)
x, y = platformModule._position() # TODO - this isn't right. We need to check the params.
if button == 1 or str(button).lower() == 'left':
platformModule._mouseDown(x, y, 'left')
elif button == 2 or str(button).lower() == 'middle':
platformModule._mouseDown(x, y, 'middle')
elif button == 3 or str(button).lower() == 'right':
platformModule._mouseDown(x, y, 'right')
_autoPause(pause, _pause)
def mouseUp(x=None, y=None, button='left', duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs releasing a mouse button up (but not down beforehand).
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
mouse up happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
mouse up happens. None by default.
button (str, int, optional): The mouse button released. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
Raises:
ValueError: If button is not one of 'left', 'middle', 'right', 1, 2, or 3
"""
if button not in ('left', 'middle', 'right', 1, 2, 3):
raise ValueError("button argument must be one of ('left', 'middle', 'right', 1, 2, 3), not %s" % button)
_failSafeCheck()
x, y = _unpackXY(x, y)
_mouseMoveDrag('move', x, y, 0, 0, duration=0, tween=None)
x, y = platformModule._position()
if button == 1 or str(button).lower() == 'left':
platformModule._mouseUp(x, y, 'left')
elif button == 2 or str(button).lower() == 'middle':
platformModule._mouseUp(x, y, 'middle')
elif button == 3 or str(button).lower() == 'right':
platformModule._mouseUp(x, y, 'right')
_autoPause(pause, _pause)
def click(x=None, y=None, clicks=1, interval=0.0, button='left', duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs pressing a mouse button down and then immediately releasing it.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where
the click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
clicks (int, optional): The number of clicks to perform. 1 by default.
For example, passing 2 would do a doubleclick.
interval (float, optional): The number of seconds in between each click,
if the number of clicks is greater than 1. 0.0 by default, for no
pause in between clicks.
button (str, int, optional): The mouse button clicked. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
Raises:
ValueError: If button is not one of 'left', 'middle', 'right', 1, 2, 3
"""
if button not in ('left', 'middle', 'right', 1, 2, 3):
raise ValueError("button argument must be one of ('left', 'middle', 'right', 1, 2, 3)")
_failSafeCheck()
x, y = _unpackXY(x, y)
_mouseMoveDrag('move', x, y, 0, 0, duration=0, tween=None)
x, y = platformModule._position()
for i in range(clicks):
_failSafeCheck()
if button == 1 or str(button).lower() == 'left':
platformModule._click(x, y, 'left')
elif button == 2 or str(button).lower() == 'middle':
platformModule._click(x, y, 'middle')
elif button == 3 or str(button).lower() == 'right':
platformModule._click(x, y, 'right')
else:
# These mouse buttons for hor. and vert. scrolling only apply to x11:
platformModule._click(x, y, button)
time.sleep(interval)
_autoPause(pause, _pause)
def rightClick(x=None, y=None, duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs a right mouse button click.
This is a wrapper function for click('right', x, y).
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
Returns:
None
"""
_failSafeCheck()
click(x, y, 1, 0.0, 'right', _pause=False)
_autoPause(pause, _pause)
def middleClick(x=None, y=None, duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs a middle mouse button click.
This is a wrapper function for click('right', x, y).
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
Returns:
None
"""
_failSafeCheck()
click(x, y, 1, 0.0, 'middle', _pause=False)
_autoPause(pause, _pause)
def doubleClick(x=None, y=None, interval=0.0, button='left', duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs a double click.
This is a wrapper function for click('left', x, y, 2, interval).
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
interval (float, optional): The number of seconds in between each click,
if the number of clicks is greater than 1. 0.0 by default, for no
pause in between clicks.
button (str, int, optional): The mouse button clicked. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
Raises:
ValueError: If button is not one of 'left', 'middle', 'right', 1, 2, 3, 4,
5, 6, or 7
"""
_failSafeCheck()
click(x, y, 2, interval, button, _pause=False)
_autoPause(pause, _pause)
def tripleClick(x=None, y=None, interval=0.0, button='left', duration=0.0, tween=linear, pause=None, _pause=True):
"""Performs a triple click..
This is a wrapper function for click('left', x, y, 3, interval).
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
interval (float, optional): The number of seconds in between each click,
if the number of clicks is greater than 1. 0.0 by default, for no
pause in between clicks.
button (str, int, optional): The mouse button clicked. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
Raises:
ValueError: If button is not one of 'left', 'middle', 'right', 1, 2, 3, 4,
5, 6, or 7
"""
_failSafeCheck()
click(x, y, 3, interval, button, _pause=False)
_autoPause(pause, _pause)
def scroll(clicks, x=None, y=None, pause=None, _pause=True):
"""Performs a scroll of the mouse scroll wheel.
Whether this is a vertical or horizontal scroll depends on the underlying
operating system.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
clicks (int, float): The amount of scrolling to perform.
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
Returns:
None
"""
_failSafeCheck()
if type(x) in (tuple, list):
x, y = x[0], x[1]
x, y = position(x, y)
platformModule._scroll(clicks, x, y)
_autoPause(pause, _pause)
def hscroll(clicks, x=None, y=None, pause=None, _pause=True):
"""Performs an explicitly horizontal scroll of the mouse scroll wheel,
if this is supported by the operating system. (Currently just Linux.)
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
clicks (int, float): The amount of scrolling to perform.
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
Returns:
None
"""
_failSafeCheck()
if type(x) in (tuple, list):
x, y = x[0], x[1]
x, y = position(x, y)
platformModule._hscroll(clicks, x, y)
_autoPause(pause, _pause)
def vscroll(clicks, x=None, y=None, pause=None, _pause=True):
"""Performs an explicitly vertical scroll of the mouse scroll wheel,
if this is supported by the operating system. (Currently just Linux.)
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
clicks (int, float): The amount of scrolling to perform.
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
Returns:
None
"""
_failSafeCheck()
if type(x) in (tuple, list):
x, y = x[0], x[1]
x, y = position(x, y)
platformModule._vscroll(clicks, x, y)
_autoPause(pause, _pause)
def moveTo(x=None, y=None, duration=0.0, tween=linear, pause=None, _pause=True):
"""Moves the mouse cursor to a point on the screen.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): The x position on the screen where the
click happens. None by default. If tuple, this is used for x and y.
y (int, float, None, optional): The y position on the screen where the
click happens. None by default.
duration (float, optional): The amount of time it takes to move the mouse
cursor to the xy coordinates. If 0, then the mouse cursor is moved
instantaneously. 0.0 by default.
tween (func, optional): The tweening function used if the duration is not
0. A linear tween is used by default. See the tweens.py file for
details.
Returns:
None
"""
x, y = _unpackXY(x, y)
_failSafeCheck()
_mouseMoveDrag('move', x, y, 0, 0, duration, tween)
_autoPause(pause, _pause)
def moveRel(xOffset=None, yOffset=None, duration=0.0, tween=linear, pause=None, _pause=True):
"""Moves the mouse cursor to a point on the screen, relative to its current
position.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): How far left (for negative values) or
right (for positive values) to move the cursor. 0 by default. If tuple, this is used for x and y.
y (int, float, None, optional): How far up (for negative values) or
down (for positive values) to move the cursor. 0 by default.
duration (float, optional): The amount of time it takes to move the mouse
cursor to the new xy coordinates. If 0, then the mouse cursor is moved
instantaneously. 0.0 by default.
tween (func, optional): The tweening function used if the duration is not
0. A linear tween is used by default. See the tweens.py file for
details.
Returns:
None
"""
xOffset, yOffset = _unpackXY(xOffset, yOffset)
_failSafeCheck()
_mouseMoveDrag('move', None, None, xOffset, yOffset, duration, tween)
_autoPause(pause, _pause)
def dragTo(x=None, y=None, duration=0.0, tween=linear, button='left', pause=None, _pause=True):
"""Performs a mouse drag (mouse movement while a button is held down) to a
point on the screen.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): How far left (for negative values) or
right (for positive values) to move the cursor. 0 by default. If tuple, this is used for x and y.
y (int, float, None, optional): How far up (for negative values) or
down (for positive values) to move the cursor. 0 by default.
duration (float, optional): The amount of time it takes to move the mouse
cursor to the new xy coordinates. If 0, then the mouse cursor is moved
instantaneously. 0.0 by default.
tween (func, optional): The tweening function used if the duration is not
0. A linear tween is used by default. See the tweens.py file for
details.
button (str, int, optional): The mouse button clicked. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
"""
_failSafeCheck()
if type(x) in (tuple, list):
x, y = x[0], x[1]
mouseDown(button=button, _pause=False)
_mouseMoveDrag('drag', x, y, 0, 0, duration, tween, button)
mouseUp(button=button, _pause=False)
_autoPause(pause, _pause)
def dragRel(xOffset=0, yOffset=0, duration=0.0, tween=linear, button='left', pause=None, _pause=True):
"""Performs a mouse drag (mouse movement while a button is held down) to a
point on the screen, relative to its current position.
The x and y parameters detail where the mouse event happens. If None, the
current mouse position is used. If a float value, it is rounded down. If
outside the boundaries of the screen, the event happens at edge of the
screen.
Args:
x (int, float, None, tuple, optional): How far left (for negative values) or
right (for positive values) to move the cursor. 0 by default. If tuple, this is used for xOffset and yOffset.
y (int, float, None, optional): How far up (for negative values) or
down (for positive values) to move the cursor. 0 by default.
duration (float, optional): The amount of time it takes to move the mouse
cursor to the new xy coordinates. If 0, then the mouse cursor is moved
instantaneously. 0.0 by default.
tween (func, optional): The tweening function used if the duration is not
0. A linear tween is used by default. See the tweens.py file for
details.
button (str, int, optional): The mouse button clicked. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
"""
if xOffset is None:
xOffset = 0
if yOffset is None:
yOffset = 0
if type(xOffset) in (tuple, list):
xOffset, yOffset = xOffset[0], xOffset[1]
if xOffset == 0 and yOffset == 0:
return # no-op case
_failSafeCheck()
mousex, mousey = platformModule._position()
mouseDown(button=button, _pause=False)
_mouseMoveDrag('drag', mousex, mousey, xOffset, yOffset, duration, tween, button)
mouseUp(button=button, _pause=False)
_autoPause(pause, _pause)
def _mouseMoveDrag(moveOrDrag, x, y, xOffset, yOffset, duration, tween, button=None):
"""Handles the actual move or drag event, since different platforms
implement them differently.
On Windows & Linux, a drag is a normal mouse move while a mouse button is
held down. On OS X, a distinct "drag" event must be used instead.
The code for moving and dragging the mouse is similar, so this function
handles both. Users should call the moveTo() or dragTo() functions instead
of calling _mouseMoveDrag().
Args:
moveOrDrag (str): Either 'move' or 'drag', for the type of action this is.
x (int, float, None, optional): How far left (for negative values) or
right (for positive values) to move the cursor. 0 by default.
y (int, float, None, optional): How far up (for negative values) or
down (for positive values) to move the cursor. 0 by default.
xOffset (int, float, None, optional): How far left (for negative values) or
right (for positive values) to move the cursor. 0 by default.
yOffset (int, float, None, optional): How far up (for negative values) or
down (for positive values) to move the cursor. 0 by default.
duration (float, optional): The amount of time it takes to move the mouse
cursor to the new xy coordinates. If 0, then the mouse cursor is moved
instantaneously. 0.0 by default.
tween (func, optional): The tweening function used if the duration is not
0. A linear tween is used by default. See the tweens.py file for
details.
button (str, int, optional): The mouse button clicked. Must be one of
'left', 'middle', 'right' (or 1, 2, or 3) respectively. 'left' by
default.
Returns:
None
"""
# The move and drag code is similar, but OS X requires a special drag event instead of just a move event when dragging.
# See https://stackoverflow.com/a/2696107/1893164
assert moveOrDrag in ('move', 'drag'), "moveOrDrag must be in ('move', 'drag'), not %s" % (moveOrDrag)
if sys.platform != 'darwin':
moveOrDrag = 'move' # Only OS X needs the drag event specifically.
xOffset = int(xOffset) if xOffset is not None else 0
yOffset = int(yOffset) if yOffset is not None else 0
if x is None and y is None and xOffset == 0 and yOffset == 0:
return # Special case for no mouse movement at all.
startx, starty = position()
x = int(x) if x is not None else startx
y = int(y) if y is not None else starty
# x, y, xOffset, yOffset are now int.
x += xOffset
y += yOffset
width, height = size()
# Make sure x and y are within the screen bounds.
x = max(0, min(x, width - 1))
y = max(0, min(y, height - 1))
# If the duration is small enough, just move the cursor there instantly.
steps = [(x, y)]
if duration > MINIMUM_DURATION:
# Non-instant moving/dragging involves tweening:
num_steps = max(width, height)
sleep_amount = duration / num_steps
if sleep_amount < MINIMUM_SLEEP:
num_steps = int(duration / MINIMUM_SLEEP)
sleep_amount = duration / num_steps
steps = [
getPointOnLine(startx, starty, x, y, tween(n / num_steps))
for n in range(num_steps)
]
# Making sure the last position is the actual destination.
steps.append((x, y))
for tweenX, tweenY in steps:
if len(steps) > 1:
# A single step does not require tweening.
time.sleep(sleep_amount)
_failSafeCheck()
tweenX = int(round(tweenX))
tweenY = int(round(tweenY))
if moveOrDrag == 'move':
platformModule._moveTo(tweenX, tweenY)
elif moveOrDrag == 'drag':
platformModule._dragTo(tweenX, tweenY, button)
else:
raise NotImplementedError('Unknown value of moveOrDrag: {0}'.format(moveOrDrag))
_failSafeCheck()
# Keyboard Functions
# ==================
def isValidKey(key):
"""Returns a Boolean value if the given key is a valid value to pass to
PyAutoGUI's keyboard-related functions for the current platform.
This function is here because passing an invalid value to the PyAutoGUI
keyboard functions currently is a no-op that does not raise an exception.
Some keys are only valid on some platforms. For example, while 'esc' is
valid for the Escape key on all platforms, 'browserback' is only used on
Windows operating systems.
Args:
key (str): The key value.
Returns:
bool: True if key is a valid value, False if not.
"""
return platformModule.keyboardMapping.get(key, None) != None
def keyDown(key, pause=None, _pause=True):
"""Performs a keyboard key press without the release. This will put that
key in a held down state.
NOTE: For some reason, this does not seem to cause key repeats like would
happen if a keyboard key was held down on a text field.
Args:
key (str): The key to be pressed down. The valid names are listed in
KEYBOARD_KEYS.
Returns:
None
"""
if len(key) > 1:
key = key.lower()
_failSafeCheck()
platformModule._keyDown(key)
_autoPause(pause, _pause)
def keyUp(key, pause=None, _pause=True):
"""Performs a keyboard key release (without the press down beforehand).
Args:
key (str): The key to be released up. The valid names are listed in
KEYBOARD_KEYS.
Returns:
None
"""
if len(key) > 1:
key = key.lower()
_failSafeCheck()
platformModule._keyUp(key)
_autoPause(pause, _pause)
def press(keys, presses=1, interval=0.0, pause=None, _pause=True):
"""Performs a keyboard key press down, followed by a release.
Args:
key (str, list): The key to be pressed. The valid names are listed in
KEYBOARD_KEYS. Can also be a list of such strings.
presses (integer, optiional): the number of press repetition
1 by default, for just one press
interval (float, optional): How many seconds between each press.
0.0 by default, for no pause between presses.
pause (float, optional): How many seconds in the end of function process.
None by default, for no pause in the end of function process.
Returns:
None
"""
if type(keys) == str:
keys = [keys] # put string in a list
else:
lowerKeys = []
for s in keys:
if len(s) > 1:
lowerKeys.append(s.lower())
else:
lowerKeys.append(s)
interval = float(interval)
for i in range(presses):
for k in keys:
_failSafeCheck()
platformModule._keyDown(k)
platformModule._keyUp(k)
time.sleep(interval)
_autoPause(pause, _pause)
def typewrite(message, interval=0.0, pause=None, _pause=True):
"""Performs a keyboard key press down, followed by a release, for each of
the characters in message.
The message argument can also be list of strings, in which case any valid
keyboard name can be used.
Since this performs a sequence of keyboard presses and does not hold down
keys, it cannot be used to perform keyboard shortcuts. Use the hotkey()
function for that.
Args:
message (str, list): If a string, then the characters to be pressed. If a
list, then the key names of the keys to press in order. The valid names
are listed in KEYBOARD_KEYS.
interval (float, optional): The number of seconds in between each press.
0.0 by default, for no pause in between presses.
Returns:
None
"""
interval = float(interval)
_failSafeCheck()
for c in message:
if len(c) > 1:
c = c.lower()
press(c, _pause=False)
time.sleep(interval)
_failSafeCheck()
_autoPause(pause, _pause)
def hotkey(*args, **kwargs):
"""Performs key down presses on the arguments passed in order, then performs
key releases in reverse order.
The effect is that calling hotkey('ctrl', 'shift', 'c') would perform a
"Ctrl-Shift-C" hotkey/keyboard shortcut press.
Args:
key(s) (str): The series of keys to press, in order. This can also be a
list of key strings to press.
interval (float, optional): The number of seconds in between each press.
0.0 by default, for no pause in between presses.
Returns:
None
"""
interval = float(kwargs.get('interval', 0.0))
_failSafeCheck()
for c in args:
if len(c) > 1:
c = c.lower()
platformModule._keyDown(c)
time.sleep(interval)
for c in reversed(args):
if len(c) > 1:
c = c.lower()
platformModule._keyUp(c)
time.sleep(interval)
_autoPause(kwargs.get('pause', None), kwargs.get('_pause', True))
class FailSafeException(Exception):
pass
def _failSafeCheck():
if FAILSAFE and position() == (0, 0):
raise FailSafeException('PyAutoGUI fail-safe triggered from mouse moving to upper-left corner. To disable this fail-safe, set pyautogui.FAILSAFE to False.')
def displayMousePosition(xOffset=0, yOffset=0):
"""This function is meant to be run from the command line. It will
automatically display the location and RGB of the mouse cursor."""
print('Press Ctrl-C to quit.')
if xOffset != 0 or yOffset != 0:
print('xOffset: %s yOffset: %s' % (xOffset, yOffset))
resolution = size()
try:
while True:
# Get and print the mouse coordinates.
x, y = position()
positionStr = 'X: ' + str(x - xOffset).rjust(4) + ' Y: ' + str(y - yOffset).rjust(4)
if (x - xOffset) < 0 or (y - yOffset) < 0 or (x - xOffset) >= resolution[0] or (y - yOffset) >= resolution[1]:
pixelColor = ('NaN', 'NaN', 'NaN')
else:
pixelColor = pyscreeze.screenshot().getpixel((x, y))
positionStr += ' RGB: (' + str(pixelColor[0]).rjust(3)
positionStr += ', ' + str(pixelColor[1]).rjust(3)
positionStr += ', ' + str(pixelColor[2]).rjust(3) + ')'
sys.stdout.write(positionStr)
sys.stdout.write('\b' * len(positionStr))
sys.stdout.flush()
except KeyboardInterrupt:
sys.stdout.write('\n')
sys.stdout.flush()<|fim▁end|>
|
# the mouse.
MINIMUM_DURATION = 0.1
|
<|file_name|>main.after.py<|end_file_name|><|fim▁begin|>import subprocess
import sys
import django.conf
import django.utils.encoding
import matplotlib.pyplot as plt
subprocess.Popen<|fim▁hole|>
sys.argv
plt.func()<|fim▁end|>
| |
<|file_name|>ip.rs<|end_file_name|><|fim▁begin|>//! Handles parsing of Internet Protocol fields (shared between ipv4 and ipv6)
use nom::bits;
use nom::error::Error;
use nom::number;
use nom::sequence;
use nom::IResult;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum IPProtocol {
HOPOPT,
ICMP,
IGMP,
GGP,
IPINIP,
ST,
TCP,
CBT,
EGP,
IGP,
BBNRCCMON,
NVPII,
PUP,
ARGUS,
EMCON,
XNET,
CHAOS,
UDP,
IPV6,
ICMP6,
Other(u8),
}
<|fim▁hole|>impl From<u8> for IPProtocol {
fn from(raw: u8) -> Self {
match raw {
0 => IPProtocol::HOPOPT,
1 => IPProtocol::ICMP,
2 => IPProtocol::IGMP,
3 => IPProtocol::GGP,
4 => IPProtocol::IPINIP,
5 => IPProtocol::ST,
6 => IPProtocol::TCP,
7 => IPProtocol::CBT,
8 => IPProtocol::EGP,
9 => IPProtocol::IGP,
10 => IPProtocol::BBNRCCMON,
11 => IPProtocol::NVPII,
12 => IPProtocol::PUP,
13 => IPProtocol::ARGUS,
14 => IPProtocol::EMCON,
15 => IPProtocol::XNET,
16 => IPProtocol::CHAOS,
17 => IPProtocol::UDP,
41 => IPProtocol::IPV6,
58 => IPProtocol::ICMP6,
other => IPProtocol::Other(other),
}
}
}
pub(crate) fn two_nibbles(input: &[u8]) -> IResult<&[u8], (u8, u8)> {
bits::bits::<_, _, Error<_>, _, _>(sequence::pair(
bits::streaming::take(4u8),
bits::streaming::take(4u8),
))(input)
}
pub(crate) fn protocol(input: &[u8]) -> IResult<&[u8], IPProtocol> {
let (input, protocol) = number::streaming::be_u8(input)?;
Ok((input, protocol.into()))
}<|fim▁end|>
| |
<|file_name|>microtask.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Implementation of [microtasks](https://html.spec.whatwg.org/multipage/#microtask) and
//! microtask queues. It is up to implementations of event loops to store a queue and
//! perform checkpoints at appropriate times, as well as enqueue microtasks as required.
use crate::dom::bindings::callback::ExceptionHandling;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::PromiseBinding::PromiseJobCallback;
use crate::dom::bindings::codegen::Bindings::VoidFunctionBinding::VoidFunction;
use crate::dom::bindings::root::DomRoot;
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlimageelement::ImageElementMicrotask;
use crate::dom::htmlmediaelement::MediaElementMicrotask;
use crate::dom::mutationobserver::MutationObserver;
use crate::script_runtime::{notify_about_rejected_promises, JSContext};
use crate::script_thread::ScriptThread;
use js::jsapi::{JobQueueIsEmpty, JobQueueMayNotBeEmpty};
use msg::constellation_msg::PipelineId;
use std::cell::Cell;
use std::mem;
use std::rc::Rc;
/// A collection of microtasks in FIFO order.
#[derive(Default, JSTraceable, MallocSizeOf)]
pub struct MicrotaskQueue {
/// The list of enqueued microtasks that will be invoked at the next microtask checkpoint.
microtask_queue: DomRefCell<Vec<Microtask>>,
/// <https://html.spec.whatwg.org/multipage/#performing-a-microtask-checkpoint>
performing_a_microtask_checkpoint: Cell<bool>,
}
#[derive(JSTraceable, MallocSizeOf)]
pub enum Microtask {
Promise(EnqueuedPromiseCallback),
User(UserMicrotask),
MediaElement(MediaElementMicrotask),
ImageElement(ImageElementMicrotask),
CustomElementReaction,
NotifyMutationObservers,
}
pub trait MicrotaskRunnable {
fn handler(&self) {}
}
/// A promise callback scheduled to run during the next microtask checkpoint (#4283).
#[derive(JSTraceable, MallocSizeOf)]
pub struct EnqueuedPromiseCallback {
#[ignore_malloc_size_of = "Rc has unclear ownership"]
pub callback: Rc<PromiseJobCallback>,
pub pipeline: PipelineId,
}
/// A microtask that comes from a queueMicrotask() Javascript call,
/// identical to EnqueuedPromiseCallback once it's on the queue
#[derive(JSTraceable, MallocSizeOf)]
pub struct UserMicrotask {
#[ignore_malloc_size_of = "Rc has unclear ownership"]
pub callback: Rc<VoidFunction>,
pub pipeline: PipelineId,
}
impl MicrotaskQueue {
/// Add a new microtask to this queue. It will be invoked as part of the next
/// microtask checkpoint.
#[allow(unsafe_code)]
pub fn enqueue(&self, job: Microtask, cx: JSContext) {
self.microtask_queue.borrow_mut().push(job);
unsafe { JobQueueMayNotBeEmpty(*cx) };
}
/// <https://html.spec.whatwg.org/multipage/#perform-a-microtask-checkpoint>
/// Perform a microtask checkpoint, executing all queued microtasks until the queue is empty.<|fim▁hole|> cx: JSContext,
target_provider: F,
globalscopes: Vec<DomRoot<GlobalScope>>,
) where
F: Fn(PipelineId) -> Option<DomRoot<GlobalScope>>,
{
if self.performing_a_microtask_checkpoint.get() {
return;
}
// Step 1
self.performing_a_microtask_checkpoint.set(true);
debug!("Now performing a microtask checkpoint");
// Steps 2
while !self.microtask_queue.borrow().is_empty() {
rooted_vec!(let mut pending_queue);
mem::swap(&mut *pending_queue, &mut *self.microtask_queue.borrow_mut());
for (idx, job) in pending_queue.iter().enumerate() {
if idx == pending_queue.len() - 1 && self.microtask_queue.borrow().is_empty() {
unsafe { JobQueueIsEmpty(*cx) };
}
match *job {
Microtask::Promise(ref job) => {
if let Some(target) = target_provider(job.pipeline) {
let _ = job.callback.Call_(&*target, ExceptionHandling::Report);
}
},
Microtask::User(ref job) => {
if let Some(target) = target_provider(job.pipeline) {
let _ = job.callback.Call_(&*target, ExceptionHandling::Report);
}
},
Microtask::MediaElement(ref task) => {
task.handler();
},
Microtask::ImageElement(ref task) => {
task.handler();
},
Microtask::CustomElementReaction => {
ScriptThread::invoke_backup_element_queue();
},
Microtask::NotifyMutationObservers => {
MutationObserver::notify_mutation_observers();
},
}
}
}
// Step 3
for global in globalscopes.into_iter() {
notify_about_rejected_promises(&global);
}
// TODO: Step 4 - Cleanup Indexed Database transactions.
// Step 5
self.performing_a_microtask_checkpoint.set(false);
}
pub fn empty(&self) -> bool {
self.microtask_queue.borrow().is_empty()
}
}<|fim▁end|>
|
#[allow(unsafe_code)]
pub fn checkpoint<F>(
&self,
|
<|file_name|>rdflib-stample-pg-extension-0.1.1.js<|end_file_name|><|fim▁begin|>(function(root, undef) {
///////////////////////////////////////////////////////////////////////////////////////////////
// pg.js, part of rdflib-pg-extension.js made by Stample
// see https://github.com/stample/rdflib.js
///////////////////////////////////////////////////////////////////////////////////////////////
$rdf.PG = {
createNewStore: function(fetcherTimeout) {
var store = new $rdf.IndexedFormula();
// this makes "store.fetcher" variable available
$rdf.fetcher(store, fetcherTimeout, true);
return store;
}
}
/**
* Some common and useful namespaces already declared for you
*/
$rdf.PG.Namespaces = {
LINK: $rdf.Namespace("http://www.w3.org/2007/ont/link#"),
HTTP: $rdf.Namespace("http://www.w3.org/2007/ont/http#"),
HTTPH: $rdf.Namespace("http://www.w3.org/2007/ont/httph#"),
RDF: $rdf.Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#"),
RDFS: $rdf.Namespace("http://www.w3.org/2000/01/rdf-schema#"),
OWL: $rdf.Namespace("http://www.w3.org/2002/07/owl#"),
RSS: $rdf.Namespace("http://purl.org/rss/1.0/"),
XSD: $rdf.Namespace("http://www.w3.org/TR/2004/REC-xmlschema-2-20041028/#dt-"),
IANA: $rdf.Namespace("http://www.iana.org/assignments/link-relations/#"),
CERT: $rdf.Namespace("http://www.w3.org/ns/auth/cert"),
WAC: $rdf.Namespace("http://www.w3.org/ns/auth/acl#"),
LDP: $rdf.Namespace("http://www.w3.org/ns/ldp#"),
SIOC: $rdf.Namespace("http://rdfs.org/sioc/ns#"),
DC: $rdf.Namespace("http://purl.org/dc/elements/1.1/"),
FOAF: $rdf.Namespace("http://xmlns.com/foaf/0.1/"),
CONTACT: $rdf.Namespace("http://www.w3.org/2000/10/swap/pim/contact#"),
STAT: $rdf.Namespace("http://www.w3.org/ns/posix/stat#"),
GEOLOC: $rdf.Namespace("http://www.w3.org/2003/01/geo/wgs84_pos#")
}
/**
* Permits to get metadata about a pointed graph.
* Like request headers and response headers.
* RDFLib put these in the store as triples and it's not always easy to know where it puts the info.
* This makes it easier to find back these metadatas
*/
$rdf.PG.MetadataHelper = {
assertSingleStatement: function(stmts,msg) {
if ( !stmts || stmts.length != 1 ) {
throw new Error(msg + " - Expected exactly one statement. Found: "+stmts);
}
},
getRequestNode: function(pg) {
var fetchUriAsLit = $rdf.lit(pg.why().uri);
var stmts = store.statementsMatching(undefined, $rdf.PG.Namespaces.LINK("requestedURI"), fetchUriAsLit, store.fetcher.appNode);
this.assertSingleStatement(stmts,"There should be exactly one request node");
var stmt = stmts[0];
return stmt.subject;
},
getResponseNode: function(requestNode) {
var stmts = store.statementsMatching(requestNode, $rdf.PG.Namespaces.LINK("response"), undefined);
this.assertSingleStatement(stmts,"There should be exactly one response node");
var stmt = stmts[0];
return stmt.object;
},
getResponseHeaderValue: function(responseNode,headerName) {
var headerSym = $rdf.PG.Namespaces.HTTPH(headerName.toLowerCase());
var stmts = store.statementsMatching(responseNode, headerSym, undefined, responseNode);
if ( !stmts || stmts.length == 0 ) return undefined;
var stmt = stmts[0];
return stmt.object;
},
getResponseStatus: function(responseNode) {
var statusSym = $rdf.PG.Namespaces.HTTP("status");
var stmts = store.statementsMatching(responseNode, statusSym, undefined, responseNode);
this.assertSingleStatement(stmts,"There should be exactly one response node");
var stmt = stmts[0];
return stmt.object;
},
getResponseStatusText: function(responseNode) {
var statusSym = $rdf.PG.Namespaces.HTTP("statusText");
var stmts = store.statementsMatching(responseNode, statusSym, undefined, responseNode);
this.assertSingleStatement(stmts,"There should be exactly one response node");
var stmt = stmts[0];
return stmt.object;
},
/**
* Returns an helper method that is bound to the given pointed graph and permits to get metadatas related
* to the underlying document / resource / named graph
*
* Note that you can only use this if the underlying document of the pg was retrieved through the fetcher.
* If the data was added to the store manually then the requests/responses metadatas are not present in the store
* unless you have added them by yourself
*/
forPointedGraph: function(pg) {
var self = this;
var requestNode = this.getRequestNode(pg);
var responseNode = this.getResponseNode(requestNode);
return {
getRequestNode: function() {
return requestNode;
},
getResponseNode: function() {
return responseNode;
},
getResponseStatus: function() {
return self.getResponseStatus(responseNode);
},
getResponseStatusText: function() {
return self.getResponseStatusText(responseNode);
},
getResponseHeaderValue: function(headerName) {
return self.getResponseHeaderValue(responseNode,headerName);
}
}
}
}
$rdf.PG.Utils = {
/**
* Just a little helper method to verify preconditions and fail fast.
* See http://en.wikipedia.org/wiki/Precondition
* See http://en.wikipedia.org/wiki/Fail-fast
* @param condition
* @param message
*/
checkArgument: function(condition, message) {
if (!condition) {
throw Error('IllegalArgumentException: ' + (message || 'No description'));
}
},
/**
* remove hash from URL - this gets the document location
* @param url
* @returns {*}
*/
fragmentless: function(url) {
return url.split('#')[0];
},
isFragmentless: function(url) {
return url.indexOf('#') == -1;
},
isFragmentlessSymbol: function(node) {
return this.isSymbolNode(node) && this.isFragmentless(this.symbolNodeToUrl(node));
},
getTermType: function(node) {
if ( node && node.termType ) {
return node.termType
} else {
throw new Error("Can't get termtype on this object. Probably not an RDFlib node: "+node);
}
},
isLiteralNode: function(node) {
return this.getTermType(node) == 'literal';
},
isSymbolNode: function(node) {
return this.getTermType(node) == 'symbol';
},
isBlankNode: function(node) {
return this.getTermType(node) == 'bnode';
},
literalNodeToValue: function(node) {
this.checkArgument(this.isLiteralNode(node), "Node is not a literal node:"+node);
return node.value;
},
symbolNodeToUrl: function(node) {
this.checkArgument(this.isSymbolNode(node), "Node is not a symbol node:"+node);
return node.uri;
},
/**
* Get the nodes for a given relation symbol
* @param pg
* @param relSym
* @returns => List[Nodes]
*/
getNodes: function(pg, relSym) {
return _.chain( pg.rels(relSym) )
.map(function(pg) {
return pg.pointer;
}).value();
},
getLiteralNodes: function(pg, relSym) {
return _.chain($rdf.PG.Utils.getNodes(pg,relSym))
.filter($rdf.PG.Utils.isLiteralNode)
.value();
},
getSymbolNodes: function(pg, relSym) {
return _.chain($rdf.PG.Utils.getNodes(pg,relSym))
.filter($rdf.PG.Utils.isSymbolNode)
.value();
},
getBlankNodes: function(pg, relSym) {
return _.chain($rdf.PG.Utils.getNodes(pg,relSym))
.filter($rdf.PG.Utils.isBlankNode)
.value();
},
/**
*
* @param pgList
* @returns {*}
*/
getLiteralValues: function(pgList) {
var rels = (slice.call(arguments, 1));
var res = _.chain(pgList)
.map(function (pg) {
return pg.getLiteral(rels);
})
.flatten()
.value();
return res;
}
}
$rdf.PG.Utils.Rx = {
/**
* Permits to create an RxJs observable based on a list of promises
* @param promiseList the list of promise you want to convert as an RxJs Observable
* @param subject the type of Rx Subject you want to use (default to ReplaySubject)
* @param onError, an optional callback for handling errors
* @return {*}
*/
promiseListToObservable: function(promiseList, subject, onError) {
if ( promiseList.length == 0 ) {
return Rx.Observable.empty();
}
// Default to ReplaySubject
var subject = subject || new Rx.ReplaySubject();
// Default to non-blocking error logging
var onError = onError || function(error) {
console.debug("Promise error catched in promiseListToObservable: ",error);
// true means the stream won't continue.
return false;
};
var i = 0;
promiseList.map(function(promise) {
promise.then(
function (promiseValue) {
subject.onNext(promiseValue);
i++;
if ( i == promiseList.length ) {
subject.onCompleted();
}
},
function (error) {
var doStop = onError(error);
if ( doStop ) {
subject.onError(error);
}
else {
i++;
if ( i == promiseList.length ) {
subject.onCompleted();
}
}
}
)
});
return subject.asObservable();
}
}
$rdf.PG.Filters = {
isLiteralPointer: function(pg) {
return pg.isLiteralPointer();
},
isBlankNodePointer: function(pg) {
return pg.isBlankNodePointer();
},
isSymbolPointer: function(pg) {
return pg.isSymbolPointer();
}
}
$rdf.PG.Transformers = {
literalPointerToValue: function(pg) {
return $rdf.PG.Utils.literalNodeToValue(pg.pointer);
},
symbolPointerToValue: function(pg) {
return $rdf.PG.Utils.symbolNodeToUrl(pg.pointer);
},
tripleToSubject: function(triple) {
return triple.subject;
},
tripleToPredicate: function(triple) {
return triple.predicate;
},
tripleToObject: function(triple) {
return triple.object;
}
}
///////////////////////////////////////////////////////////////////////////////////////////////
// pointedGraph.js, part of rdflib-pg-extension.js made by Stample
// see https://github.com/stample/rdflib.js
///////////////////////////////////////////////////////////////////////////////////////////////
/**
* A pointed graph is a pointer in a named graph.
* A named graph is an http resource/document which contains an RDF graph.
* A pointer is a particular node in this graph.
*
* This PointedGraph implementation provides methods to navigate from one node to another in the current namedGraph,
* but it also permits to jump from one namedGraph to another (firing http requests) if a pointer points to a remote node.
*
* @param {$rdf.store} store - Quad Store
* @param {$rdf.node} pointer: point in the current graph. Type: Literal, Bnode, or URI
* @param {$rdf.sym} namedGraphUrl: the URL of the current RDF graph.
* @return {$rdf.PointedGraph}
*/
$rdf.pointedGraph = function(store, pointer, namedGraphUrl) {
return new $rdf.PointedGraph(store, pointer, namedGraphUrl);
};
$rdf.PointedGraph = function() {
$rdf.PointedGraph = function(store, pointer, namedGraphUrl) {
// TODO assert the pointer is a node
$rdf.PG.Utils.checkArgument( $rdf.PG.Utils.isFragmentlessSymbol(namedGraphUrl),"The namedGraphUrl should be a fragmentless symbol! -> "+namedGraphUrl);
this.store = store;
this.pointer = pointer;
this.namedGraphUrl = namedGraphUrl;
// The namedGraphFetchUrl is the namedGraphUrl that may or not be proxified.
// We need this because we kind of hacked RDFLib and unfortunatly if there's a cors proxy enabled,
// rdflib will only remember the proxified version of the url in the store
this.namedGraphFetchUrl = store.fetcher.proxifySymbolIfNeeded(namedGraphUrl);
};
$rdf.PointedGraph.prototype.constructor = $rdf.PointedGraph;
// TODO this logging stuff must be moved somewhere else :(
// Logs.
var logLevels = $rdf.PointedGraph.logLevels = {
nologs: 0,
debug: 1,
info: 2,
warning: 3,
error: 4
};
// Default is no logs.
$rdf.PointedGraph.logLevel = logLevels.nologs;
// To change the level of logs<|fim▁hole|> $rdf.PointedGraph.logLevel = (logLevels[level] == null ? logLevels.info : logLevels[level]);
}
var doLog = function(level, consoleLogFunction ,messageArray) {
var loggingEnabled = ($rdf.PointedGraph.logLevel !== logLevels.nologs);
if ( loggingEnabled ) {
var shouldLog = ( (logLevels[level] || logLevels.debug) >= $rdf.PointedGraph.logLevel );
if ( shouldLog ) {
// TODO maybe it may be cool to prefix the log with the current pg infos
consoleLogFunction.apply(console,messageArray);
}
}
}
// Specific functions for each level of logs.
var debug = function() { doLog('debug', console.debug, _.toArray(arguments)) };
var info = function() { doLog('info', console.info, _.toArray(arguments)) };
var warning = function() { doLog('warning', console.warn, _.toArray(arguments)) };
var error = function() { doLog('error', console.error, _.toArray(arguments)) };
// Utils.
function sparqlPatch(uri, query) {
var promise = $.ajax({
type: "PATCH",
url: uri,
contentType: 'application/sparql-update',
dataType: 'text',
processData:false,
data: query
}).promise();
return promise;
}
function sparqlPut(uri, query) {
var promise = $.ajax({
type: "PUT",
url: uri,
contentType: 'application/sparql-update',
dataType: 'text',
processData:false,
data: query
}).promise();
return promise;
}
/**
* From the pointer, this follows a predicate/symbol/rel and gives a list of pointer in the same graph/document.
* @param {$rdf.sym} rel the relation from this node
* @returns {[PointedGraph]} of PointedGraphs with the same graph name in the same store
*/
$rdf.PointedGraph.prototype.rel = function (rel) {
$rdf.PG.Utils.checkArgument( $rdf.PG.Utils.isSymbolNode(rel) , "The argument should be a symbol:"+rel);
var self = this;
var resList = this.getCurrentDocumentTriplesMatching(this.pointer, rel, undefined, false);
return _.map(resList, function (triple) {
return new $rdf.PointedGraph(self.store, triple.object, self.namedGraphUrl, self.namedGraphFetchUrl);
});
}
$rdf.PointedGraph.prototype.relFirst = function(relUri) {
var l = this.rel(relUri);
if (l.length > 0) return l[0];
}
/**
* This is the reverse of "rel": this permits to know which PG in the current graph/document points to the given pointer
* @param {$rdf.sym} rel the relation to this node
* @returns {[PointedGraph]} of PointedGraphs with the same graph name in the same store
*/
$rdf.PointedGraph.prototype.rev = function (rel) {
$rdf.PG.Utils.checkArgument( $rdf.PG.Utils.isSymbolNode(rel) , "The argument should be a symbol:"+rel);
var self = this;
var resList = this.getCurrentDocumentTriplesMatching(undefined, rel, this.pointer, false);
return _.map(resList, function (triple) {
return new $rdf.PointedGraph(self.store, triple.subject, self.namedGraphUrl, self.namedGraphFetchUrl);
});
}
$rdf.PointedGraph.prototype.revFirst = function(relUri) {
var l = this.rev(relUri);
if (l.length > 0) return l[0];
}
/**
* Same as "rel" but follow mmultiple predicates/rels
* @returns {*}
*/
// Array[relUri] => Array[Pgs] TODO to rework
$rdf.PointedGraph.prototype.rels = function() {
var self = this;
var pgList = _.chain(arguments)
.map(function(arg) {
return self.rel(arg)
})
.flatten()
.value()
return pgList;
}
/**
* This permits to follow a relation in the local graph and then jump asynchronously.
* This produces a stream of pointed graphs in the form of an RxJs Observable
* @param Observable[PointedGraph]
* @param onJumpError
*/
$rdf.PointedGraph.prototype.jumpRelObservable = function(relUri) {
var pgList = this.rel(relUri);
var pgPromiseList = pgList.map(function(pg) {
return pg.jumpAsync();
});
return $rdf.PG.Utils.Rx.promiseListToObservable(pgPromiseList);
}
/**
* Just an alias for jumpRelPathObservable
* @param relPath
* @param onJumpErrorCallback
* @return {*}
*/
$rdf.PointedGraph.prototype.followPath = function(relPath) {
return this.jumpRelPathObservable(relPath);
}
/**
* Permits to follow a relation/predicate path, jumping from one document to another when it's needed
* @param relPath
* @param onJumpErrorCallback optional callback to handle jump errors, because they are not emitted in the stream
* @return {*}
*/
$rdf.PointedGraph.prototype.jumpRelPathObservable = function(relPath) {
$rdf.PG.Utils.checkArgument(relPath && relPath.length > 0,"No relation to follow! "+relPath);
var head = relPath[0];
var tail = relPath.slice(1);
var headStream = this.jumpRelObservable(head);
if ( _.isEmpty(tail) ) {
return headStream;
}
else {
return headStream.flatMap(function(pg) {
var tailStream = pg.jumpRelPathObservable(tail);
return tailStream;
})
}
}
/**
* Nearly the same as jumpAsync except it will not fetch remote document but will only use documents
* that are already in the store. This means that you can't jump to a remote document that has not been previously
* loaded in the store or an error will be thrown.
* @returns {$rdf.PointedGraph}
*/
$rdf.PointedGraph.prototype.jump = function() {
if ( this.isLocalPointer() ) {
return this;
}
else {
var pointerDocumentUrl = this.getSymbolPointerDocumentUrl();
var pointerDocumentFetchUrl = this.store.fetcher.proxifyIfNeeded(pointerDocumentUrl);
var uriFetchState = this.store.fetcher.getState(pointerDocumentFetchUrl);
if (uriFetchState == 'fetched') {
return $rdf.pointedGraph(this.store, this.pointer, $rdf.sym(pointerDocumentUrl), $rdf.sym(pointerDocumentFetchUrl) );
} else {
// If this error bothers you, you may need to use jumpAsync
throw new Error("Can't jump because the jump requires ["+pointerDocumentUrl+"] to be already fetched." +
" This resource is not in the store. State="+uriFetchState);
}
}
}
/**
* This permits to jump to the pointer document if the document
* This will return the current PG if the pointer is local (bnode/literal/local symbols...)
* This will return a new PG if the pointer refers to another document.
*
* So, basically
* - (documentUrl - documentUrl#hash ) will return (documentUrl - documentUrl#hash )
* - (documentUrl - documentUrl2#hash ) will return (documentUrl2 - documentUrl2#hash )
*
* @returns {Promise[PointedGraph]}
*/
$rdf.PointedGraph.prototype.jumpAsync = function() {
var originalPG = this;
if ( originalPG.isLocalPointer() ) {
return Q.fcall(function () {
return originalPG;
})
}
else {
return this.jumpFetchRemote();
}
}
/**
* This permits to follow a remote symbol pointer and fetch the remote document.
* This will give you a PG with the same pointer but the underlying document will be
* the remote document instead of the current document.
*
* For exemple, let's suppose:
* - current PG (documentUrl,pointer) is (url1, url1#profile)
* - current document contains triple (url1#profile - foaf:knows - url2#profile)
* - you follow the foaf:knows rel and get PG2 (url1, url2#profile)
* - then you can jumpFetch on PG2 because url2 != url1
* - this will give you PG3 (url2, url2#profile)
* - you'll have the same pointer, but the document is different
*
* @returns {Promise[PointedGraph]}
*/
$rdf.PointedGraph.prototype.jumpFetchRemote = function() {
$rdf.PG.Utils.checkArgument( this.isRemotePointer(),"You are not supposed to jumpFetch if you already have all the data locally. Pointer="+this.pointer);
var pointerUrl = this.getSymbolPointerUrl();
var referrerUrl = $rdf.PG.Utils.symbolNodeToUrl(this.namedGraphUrl);
var force = false;
return this.store.fetcher.fetch(pointerUrl, referrerUrl, force);
}
// relUri => List[Symbol]
$rdf.PointedGraph.prototype.getSymbol = function() {
var rels = _.flatten(arguments); // TODO: WTF WHY DO WE NEED TO FLATTEN!!!
var pgList = this.rels.apply(this, rels);
var symbolValueList =
_.chain(pgList)
.filter($rdf.PG.Filters.isSymbolPointer)
.map($rdf.PG.Transformers.symbolPointerToValue)
.value();
return symbolValueList
}
// relUri => List[Literal]
// TODO change the name
$rdf.PointedGraph.prototype.getLiteral = function () {
var rels = _.flatten(arguments); // TODO: WTF WHY DO WE NEED TO FLATTEN!!!
var pgList = this.rels.apply(this, rels);
var literalValueList = _.chain(pgList)
.filter($rdf.PG.Filters.isLiteralPointer)
.map($rdf.PG.Transformers.literalPointerToValue)
.value();
return literalValueList;
}
// Interaction with the PGs.
$rdf.PointedGraph.prototype.delete = function(relUri, value) {
// TODO to rework? remove hardcoded namespace value
var query = 'PREFIX foaf: <http://xmlns.com/foaf/0.1/> \n' +
'DELETE DATA \n' +
'{' + "<" + this.pointer.value + ">" + relUri + ' "' + value + '"' + '. \n' + '}';
return sparqlPatch(this.pointer.value, query);
}
$rdf.PointedGraph.prototype.insert = function(relUri, value) {
// TODO to rework? remove hardcoded namespace value?
var query = 'PREFIX foaf: <http://xmlns.com/foaf/0.1/> \n' +
'INSERT DATA \n' +
'{' + "<" + this.pointer.value + ">" + relUri + ' "' + value + '"' + '. \n' + '}';
return sparqlPatch(this.pointer.value, query);
}
$rdf.PointedGraph.prototype.update = function (relUri, newValue, oldvalue) {
var query = 'DELETE DATA \n' +
'{' + "<" + this.pointer.value + "> " + relUri + ' "' + oldvalue + '"' + '} ;\n' +
'INSERT DATA \n' +
'{' + "<" + this.pointer.value + "> " + relUri + ' "' + newValue + '"' + '. } ';
return sparqlPatch(this.pointer.value, query);
}
$rdf.PointedGraph.prototype.updateStore = function(relUri, newValue) {
this.store.removeMany(this.pointer, relUri, undefined, this.namedGraphFetchUrl);
this.store.add(this.pointer, relUri, newValue, this.namedGraphFetchUrl);
}
$rdf.PointedGraph.prototype.replaceStatements = function(pg) {
var self = this;
this.store.removeMany(undefined, undefined, undefined, pg.namedGraphFetchUrl);
_.each(pg.store.statements, function(stat) {
self.store.add(stat.subject, stat.predicate, stat.object, pg.namedGraphFetchUrl)
});
}
$rdf.PointedGraph.prototype.addRel = function(rel, object) {
this.store.add( this.pointer, rel, object, this.why() );
}
$rdf.PointedGraph.prototype.removeRel = function(rel, object) {
this.store.removeMany( this.pointer, rel, object, this.why() );
}
$rdf.PointedGraph.prototype.ajaxPut = function (baseUri, data, success, error, done) {
$.ajax({
type: "PUT",
url: baseUri,
dataType: "text",
contentType: "text/turtle",
processData: false,
data: data,
success: function (data, status, xhr) {
if (success) success(xhr)
},
error: function (xhr, status, err) {
if (error) error(xhr)
}
})
.done(function () {
if (done) done()
});
}
$rdf.PointedGraph.prototype.print = function() {
return this.printSummary() + " = { "+this.printContent() + "}"
}
$rdf.PointedGraph.prototype.printSummary = function() {
return "PG[pointer="+this.pointer+" - NamedGraph="+this.namedGraphUrl+"]";
}
$rdf.PointedGraph.prototype.printContent = function() {
return $rdf.Serializer(this.store).statementsToN3(this.store.statementsMatching(undefined, undefined, undefined, this.namedGraphFetchUrl));
}
$rdf.PointedGraph.prototype.toString = function() {
return this.printSummary();
}
/**
* Return a clone of the current pointed graph in another store.
* This is useful to edit a pointed graph.
* Once the edit is validated it may be nice to merge the small temporary edited store
* to the original big store.
*/
// TODO need better name
$rdf.PointedGraph.prototype.deepCopyOfGraph = function() {
var self = this;
var triples = this.store.statementsMatching(undefined, undefined, undefined, this.namedGraphFetchUrl);
var store = new $rdf.IndexedFormula();
$rdf.fetcher(store, 100000, true); // TODO; deals with timeOut
_.each(triples, function(stat) {
store.add(stat.subject, stat.predicate, stat.object, self.namedGraphFetchUrl)
});
return new $rdf.PointedGraph(store, this.pointer, this.namedGraphUrl, this.namedGraphFetchUrl);
}
$rdf.PointedGraph.prototype.isSymbolPointer = function() {
return $rdf.PG.Utils.isSymbolNode(this.pointer);
}
$rdf.PointedGraph.prototype.isLiteralPointer = function() {
return $rdf.PG.Utils.isLiteralNode(this.pointer);
}
$rdf.PointedGraph.prototype.isBlankNodePointer = function() {
return $rdf.PG.Utils.isBlankNode(this.pointer);
}
/**
* Returns the Url of the pointer.
* The url may contain a fragment.
* Will fail if the pointer is not a symbol because you can't get an url for a blank node or a literal.
*/
$rdf.PointedGraph.prototype.getSymbolPointerUrl = function() {
return $rdf.PG.Utils.symbolNodeToUrl(this.pointer);
}
/**
* Returns the Url of the document in which points the symbol pointer.
* The url is a document URL so it won't contain a fragment.
* Will fail if the pointer is not a symbol because you can't get an url for a blank node or a literal.
*/
$rdf.PointedGraph.prototype.getSymbolPointerDocumentUrl = function() {
var pointerUrl = this.getSymbolPointerUrl();
return $rdf.PG.Utils.fragmentless(pointerUrl);
}
/**
* Returns the current document/namedGraph Url (so it has no fragment)
*/
$rdf.PointedGraph.prototype.getCurrentDocumentUrl = function() {
return $rdf.PG.Utils.symbolNodeToUrl(this.namedGraphUrl);
}
/**
* This permits to find triples in the current document.
* This will not look in the whole store but will only check in the current document/namedGraph
* @param pointer (node)
* @param rel (node)
* @param object (node)
* @param onlyOne: set true if you only want one triple result (for perf reasons for exemple)
* @returns {*}
*/
$rdf.PointedGraph.prototype.getCurrentDocumentTriplesMatching = function (pointer,rel,object,onlyOne) {
var why = this.why();
return this.store.statementsMatching(pointer, rel, object, this.why(), onlyOne);
}
/**
* Builds a metadata helper to get metadatas related to the underlying documment
* @return {*}
*/
$rdf.PointedGraph.prototype.currentDocumentMetadataHelper = function() {
return $rdf.PG.MetadataHelper.forPointedGraph(this);
}
/**
* In the actual version it seems that RDFLib use the fetched url as the "why"
* Maybe it's because we have modified it a little bit to work better with our cors proxy.
* This is why we need to pass the namedGraphFetchUrl and not the namedGraphUrl
*/
$rdf.PointedGraph.prototype.why = function() {
return this.namedGraphFetchUrl;
}
/**
* This permits to find the triples that matches a given rel/predicate and object
* for the current pointer in the current document.
* @param rel
* @param object
* @param onlyOne
*/
$rdf.PointedGraph.prototype.getPointerTriplesMatching = function(rel,object,onlyOne) {
return this.getCurrentDocumentTriplesMatching(this.pointer, rel, object, onlyOne);
}
/**
* Permits to know if there is at least one triple in this graph that matches the pointer, predicate and object
* @param rel
* @param object
* @param onlyOne
* @return {boolean}
*/
$rdf.PointedGraph.prototype.hasPointerTripleMatching = function(rel,object) {
return this.getPointerTriplesMatching(rel,object,true).length > 0;
}
/**
* Returns the Url of the currently pointed document.
* Most of the time it will return the current document url.
* It will return a different url only for non-local symbol nodes.
*
* If you follow a foaf:knows, you will probably get a list of PGs where the pointer document
* URL is not local because your friends will likely describe themselves in different resources.
*/
$rdf.PointedGraph.prototype.getPointerDocumentUrl = function() {
if ( this.isSymbolPointer() ) {
return this.getSymbolPointerDocumentUrl();
} else {
return this.getCurrentDocumentUrl();
}
}
/**
* Permits to know if the pointer is local to the current document.
* This will be the case for blank nodes, literals and local symbol pointers.
* @returns {boolean}
*/
$rdf.PointedGraph.prototype.isLocalPointer = function() {
return this.getPointerDocumentUrl() == this.getCurrentDocumentUrl();
}
$rdf.PointedGraph.prototype.isRemotePointer = function() {
return !this.isLocalPointer();
}
/**
* Permits to "move" to another subject in the given graph
* @param newPointer
* @returns {$rdf.PointedGraph}
*/
$rdf.PointedGraph.prototype.withPointer = function(newPointer) {
return new $rdf.PointedGraph(this.store, newPointer, this.namedGraphUrl, this.namedGraphFetchUrl);
}
/**
* Permits to know if the given pointer have at least one rel that can be followed.
* This means that the current pointer exists in the local graph as a subject in at least one triple.
*/
$rdf.PointedGraph.prototype.hasRels = function() {
return this.getCurrentDocumentTriplesMatching(this.pointer, undefined, undefined, true).length > 0;
}
/**
* Permits to know if the given pointer have at least one rev that can be followed.
* This means that the current pointer exists in the local graph as an object in at least one triple.
*/
$rdf.PointedGraph.prototype.hasRevs = function() {
return this.getCurrentDocumentTriplesMatching(undefined, undefined, this.pointer, true).length > 0;
}
return $rdf.PointedGraph;
}();
///////////////////////////////////////////////////////////////////////////////////////////////
// fetcherWithPromise.js, part of rdflib-pg-extension.js made by Stample
// see https://github.com/stample/rdflib.js
///////////////////////////////////////////////////////////////////////////////////////////////
/*
TODO:
this proxification code is kind of duplicate of RDFLib's "crossSiteProxyTemplate" code.
How can we make this code be integrated in rdflib nicely?
*/
/**
* Permits to know in which conditions we are using a CORS proxy (if one is configured)
* @param uri
*/
$rdf.Fetcher.prototype.requiresProxy = function(url) {
var isCorsProxyConfigured = $rdf.Fetcher.fetcherWithPromiseCrossSiteProxyTemplate;
if ( !isCorsProxyConfigured ) {
return false;
}
else {
// /!\ this may not work with the original version of RDFLib
var isUriAlreadyProxified = (url.indexOf($rdf.Fetcher.fetcherWithPromiseCrossSiteProxyTemplate) == 0);
var isHomeServerUri = (url.indexOf($rdf.Fetcher.homeServer) == 0)
if ( isUriAlreadyProxified || isHomeServerUri ) {
return false;
} else {
return true;
}
}
}
/**
* permits to proxify the URI
* @param uri
* @returns {string}
*/
$rdf.Fetcher.prototype.proxify = function(uri) {
if ( uri && uri.indexOf('#') != -1 ) {
throw new Error("Tit is forbiden to proxify an uri with a fragment:"+uri);
}
if ( uri && uri.indexOf($rdf.Fetcher.fetcherWithPromiseCrossSiteProxyTemplate) == 0 ) {
throw new Error("You are trying to proxify an URL that seems to already been proxified!"+uri);
}
return $rdf.Fetcher.fetcherWithPromiseCrossSiteProxyTemplate + encodeURIComponent(uri);
};
/**
* Permits to proxify an url if RDFLib is configured to be used with a CORS Proxy
* @param url
* @returns {String} the original url or the proxied url
*/
$rdf.Fetcher.prototype.proxifyIfNeeded = function(url) {
if ( this.requiresProxy(url) ) {
return this.proxify(url);
} else {
return url;
}
}
$rdf.Fetcher.prototype.proxifySymbolIfNeeded = function(symbol) {
$rdf.PG.Utils.checkArgument( $rdf.PG.Utils.isSymbolNode(symbol),"This is not a symbol!"+symbol);
var url = $rdf.PG.Utils.symbolNodeToUrl(symbol);
var proxifiedUrl = this.proxifyIfNeeded(url);
return $rdf.sym(proxifiedUrl);
}
/**
* Return the Promise of a pointed graph for a given url
* @param {String} uri to fetch as string. The URI may contain a fragment because it results in a pointedGraph
* @param {String} referringTerm the uri as string. Referring to the requested url
* @param {boolean} force, force fetching of resource even if already in store
* @return {Promise} of a pointedGraph
*/
$rdf.Fetcher.prototype.fetch = function(uri, referringTerm, force) {
var self = this;
var uriSym = $rdf.sym(uri);
var docUri = $rdf.PG.Utils.fragmentless(uri);
var docUriSym = $rdf.sym(docUri);
// The doc uri to fetch is the doc uri that may have been proxyfied
var docUriToFetch = self.proxifyIfNeeded(docUri);
var docUriToFetchSym = $rdf.sym(docUriToFetch);
// if force mode enabled -> we previously unload so that uriFetchState will be "unrequested"
if ( force ) {
self.unload(docUriToFetchSym);
}
var uriFetchState = self.getState(docUriToFetch);
// if it was already fetched we return directly the pointed graph pointing
if (uriFetchState == 'fetched') {
return Q.fcall(function() {
return $rdf.pointedGraph(self.store, uriSym, docUriSym, docUriToFetchSym)
});
}
// if it was already fetched and there was an error we do not try again
// notice you can call "unload(symbol)" if you want a failed request to be fetched again if needed
else if ( uriFetchState == 'failed') {
return Q.fcall(function() {
throw new Error("Previous fetch has failed for"+docUriToFetch+" -> Will try to fetch it again");
});
}
// else maybe a request for this uri is already pending, or maybe we will have to fire a request
// in both case we are interested in the answer
else if ( uriFetchState == 'requested' || uriFetchState == 'unrequested' ) {
if ( uriFetchState == 'requested') {
console.debug("A request is already being done for",docUriToFetch," -> will wait for that response");
}
var deferred = Q.defer();
self.addCallback('done', function fetchDoneCallback(uriFetched) {
if ( docUriToFetch == uriFetched ) {
deferred.resolve($rdf.pointedGraph(self.store, uriSym, docUriSym, docUriToFetchSym));
return false; // stop
}
return true; // continue
});
self.addCallback('fail', function fetchFailureCallback(uriFetched, statusString, xhr) {
if ( docUriToFetch == uriFetched ) {
deferred.reject(new Error("Async fetch failure [uri="+uri+"][statusCode="+xhr.status+"][reason="+statusString+"]"));
return false; // stop
}
return true; // continue
});
if (uriFetchState == 'unrequested') {
var result = self.requestURI(docUriToFetch, referringTerm, force);
if (result == null) {
// TODO not sure of the effect of this line. This may cause the promise to be resolved twice no?
deferred.resolve($rdf.pointedGraph(self.store, uriSym, docUriSym, docUriToFetchSym));
}
}
return deferred.promise;
}
else {
throw new Error("Unknown and unhandled uriFetchState="+uriFetchState+" - for URI="+uri)
}
}})(this);<|fim▁end|>
|
$rdf.PointedGraph.setLogLevel = function(level) {
|
<|file_name|>basicShape.ts<|end_file_name|><|fim▁begin|>/*
* Power BI Visualizations
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the ""Software""), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/// <reference path="../_references.ts"/>
module powerbi.visuals {
export interface BasicShapeDataViewObjects extends DataViewObjects {
general: BasicShapeDataViewObject;
line: LineObject;
fill: FillObject;
lockAspect: LockAspectObject;
rotation: RotationObject;
}
export interface LineObject extends DataViewObject {
lineColor: Fill;
roundEdge: number;
weight: number;
transparency: number;
}
export interface FillObject extends DataViewObject {
transparency: number;
fillColor: Fill;
show: boolean;
}
export interface LockAspectObject extends DataViewObject {
show: boolean;
}
export interface RotationObject extends DataViewObject {
angle: number;
}
export interface BasicShapeDataViewObject extends DataViewObject {
shapeType: string;
shapeSvg: string;
}
export interface BasicShapeData {
shapeType: string;
lineColor: string;
lineTransparency: number;
lineWeight: number;
showFill: boolean;
fillColor: string;
shapeTransparency: number;
lockAspectRatio: boolean;
roundEdge: number;
angle: number;
}
export class BasicShapeVisual implements IVisual {
private currentViewport: IViewport;
private element: JQuery;
private data: BasicShapeData;
private selection: D3.Selection;
public static DefaultShape: string = powerbi.basicShapeType.rectangle;
public static DefaultStrokeColor: string = '#00B8AA';
public static DefaultFillColor: string = '#E6E6E6';
public static DefaultFillTransValue: number = 100;
public static DefaultWeightValue: number = 3;
public static DefaultLineTransValue: number = 100;
public static DefaultRoundEdgeValue: number = 0;
public static DefaultAngle: number = 0;
/**property for the shape line color */
get shapeType(): string {
return this.data ? this.data.shapeType : BasicShapeVisual.DefaultShape;
}
set shapeType(shapeType: string) {
this.data.shapeType = shapeType;
}
/**property for the shape line color */
get lineColor(): string {
return this.data ? this.data.lineColor : BasicShapeVisual.DefaultStrokeColor;
}
set lineColor(color: string) {
this.data.lineColor = color;
}
/**property for the shape line transparency */
get lineTransparency(): number {
return this.data ? this.data.lineTransparency : BasicShapeVisual.DefaultLineTransValue;
}
set lineTransparency(trans: number) {
this.data.lineTransparency = trans;
}
/**property for the shape line weight */
get lineWeight(): number {
return this.data ? this.data.lineWeight : BasicShapeVisual.DefaultWeightValue;
}
set lineWeight(weight: number) {
this.data.lineWeight = weight;
}
/**property for the shape round edge */
get roundEdge(): number {
return this.data ? this.data.roundEdge : BasicShapeVisual.DefaultRoundEdgeValue;
}
set roundEdge(roundEdge: number) {
this.data.roundEdge = roundEdge;
}
/**property for showing the fill properties */
get showFill(): boolean {
return this.data ? this.data.showFill : true;
}
set showFill(show: boolean) {
this.data.showFill = show;
}
/**property for the shape line color */
get fillColor(): string {
return this.data ? this.data.fillColor : BasicShapeVisual.DefaultFillColor;
}
set fillColor(color: string) {
this.data.fillColor = color;
}
/**property for the shape fill transparency */
get shapeTransparency(): number {
return this.data ? this.data.shapeTransparency : BasicShapeVisual.DefaultFillTransValue;
}
set shapeTransparency(trans: number) {
this.data.shapeTransparency = trans;
}
/**property for showing the lock aspect ratio */
get lockAspectRatio(): boolean {
return this.data ? this.data.lockAspectRatio : false;
}
set lockAspectRatio(show: boolean) {
this.data.lockAspectRatio = show;
}
/**property for the shape angle */
get angle(): number {
return this.data ? this.data.angle : BasicShapeVisual.DefaultAngle;
}
set angle(angle: number) {
this.data.angle = angle;
}
public init(options: VisualInitOptions) {
this.element = options.element;
this.selection = d3.select(this.element.context);
this.currentViewport = options.viewport;
}
public constructor(options?: VisualInitOptions) {
}
public update(options: VisualUpdateOptions): void {
debug.assertValue(options, 'options');
this.currentViewport = options.viewport;
let dataViews = options.dataViews;
if (!_.isEmpty(dataViews)) {
let dataView = options.dataViews[0];
if (dataView.metadata && dataView.metadata.objects) {
let dataViewObject = <BasicShapeDataViewObjects>options.dataViews[0].metadata.objects;
this.data = this.getDataFromDataView(dataViewObject);
}
}
this.render();
}
private getDataFromDataView(dataViewObject: BasicShapeDataViewObjects): BasicShapeData {
if (dataViewObject) {
return {
shapeType: dataViewObject.general !== undefined && dataViewObject.general.shapeType !== undefined ? dataViewObject.general.shapeType : this.shapeType,
lineColor: dataViewObject.line !== undefined && dataViewObject.line.lineColor !== undefined ? dataViewObject.line.lineColor.solid.color : this.lineColor,
lineTransparency: dataViewObject.line !== undefined && dataViewObject.line.transparency !== undefined ? dataViewObject.line.transparency : this.lineTransparency,
lineWeight: dataViewObject.line !== undefined && dataViewObject.line.weight !== undefined ? dataViewObject.line.weight : this.lineWeight,
roundEdge: dataViewObject.line !== undefined && dataViewObject.line.roundEdge !== undefined ? dataViewObject.line.roundEdge : this.roundEdge,
shapeTransparency: dataViewObject.fill !== undefined && dataViewObject.fill.transparency !== undefined ? dataViewObject.fill.transparency : this.shapeTransparency,
fillColor: dataViewObject.fill !== undefined && dataViewObject.fill.fillColor !== undefined ? dataViewObject.fill.fillColor.solid.color : this.fillColor,
showFill: dataViewObject.fill !== undefined && dataViewObject.fill.show !== undefined ? dataViewObject.fill.show : this.showFill,
lockAspectRatio: dataViewObject.lockAspect !== undefined && dataViewObject.lockAspect.show !== undefined ? dataViewObject.lockAspect.show : this.lockAspectRatio,
angle: dataViewObject.rotation !== undefined && dataViewObject.rotation.angle !== undefined ? dataViewObject.rotation.angle : this.angle
};
}
return null;
}
public enumerateObjectInstances(options: EnumerateVisualObjectInstancesOptions): VisualObjectInstance[] {
let objectInstances: VisualObjectInstance[] = [];
switch (options.objectName) {
case 'line':
let instance: VisualObjectInstance = {
selector: null,
properties: {
lineColor: this.lineColor,
transparency: this.lineTransparency,
weight: this.lineWeight
},
objectName: options.objectName
};
if (this.data.shapeType === powerbi.basicShapeType.rectangle) {
instance.properties['roundEdge'] = this.roundEdge;
}
objectInstances.push(instance);
return objectInstances;
case 'fill':
objectInstances.push({
selector: null,
properties: {
show: this.showFill,
fillColor: this.fillColor,
transparency: this.shapeTransparency
},
objectName: options.objectName
});
return objectInstances;
case 'lockAspect':
objectInstances.push({
selector: null,
properties: {
show: this.lockAspectRatio
},
objectName: options.objectName
});
return objectInstances;
case 'rotation':
objectInstances.push({
selector: null,
properties: {
angle: this.angle
},
objectName: options.objectName
});
return objectInstances;
}
return null;
}
public render(): void {
this.selection.html('');
switch (this.shapeType) {
case powerbi.basicShapeType.rectangle:
shapeFactory.createRectangle(
this.data, this.currentViewport.height, this.currentViewport.width, this.selection, this.angle);
break;
case powerbi.basicShapeType.oval:
shapeFactory.createOval(
this.data, this.currentViewport.height, this.currentViewport.width, this.selection, this.angle);
break;
case powerbi.basicShapeType.line:
shapeFactory.createLine(
this.data, this.currentViewport.height, this.currentViewport.width, this.selection, this.angle);
break;
case powerbi.basicShapeType.arrow:
shapeFactory.createUpArrow(
this.data, this.currentViewport.height, this.currentViewport.width, this.selection, this.angle);
break;
case powerbi.basicShapeType.triangle:
shapeFactory.createTriangle(
this.data, this.currentViewport.height, this.currentViewport.width, this.selection, this.angle);
break;<|fim▁hole|> }
}
}
}<|fim▁end|>
|
default:
break;
|
<|file_name|>androidUnlock.js<|end_file_name|><|fim▁begin|>"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});<|fim▁hole|><|fim▁end|>
|
var androidUnlock = exports.androidUnlock = { "viewBox": "0 0 512 512", "children": [{ "name": "path", "attribs": { "d": "M376,186h-20v-40c0-55-45-100-100-100S156,91,156,146h37.998c0-34.004,28.003-62.002,62.002-62.002\r\n\tc34.004,0,62.002,27.998,62.002,62.002H318v40H136c-22.002,0-40,17.998-40,40v200c0,22.002,17.998,40,40,40h240\r\n\tc22.002,0,40-17.998,40-40V226C416,203.998,398.002,186,376,186z M256,368c-22.002,0-40-17.998-40-40s17.998-40,40-40\r\n\ts40,17.998,40,40S278.002,368,256,368z" }, "children": [] }] };
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from helusers.models import AbstractUser
<|fim▁hole|>class User(AbstractUser):
pass<|fim▁end|>
| |
<|file_name|>termination.py<|end_file_name|><|fim▁begin|># Copyright 2015-2017 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import struct
from .message import Message
@Message.register
class Termination(Message):
TYPE = Message.TERMINATION
TYPE_STR = 'termination'
reason_codict = {
0: "Session administratively closed. The session might be re-initiated.",
1: "Unspecified reason.",
2: "Out of resources. The router has exhausted resources available for the BMP session.",
3: "Redundant connection. The router has determined\
that this connection is redundant with another one.",
4: "Session permanently administratively closed,\
will not be re-initiated. Monitoring station should reduce\
(potentially to 0) the rate at which it attempts\
reconnection to the monitored router."
}<|fim▁hole|> def unpack(cls, data):
infor_tlv = dict()
while data:
info_type, info_len = struct.unpack('!HH', data[0:4])
info_value = data[4: 4 + info_len]
if info_type == 0:
infor_tlv['string'] = info_value.decode('ascii')
elif info_type == 1:
infor_tlv['reason'] = cls.reason_codict[struct.unpack('!H', info_value)[0]]
data = data[4 + info_len:]
return cls(value=infor_tlv)<|fim▁end|>
|
@classmethod
|
<|file_name|>brain_subprocess.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016 Claudiu Popa <[email protected]>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import sys
import textwrap
import six
import astroid
PY33 = sys.version_info >= (3, 3)
PY36 = sys.version_info >= (3, 6)
def _subprocess_transform():
if six.PY3:
communicate = (bytes('string', 'ascii'), bytes('string', 'ascii'))
communicate_signature = 'def communicate(self, input=None, timeout=None)'
if PY36:
init = """
def __init__(self, args, bufsize=0, executable=None,<|fim▁hole|> startupinfo=None, creationflags=0, restore_signals=True,
start_new_session=False, pass_fds=(), *,
encoding=None, errors=None):
pass
"""
else:
init = """
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0, restore_signals=True,
start_new_session=False, pass_fds=()):
pass
"""
else:
communicate = ('string', 'string')
communicate_signature = 'def communicate(self, input=None)'
init = """
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
pass
"""
if PY33:
wait_signature = 'def wait(self, timeout=None)'
else:
wait_signature = 'def wait(self)'
if six.PY3:
ctx_manager = '''
def __enter__(self): return self
def __exit__(self, *args): pass
'''
else:
ctx_manager = ''
code = textwrap.dedent('''
class Popen(object):
returncode = pid = 0
stdin = stdout = stderr = file()
%(communicate_signature)s:
return %(communicate)r
%(wait_signature)s:
return self.returncode
def poll(self):
return self.returncode
def send_signal(self, signal):
pass
def terminate(self):
pass
def kill(self):
pass
%(ctx_manager)s
''' % {'communicate': communicate,
'communicate_signature': communicate_signature,
'wait_signature': wait_signature,
'ctx_manager': ctx_manager})
init_lines = textwrap.dedent(init).splitlines()
indented_init = '\n'.join([' ' * 4 + line for line in init_lines])
code += indented_init
return astroid.parse(code)
astroid.register_module_extender(astroid.MANAGER, 'subprocess', _subprocess_transform)<|fim▁end|>
|
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
|
<|file_name|>mrp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)<|fim▁hole|>
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()<|fim▁end|>
|
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
|
<|file_name|>struct-field-privacy.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct A {
a: int,
pub b: int,
}
pub struct B {
a: int,
priv b: int,
}<|fim▁end|>
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import containers from './containers'<|fim▁hole|>
module.exports = {...containers, ...ui, App}<|fim▁end|>
|
import ui from './ui'
import App from './App'
|
<|file_name|>TcpdumpWrapper.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding: UTF-8
# TcpdumpWrapper
#
# Copyright(c) 2015-2018 Uptime Technologies, LLC.
from datetime import datetime, timedelta, date, time
import hashlib
import os
import re
import subprocess
import sys
import log
class TcpdumpPacket:
def __init__(self, ts, src, dst, bytes, debug=None):
self.ts = self.string2timestamp(ts)
self.src = src
self.dst = dst
self.debug = debug
self.bytes = bytes
self.messages = []
log.debug("+ " + ts + " " + src + " " + dst)
self.end()
self.session_id = self.get_session_id()
def string2timestamp(self, ts):
t = datetime.strptime(ts, '%Y-%m-%d %H:%M:%S.%f')
log.debug("ts = " + str(t))
return t
def get_session_id(self):
src_dst = [self.iph_src + ":" + str(self.tcph_src),
self.iph_dst + ":" + str(self.tcph_dst)]
ss = ""
for s in sorted(src_dst):
ss = ss + s + " "
return hashlib.md5(ss).hexdigest()[0:12]
def get_timestamp(self):
log.debug("get_timestamp: %s" % str(self.ts))
return self.ts
def get_messages(self):
return self.messages
def parse_ip_header(self, data):
self.iph_version = (data[0] >> 4) & 0b1111
self.iph_header_len = data[0] & 0b1111
self.iph_tos = data[1]
self.iph_dgram_len = (data[2] << 8) + data[3]
self.iph_id = (data[4] << 8) + data[5]
self.iph_dst = "%d.%d.%d.%d" % (data[12], data[13], data[14], data[15])
self.iph_src = "%d.%d.%d.%d" % (data[16], data[17], data[18], data[19])
if self.debug is True:
print("version : %d" % self.iph_version)
print("hd len : %d (%d)" % (self.iph_header_len,
self.iph_header_len * 4))
print("tos : %d" % self.iph_tos)
print("dgram len: %d" % self.iph_dgram_len)
print("data len: %d" % (self.iph_dgram_len -
self.iph_header_len*4))
print("id : %d" % self.iph_id)
print("dst : %s" % (self.iph_dst))
print("src : %s" % (self.iph_src))
return self.iph_header_len * 4
def parse_tcp_header(self, data):
self.tcph_src = (data[0] << 8) + data[1]
self.tcph_dst = (data[2] << 8) + data[3]
self.tcph_seq = ((data[4] << 24) + (data[5] << 16) +
(data[6] << 8) + data[7])
self.tcph_offset = (data[12] >> 4) & 0b1111
if self.debug is True:
print("src port : %d" % (self.tcph_src))
print("dst port : %d" % (self.tcph_dst))
print("seq : %d" % (self.tcph_seq))
print("offset : %d (%d)" % (self.tcph_offset,
self.tcph_offset * 4))
return self.tcph_offset * 4
def end(self):<|fim▁hole|> iph_len = self.parse_ip_header(self.bytes[cur:])
cur = cur + iph_len
tcph_len = self.parse_tcp_header(self.bytes[cur:])
cur = cur + tcph_len
self.payload = self.bytes[cur:]
s = ""
for d in self.payload:
s = s + "%02x " % (d)
log.debug("payload: " + s)
if len(self.payload) >= 5:
pos = 0
cont = True
while cont:
if len(self.payload[pos:]) < 5:
cont = False
break
ch = self.read_char(self.payload[pos:])
# if not(ch >= 48 and ch <= 122):
# break
pos = pos + 1
i = self.read_int32(self.payload[pos:])
pos = pos + 4
log.debug("sess: " + self.get_session_id() + ": " +
str(self.ts) + ": %c[%x] len=%d" % (ch, ch, i))
# client to server
if ch == ord('S'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
log.debug(s)
elif ch == ord('Q'):
s = self.read_string(self.payload[pos:], i - 4)
log.debug(s)
self.messages.append([chr(ch), s])
elif ch == ord('P'):
s = self.read_string(self.payload[pos:], i - 4)
s1 = s.split('\0')
log.debug("> " + s1[0] + "," + s1[1])
self.messages.append([chr(ch), s1[0], s1[1]])
elif ch == ord('E'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
elif ch == ord('B'):
s = self.read_string(self.payload[pos:], i - 4)
s1 = s.split('\0')
log.debug("> " + s1[0] + "," + s1[1])
self.messages.append([chr(ch), s1[0], s1[1]])
elif ch == ord('X'):
self.messages.append([chr(ch), None])
cont = False
# server to client
elif ch == ord('T'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
log.debug(s)
elif ch == ord('D'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
log.debug(s)
elif ch == ord('C'):
s = self.read_string(self.payload[pos:], i - 4)
self.messages.append([chr(ch), s])
log.debug(s)
elif ch == ord('1'):
self.messages.append([chr(ch), None])
elif ch == ord('2'):
self.messages.append([chr(ch), None])
elif ch == ord('n'):
self.messages.append([chr(ch), None])
elif ch == ord('Z'):
self.messages.append([chr(ch), None])
cont = False
pos = pos + (i - 4)
def parse(self):
self.pos = 12
while len(self.payload) > self.pos + 5:
c = self.read_char()
log.debug("%02x(%c)" % (c, c))
l = self.read_int32()
log.debug(l)
self.pos = self.pos + l
def read_char(self, data):
ch = data[0]
return ch
def read_int32(self, data):
i = (data[0] << 24) + (data[1] << 16) + (data[2] << 8) + (data[3])
return i
def read_string(self, data, size):
s = ""
i = 0
while i < size:
s = s + "%c" % data[i]
i = i + 1
return s
class TcpdumpWrapper:
pkt = None
tcpdump = None
process = None
line = None
def __init__(self, host=None, port=None, interface=None, inputfile=None,
debug=None):
if debug is True:
log.setLevel(log.DEBUG)
self.host = host
self.port = port
self.iface = interface
self.inputfile = inputfile
self.debug = debug
if self.port is None:
self.port = "5432"
if self.iface is None:
self.iface = "any"
self.tcpdump = "tcpdump -tttt"
if self.inputfile is not None:
self.tcpdump = self.tcpdump + " -r " + self.inputfile
self.tcpdump = (self.tcpdump + " -l -i " + self.iface +
" -s 0 -X -p tcp port " + str(self.port))
if self.host is not None:
self.tcpdump = self.tcpdump + " and host " + self.host
log.info(self.tcpdump)
self.process = subprocess.Popen([self.tcpdump],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
bufsize=0)
# header
self.p1 = re.compile('^([\d-]+) ([\d:\.]+) IP (.*) > (.*): Flags')
# data
self.p2 = re.compile('^\t+0x(.+): (.+) ')
def get_packet(self):
self.data = ""
self.hdr = None
while True:
if self.line is None:
self.line = self.readline()
if self.line is None:
if self.hdr is not None and self.data is not None:
# EOF found.
pkt = TcpdumpPacket(self.hdr[0], self.hdr[1],
self.hdr[2],
self.parse_data(self.data),
self.debug)
self.data = ""
self.hdr = None
return pkt
else:
return None
if self.line[0] != '\t':
if len(self.data) > 0:
pkt = TcpdumpPacket(self.hdr[0], self.hdr[1], self.hdr[2],
self.parse_data(self.data), self.debug)
self.data = ""
self.hdr = None
return pkt
self.hdr = self.parse_header(self.line)
# d: ts, src, dest
log.debug("Header: ts=" + self.hdr[0] + ", src=" +
self.hdr[1] + ", dest=" + self.hdr[2])
else:
# log.debug(">" + self.line[10:50] + "<")
self.data = self.data + self.line[10:50]
self.line = None
def get_last_packet(self):
pkt = None
if self.hdr is not None and self.data is not None:
pkt = TcpdumpPacket(self.hdr[0], self.hdr[1], self.hdr[2],
self.parse_data(self.data), self.debug)
self.data = ""
self.hdr = None
return pkt
def readline(self):
self.process.stdout.flush()
line = self.process.stdout.readline()
if len(line) == 0:
# EOF
self.process.poll()
if self.process.returncode != 0:
log.error("%s" % self.process.stderr.readline())
sys.exit(1)
log.debug("readline: EOF")
return None
return line.rstrip()
def parse_header(self, line):
r1 = self.p1.match(line)
if r1 is not None:
# Header line may look like this:
# 2015-04-30 13:33:27.579311 IP localhost.55641 > \
# localhost.postgres: Flags [.], ack 66, win 664, \
# options [nop,nop,TS val 265008484 ecr 265008484], length 0
ts = r1.group(1) + " " + r1.group(2)
src = r1.group(3)
dest = r1.group(4)
return [ts, src, dest]
return None
def parse_data(self, line):
bytes = []
# line:
# 4500 0039 e080 4000 4006 5c3c 7f00 0001
offset = 0
length = len(line)
log.debug("! " + line)
cur = 0
while cur < length:
if line[cur] == ' ':
cur = cur + 1
continue
# chr to hex
h = int(line[cur:cur+2], 16)
bytes.append(h)
# print(payload[cur:cur+2] + ", " + str(h))
cur = cur + 2
return bytes
def parse(self, line):
# return true when messages avalable.
msg_avail = False
log.debug("parse: " + line)
# packet header info
r1 = self.p1.match(line)
if r1 is not None:
# Header line may look like this:
# 13:33:27.579311 IP localhost.55641 > localhost.postgres: \
# Flags [.], ack 66, win 664, options \
# [nop,nop,TS val 265008484 ecr 265008484], length 0
log.debug("Header: " + line)
ts = r1.group(1)
src = r1.group(2)
dest = r1.group(3)
# close the previous packet
if self.pkt is not None:
self.pkt.end()
# retreive all info/messages in the previous packet.
self.session_id = self.pkt.get_session_id()
self.timestamp = self.pkt.get_timestamp()
self.messages = copy.deepcopy(self.pkt.get_messages())
msg_avail = True
for m in self.pkt.get_messages():
log.debug("ts:%s cmd:%c msg:%s" %
(str(self.pkt.get_timestamp()), m[0], m[1]))
# new packet coming
self.pkt = TcpdumpPacket(ts, src, dest, self.debug)
self.packets.append(self.pkt)
# packet bytes
r2 = self.p2.match(line)
if r2 is not None:
log.debug("matched r2: " + line)
if self.pkt is not None:
log.debug("append_bytes: " + line)
self.pkt.append_bytes(r2.group(2))
return msg_avail
def print_packets(self):
print(self.packets)<|fim▁end|>
|
cur = 0
|
<|file_name|>signal_propagation_shell.py<|end_file_name|><|fim▁begin|>import os
import sys<|fim▁hole|>import signal
import time
import subprocess
WHO = None
def handler(signum, frame):
global WHO
print('Signal handler', signum, WHO, frame)
print('Disable handler', signum, WHO, frame)
signal.signal(signal.SIGINT, signal.SIG_DFL)
def main(argv):
global WHO
WHO = argv[1]
if WHO == 'parent':
signal.signal(signal.SIGINT, handler)
p = subprocess.Popen('python3 signal_propagation.py child',
shell=True)
for index in range(0, 10):
time.sleep(1)
print('Sleep', index, WHO)
if WHO == 'parent':
p.send_signal(signal.SIGINT)
p.communicate()
else:
while True:
time.sleep(1)
print('Sleep 1 infinity')
if __name__ == '__main__':
main(sys.argv)<|fim▁end|>
| |
<|file_name|>timer-impl.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2015 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Requires polyfills in immediate side effect.
import '../polyfills';
import {user} from '../log';
import {fromClass} from '../service';
/**
* Helper with all things Timer.
*/
export class Timer {
/**
* @param {!Window} win
*/
constructor(win) {
/** @const {!Window} */
this.win = win;
/** @private @const {!Promise} */
this.resolved_ = Promise.resolve();
this.taskCount_ = 0;
<|fim▁hole|> /** @const {number} */
this.startTime_ = Date.now();
}
/**
* Returns time since start in milliseconds.
* @return {number}
*/
timeSinceStart() {
return Date.now() - this.startTime_;
}
/**
* Runs the provided callback after the specified delay. This uses a micro
* task for 0 or no specified time. This means that the delay will actually
* be close to 0 and this will NOT yield to the event queue.
*
* Returns the timer ID that can be used to cancel the timer (cancel method).
* @param {!function()} callback
* @param {number=} opt_delay
* @return {number|string}
*/
delay(callback, opt_delay) {
if (!opt_delay) {
// For a delay of zero, schedule a promise based micro task since
// they are predictably fast.
const id = 'p' + this.taskCount_++;
this.resolved_.then(() => {
if (this.canceled_[id]) {
delete this.canceled_[id];
return;
}
callback();
});
return id;
}
return this.win.setTimeout(callback, opt_delay);
}
/**
* Cancels the previously scheduled callback.
* @param {number|string|null} timeoutId
*/
cancel(timeoutId) {
if (typeof timeoutId == 'string') {
this.canceled_[timeoutId] = true;
return;
}
this.win.clearTimeout(timeoutId);
}
/**
* Returns a promise that will resolve after the delay. Optionally, the
* resolved value can be provided as opt_result argument.
* @param {number=} opt_delay
* @param {RESULT=} opt_result
* @return {!Promise<RESULT>}
* @template RESULT
*/
promise(opt_delay, opt_result) {
let timerKey = null;
return new Promise((resolve, reject) => {
timerKey = this.delay(() => {
timerKey = -1;
resolve(opt_result);
}, opt_delay);
if (timerKey == -1) {
reject(new Error('Failed to schedule timer.'));
}
}).catch(error => {
// Clear the timer. The most likely reason is "cancel" signal.
if (timerKey != -1) {
this.cancel(timerKey);
}
throw error;
});
}
/**
* Returns a promise that will fail after the specified delay. Optionally,
* this method can take opt_racePromise parameter. In this case, the
* resulting promise will either fail when the specified delay expires or
* will resolve based on the opt_racePromise, whichever happens first.
* @param {number} delay
* @param {!Promise<RESULT>|undefined} opt_racePromise
* @param {string=} opt_message
* @return {!Promise<RESULT>}
* @template RESULT
*/
timeoutPromise(delay, opt_racePromise, opt_message) {
let timerKey = null;
const delayPromise = new Promise((_resolve, reject) => {
timerKey = this.delay(() => {
timerKey = -1;
reject(user().createError(opt_message || 'timeout'));
}, delay);
if (timerKey == -1) {
reject(new Error('Failed to schedule timer.'));
}
}).catch(error => {
// Clear the timer. The most likely reason is "cancel" signal.
if (timerKey != -1) {
this.cancel(timerKey);
}
throw error;
});
if (!opt_racePromise) {
return delayPromise;
}
return Promise.race([delayPromise, opt_racePromise]);
}
}
/**
* @param {!Window} window
* @return {!Timer}
*/
export function installTimerService(window) {
return fromClass(window, 'timer', Timer);
};<|fim▁end|>
|
this.canceled_ = {};
|
<|file_name|>test_env.py<|end_file_name|><|fim▁begin|>from mock import MagicMock, mock_open, patch
from unittest import TestCase
from warnings import simplefilter
import pconf
from pconf.store.env import Env
TEST_ENV_BASE_VARS = {
"env__var": "result",
"env__var_2": "second_result",
}
TEST_ENV_MATCHED_VARS = {"matched_var": "match"}
TEST_ENV_WHITELIST_VARS = {"whitelisted_var": "whitelist"}
TEST_SEPARATED_VARS = {"env": {"var": "result", "var_2": "second_result"}}
TEST_ENV_VARS = dict(TEST_ENV_WHITELIST_VARS, **TEST_ENV_MATCHED_VARS)
TEST_SEPARATED_VARS = dict(TEST_SEPARATED_VARS, **TEST_ENV_VARS)
TEST_ENV_VARS = dict(TEST_ENV_VARS, **TEST_ENV_BASE_VARS)
TEST_ENV_CONVERTED = {
"env--var": "result",
"env--var-2": "second_result",
"matched-var": "match",
"whitelisted-var": "whitelist",
}
TEST_ENV_CONVERTED_SEPARATED = {
"env": {"var": "result", "var-2": "second_result"},
"matched-var": "match",
"whitelisted-var": "whitelist",
}
TEST_ENV_UPPERCASE = {
"ENV__VAR": "result",
"ENV__VAR_2": "second_result",
"MATCHED_VAR": "match",
"WHITELISTED_VAR": "whitelist",
}
TEST_ENV_TYPED_VARS = {
"key": "value",
"int": "123",
"float": "1.23",
"complex": "1+2j",
"list": "['list1', 'list2', {'dict_in_list': 'value'}]",
"dict": "{'nested_dict': 'nested_value'}",
"tuple": "(123, 'string')",
"bool": "True",
"boolstring": "false",
"string_with_specials": "Test!@#$%^&*()-_=+[]{};:,<.>/?\\'\"`~",
} # noqa: E501
TEST_ENV_TYPED_VARS_PARSED = {
"key": "value",
"int": 123,
"float": 1.23,
"complex": 1 + 2j,
"list": ["list1", "list2", {"dict_in_list": "value"}],
"dict": {"nested_dict": "nested_value"},
"tuple": (123, "string"),
"bool": True,
"boolstring": False,
"string_with_specials": "Test!@#$%^&*()-_=+[]{};:,<.>/?\\'\"`~",
} # noqa: E501
TEST_ENV_DOCKER_SECRETS = {"MY_EXAMPLE_SECRET_FILE": "/run/secrets/my_example_secret"}
TEST_ENV_DOCKER_SECRETS_INVALID_POSTFIX = {
"MY_EXAMPLE_SECRET": "/run/secrets/my_example_secret"
}
TEST_DOCKER_SECRET_CONTENT = "mysecret"
TEST_DOCKER_SECRETS_RESULT = {"MY_EXAMPLE_SECRET": TEST_DOCKER_SECRET_CONTENT}
TEST_SEPARATOR = "__"
TEST_MATCH = r"^matched"
TEST_WHITELIST = ["whitelisted_var", "whitelist2"]
TEST_PARSE_VALUES = True
TEST_TO_LOWER = True
TEST_CONVERT_UNDERSCORES = True
TEST_DOCKER_SECRETS = list(TEST_ENV_DOCKER_SECRETS.keys())
TEST_DOCKER_SECRETS_INVALID_POSTFIX = ["MY_EXAMPLE_SECRET"]
TEST_DOCKER_SECRETS_PATH = str(list(TEST_DOCKER_SECRETS_RESULT.values())[0])
MOCK_OPEN_FUNCTION = "builtins.open"
def throw_ioerror(*args, **kwargs):
raise IOError("test")
class TestEnv(TestCase):
def test_default_params(self):
env_store = Env()
self.assertEqual(env_store.separator, None)
self.assertEqual(env_store.match, None)
self.assertEqual(env_store.whitelist, None)
self.assertEqual(env_store.parse_values, False)
self.assertEqual(env_store.to_lower, False)
self.assertEqual(env_store.convert_underscores, False)
def test_optional_params(self):
env_store = Env(
separator=TEST_SEPARATOR,
match=TEST_MATCH,
whitelist=TEST_WHITELIST,
parse_values=TEST_PARSE_VALUES,
to_lower=TEST_TO_LOWER,
convert_underscores=TEST_CONVERT_UNDERSCORES,
)<|fim▁hole|> self.assertEqual(env_store.whitelist, TEST_WHITELIST)
self.assertEqual(env_store.parse_values, TEST_PARSE_VALUES)
self.assertEqual(env_store.to_lower, TEST_TO_LOWER)
self.assertEqual(env_store.convert_underscores, TEST_CONVERT_UNDERSCORES)
@patch("pconf.store.env.os", new=MagicMock())
def test_get_all_vars(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env()
result = env_store.get()
self.assertEqual(result, TEST_ENV_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_get_idempotent(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env()
result = env_store.get()
self.assertEqual(result, TEST_ENV_VARS)
self.assertIsInstance(result, dict)
pconf.store.env.os.environ = TEST_ENV_BASE_VARS
result = env_store.get()
self.assertEqual(result, TEST_ENV_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_whitelist(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env(whitelist=TEST_WHITELIST)
result = env_store.get()
self.assertEqual(result, TEST_ENV_WHITELIST_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_match(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env(match=TEST_MATCH)
result = env_store.get()
self.assertEqual(result, TEST_ENV_MATCHED_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_whitelist_and_match(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env(match=TEST_MATCH, whitelist=TEST_WHITELIST)
result = env_store.get()
self.assertEqual(result, dict(TEST_ENV_MATCHED_VARS, **TEST_ENV_WHITELIST_VARS))
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_separator(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env(separator=TEST_SEPARATOR)
result = env_store.get()
self.assertEqual(result, TEST_SEPARATED_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_parse_values(self):
pconf.store.env.os.environ = TEST_ENV_TYPED_VARS
env_store = Env(parse_values=TEST_PARSE_VALUES)
result = env_store.get()
self.assertEqual(result, TEST_ENV_TYPED_VARS_PARSED)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_lowercase_conversion(self):
pconf.store.env.os.environ = TEST_ENV_UPPERCASE
env_store = Env(to_lower=TEST_TO_LOWER)
result = env_store.get()
self.assertEqual(result, TEST_ENV_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_lowercase_and_separator(self):
pconf.store.env.os.environ = TEST_ENV_UPPERCASE
env_store = Env(separator=TEST_SEPARATOR, to_lower=TEST_TO_LOWER)
result = env_store.get()
self.assertEqual(result, TEST_SEPARATED_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_convert_underscore_replacement(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env(convert_underscores=TEST_CONVERT_UNDERSCORES)
result = env_store.get()
self.assertEqual(result, TEST_ENV_CONVERTED)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_convert_underscore_and_separator(self):
pconf.store.env.os.environ = TEST_ENV_VARS
env_store = Env(
separator=TEST_SEPARATOR, convert_underscores=TEST_CONVERT_UNDERSCORES
)
result = env_store.get()
self.assertEqual(result, TEST_ENV_CONVERTED_SEPARATED)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_parse_and_split_order(self):
pconf.store.env.os.environ = TEST_ENV_VARS
try:
env_store = Env(separator=TEST_SEPARATOR, parse_values=TEST_PARSE_VALUES)
except AttributeError:
self.fail("Parsing environment variables raised AttributeError")
result = env_store.get()
self.assertEqual(result, TEST_SEPARATED_VARS)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
@patch(MOCK_OPEN_FUNCTION, mock_open(read_data=TEST_DOCKER_SECRETS_PATH))
def test_docker_secrets(self):
pconf.store.env.os.environ = TEST_ENV_DOCKER_SECRETS
env_store = Env(docker_secrets=TEST_DOCKER_SECRETS)
result = env_store.get()
self.assertEqual(list(result.keys()), list(TEST_DOCKER_SECRETS_RESULT.keys()))
self.assertEqual(result, TEST_DOCKER_SECRETS_RESULT)
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
def test_docker_secrets_invalid_postfix(self):
pconf.store.env.os.environ = TEST_ENV_DOCKER_SECRETS_INVALID_POSTFIX
env_store = Env(docker_secrets=TEST_DOCKER_SECRETS_INVALID_POSTFIX)
result = env_store.get()
self.assertEqual(result, {})
self.assertIsInstance(result, dict)
@patch("pconf.store.env.os", new=MagicMock())
@patch(MOCK_OPEN_FUNCTION, side_effect=throw_ioerror)
def test_docker_secrets_nonexistent_file(self, mock_open):
simplefilter("ignore")
pconf.store.env.os.environ = TEST_ENV_DOCKER_SECRETS
env_store = Env(docker_secrets=TEST_DOCKER_SECRETS)
result = env_store.get()
self.assertEqual(result, {})
self.assertIsInstance(result, dict)<|fim▁end|>
|
self.assertEqual(env_store.separator, TEST_SEPARATOR)
self.assertEqual(env_store.match, TEST_MATCH)
|
<|file_name|>SI_DeviceKeyboard.cpp<|end_file_name|><|fim▁begin|>// Copyright 2015 Dolphin Emulator Project
// Licensed under GPLv2+
// Refer to the license.txt file included.
#include "Core/HW/SI/SI_DeviceKeyboard.h"
#include <cstring>
#include "Common/ChunkFile.h"
#include "Common/CommonTypes.h"
#include "Common/Logging/Log.h"
#include "Common/Swap.h"
#include "Core/HW/GCKeyboard.h"
#include "InputCommon/KeyboardStatus.h"
namespace SerialInterface
{
// --- GameCube keyboard ---
CSIDevice_Keyboard::CSIDevice_Keyboard(SIDevices device, int device_number)
: ISIDevice(device, device_number)
{
}
int CSIDevice_Keyboard::RunBuffer(u8* buffer, int request_length)
{
// For debug logging only
ISIDevice::RunBuffer(buffer, request_length);
// Read the command
const auto command = static_cast<EBufferCommands>(buffer[0]);
// Handle it
switch (command)
{
case CMD_RESET:
case CMD_ID:
{
u32 id = Common::swap32(SI_GC_KEYBOARD);
std::memcpy(buffer, &id, sizeof(id));
return sizeof(id);
}
case CMD_DIRECT:
{
INFO_LOG_FMT(SERIALINTERFACE, "Keyboard - Direct (Request Length: {})", request_length);
u32 high, low;
GetData(high, low);
for (int i = 0; i < 4; i++)
{
buffer[i + 0] = (high >> (24 - (i * 8))) & 0xff;
buffer[i + 4] = (low >> (24 - (i * 8))) & 0xff;
}
return sizeof(high) + sizeof(low);
}
default:
{
ERROR_LOG_FMT(SERIALINTERFACE, "Unknown SI command ({:#x})", command);
}
break;
}
return 0;
}
KeyboardStatus CSIDevice_Keyboard::GetKeyboardStatus() const
{
return Keyboard::GetStatus(m_device_number);
}
bool CSIDevice_Keyboard::GetData(u32& hi, u32& low)
{
KeyboardStatus key_status = GetKeyboardStatus();
u8 key[3] = {0x00, 0x00, 0x00};
MapKeys(key_status, key);
u8 checksum = key[0] ^ key[1] ^ key[2] ^ m_counter;
hi = m_counter << 24;
low = key[0] << 24 | key[1] << 16 | key[2] << 8 | checksum;
return true;
}
void CSIDevice_Keyboard::SendCommand(u32 command, u8 poll)
{
UCommand keyboard_command(command);
switch (keyboard_command.command)
{
case 0x00:
break;
case CMD_POLL:
{
m_counter++;
m_counter &= 15;
}
break;
default:
{
ERROR_LOG_FMT(SERIALINTERFACE, "Unknown direct command ({:#x})", command);
}
break;
}
}
void CSIDevice_Keyboard::DoState(PointerWrap& p)
{
p.Do(m_counter);
}
void CSIDevice_Keyboard::MapKeys(const KeyboardStatus& key_status, u8* key)
{
u8 keys_held = 0;
const u8 MAX_KEYS_HELD = 3;
if (key_status.key0x & KEYMASK_HOME)
{
key[keys_held++] = KEY_HOME;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_END)
{
key[keys_held++] = KEY_END;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_PGUP)
{
key[keys_held++] = KEY_PGUP;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_PGDN)
{
key[keys_held++] = KEY_PGDN;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_SCROLLLOCK)
{
key[keys_held++] = KEY_SCROLLLOCK;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_A)
{
key[keys_held++] = KEY_A;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_B)
{
key[keys_held++] = KEY_B;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_C)
{
key[keys_held++] = KEY_C;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_D)
{
key[keys_held++] = KEY_D;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_E)
{
key[keys_held++] = KEY_E;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_F)
{
key[keys_held++] = KEY_F;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_G)
{
key[keys_held++] = KEY_G;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_H)
{
key[keys_held++] = KEY_H;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_I)
{
key[keys_held++] = KEY_I;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_J)
{
key[keys_held++] = KEY_J;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key0x & KEYMASK_K)
{
key[keys_held++] = KEY_K;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_L)
{
key[keys_held++] = KEY_L;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_M)
{
key[keys_held++] = KEY_M;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_N)
{
key[keys_held++] = KEY_N;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_O)
{
key[keys_held++] = KEY_O;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_P)
{
key[keys_held++] = KEY_P;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_Q)
{
key[keys_held++] = KEY_Q;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_R)
{
key[keys_held++] = KEY_R;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_S)
{
key[keys_held++] = KEY_S;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_T)
{
key[keys_held++] = KEY_T;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_U)
{
key[keys_held++] = KEY_U;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_V)
{
key[keys_held++] = KEY_V;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_W)
{
key[keys_held++] = KEY_W;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_X)
{
key[keys_held++] = KEY_X;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_Y)
{
key[keys_held++] = KEY_Y;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_Z)
{
key[keys_held++] = KEY_Z;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key1x & KEYMASK_1)
{
key[keys_held++] = KEY_1;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_2)
{
key[keys_held++] = KEY_2;<|fim▁hole|> return;
}
if (key_status.key2x & KEYMASK_3)
{
key[keys_held++] = KEY_3;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_4)
{
key[keys_held++] = KEY_4;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_5)
{
key[keys_held++] = KEY_5;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_6)
{
key[keys_held++] = KEY_6;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_7)
{
key[keys_held++] = KEY_7;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_8)
{
key[keys_held++] = KEY_8;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_9)
{
key[keys_held++] = KEY_9;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_0)
{
key[keys_held++] = KEY_0;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_MINUS)
{
key[keys_held++] = KEY_MINUS;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_PLUS)
{
key[keys_held++] = KEY_PLUS;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_PRINTSCR)
{
key[keys_held++] = KEY_PRINTSCR;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_BRACE_OPEN)
{
key[keys_held++] = KEY_BRACE_OPEN;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_BRACE_CLOSE)
{
key[keys_held++] = KEY_BRACE_CLOSE;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_COLON)
{
key[keys_held++] = KEY_COLON;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key2x & KEYMASK_QUOTE)
{
key[keys_held++] = KEY_QUOTE;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_HASH)
{
key[keys_held++] = KEY_HASH;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_COMMA)
{
key[keys_held++] = KEY_COMMA;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_PERIOD)
{
key[keys_held++] = KEY_PERIOD;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_QUESTIONMARK)
{
key[keys_held++] = KEY_QUESTIONMARK;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_INTERNATIONAL1)
{
key[keys_held++] = KEY_INTERNATIONAL1;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_F1)
{
key[keys_held++] = KEY_F1;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_F2)
{
key[keys_held++] = KEY_F2;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_F3)
{
key[keys_held++] = KEY_F3;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_F4)
{
key[keys_held++] = KEY_F4;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_F5)
{
key[keys_held++] = KEY_F5;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_F6)
{
key[keys_held++] = KEY_F6;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_F7)
{
key[keys_held++] = KEY_F7;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_F8)
{
key[keys_held++] = KEY_F8;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_F9)
{
key[keys_held++] = KEY_F9;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_F10)
{
key[keys_held++] = KEY_F10;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key3x & KEYMASK_F11)
{
key[keys_held++] = KEY_F11;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_F12)
{
key[keys_held++] = KEY_F12;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_ESC)
{
key[keys_held++] = KEY_ESC;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_INSERT)
{
key[keys_held++] = KEY_INSERT;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_DELETE)
{
key[keys_held++] = KEY_DELETE;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_TILDE)
{
key[keys_held++] = KEY_TILDE;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_BACKSPACE)
{
key[keys_held++] = KEY_BACKSPACE;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_TAB)
{
key[keys_held++] = KEY_TAB;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_CAPSLOCK)
{
key[keys_held++] = KEY_CAPSLOCK;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_LEFTSHIFT)
{
key[keys_held++] = KEY_LEFTSHIFT;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_RIGHTSHIFT)
{
key[keys_held++] = KEY_RIGHTSHIFT;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_LEFTCONTROL)
{
key[keys_held++] = KEY_LEFTCONTROL;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_RIGHTALT)
{
key[keys_held++] = KEY_RIGHTALT;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_LEFTWINDOWS)
{
key[keys_held++] = KEY_LEFTWINDOWS;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_SPACE)
{
key[keys_held++] = KEY_SPACE;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_RIGHTWINDOWS)
{
key[keys_held++] = KEY_RIGHTWINDOWS;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key4x & KEYMASK_MENU)
{
key[keys_held++] = KEY_MENU;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key5x & KEYMASK_LEFTARROW)
{
key[keys_held++] = KEY_LEFTARROW;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key5x & KEYMASK_DOWNARROW)
{
key[keys_held++] = KEY_DOWNARROW;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key5x & KEYMASK_UPARROW)
{
key[keys_held++] = KEY_UPARROW;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key5x & KEYMASK_RIGHTARROW)
{
key[keys_held++] = KEY_RIGHTARROW;
if (keys_held >= MAX_KEYS_HELD)
return;
}
if (key_status.key5x & KEYMASK_ENTER)
{
key[keys_held++] = KEY_ENTER;
if (keys_held >= MAX_KEYS_HELD)
return;
}
}
} // namespace SerialInterface<|fim▁end|>
|
if (keys_held >= MAX_KEYS_HELD)
|
<|file_name|>main.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import 'rxjs/add/operator/map';
bootstrap(RedditFeed, [...HTTP_PROVIDERS]).catch(err => console.error(err));<|fim▁end|>
|
import {bootstrap} from 'angular2/platform/browser';
import {RedditFeed} from './components/reddit_feed';
import {HTTP_PROVIDERS} from 'angular2/http';
|
<|file_name|>ngfuse.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
import * as Fuse from 'fuse.js';
@Injectable()
export class NgFuseService {
defaults: NgFuseOptions = {
shouldSort: true,
threshold: 0.4,
location: 0,
distance: 100,
maxPatternLength: 32,
minMatchCharLength: 1,
minSearchStringLenght: 1
}
searchOptions: NgFuseOptions = this.defaults;
constructor() { };
search(collection: Array<Object>, searchString: string, options: NgFuseOptions = {}) {
Object.assign(this.searchOptions, this.defaults, options);
let results = []
if (searchString && searchString.length >= this.searchOptions.minSearchStringLenght) {
const fuse = new Fuse(collection, this.searchOptions);
results = fuse.search(searchString);
return results;
} else {
return collection;
}
};
}
export interface NgFuseOptions extends Fuse.FuseOptions {
minSearchStringLenght?: 1;
<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>test_project_structure.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the<|fim▁hole|>import ast
import glob
import itertools
import mmap
import os
import unittest
from typing import List
from parameterized import parameterized
ROOT_FOLDER = os.path.realpath(
os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir)
)
class TestProjectStructure(unittest.TestCase):
def test_reference_to_providers_from_core(self):
for filename in glob.glob(f"{ROOT_FOLDER}/example_dags/**/*.py", recursive=True):
self.assert_file_not_contains(filename, "providers")
def test_deprecated_packages(self):
path_pattern = f"{ROOT_FOLDER}/airflow/contrib/**/*.py"
for filename in glob.glob(path_pattern, recursive=True):
if filename.endswith("/__init__.py"):
self.assert_file_contains(filename, "This package is deprecated.")
else:
self.assert_file_contains(filename, "This module is deprecated.")
def assert_file_not_contains(self, filename: str, pattern: str):
with open(filename, 'rb', 0) as file, mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_READ) as content:
if content.find(bytes(pattern, 'utf-8')) != -1:
self.fail(f"File {filename} not contains pattern - {pattern}")
def assert_file_contains(self, filename: str, pattern: str):
with open(filename, 'rb', 0) as file, mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_READ) as content:
if content.find(bytes(pattern, 'utf-8')) == -1:
self.fail(f"File {filename} contains illegal pattern - {pattern}")
def test_providers_modules_should_have_tests(self):
"""
Assert every module in /airflow/providers has a corresponding test_ file in tests/airflow/providers.
"""
# Deprecated modules that don't have corresponded test
expected_missing_providers_modules = {
(
'airflow/providers/amazon/aws/hooks/aws_dynamodb.py',
'tests/providers/amazon/aws/hooks/test_aws_dynamodb.py',
)
}
# TODO: Should we extend this test to cover other directories?
modules_files = glob.glob(f"{ROOT_FOLDER}/airflow/providers/**/*.py", recursive=True)
# Make path relative
modules_files = (os.path.relpath(f, ROOT_FOLDER) for f in modules_files)
# Exclude example_dags
modules_files = (f for f in modules_files if "/example_dags/" not in f)
# Exclude __init__.py
modules_files = (f for f in modules_files if not f.endswith("__init__.py"))
# Change airflow/ to tests/
expected_test_files = (
f'tests/{f.partition("/")[2]}' for f in modules_files if not f.endswith("__init__.py")
)
# Add test_ prefix to filename
expected_test_files = (
f'{f.rpartition("/")[0]}/test_{f.rpartition("/")[2]}'
for f in expected_test_files
if not f.endswith("__init__.py")
)
current_test_files = glob.glob(f"{ROOT_FOLDER}/tests/providers/**/*.py", recursive=True)
# Make path relative
current_test_files = (os.path.relpath(f, ROOT_FOLDER) for f in current_test_files)
# Exclude __init__.py
current_test_files = (f for f in current_test_files if not f.endswith("__init__.py"))
modules_files = set(modules_files)
expected_test_files = set(expected_test_files)
current_test_files = set(current_test_files)
missing_tests_files = expected_test_files - expected_test_files.intersection(current_test_files)
with self.subTest("Detect missing tests in providers module"):
expected_missing_test_modules = {pair[1] for pair in expected_missing_providers_modules}
missing_tests_files = missing_tests_files - set(expected_missing_test_modules)
assert set() == missing_tests_files
with self.subTest("Verify removed deprecated module also removed from deprecated list"):
expected_missing_modules = {pair[0] for pair in expected_missing_providers_modules}
removed_deprecated_module = expected_missing_modules - modules_files
if removed_deprecated_module:
self.fail(
"You've removed a deprecated module:\n"
f"{removed_deprecated_module}"
"\n"
"Thank you very much.\n"
"Can you remove it from the list of expected missing modules tests, please?"
)
def get_imports_from_file(filepath: str):
with open(filepath) as py_file:
content = py_file.read()
doc_node = ast.parse(content, filepath)
import_names: List[str] = []
for current_node in ast.walk(doc_node):
if not isinstance(current_node, (ast.Import, ast.ImportFrom)):
continue
for alias in current_node.names:
name = alias.name
fullname = f'{current_node.module}.{name}' if isinstance(current_node, ast.ImportFrom) else name
import_names.append(fullname)
return import_names
def filepath_to_module(filepath: str):
filepath = os.path.relpath(os.path.abspath(filepath), ROOT_FOLDER)
return filepath.replace("/", ".")[: -(len('.py'))]
def get_classes_from_file(filepath: str):
with open(filepath) as py_file:
content = py_file.read()
doc_node = ast.parse(content, filepath)
module = filepath_to_module(filepath)
results: List[str] = []
for current_node in ast.walk(doc_node):
if not isinstance(current_node, ast.ClassDef):
continue
name = current_node.name
if not name.endswith("Operator") and not name.endswith("Sensor") and not name.endswith("Operator"):
continue
results.append(f"{module}.{name}")
return results
class TestGoogleProviderProjectStructure(unittest.TestCase):
MISSING_EXAMPLE_DAGS = {
('cloud', 'adls_to_gcs'),
('cloud', 'sql_to_gcs'),
('cloud', 'bigquery_to_mysql'),
('cloud', 'cassandra_to_gcs'),
('cloud', 'mssql_to_gcs'),
('suite', 'drive'),
('ads', 'ads_to_gcs'),
}
# Those operators are deprecated and we do not need examples for them
DEPRECATED_OPERATORS = {
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service'
'.CloudDataTransferServiceS3ToGCSOperator',
'airflow.providers.google.cloud.operators.cloud_storage_transfer_service'
'.CloudDataTransferServiceGCSToGCSOperator',
'airflow.providers.google.cloud.sensors.gcs.GCSObjectsWtihPrefixExistenceSensor',
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHadoopJobOperator',
'airflow.providers.google.cloud.operators.dataproc.DataprocScaleClusterOperator',
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkJobOperator',
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitSparkSqlJobOperator',
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitHiveJobOperator',
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPigJobOperator',
'airflow.providers.google.cloud.operators.dataproc.DataprocSubmitPySparkJobOperator',
'airflow.providers.google.cloud.operators.mlengine.MLEngineManageModelOperator',
'airflow.providers.google.cloud.operators.mlengine.MLEngineManageVersionOperator',
'airflow.providers.google.cloud.operators.dataflow.DataflowCreateJavaJobOperator',
'airflow.providers.google.cloud.operators.bigquery.BigQueryPatchDatasetOperator',
'airflow.providers.google.cloud.operators.dataflow.DataflowCreatePythonJobOperator',
'airflow.providers.google.cloud.operators.bigquery.BigQueryExecuteQueryOperator',
}
# Those operators should not have examples as they are never used standalone (they are abstract)
BASE_OPERATORS = {
'airflow.providers.google.cloud.operators.compute.ComputeEngineBaseOperator',
'airflow.providers.google.cloud.operators.cloud_sql.CloudSQLBaseOperator',
'airflow.providers.google.cloud.operators.dataproc.DataprocJobBaseOperator',
}
# Please at the examples to those operators at the earliest convenience :)
MISSING_EXAMPLES_FOR_OPERATORS = {
'airflow.providers.google.cloud.operators.dataproc.DataprocInstantiateInlineWorkflowTemplateOperator',
'airflow.providers.google.cloud.operators.mlengine.MLEngineTrainingCancelJobOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPGetStoredInfoTypeOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPReidentifyContentOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateDeidentifyTemplateOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPCreateDLPJobOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateDeidentifyTemplateOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDLPJobTriggerOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPListDeidentifyTemplatesOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDeidentifyTemplateOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPListInspectTemplatesOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPListStoredInfoTypesOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPUpdateInspectTemplateOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteDLPJobOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPListJobTriggersOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPCancelDLPJobOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPGetDLPJobOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPGetInspectTemplateOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPListInfoTypesOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPDeleteDeidentifyTemplateOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPListDLPJobsOperator',
'airflow.providers.google.cloud.operators.dlp.CloudDLPRedactImageOperator',
'airflow.providers.google.cloud.operators.datastore.CloudDatastoreDeleteOperationOperator',
'airflow.providers.google.cloud.operators.datastore.CloudDatastoreGetOperationOperator',
'airflow.providers.google.cloud.sensors.gcs.GCSObjectUpdateSensor',
'airflow.providers.google.cloud.sensors.gcs.GCSUploadSessionCompleteSensor',
}
def test_example_dags(self):
operators_modules = itertools.chain(
*(self.find_resource_files(resource_type=d) for d in ["operators", "sensors", "transfers"])
)
example_dags_files = self.find_resource_files(resource_type="example_dags")
# Generate tuple of department and service e.g. ('marketing_platform', 'display_video')
operator_sets = [(f.split("/")[-3], f.split("/")[-1].rsplit(".")[0]) for f in operators_modules]
example_sets = [
(f.split("/")[-3], f.split("/")[-1].rsplit(".")[0].replace("example_", "", 1))
for f in example_dags_files
]
def has_example_dag(operator_set):
for e in example_sets:
if e[0] != operator_set[0]:
continue
if e[1].startswith(operator_set[1]):
return True
return False
with self.subTest("Detect missing example dags"):
missing_example = {s for s in operator_sets if not has_example_dag(s)}
missing_example -= self.MISSING_EXAMPLE_DAGS
assert set() == missing_example
with self.subTest("Keep update missing example dags list"):
new_example_dag = set(example_sets).intersection(set(self.MISSING_EXAMPLE_DAGS))
if new_example_dag:
new_example_dag_text = '\n'.join(str(f) for f in new_example_dag)
self.fail(
"You've added a example dag currently listed as missing:\n"
f"{new_example_dag_text}"
"\n"
"Thank you very much.\n"
"Can you remove it from the list of missing example, please?"
)
with self.subTest("Remove extra elements"):
extra_example_dags = set(self.MISSING_EXAMPLE_DAGS) - set(operator_sets)
if extra_example_dags:
new_example_dag_text = '\n'.join(str(f) for f in extra_example_dags)
self.fail(
"You've added a example dag currently listed as missing:\n"
f"{new_example_dag_text}"
"\n"
"Thank you very much.\n"
"Can you remove it from the list of missing example, please?"
)
def test_missing_example_for_operator(self):
missing_operators = []
for resource_type in ["operators", "sensors", "transfers"]:
operator_files = set(
self.find_resource_files(top_level_directory="airflow", resource_type=resource_type)
)
for filepath in operator_files:
service_name = os.path.basename(filepath)[: -(len(".py"))]
example_dags = list(
glob.glob(
f"{ROOT_FOLDER}/airflow/providers/google/*/example_dags/example_{service_name}*.py"
)
)
if not example_dags:
# Ignore. We have separate tests that detect this.
continue
example_paths = {
path for example_dag in example_dags for path in get_imports_from_file(example_dag)
}
example_paths = {
path for path in example_paths if f'.{resource_type}.{service_name}.' in path
}
print("example_paths=", example_paths)
operators_paths = set(get_classes_from_file(f"{ROOT_FOLDER}/{filepath}"))
missing_operators.extend(operators_paths - example_paths)
full_set = set()
full_set.update(self.MISSING_EXAMPLES_FOR_OPERATORS)
full_set.update(self.DEPRECATED_OPERATORS)
full_set.update(self.BASE_OPERATORS)
assert set(missing_operators) == full_set
@parameterized.expand(
itertools.product(["_system.py", "_system_helper.py"], ["operators", "sensors", "transfers"])
)
def test_detect_invalid_system_tests(self, resource_type, filename_suffix):
operators_tests = self.find_resource_files(top_level_directory="tests", resource_type=resource_type)
operators_files = self.find_resource_files(top_level_directory="airflow", resource_type=resource_type)
files = {f for f in operators_tests if f.endswith(filename_suffix)}
expected_files = (f"tests/{f[8:]}" for f in operators_files)
expected_files = (f.replace(".py", filename_suffix).replace("/test_", "/") for f in expected_files)
expected_files = {f'{f.rpartition("/")[0]}/test_{f.rpartition("/")[2]}' for f in expected_files}
assert set() == files - expected_files
@staticmethod
def find_resource_files(
top_level_directory: str = "airflow",
department: str = "*",
resource_type: str = "*",
service: str = "*",
):
python_files = glob.glob(
f"{ROOT_FOLDER}/{top_level_directory}/providers/google/{department}/{resource_type}/{service}.py"
)
# Make path relative
resource_files = (os.path.relpath(f, ROOT_FOLDER) for f in python_files)
# Exclude __init__.py and pycache
resource_files = (f for f in resource_files if not f.endswith("__init__.py"))
return resource_files
class TestOperatorsHooks(unittest.TestCase):
def test_no_illegal_suffixes(self):
illegal_suffixes = ["_operator.py", "_hook.py", "_sensor.py"]
files = itertools.chain(
*(
glob.glob(f"{ROOT_FOLDER}/{part}/providers/**/{resource_type}/*.py", recursive=True)
for resource_type in ["operators", "hooks", "sensors", "example_dags"]
for part in ["airflow", "tests"]
)
)
invalid_files = [f for f in files if any(f.endswith(suffix) for suffix in illegal_suffixes)]
assert [] == invalid_files<|fim▁end|>
|
# specific language governing permissions and limitations
# under the License.
|
<|file_name|>augmented-assignments.rs<|end_file_name|><|fim▁begin|>use std::ops::AddAssign;
struct Int(i32);
impl AddAssign for Int {
fn add_assign(&mut self, _: Int) {
unimplemented!()
}
}
fn main() {
let mut x = Int(1);
x
//~^ NOTE borrow of `x` occurs here
+=
x;
//~^ ERROR cannot move out of `x` because it is borrowed
//~| move out of `x` occurs here
let y = Int(2);
//~^ HELP consider changing this to be mutable
//~| SUGGESTION mut y
y //~ ERROR cannot borrow `y` as mutable, as it is not declared as mutable
//~| cannot borrow as mutable
+=
Int(1);<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>create_pi.py<|end_file_name|><|fim▁begin|>from random import random
def ran_pi():
<|fim▁hole|> temp=[]
for j in range(dim[1]):
temp.append(random())
sm=sum(temp)
out.append([temp[i]/sm for i in range(dim[1])])
return out
pi={i:[[1.0,0.0],[0.0,1.0]] for i in range(5)}
temp={i:[[0.0,1.0],[1.0,0.0]] for i in range(5,10)}
pi.update(temp)<|fim▁end|>
|
dim=[2,3]
out=[]
for i in range(dim[0]):
|
<|file_name|>user-resource.go<|end_file_name|><|fim▁begin|>package main
import (
"log"
"net/http"
"github.com/emicklei/go-restful"
restfulspec "github.com/emicklei/go-restful-openapi"
"github.com/go-openapi/spec"
)
type UserResource struct {
// normally one would use DAO (data access object)
users map[string]User
}
func (u UserResource) WebService() *restful.WebService {
ws := new(restful.WebService)
ws.
Path("/users").
Consumes(restful.MIME_XML, restful.MIME_JSON).
Produces(restful.MIME_JSON, restful.MIME_XML) // you can specify this per route as well
tags := []string{"users"}
ws.Route(ws.GET("/").To(u.findAllUsers).
// docs
Doc("get all users").
Metadata(restfulspec.KeyOpenAPITags, tags).
Writes([]User{}).
Returns(200, "OK", []User{}))
ws.Route(ws.GET("/{user-id}").To(u.findUser).
// docs
Doc("get a user").
Param(ws.PathParameter("user-id", "identifier of the user").DataType("integer").DefaultValue("1")).
Metadata(restfulspec.KeyOpenAPITags, tags).
Writes(User{}). // on the response
Returns(200, "OK", User{}).
Returns(404, "Not Found", nil))
ws.Route(ws.PUT("/{user-id}").To(u.updateUser).
// docs
Doc("update a user").
Param(ws.PathParameter("user-id", "identifier of the user").DataType("string")).
Metadata(restfulspec.KeyOpenAPITags, tags).
Reads(User{})) // from the request
ws.Route(ws.PUT("").To(u.createUser).
// docs
Doc("create a user").
Metadata(restfulspec.KeyOpenAPITags, tags).
Reads(User{})) // from the request
ws.Route(ws.DELETE("/{user-id}").To(u.removeUser).
// docs
Doc("delete a user").
Metadata(restfulspec.KeyOpenAPITags, tags).
Param(ws.PathParameter("user-id", "identifier of the user").DataType("string")))
return ws
}
// GET http://localhost:8080/users
//
func (u UserResource) findAllUsers(request *restful.Request, response *restful.Response) {
list := []User{}
for _, each := range u.users {
list = append(list, each)
}
response.WriteEntity(list)
}
// GET http://localhost:8080/users/1
//
func (u UserResource) findUser(request *restful.Request, response *restful.Response) {
id := request.PathParameter("user-id")
usr := u.users[id]
if len(usr.ID) == 0 {
response.WriteErrorString(http.StatusNotFound, "User could not be found.")
} else {
response.WriteEntity(usr)
}
}
// PUT http://localhost:8080/users/1
// <User><Id>1</Id><Name>Melissa Raspberry</Name></User>
//
func (u *UserResource) updateUser(request *restful.Request, response *restful.Response) {
usr := new(User)
err := request.ReadEntity(&usr)
if err == nil {
u.users[usr.ID] = *usr
response.WriteEntity(usr)
} else {
response.WriteError(http.StatusInternalServerError, err)
}
}
// PUT http://localhost:8080/users/1
// <User><Id>1</Id><Name>Melissa</Name></User>
//
func (u *UserResource) createUser(request *restful.Request, response *restful.Response) {
usr := User{ID: request.PathParameter("user-id")}
err := request.ReadEntity(&usr)
if err == nil {
u.users[usr.ID] = usr
response.WriteHeaderAndEntity(http.StatusCreated, usr)
} else {
response.WriteError(http.StatusInternalServerError, err)
}
}
// DELETE http://localhost:8080/users/1
//
func (u *UserResource) removeUser(request *restful.Request, response *restful.Response) {
id := request.PathParameter("user-id")
delete(u.users, id)
}<|fim▁hole|> u := UserResource{map[string]User{}}
restful.DefaultContainer.Add(u.WebService())
config := restfulspec.Config{
WebServices: restful.RegisteredWebServices(), // you control what services are visible
APIPath: "/apidocs.json",
PostBuildSwaggerObjectHandler: enrichSwaggerObject}
restful.DefaultContainer.Add(restfulspec.NewOpenAPIService(config))
// Optionally, you can install the Swagger Service which provides a nice Web UI on your REST API
// You need to download the Swagger HTML5 assets and change the FilePath location in the config below.
// Open http://localhost:8080/apidocs/?url=http://localhost:8080/apidocs.json
http.Handle("/apidocs/", http.StripPrefix("/apidocs/", http.FileServer(http.Dir("/Users/emicklei/Projects/swagger-ui/dist"))))
// Optionally, you may need to enable CORS for the UI to work.
cors := restful.CrossOriginResourceSharing{
AllowedHeaders: []string{"Content-Type", "Accept"},
AllowedMethods: []string{"GET", "POST", "PUT", "DELETE"},
CookiesAllowed: false,
Container: restful.DefaultContainer}
restful.DefaultContainer.Filter(cors.Filter)
log.Printf("Get the API using http://localhost:8080/apidocs.json")
log.Printf("Open Swagger UI using http://localhost:8080/apidocs/?url=http://localhost:8080/apidocs.json")
log.Fatal(http.ListenAndServe(":8080", nil))
}
func enrichSwaggerObject(swo *spec.Swagger) {
swo.Info = &spec.Info{
InfoProps: spec.InfoProps{
Title: "UserService",
Description: "Resource for managing Users",
Contact: &spec.ContactInfo{
Name: "john",
Email: "[email protected]",
URL: "http://johndoe.org",
},
License: &spec.License{
Name: "MIT",
URL: "http://mit.org",
},
Version: "1.0.0",
},
}
swo.Tags = []spec.Tag{spec.Tag{TagProps: spec.TagProps{
Name: "users",
Description: "Managing users"}}}
}
// User is just a sample type
type User struct {
ID string `json:"id" description:"identifier of the user"`
Name string `json:"name" description:"name of the user" default:"john"`
Age int `json:"age" description:"age of the user" default:"21"`
}<|fim▁end|>
|
func main() {
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.