repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
endlessm/chromium-browser | third_party/llvm/llvm/unittests/Analysis/ValueLatticeTest.cpp | 9059 | //===- ValueLatticeTest.cpp - ScalarEvolution unit tests --------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "llvm/Analysis/ValueLattice.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/IR/ConstantRange.h"
#include "llvm/IR/Constants.h"
#include "llvm/IR/IRBuilder.h"
#include "llvm/IR/LLVMContext.h"
#include "llvm/IR/Module.h"
#include "gtest/gtest.h"
namespace llvm {
namespace {
// We use this fixture to ensure that we clean up ScalarEvolution before
// deleting the PassManager.
class ValueLatticeTest : public testing::Test {
protected:
LLVMContext Context;
};
TEST_F(ValueLatticeTest, ValueLatticeGetters) {
auto I32Ty = IntegerType::get(Context, 32);
auto *C1 = ConstantInt::get(I32Ty, 1);
EXPECT_TRUE(ValueLatticeElement::get(C1).isConstantRange());
EXPECT_TRUE(
ValueLatticeElement::getRange({C1->getValue()}).isConstantRange());
EXPECT_TRUE(ValueLatticeElement::getOverdefined().isOverdefined());
auto FloatTy = Type::getFloatTy(Context);
auto *C2 = ConstantFP::get(FloatTy, 1.1);
EXPECT_TRUE(ValueLatticeElement::get(C2).isConstant());
EXPECT_TRUE(ValueLatticeElement::getNot(C2).isNotConstant());
}
TEST_F(ValueLatticeTest, MarkConstantRange) {
auto LV1 =
ValueLatticeElement::getRange({APInt(32, 10, true), APInt(32, 20, true)});
// Test markConstantRange() with an equal range.
EXPECT_FALSE(
LV1.markConstantRange({APInt(32, 10, true), APInt(32, 20, true)}));
// Test markConstantRange() with supersets of existing range.
EXPECT_TRUE(LV1.markConstantRange({APInt(32, 5, true), APInt(32, 20, true)}));
EXPECT_EQ(LV1.getConstantRange().getLower().getLimitedValue(), 5U);
EXPECT_EQ(LV1.getConstantRange().getUpper().getLimitedValue(), 20U);
EXPECT_TRUE(LV1.markConstantRange({APInt(32, 5, true), APInt(32, 23, true)}));
EXPECT_EQ(LV1.getConstantRange().getLower().getLimitedValue(), 5U);
EXPECT_EQ(LV1.getConstantRange().getUpper().getLimitedValue(), 23U);
}
TEST_F(ValueLatticeTest, MergeIn) {
auto I32Ty = IntegerType::get(Context, 32);
auto *C1 = ConstantInt::get(I32Ty, 1);
// Merge to lattice values with equal integer constant.
auto LV1 = ValueLatticeElement::get(C1);
EXPECT_FALSE(LV1.mergeIn(ValueLatticeElement::get(C1)));
EXPECT_TRUE(LV1.isConstantRange());
EXPECT_EQ(LV1.asConstantInteger().getValue().getLimitedValue(), 1U);
// Merge LV1 with different integer constant.
EXPECT_TRUE(
LV1.mergeIn(ValueLatticeElement::get(ConstantInt::get(I32Ty, 99))));
EXPECT_TRUE(LV1.isConstantRange());
EXPECT_EQ(LV1.getConstantRange().getLower().getLimitedValue(), 1U);
EXPECT_EQ(LV1.getConstantRange().getUpper().getLimitedValue(), 100U);
// Merge constant range with same constant range.
EXPECT_FALSE(LV1.mergeIn(LV1));
EXPECT_TRUE(LV1.isConstantRange());
EXPECT_EQ(LV1.getConstantRange().getLower().getLimitedValue(), 1U);
EXPECT_EQ(LV1.getConstantRange().getUpper().getLimitedValue(), 100U);
// Merge LV1 in undefined value.
ValueLatticeElement LV2;
EXPECT_TRUE(LV2.mergeIn(LV1));
EXPECT_TRUE(LV1.isConstantRange());
EXPECT_EQ(LV1.getConstantRange().getLower().getLimitedValue(), 1U);
EXPECT_EQ(LV1.getConstantRange().getUpper().getLimitedValue(), 100U);
EXPECT_TRUE(LV2.isConstantRange());
EXPECT_EQ(LV2.getConstantRange().getLower().getLimitedValue(), 1U);
EXPECT_EQ(LV2.getConstantRange().getUpper().getLimitedValue(), 100U);
// Merge LV1 with overdefined.
EXPECT_TRUE(LV1.mergeIn(ValueLatticeElement::getOverdefined()));
EXPECT_TRUE(LV1.isOverdefined());
// Merge overdefined with overdefined.
EXPECT_FALSE(LV1.mergeIn(ValueLatticeElement::getOverdefined()));
EXPECT_TRUE(LV1.isOverdefined());
}
TEST_F(ValueLatticeTest, getCompareIntegers) {
auto *I32Ty = IntegerType::get(Context, 32);
auto *I1Ty = IntegerType::get(Context, 1);
auto *C1 = ConstantInt::get(I32Ty, 1);
auto LV1 = ValueLatticeElement::get(C1);
// Check getCompare for equal integer constants.
EXPECT_TRUE(LV1.getCompare(CmpInst::ICMP_EQ, I1Ty, LV1)->isOneValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::ICMP_SGE, I1Ty, LV1)->isOneValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::ICMP_SLE, I1Ty, LV1)->isOneValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::ICMP_NE, I1Ty, LV1)->isZeroValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::ICMP_SLT, I1Ty, LV1)->isZeroValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::ICMP_SGT, I1Ty, LV1)->isZeroValue());
auto LV2 =
ValueLatticeElement::getRange({APInt(32, 10, true), APInt(32, 20, true)});
// Check getCompare with distinct integer ranges.
EXPECT_TRUE(LV1.getCompare(CmpInst::ICMP_SLT, I1Ty, LV2)->isOneValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::ICMP_SLE, I1Ty, LV2)->isOneValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::ICMP_NE, I1Ty, LV2)->isOneValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::ICMP_EQ, I1Ty, LV2)->isZeroValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::ICMP_SGE, I1Ty, LV2)->isZeroValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::ICMP_SGT, I1Ty, LV2)->isZeroValue());
auto LV3 =
ValueLatticeElement::getRange({APInt(32, 15, true), APInt(32, 19, true)});
// Check getCompare with a subset integer ranges.
EXPECT_EQ(LV2.getCompare(CmpInst::ICMP_SLT, I1Ty, LV3), nullptr);
EXPECT_EQ(LV2.getCompare(CmpInst::ICMP_SLE, I1Ty, LV3), nullptr);
EXPECT_EQ(LV2.getCompare(CmpInst::ICMP_NE, I1Ty, LV3), nullptr);
EXPECT_EQ(LV2.getCompare(CmpInst::ICMP_EQ, I1Ty, LV3), nullptr);
EXPECT_EQ(LV2.getCompare(CmpInst::ICMP_SGE, I1Ty, LV3), nullptr);
EXPECT_EQ(LV2.getCompare(CmpInst::ICMP_SGT, I1Ty, LV3), nullptr);
auto LV4 =
ValueLatticeElement::getRange({APInt(32, 15, true), APInt(32, 25, true)});
// Check getCompare with overlapping integer ranges.
EXPECT_EQ(LV3.getCompare(CmpInst::ICMP_SLT, I1Ty, LV4), nullptr);
EXPECT_EQ(LV3.getCompare(CmpInst::ICMP_SLE, I1Ty, LV4), nullptr);
EXPECT_EQ(LV3.getCompare(CmpInst::ICMP_NE, I1Ty, LV4), nullptr);
EXPECT_EQ(LV3.getCompare(CmpInst::ICMP_EQ, I1Ty, LV4), nullptr);
EXPECT_EQ(LV3.getCompare(CmpInst::ICMP_SGE, I1Ty, LV4), nullptr);
EXPECT_EQ(LV3.getCompare(CmpInst::ICMP_SGT, I1Ty, LV4), nullptr);
}
TEST_F(ValueLatticeTest, getCompareFloat) {
auto *FloatTy = IntegerType::getFloatTy(Context);
auto *I1Ty = IntegerType::get(Context, 1);
auto *C1 = ConstantFP::get(FloatTy, 1.0);
auto LV1 = ValueLatticeElement::get(C1);
auto LV2 = ValueLatticeElement::get(C1);
// Check getCompare for equal floating point constants.
EXPECT_TRUE(LV1.getCompare(CmpInst::FCMP_OEQ, I1Ty, LV2)->isOneValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::FCMP_OGE, I1Ty, LV2)->isOneValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::FCMP_OLE, I1Ty, LV2)->isOneValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::FCMP_ONE, I1Ty, LV2)->isZeroValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::FCMP_OLT, I1Ty, LV2)->isZeroValue());
EXPECT_TRUE(LV1.getCompare(CmpInst::FCMP_OGT, I1Ty, LV2)->isZeroValue());
EXPECT_TRUE(
LV1.mergeIn(ValueLatticeElement::get(ConstantFP::get(FloatTy, 2.2))));
EXPECT_EQ(LV1.getCompare(CmpInst::FCMP_OEQ, I1Ty, LV2), nullptr);
EXPECT_EQ(LV1.getCompare(CmpInst::FCMP_OGE, I1Ty, LV2), nullptr);
EXPECT_EQ(LV1.getCompare(CmpInst::FCMP_OLE, I1Ty, LV2), nullptr);
EXPECT_EQ(LV1.getCompare(CmpInst::FCMP_ONE, I1Ty, LV2), nullptr);
EXPECT_EQ(LV1.getCompare(CmpInst::FCMP_OLT, I1Ty, LV2), nullptr);
EXPECT_EQ(LV1.getCompare(CmpInst::FCMP_OGT, I1Ty, LV2), nullptr);
}
TEST_F(ValueLatticeTest, getCompareUndef) {
auto *I32Ty = IntegerType::get(Context, 32);
auto *I1Ty = IntegerType::get(Context, 1);
auto LV1 = ValueLatticeElement::get(UndefValue::get(I32Ty));
auto LV2 =
ValueLatticeElement::getRange({APInt(32, 10, true), APInt(32, 20, true)});
EXPECT_TRUE(isa<UndefValue>(LV1.getCompare(CmpInst::ICMP_SLT, I1Ty, LV2)));
EXPECT_TRUE(isa<UndefValue>(LV1.getCompare(CmpInst::ICMP_SLE, I1Ty, LV2)));
EXPECT_TRUE(isa<UndefValue>(LV1.getCompare(CmpInst::ICMP_NE, I1Ty, LV2)));
EXPECT_TRUE(isa<UndefValue>(LV1.getCompare(CmpInst::ICMP_EQ, I1Ty, LV2)));
EXPECT_TRUE(isa<UndefValue>(LV1.getCompare(CmpInst::ICMP_SGE, I1Ty, LV2)));
EXPECT_TRUE(isa<UndefValue>(LV1.getCompare(CmpInst::ICMP_SGT, I1Ty, LV2)));
auto *FloatTy = IntegerType::getFloatTy(Context);
auto LV3 = ValueLatticeElement::get(ConstantFP::get(FloatTy, 1.0));
EXPECT_TRUE(isa<UndefValue>(LV1.getCompare(CmpInst::FCMP_OEQ, I1Ty, LV3)));
EXPECT_TRUE(isa<UndefValue>(LV1.getCompare(CmpInst::FCMP_OGE, I1Ty, LV3)));
EXPECT_TRUE(isa<UndefValue>(LV1.getCompare(CmpInst::FCMP_OLE, I1Ty, LV3)));
EXPECT_TRUE(isa<UndefValue>(LV1.getCompare(CmpInst::FCMP_ONE, I1Ty, LV3)));
EXPECT_TRUE(isa<UndefValue>(LV1.getCompare(CmpInst::FCMP_OLT, I1Ty, LV3)));
EXPECT_TRUE(isa<UndefValue>(LV1.getCompare(CmpInst::FCMP_OGT, I1Ty, LV3)));
}
} // end anonymous namespace
} // end namespace llvm
| bsd-3-clause |
glennq/scikit-learn | sklearn/mixture/bayesian_mixture.py | 32737 | """Bayesian Gaussian Mixture Model."""
# Author: Wei Xue <[email protected]>
# Thierry Guillemot <[email protected]>
# License: BSD 3 clause
import math
import numpy as np
from scipy.special import betaln, digamma, gammaln
from .base import BaseMixture, _check_shape
from .gaussian_mixture import _check_precision_matrix
from .gaussian_mixture import _check_precision_positivity
from .gaussian_mixture import _compute_log_det_cholesky
from .gaussian_mixture import _compute_precision_cholesky
from .gaussian_mixture import _estimate_gaussian_parameters
from .gaussian_mixture import _estimate_log_gaussian_prob
from ..utils import check_array
from ..utils.validation import check_is_fitted
def _log_dirichlet_norm(dirichlet_concentration):
"""Compute the log of the Dirichlet distribution normalization term.
Parameters
----------
dirichlet_concentration : array-like, shape (n_samples,)
The parameters values of the Dirichlet distribution.
Returns
-------
log_dirichlet_norm : float
The log normalization of the Dirichlet distribution.
"""
return (gammaln(np.sum(dirichlet_concentration)) -
np.sum(gammaln(dirichlet_concentration)))
def _log_wishart_norm(degrees_of_freedom, log_det_precisions_chol, n_features):
"""Compute the log of the Wishart distribution normalization term.
Parameters
----------
degrees_of_freedom : array-like, shape (n_components,)
The number of degrees of freedom on the covariance Wishart
distributions.
log_det_precision_chol : array-like, shape (n_components,)
The determinant of the precision matrix for each component.
n_features : int
The number of features.
Return
------
log_wishart_norm : array-like, shape (n_components,)
The log normalization of the Wishart distribution.
"""
# To simplify the computation we have removed the np.log(np.pi) term
return -(degrees_of_freedom * log_det_precisions_chol +
degrees_of_freedom * n_features * .5 * math.log(2.) +
np.sum(gammaln(.5 * (degrees_of_freedom -
np.arange(n_features)[:, np.newaxis])), 0))
class BayesianGaussianMixture(BaseMixture):
"""Variational Bayesian estimation of a Gaussian mixture.
This class allows to infer an approximate posterior distribution over the
parameters of a Gaussian mixture distribution. The effective number of
components can be inferred from the data.
This class implements two types of prior for the weights distribution: a
finite mixture model with Dirichlet distribution and an infinite mixture
model with the Dirichlet Process. In practice Dirichlet Process inference
algorithm is approximated and uses a truncated distribution with a fixed
maximum number of components (called the Stick-breaking representation).
The number of components actually used almost always depends on the data.
.. versionadded:: 0.18
*BayesianGaussianMixture*.
Read more in the :ref:`User Guide <bgmm>`.
Parameters
----------
n_components : int, defaults to 1.
The number of mixture components. Depending on the data and the value
of the `weight_concentration_prior` the model can decide to not use
all the components by setting some component `weights_` to values very
close to zero. The number of effective components is therefore smaller
than n_components.
covariance_type : {'full', 'tied', 'diag', 'spherical'}, defaults to 'full'
String describing the type of covariance parameters to use.
Must be one of::
'full' (each component has its own general covariance matrix),
'tied' (all components share the same general covariance matrix),
'diag' (each component has its own diagonal covariance matrix),
'spherical' (each component has its own single variance).
tol : float, defaults to 1e-3.
The convergence threshold. EM iterations will stop when the
lower bound average gain on the likelihood (of the training data with
respect to the model) is below this threshold.
reg_covar : float, defaults to 1e-6.
Non-negative regularization added to the diagonal of covariance.
Allows to assure that the covariance matrices are all positive.
max_iter : int, defaults to 100.
The number of EM iterations to perform.
n_init : int, defaults to 1.
The number of initializations to perform. The result with the highest
lower bound value on the likelihood is kept.
init_params : {'kmeans', 'random'}, defaults to 'kmeans'.
The method used to initialize the weights, the means and the
covariances.
Must be one of::
'kmeans' : responsibilities are initialized using kmeans.
'random' : responsibilities are initialized randomly.
weight_concentration_prior_type : str, defaults to 'dirichlet_process'.
String describing the type of the weight concentration prior.
Must be one of::
'dirichlet_process' (using the Stick-breaking representation),
'dirichlet_distribution' (can favor more uniform weights).
weight_concentration_prior : float | None, optional.
The dirichlet concentration of each component on the weight
distribution (Dirichlet). The higher concentration puts more mass in
the center and will lead to more components being active, while a lower
concentration parameter will lead to more mass at the edge of the
mixture weights simplex. The value of the parameter must be greater
than 0. If it is None, it's set to ``1. / n_components``.
mean_precision_prior : float | None, optional.
The precision prior on the mean distribution (Gaussian).
Controls the extend to where means can be placed. Smaller
values concentrate the means of each clusters around `mean_prior`.
The value of the parameter must be greater than 0.
If it is None, it's set to 1.
mean_prior : array-like, shape (n_features,), optional
The prior on the mean distribution (Gaussian).
If it is None, it's set to the mean of X.
degrees_of_freedom_prior : float | None, optional.
The prior of the number of degrees of freedom on the covariance
distributions (Wishart). If it is None, it's set to `n_features`.
covariance_prior : float or array-like, optional
The prior on the covariance distribution (Wishart).
If it is None, the emiprical covariance prior is initialized using the
covariance of X. The shape depends on `covariance_type`::
(n_features, n_features) if 'full',
(n_features, n_features) if 'tied',
(n_features) if 'diag',
float if 'spherical'
random_state: RandomState or an int seed, defaults to None.
A random number generator instance.
warm_start : bool, default to False.
If 'warm_start' is True, the solution of the last fitting is used as
initialization for the next call of fit(). This can speed up
convergence when fit is called several time on similar problems.
verbose : int, default to 0.
Enable verbose output. If 1 then it prints the current
initialization and each iteration step. If greater than 1 then
it prints also the log probability and the time needed
for each step.
verbose_interval : int, default to 10.
Number of iteration done before the next print.
Attributes
----------
weights_ : array-like, shape (n_components,)
The weights of each mixture components.
means_ : array-like, shape (n_components, n_features)
The mean of each mixture component.
covariances_ : array-like
The covariance of each mixture component.
The shape depends on `covariance_type`::
(n_components,) if 'spherical',
(n_features, n_features) if 'tied',
(n_components, n_features) if 'diag',
(n_components, n_features, n_features) if 'full'
precisions_ : array-like
The precision matrices for each component in the mixture. A precision
matrix is the inverse of a covariance matrix. A covariance matrix is
symmetric positive definite so the mixture of Gaussian can be
equivalently parameterized by the precision matrices. Storing the
precision matrices instead of the covariance matrices makes it more
efficient to compute the log-likelihood of new samples at test time.
The shape depends on ``covariance_type``::
(n_components,) if 'spherical',
(n_features, n_features) if 'tied',
(n_components, n_features) if 'diag',
(n_components, n_features, n_features) if 'full'
precisions_cholesky_ : array-like
The cholesky decomposition of the precision matrices of each mixture
component. A precision matrix is the inverse of a covariance matrix.
A covariance matrix is symmetric positive definite so the mixture of
Gaussian can be equivalently parameterized by the precision matrices.
Storing the precision matrices instead of the covariance matrices makes
it more efficient to compute the log-likelihood of new samples at test
time. The shape depends on ``covariance_type``::
(n_components,) if 'spherical',
(n_features, n_features) if 'tied',
(n_components, n_features) if 'diag',
(n_components, n_features, n_features) if 'full'
converged_ : bool
True when convergence was reached in fit(), False otherwise.
n_iter_ : int
Number of step used by the best fit of inference to reach the
convergence.
lower_bound_ : float
Lower bound value on the likelihood (of the training data with
respect to the model) of the best fit of inference.
weight_concentration_prior_ : tuple or float
The dirichlet concentration of each component on the weight
distribution (Dirichlet). The type depends on
``weight_concentration_prior_type``::
(float, float) if 'dirichlet_process' (Beta parameters),
float if 'dirichlet_distribution' (Dirichlet parameters).
The higher concentration puts more mass in
the center and will lead to more components being active, while a lower
concentration parameter will lead to more mass at the edge of the
simplex.
weight_concentration_ : array-like, shape (n_components,)
The dirichlet concentration of each component on the weight
distribution (Dirichlet).
mean_precision_prior : float
The precision prior on the mean distribution (Gaussian).
Controls the extend to where means can be placed.
Smaller values concentrate the means of each clusters around
`mean_prior`.
mean_precision_ : array-like, shape (n_components,)
The precision of each components on the mean distribution (Gaussian).
means_prior_ : array-like, shape (n_features,)
The prior on the mean distribution (Gaussian).
degrees_of_freedom_prior_ : float
The prior of the number of degrees of freedom on the covariance
distributions (Wishart).
degrees_of_freedom_ : array-like, shape (n_components,)
The number of degrees of freedom of each components in the model.
covariance_prior_ : float or array-like
The prior on the covariance distribution (Wishart).
The shape depends on `covariance_type`::
(n_features, n_features) if 'full',
(n_features, n_features) if 'tied',
(n_features) if 'diag',
float if 'spherical'
See Also
--------
GaussianMixture : Finite Gaussian mixture fit with EM.
References
----------
.. [1] `Bishop, Christopher M. (2006). "Pattern recognition and machine
learning". Vol. 4 No. 4. New York: Springer.
<http://www.springer.com/kr/book/9780387310732>`_
.. [2] `Hagai Attias. (2000). "A Variational Bayesian Framework for
Graphical Models". In Advances in Neural Information Processing
Systems 12.
<http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.36.2841&rep=rep1&type=pdf>`_
.. [3] `Blei, David M. and Michael I. Jordan. (2006). "Variational
inference for Dirichlet process mixtures". Bayesian analysis 1.1
<http://www.cs.princeton.edu/courses/archive/fall11/cos597C/reading/BleiJordan2005.pdf>`_
"""
def __init__(self, n_components=1, covariance_type='full', tol=1e-3,
reg_covar=1e-6, max_iter=100, n_init=1, init_params='kmeans',
weight_concentration_prior_type='dirichlet_process',
weight_concentration_prior=None,
mean_precision_prior=None, mean_prior=None,
degrees_of_freedom_prior=None, covariance_prior=None,
random_state=None, warm_start=False, verbose=0,
verbose_interval=10):
super(BayesianGaussianMixture, self).__init__(
n_components=n_components, tol=tol, reg_covar=reg_covar,
max_iter=max_iter, n_init=n_init, init_params=init_params,
random_state=random_state, warm_start=warm_start,
verbose=verbose, verbose_interval=verbose_interval)
self.covariance_type = covariance_type
self.weight_concentration_prior_type = weight_concentration_prior_type
self.weight_concentration_prior = weight_concentration_prior
self.mean_precision_prior = mean_precision_prior
self.mean_prior = mean_prior
self.degrees_of_freedom_prior = degrees_of_freedom_prior
self.covariance_prior = covariance_prior
def _check_parameters(self, X):
"""Check that the parameters are well defined.
Parameters
----------
X : array-like, shape (n_samples, n_features)
"""
if self.covariance_type not in ['spherical', 'tied', 'diag', 'full']:
raise ValueError("Invalid value for 'covariance_type': %s "
"'covariance_type' should be in "
"['spherical', 'tied', 'diag', 'full']"
% self.covariance_type)
if (self.weight_concentration_prior_type not in
['dirichlet_process', 'dirichlet_distribution']):
raise ValueError(
"Invalid value for 'weight_concentration_prior_type': %s "
"'weight_concentration_prior_type' should be in "
"['dirichlet_process', 'dirichlet_distribution']"
% self.weight_concentration_prior_type)
self._check_weights_parameters()
self._check_means_parameters(X)
self._check_precision_parameters(X)
self._checkcovariance_prior_parameter(X)
def _check_weights_parameters(self):
"""Check the parameter of the Dirichlet distribution."""
if self.weight_concentration_prior is None:
self.weight_concentration_prior_ = 1. / self.n_components
elif self.weight_concentration_prior > 0.:
self.weight_concentration_prior_ = (
self.weight_concentration_prior)
else:
raise ValueError("The parameter 'weight_concentration_prior' "
"should be greater than 0., but got %.3f."
% self.weight_concentration_prior)
def _check_means_parameters(self, X):
"""Check the parameters of the Gaussian distribution.
Parameters
----------
X : array-like, shape (n_samples, n_features)
"""
_, n_features = X.shape
if self.mean_precision_prior is None:
self.mean_precision_prior_ = 1.
elif self.mean_precision_prior > 0.:
self.mean_precision_prior_ = self.mean_precision_prior
else:
raise ValueError("The parameter 'mean_precision_prior' should be "
"greater than 0., but got %.3f."
% self.mean_precision_prior)
if self.mean_prior is None:
self.mean_prior_ = X.mean(axis=0)
else:
self.mean_prior_ = check_array(self.mean_prior,
dtype=[np.float64, np.float32],
ensure_2d=False)
_check_shape(self.mean_prior_, (n_features, ), 'means')
def _check_precision_parameters(self, X):
"""Check the prior parameters of the precision distribution.
Parameters
----------
X : array-like, shape (n_samples, n_features)
"""
_, n_features = X.shape
if self.degrees_of_freedom_prior is None:
self.degrees_of_freedom_prior_ = n_features
elif self.degrees_of_freedom_prior > n_features - 1.:
self.degrees_of_freedom_prior_ = self.degrees_of_freedom_prior
else:
raise ValueError("The parameter 'degrees_of_freedom_prior' "
"should be greater than %d, but got %.3f."
% (n_features - 1, self.degrees_of_freedom_prior))
def _checkcovariance_prior_parameter(self, X):
"""Check the `covariance_prior_`.
Parameters
----------
X : array-like, shape (n_samples, n_features)
"""
_, n_features = X.shape
if self.covariance_prior is None:
self.covariance_prior_ = {
'full': np.atleast_2d(np.cov(X.T)),
'tied': np.atleast_2d(np.cov(X.T)),
'diag': np.var(X, axis=0, ddof=1),
'spherical': np.var(X, axis=0, ddof=1).mean()
}[self.covariance_type]
elif self.covariance_type in ['full', 'tied']:
self.covariance_prior_ = check_array(
self.covariance_prior, dtype=[np.float64, np.float32],
ensure_2d=False)
_check_shape(self.covariance_prior_, (n_features, n_features),
'%s covariance_prior' % self.covariance_type)
_check_precision_matrix(self.covariance_prior_,
self.covariance_type)
elif self.covariance_type == 'diag':
self.covariance_prior_ = check_array(
self.covariance_prior, dtype=[np.float64, np.float32],
ensure_2d=False)
_check_shape(self.covariance_prior_, (n_features,),
'%s covariance_prior' % self.covariance_type)
_check_precision_positivity(self.covariance_prior_,
self.covariance_type)
# spherical case
elif self.covariance_prior > 0.:
self.covariance_prior_ = self.covariance_prior
else:
raise ValueError("The parameter 'spherical covariance_prior' "
"should be greater than 0., but got %.3f."
% self.covariance_prior)
def _initialize(self, X, resp):
"""Initialization of the mixture parameters.
Parameters
----------
X : array-like, shape (n_samples, n_features)
resp : array-like, shape (n_samples, n_components)
"""
nk, xk, sk = _estimate_gaussian_parameters(X, resp, self.reg_covar,
self.covariance_type)
self._estimate_weights(nk)
self._estimate_means(nk, xk)
self._estimate_precisions(nk, xk, sk)
def _estimate_weights(self, nk):
"""Estimate the parameters of the Dirichlet distribution.
Parameters
----------
nk : array-like, shape (n_components,)
"""
if self.weight_concentration_prior_type == 'dirichlet_process':
# For dirichlet process weight_concentration will be a tuple
# containing the two parameters of the beta distribution
self.weight_concentration_ = (
1. + nk,
(self.weight_concentration_prior_ +
np.hstack((np.cumsum(nk[::-1])[-2::-1], 0))))
else:
# case Variationnal Gaussian mixture with dirichlet distribution
self.weight_concentration_ = self.weight_concentration_prior_ + nk
def _estimate_means(self, nk, xk):
"""Estimate the parameters of the Gaussian distribution.
Parameters
----------
nk : array-like, shape (n_components,)
xk : array-like, shape (n_components, n_features)
"""
self.mean_precision_ = self.mean_precision_prior_ + nk
self.means_ = ((self.mean_precision_prior_ * self.mean_prior_ +
nk[:, np.newaxis] * xk) /
self.mean_precision_[:, np.newaxis])
def _estimate_precisions(self, nk, xk, sk):
"""Estimate the precisions parameters of the precision distribution.
Parameters
----------
nk : array-like, shape (n_components,)
xk : array-like, shape (n_components, n_features)
sk : array-like
The shape depends of `covariance_type`:
'full' : (n_components, n_features, n_features)
'tied' : (n_features, n_features)
'diag' : (n_components, n_features)
'spherical' : (n_components,)
"""
{"full": self._estimate_wishart_full,
"tied": self._estimate_wishart_tied,
"diag": self._estimate_wishart_diag,
"spherical": self._estimate_wishart_spherical
}[self.covariance_type](nk, xk, sk)
self.precisions_cholesky_ = _compute_precision_cholesky(
self.covariances_, self.covariance_type)
def _estimate_wishart_full(self, nk, xk, sk):
"""Estimate the full Wishart distribution parameters.
Parameters
----------
X : array-like, shape (n_samples, n_features)
nk : array-like, shape (n_components,)
xk : array-like, shape (n_components, n_features)
sk : array-like, shape (n_components, n_features, n_features)
"""
_, n_features = xk.shape
# Warning : in some Bishop book, there is a typo on the formula 10.63
# `degrees_of_freedom_k = degrees_of_freedom_0 + Nk` is
# the correct formula
self.degrees_of_freedom_ = self.degrees_of_freedom_prior_ + nk
self.covariances_ = np.empty((self.n_components, n_features,
n_features))
for k in range(self.n_components):
diff = xk[k] - self.mean_prior_
self.covariances_[k] = (self.covariance_prior_ + nk[k] * sk[k] +
nk[k] * self.mean_precision_prior_ /
self.mean_precision_[k] * np.outer(diff,
diff))
# Contrary to the original bishop book, we normalize the covariances
self.covariances_ /= (
self.degrees_of_freedom_[:, np.newaxis, np.newaxis])
def _estimate_wishart_tied(self, nk, xk, sk):
"""Estimate the tied Wishart distribution parameters.
Parameters
----------
X : array-like, shape (n_samples, n_features)
nk : array-like, shape (n_components,)
xk : array-like, shape (n_components, n_features)
sk : array-like, shape (n_features, n_features)
"""
_, n_features = xk.shape
# Warning : in some Bishop book, there is a typo on the formula 10.63
# `degrees_of_freedom_k = degrees_of_freedom_0 + Nk`
# is the correct formula
self.degrees_of_freedom_ = (
self.degrees_of_freedom_prior_ + nk.sum() / self.n_components)
diff = xk - self.mean_prior_
self.covariances_ = (
self.covariance_prior_ + sk * nk.sum() / self.n_components +
self.mean_precision_prior_ / self.n_components * np.dot(
(nk / self.mean_precision_) * diff.T, diff))
# Contrary to the original bishop book, we normalize the covariances
self.covariances_ /= self.degrees_of_freedom_
def _estimate_wishart_diag(self, nk, xk, sk):
"""Estimate the diag Wishart distribution parameters.
Parameters
----------
X : array-like, shape (n_samples, n_features)
nk : array-like, shape (n_components,)
xk : array-like, shape (n_components, n_features)
sk : array-like, shape (n_components, n_features)
"""
_, n_features = xk.shape
# Warning : in some Bishop book, there is a typo on the formula 10.63
# `degrees_of_freedom_k = degrees_of_freedom_0 + Nk`
# is the correct formula
self.degrees_of_freedom_ = self.degrees_of_freedom_prior_ + nk
diff = xk - self.mean_prior_
self.covariances_ = (
self.covariance_prior_ + nk[:, np.newaxis] * (
sk + (self.mean_precision_prior_ /
self.mean_precision_)[:, np.newaxis] * np.square(diff)))
# Contrary to the original bishop book, we normalize the covariances
self.covariances_ /= self.degrees_of_freedom_[:, np.newaxis]
def _estimate_wishart_spherical(self, nk, xk, sk):
"""Estimate the spherical Wishart distribution parameters.
Parameters
----------
X : array-like, shape (n_samples, n_features)
nk : array-like, shape (n_components,)
xk : array-like, shape (n_components, n_features)
sk : array-like, shape (n_components,)
"""
_, n_features = xk.shape
# Warning : in some Bishop book, there is a typo on the formula 10.63
# `degrees_of_freedom_k = degrees_of_freedom_0 + Nk`
# is the correct formula
self.degrees_of_freedom_ = self.degrees_of_freedom_prior_ + nk
diff = xk - self.mean_prior_
self.covariances_ = (
self.covariance_prior_ + nk * (
sk + self.mean_precision_prior_ / self.mean_precision_ *
np.mean(np.square(diff), 1)))
# Contrary to the original bishop book, we normalize the covariances
self.covariances_ /= self.degrees_of_freedom_
def _check_is_fitted(self):
check_is_fitted(self, ['weight_concentration_', 'mean_precision_',
'means_', 'degrees_of_freedom_',
'covariances_', 'precisions_',
'precisions_cholesky_'])
def _m_step(self, X, log_resp):
"""M step.
Parameters
----------
X : array-like, shape (n_samples, n_features)
log_resp : array-like, shape (n_samples, n_components)
Logarithm of the posterior probabilities (or responsibilities) of
the point of each sample in X.
"""
n_samples, _ = X.shape
nk, xk, sk = _estimate_gaussian_parameters(
X, np.exp(log_resp), self.reg_covar, self.covariance_type)
self._estimate_weights(nk)
self._estimate_means(nk, xk)
self._estimate_precisions(nk, xk, sk)
def _estimate_log_weights(self):
if self.weight_concentration_prior_type == 'dirichlet_process':
digamma_sum = digamma(self.weight_concentration_[0] +
self.weight_concentration_[1])
digamma_a = digamma(self.weight_concentration_[0])
digamma_b = digamma(self.weight_concentration_[1])
return (digamma_a - digamma_sum +
np.hstack((0, np.cumsum(digamma_b - digamma_sum)[:-1])))
else:
# case Variationnal Gaussian mixture with dirichlet distribution
return (digamma(self.weight_concentration_) -
digamma(np.sum(self.weight_concentration_)))
def _estimate_log_prob(self, X):
_, n_features = X.shape
# We remove `n_features * np.log(self.degrees_of_freedom_)` because
# the precision matrix is normalized
log_gauss = (_estimate_log_gaussian_prob(
X, self.means_, self.precisions_cholesky_, self.covariance_type) -
.5 * n_features * np.log(self.degrees_of_freedom_))
log_lambda = n_features * np.log(2.) + np.sum(digamma(
.5 * (self.degrees_of_freedom_ -
np.arange(0, n_features)[:, np.newaxis])), 0)
return log_gauss + .5 * (log_lambda -
n_features / self.mean_precision_)
def _compute_lower_bound(self, log_resp, log_prob_norm):
"""Estimate the lower bound of the model.
The lower bound on the likelihood (of the training data with respect to
the model) is used to detect the convergence and has to decrease at
each iteration.
Parameters
----------
X : array-like, shape (n_samples, n_features)
log_resp : array, shape (n_samples, n_components)
Logarithm of the posterior probabilities (or responsibilities) of
the point of each sample in X.
log_prob_norm : float
Logarithm of the probability of each sample in X.
Returns
-------
lower_bound : float
"""
# Contrary to the original formula, we have done some simplification
# and removed all the constant terms.
n_features, = self.mean_prior_.shape
# We removed `.5 * n_features * np.log(self.degrees_of_freedom_)`
# because the precision matrix is normalized.
log_det_precisions_chol = (_compute_log_det_cholesky(
self.precisions_cholesky_, self.covariance_type, n_features) -
.5 * n_features * np.log(self.degrees_of_freedom_))
if self.covariance_type == 'tied':
log_wishart = self.n_components * np.float64(_log_wishart_norm(
self.degrees_of_freedom_, log_det_precisions_chol, n_features))
else:
log_wishart = np.sum(_log_wishart_norm(
self.degrees_of_freedom_, log_det_precisions_chol, n_features))
if self.weight_concentration_prior_type == 'dirichlet_process':
log_norm_weight = -np.sum(betaln(self.weight_concentration_[0],
self.weight_concentration_[1]))
else:
log_norm_weight = _log_dirichlet_norm(self.weight_concentration_)
return (-np.sum(np.exp(log_resp) * log_resp) -
log_wishart - log_norm_weight -
0.5 * n_features * np.sum(np.log(self.mean_precision_)))
def _get_parameters(self):
return (self.weight_concentration_,
self.mean_precision_, self.means_,
self.degrees_of_freedom_, self.covariances_,
self.precisions_cholesky_)
def _set_parameters(self, params):
(self.weight_concentration_, self.mean_precision_, self.means_,
self.degrees_of_freedom_, self.covariances_,
self.precisions_cholesky_) = params
# Weights computation
if self.weight_concentration_prior_type == "dirichlet_process":
weight_dirichlet_sum = (self.weight_concentration_[0] +
self.weight_concentration_[1])
tmp = self.weight_concentration_[1] / weight_dirichlet_sum
self.weights_ = (
self.weight_concentration_[0] / weight_dirichlet_sum *
np.hstack((1, np.cumprod(tmp[:-1]))))
self.weights_ /= np.sum(self.weights_)
else:
self. weights_ = (self.weight_concentration_ /
np.sum(self.weight_concentration_))
# Precisions matrices computation
if self.covariance_type == 'full':
self.precisions_ = np.array([
np.dot(prec_chol, prec_chol.T)
for prec_chol in self.precisions_cholesky_])
elif self.covariance_type == 'tied':
self.precisions_ = np.dot(self.precisions_cholesky_,
self.precisions_cholesky_.T)
else:
self.precisions_ = self.precisions_cholesky_ ** 2
| bsd-3-clause |
guybedford/babel | packages/babel-core/test/fixtures/config/config-files/babelrc-js/.babelrc.js | 40 | module.exports = {
comments: true,
};
| mit |
vebin/odata.net | test/FunctionalTests/Taupo/Source/Taupo.Query/Contracts/IQueryTypeWithProperties.cs | 780 | //---------------------------------------------------------------------
// <copyright file="IQueryTypeWithProperties.cs" company="Microsoft">
// Copyright (C) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information.
// </copyright>
//---------------------------------------------------------------------
namespace Microsoft.Test.Taupo.Query.Contracts
{
using System.Collections.Generic;
/// <summary>
/// Query type with properties that can be projected in query
/// </summary>
public interface IQueryTypeWithProperties
{
/// <summary>
/// Gets the collection of type properties.
/// </summary>
IList<QueryProperty> Properties { get; }
}
}
| mit |
sergeybykov/orleans | test/Grains/TestGrains/MultipleConstructorsSimpleGrain.cs | 790 | using Microsoft.Extensions.Logging;
using UnitTests.GrainInterfaces;
namespace UnitTests.Grains
{
public class MultipleConstructorsSimpleGrain : SimpleGrain, ISimpleGrain
{
public const string MultipleConstructorsSimpleGrainPrefix = "UnitTests.Grains.MultipleConstructorsS";
public const int ValueUsedByParameterlessConstructor = 42;
public MultipleConstructorsSimpleGrain(ILoggerFactory loggerFactory)
: this(loggerFactory, ValueUsedByParameterlessConstructor)
{
// orleans will use this constructor when DI is not configured
}
public MultipleConstructorsSimpleGrain(ILoggerFactory loggerFactory, int initialValueofA) : base(loggerFactory)
{
base.A = initialValueofA;
}
}
}
| mit |
hotchandanisagar/odata.net | test/FunctionalTests/Service/Microsoft/OData/Service/ServiceActionRights.cs | 754 | //---------------------------------------------------------------------
// <copyright file="ServiceActionRights.cs" company="Microsoft">
// Copyright (C) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information.
// </copyright>
//---------------------------------------------------------------------
namespace Microsoft.OData.Service
{
using System;
/// <summary> Access rights for service actions. </summary>
[Flags]
public enum ServiceActionRights
{
/// <summary>Specifies no rights on this service action.</summary>
None = 0,
/// <summary>Specifies the right to execute the service action.</summary>
Invoke = 1,
}
}
| mit |
hotchandanisagar/odata.net | test/FunctionalTests/Tests/DataOData/Common/OData/Json/TextAnnotations/JsonStartObjectTextAnnotation.cs | 1172 | //---------------------------------------------------------------------
// <copyright file="JsonStartObjectTextAnnotation.cs" company="Microsoft">
// Copyright (C) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information.
// </copyright>
//---------------------------------------------------------------------
namespace Microsoft.Test.Taupo.OData.Json.TextAnnotations
{
#region Namespaces
using System.IO;
using Microsoft.Test.Taupo.Astoria.Contracts.Json;
#endregion Namespaces
/// <summary>
/// Annotation for start of the object - stores the start curly bracket
/// </summary>
public class JsonStartObjectTextAnnotation : JsonTextAnnotation
{
/// <summary>
/// Returns the default text annotation for the start object.
/// </summary>
/// <returns>The default text representation of the start object.</returns>
public static JsonStartObjectTextAnnotation GetDefault(TextWriter textWriter)
{
return new JsonStartObjectTextAnnotation() { Text = "{" + textWriter.NewLine };
}
}
}
| mit |
akaspin/soil | vendor/github.com/hashicorp/consul/agent/consul/state/state_store_test.go | 4873 | package state
import (
crand "crypto/rand"
"fmt"
"testing"
"time"
"github.com/hashicorp/consul/agent/structs"
"github.com/hashicorp/consul/types"
"github.com/hashicorp/go-memdb"
)
func testUUID() string {
buf := make([]byte, 16)
if _, err := crand.Read(buf); err != nil {
panic(fmt.Errorf("failed to read random bytes: %v", err))
}
return fmt.Sprintf("%08x-%04x-%04x-%04x-%12x",
buf[0:4],
buf[4:6],
buf[6:8],
buf[8:10],
buf[10:16])
}
func testStateStore(t *testing.T) *Store {
s, err := NewStateStore(nil)
if err != nil {
t.Fatalf("err: %s", err)
}
if s == nil {
t.Fatalf("missing state store")
}
return s
}
func testRegisterNode(t *testing.T, s *Store, idx uint64, nodeID string) {
testRegisterNodeWithMeta(t, s, idx, nodeID, nil)
}
func testRegisterNodeWithMeta(t *testing.T, s *Store, idx uint64, nodeID string, meta map[string]string) {
node := &structs.Node{Node: nodeID, Meta: meta}
if err := s.EnsureNode(idx, node); err != nil {
t.Fatalf("err: %s", err)
}
tx := s.db.Txn(false)
defer tx.Abort()
n, err := tx.First("nodes", "id", nodeID)
if err != nil {
t.Fatalf("err: %s", err)
}
if result, ok := n.(*structs.Node); !ok || result.Node != nodeID {
t.Fatalf("bad node: %#v", result)
}
}
func testRegisterService(t *testing.T, s *Store, idx uint64, nodeID, serviceID string) {
svc := &structs.NodeService{
ID: serviceID,
Service: serviceID,
Address: "1.1.1.1",
Port: 1111,
}
if err := s.EnsureService(idx, nodeID, svc); err != nil {
t.Fatalf("err: %s", err)
}
tx := s.db.Txn(false)
defer tx.Abort()
service, err := tx.First("services", "id", nodeID, serviceID)
if err != nil {
t.Fatalf("err: %s", err)
}
if result, ok := service.(*structs.ServiceNode); !ok ||
result.Node != nodeID ||
result.ServiceID != serviceID {
t.Fatalf("bad service: %#v", result)
}
}
func testRegisterCheck(t *testing.T, s *Store, idx uint64,
nodeID string, serviceID string, checkID types.CheckID, state string) {
chk := &structs.HealthCheck{
Node: nodeID,
CheckID: checkID,
ServiceID: serviceID,
Status: state,
}
if err := s.EnsureCheck(idx, chk); err != nil {
t.Fatalf("err: %s", err)
}
tx := s.db.Txn(false)
defer tx.Abort()
c, err := tx.First("checks", "id", nodeID, string(checkID))
if err != nil {
t.Fatalf("err: %s", err)
}
if result, ok := c.(*structs.HealthCheck); !ok ||
result.Node != nodeID ||
result.ServiceID != serviceID ||
result.CheckID != checkID {
t.Fatalf("bad check: %#v", result)
}
}
func testSetKey(t *testing.T, s *Store, idx uint64, key, value string) {
entry := &structs.DirEntry{Key: key, Value: []byte(value)}
if err := s.KVSSet(idx, entry); err != nil {
t.Fatalf("err: %s", err)
}
tx := s.db.Txn(false)
defer tx.Abort()
e, err := tx.First("kvs", "id", key)
if err != nil {
t.Fatalf("err: %s", err)
}
if result, ok := e.(*structs.DirEntry); !ok || result.Key != key {
t.Fatalf("bad kvs entry: %#v", result)
}
}
// watchFired is a helper for unit tests that returns if the given watch set
// fired (it doesn't care which watch actually fired). This uses a fixed
// timeout since we already expect the event happened before calling this and
// just need to distinguish a fire from a timeout. We do need a little time to
// allow the watch to set up any goroutines, though.
func watchFired(ws memdb.WatchSet) bool {
timedOut := ws.Watch(time.After(50 * time.Millisecond))
return !timedOut
}
func TestStateStore_Restore_Abort(t *testing.T) {
s := testStateStore(t)
// The detailed restore functions are tested below, this just checks
// that abort works.
restore := s.Restore()
entry := &structs.DirEntry{
Key: "foo",
Value: []byte("bar"),
RaftIndex: structs.RaftIndex{
ModifyIndex: 5,
},
}
if err := restore.KVS(entry); err != nil {
t.Fatalf("err: %s", err)
}
restore.Abort()
idx, entries, err := s.KVSList(nil, "")
if err != nil {
t.Fatalf("err: %s", err)
}
if idx != 0 {
t.Fatalf("bad index: %d", idx)
}
if len(entries) != 0 {
t.Fatalf("bad: %#v", entries)
}
}
func TestStateStore_Abandon(t *testing.T) {
s := testStateStore(t)
abandonCh := s.AbandonCh()
s.Abandon()
select {
case <-abandonCh:
default:
t.Fatalf("bad")
}
}
func TestStateStore_maxIndex(t *testing.T) {
s := testStateStore(t)
testRegisterNode(t, s, 0, "foo")
testRegisterNode(t, s, 1, "bar")
testRegisterService(t, s, 2, "foo", "consul")
if max := s.maxIndex("nodes", "services"); max != 2 {
t.Fatalf("bad max: %d", max)
}
}
func TestStateStore_indexUpdateMaxTxn(t *testing.T) {
s := testStateStore(t)
testRegisterNode(t, s, 0, "foo")
testRegisterNode(t, s, 1, "bar")
tx := s.db.Txn(true)
if err := indexUpdateMaxTxn(tx, 3, "nodes"); err != nil {
t.Fatalf("err: %s", err)
}
tx.Commit()
if max := s.maxIndex("nodes"); max != 3 {
t.Fatalf("bad max: %d", max)
}
}
| mit |
ikkosatrio/nexapp | assets/js/pages/uploader_bootstrap.js | 9588 | /* ------------------------------------------------------------------------------
*
* # Bootstrap multiple file uploader
*
* Specific JS code additions for uploader_bootstrap.html page
*
* Version: 1.2
* Latest update: Aug 10, 2016
*
* ---------------------------------------------------------------------------- */
$(function() {
//
// Define variables
//
// Modal template
var modalTemplate = '<div class="modal-dialog modal-lg" role="document">\n' +
' <div class="modal-content">\n' +
' <div class="modal-header">\n' +
' <div class="kv-zoom-actions btn-group">{toggleheader}{fullscreen}{borderless}{close}</div>\n' +
' <h6 class="modal-title">{heading} <small><span class="kv-zoom-title"></span></small></h6>\n' +
' </div>\n' +
' <div class="modal-body">\n' +
' <div class="floating-buttons btn-group"></div>\n' +
' <div class="kv-zoom-body file-zoom-content"></div>\n' + '{prev} {next}\n' +
' </div>\n' +
' </div>\n' +
'</div>\n';
// Buttons inside zoom modal
var previewZoomButtonClasses = {
toggleheader: 'btn btn-default btn-icon btn-xs btn-header-toggle',
fullscreen: 'btn btn-default btn-icon btn-xs',
borderless: 'btn btn-default btn-icon btn-xs',
close: 'btn btn-default btn-icon btn-xs'
};
// Icons inside zoom modal classes
var previewZoomButtonIcons = {
prev: '<i class="icon-arrow-left32"></i>',
next: '<i class="icon-arrow-right32"></i>',
toggleheader: '<i class="icon-menu-open"></i>',
fullscreen: '<i class="icon-screen-full"></i>',
borderless: '<i class="icon-alignment-unalign"></i>',
close: '<i class="icon-cross3"></i>'
};
// File actions
var fileActionSettings = {
zoomClass: 'btn btn-link btn-xs btn-icon',
zoomIcon: '<i class="icon-zoomin3"></i>',
dragClass: 'btn btn-link btn-xs btn-icon',
dragIcon: '<i class="icon-three-bars"></i>',
removeClass: 'btn btn-link btn-icon btn-xs',
removeIcon: '<i class="icon-trash"></i>',
indicatorNew: '<i class="icon-file-plus text-slate"></i>',
indicatorSuccess: '<i class="icon-checkmark3 file-icon-large text-success"></i>',
indicatorError: '<i class="icon-cross2 text-danger"></i>',
indicatorLoading: '<i class="icon-spinner2 spinner text-muted"></i>'
};
//
// Basic example
//
$('.file-input').fileinput({
browseLabel: 'Browse',
browseIcon: '<i class="icon-file-plus"></i>',
uploadIcon: '<i class="icon-file-upload2"></i>',
removeIcon: '<i class="icon-cross3"></i>',
layoutTemplates: {
icon: '<i class="icon-file-check"></i>',
modal: modalTemplate
},
initialCaption: "No file selected",
previewZoomButtonClasses: previewZoomButtonClasses,
previewZoomButtonIcons: previewZoomButtonIcons,
fileActionSettings: fileActionSettings
});
//
// Custom layout
//
$('.file-input-custom').fileinput({
previewFileType: 'image',
browseLabel: 'Select',
browseClass: 'btn bg-slate-700',
browseIcon: '<i class="icon-image2 position-left"></i> ',
removeLabel: 'Remove',
removeClass: 'btn btn-danger',
removeIcon: '<i class="icon-cancel-square position-left"></i> ',
uploadClass: 'btn bg-teal-400',
uploadIcon: '<i class="icon-file-upload position-left"></i> ',
layoutTemplates: {
icon: '<i class="icon-file-check"></i>',
modal: modalTemplate
},
initialCaption: "Please select image",
mainClass: 'input-group',
previewZoomButtonClasses: previewZoomButtonClasses,
previewZoomButtonIcons: previewZoomButtonIcons,
fileActionSettings: fileActionSettings
});
//
// Template modifications
//
$('.file-input-advanced').fileinput({
browseLabel: 'Browse',
browseClass: 'btn btn-default',
removeClass: 'btn btn-default',
uploadClass: 'btn bg-success-400',
browseIcon: '<i class="icon-file-plus"></i>',
uploadIcon: '<i class="icon-file-upload2"></i>',
removeIcon: '<i class="icon-cross3"></i>',
layoutTemplates: {
icon: '<i class="icon-file-check"></i>',
main1: "{preview}\n" +
"<div class='input-group {class}'>\n" +
" <div class='input-group-btn'>\n" +
" {browse}\n" +
" </div>\n" +
" {caption}\n" +
" <div class='input-group-btn'>\n" +
" {upload}\n" +
" {remove}\n" +
" </div>\n" +
"</div>",
modal: modalTemplate
},
initialCaption: "No file selected",
previewZoomButtonClasses: previewZoomButtonClasses,
previewZoomButtonIcons: previewZoomButtonIcons,
fileActionSettings: fileActionSettings
});
//
// Custom file extensions
//
$(".file-input-extensions").fileinput({
browseLabel: 'Browse',
browseClass: 'btn btn-primary',
uploadClass: 'btn btn-default',
browseIcon: '<i class="icon-file-plus"></i>',
uploadIcon: '<i class="icon-file-upload2"></i>',
removeIcon: '<i class="icon-cross3"></i>',
layoutTemplates: {
icon: '<i class="icon-file-check"></i>',
modal: modalTemplate
},
maxFilesNum: 10,
allowedFileExtensions: ["jpg", "gif", "png", "txt"],
initialCaption: "No file selected",
previewZoomButtonClasses: previewZoomButtonClasses,
previewZoomButtonIcons: previewZoomButtonIcons,
fileActionSettings: fileActionSettings
});
//
// Always display preview
//
$(".file-input-preview").fileinput({
browseLabel: 'Browse',
browseIcon: '<i class="icon-file-plus"></i>',
uploadIcon: '<i class="icon-file-upload2"></i>',
removeIcon: '<i class="icon-cross3"></i>',
layoutTemplates: {
icon: '<i class="icon-file-check"></i>',
modal: modalTemplate
},
initialPreview: [
"assets/images/demo/images/1.png",
"assets/images/demo/images/2.png",
],
initialPreviewConfig: [
{caption: "Jane.jpg", size: 930321, key: 1, showDrag: false},
{caption: "Anna.jpg", size: 1218822, key: 2, showDrag: false}
],
initialPreviewAsData: true,
overwriteInitial: false,
maxFileSize: 100,
previewZoomButtonClasses: previewZoomButtonClasses,
previewZoomButtonIcons: previewZoomButtonIcons,
fileActionSettings: fileActionSettings
});
//
// Display preview on load
//
$(".file-input-overwrite").fileinput({
browseLabel: 'Browse',
browseIcon: '<i class="icon-file-plus"></i>',
uploadIcon: '<i class="icon-file-upload2"></i>',
removeIcon: '<i class="icon-cross3"></i>',
layoutTemplates: {
icon: '<i class="icon-file-check"></i>',
modal: modalTemplate
},
initialPreview: [
"assets/images/demo/images/1.png",
"assets/images/demo/images/2.png"
],
initialPreviewConfig: [
{caption: "Jane.jpg", size: 930321, key: 1, showDrag: false},
{caption: "Anna.jpg", size: 1218822, key: 2, showDrag: false}
],
initialPreviewAsData: true,
overwriteInitial: true,
previewZoomButtonClasses: previewZoomButtonClasses,
previewZoomButtonIcons: previewZoomButtonIcons,
fileActionSettings: fileActionSettings
});
//
// AJAX upload
//
$(".file-input-ajax").fileinput({
uploadUrl: "http://localhost", // server upload action
uploadAsync: true,
maxFileCount: 5,
initialPreview: [],
fileActionSettings: {
removeIcon: '<i class="icon-bin"></i>',
removeClass: 'btn btn-link btn-xs btn-icon',
uploadIcon: '<i class="icon-upload"></i>',
uploadClass: 'btn btn-link btn-xs btn-icon',
indicatorNew: '<i class="icon-file-plus text-slate"></i>',
indicatorSuccess: '<i class="icon-checkmark3 file-icon-large text-success"></i>',
indicatorError: '<i class="icon-cross2 text-danger"></i>',
indicatorLoading: '<i class="icon-spinner2 spinner text-muted"></i>',
},
layoutTemplates: {
icon: '<i class="icon-file-check"></i>',
modal: modalTemplate
},
initialCaption: "No file selected",
previewZoomButtonClasses: previewZoomButtonClasses,
previewZoomButtonIcons: previewZoomButtonIcons
});
//
// Misc
//
// Disable/enable button
$("#btn-modify").on("click", function() {
$btn = $(this);
if ($btn.text() == "Disable file input") {
$("#file-input-methods").fileinput("disable");
$btn.html("Enable file input");
alert("Hurray! I have disabled the input and hidden the upload button.");
}
else {
$("#file-input-methods").fileinput("enable");
$btn.html("Disable file input");
alert("Hurray! I have reverted back the input to enabled with the upload button.");
}
});
});
| mit |
PanJ/SimplerCityGlide | node_modules/lodash-es/nthArg.js | 715 | import baseNth from './_baseNth.js';
import rest from './rest.js';
import toInteger from './toInteger.js';
/**
* Creates a function that gets the argument at index `n`. If `n` is negative,
* the nth argument from the end is returned.
*
* @static
* @memberOf _
* @since 4.0.0
* @category Util
* @param {number} [n=0] The index of the argument to return.
* @returns {Function} Returns the new pass-thru function.
* @example
*
* var func = _.nthArg(1);
* func('a', 'b', 'c', 'd');
* // => 'b'
*
* var func = _.nthArg(-2);
* func('a', 'b', 'c', 'd');
* // => 'c'
*/
function nthArg(n) {
n = toInteger(n);
return rest(function(args) {
return baseNth(args, n);
});
}
export default nthArg;
| mit |
Microsoft/CodeContracts | Microsoft.Research/Contracts/MsCorlib/Sources/System.Security.Principal.IdentityReference.cs | 2911 | // CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Security.Principal.IdentityReference.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Security.Principal
{
abstract public partial class IdentityReference
{
#region Methods and constructors
public static bool operator != (IdentityReference left, IdentityReference right)
{
Contract.Ensures(Contract.Result<bool>() == ((left.Equals(right)) == false));
return default(bool);
}
public static bool operator == (IdentityReference left, IdentityReference right)
{
return default(bool);
}
public abstract override bool Equals(Object o);
public abstract override int GetHashCode();
internal IdentityReference()
{
}
public abstract bool IsValidTargetType(Type targetType);
public abstract override string ToString();
public abstract IdentityReference Translate(Type targetType);
#endregion
#region Properties and indexers
public abstract string Value
{
get;
}
#endregion
}
}
| mit |
pygillier/bolt | src/Storage/QuerySet.php | 1208 | <?php
namespace Bolt\Storage;
use Doctrine\DBAL\Query\QueryBuilder;
/**
* This class works keeps a set of queries that will eventually
* be executed sequentially.
*/
class QuerySet extends \ArrayIterator
{
/**
* @param QueryBuilder $qb A QueryBuilder instance
*/
public function append($qb)
{
if (!$qb instanceof QueryBuilder) {
throw new \InvalidArgumentException("QuerySet will only accept QueryBuilder instances", 1);
}
parent::append($qb);
}
/**
* Execute function, iterate the queries, and execute them sequentially
*
* @throws \Exception
*
* @return \Doctrine\DBAL\Driver\Statement|int|null
*/
public function execute()
{
$result = null;
// Only return the result of the primary query
foreach ($this as $query) {
/** @var QueryBuilder $query */
try {
if ($result === null) {
$result = $query->execute();
} else {
$query->execute();
}
} catch (\Exception $e) {
throw $e;
}
}
return $result;
}
}
| mit |
r1k/vlc | modules/demux/adaptive/playlist/BaseAdaptationSet.cpp | 3324 | /*
* BaseAdaptationSet.cpp
*****************************************************************************
* Copyright (C) 2010 - 2011 Klagenfurt University
*
* Created on: Aug 10, 2010
* Authors: Christopher Mueller <[email protected]>
* Christian Timmerer <[email protected]>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301, USA.
*****************************************************************************/
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include "BaseAdaptationSet.h"
#include "BaseRepresentation.h"
#include <vlc_common.h>
#include <vlc_arrays.h>
#include "SegmentTemplate.h"
#include "BasePeriod.h"
#include "Inheritables.hpp"
#include <algorithm>
using namespace adaptive;
using namespace adaptive::playlist;
BaseAdaptationSet::BaseAdaptationSet(BasePeriod *period) :
CommonAttributesElements(),
SegmentInformation( period ),
isBitstreamSwitching( false )
{
}
BaseAdaptationSet::~BaseAdaptationSet ()
{
vlc_delete_all( representations );
childs.clear();
}
StreamFormat BaseAdaptationSet::getStreamFormat() const
{
if (!representations.empty())
return representations.front()->getStreamFormat();
else
return StreamFormat();
}
std::vector<BaseRepresentation*>& BaseAdaptationSet::getRepresentations()
{
return representations;
}
BaseRepresentation * BaseAdaptationSet::getRepresentationByID(const ID &id)
{
std::vector<BaseRepresentation *>::const_iterator it;
for(it = representations.begin(); it != representations.end(); ++it)
{
if((*it)->getID() == id)
return *it;
}
return NULL;
}
void BaseAdaptationSet::addRepresentation(BaseRepresentation *rep)
{
representations.insert(std::upper_bound(representations.begin(),
representations.end(),
rep,
BaseRepresentation::bwCompare),
rep);
childs.push_back(rep);
}
void BaseAdaptationSet::setSwitchPolicy (bool value)
{
this->isBitstreamSwitching = value;
}
bool BaseAdaptationSet::getBitstreamSwitching () const
{
return this->isBitstreamSwitching;
}
void BaseAdaptationSet::debug(vlc_object_t *obj, int indent) const
{
std::string text(indent, ' ');
text.append("BaseAdaptationSet ");
text.append(id.str());
msg_Dbg(obj, "%s", text.c_str());
std::vector<BaseRepresentation *>::const_iterator k;
for(k = representations.begin(); k != representations.end(); ++k)
(*k)->debug(obj, indent + 1);
}
| gpl-2.0 |
Gargamel1989/ThreesandUO | Scripts/Engines/Virtues/Justice.cs | 8300 | using System;
using System.Collections;
using Server;
using Server.Items;
using Server.Gumps;
using Server.Mobiles;
using Server.Targeting;
namespace Server
{
public class JusticeVirtue
{
private static TimeSpan LossDelay = TimeSpan.FromDays( 7.0 );
private const int LossAmount = 950;
public static void Initialize()
{
VirtueGump.Register( 109, new OnVirtueUsed( OnVirtueUsed ) );
}
public static bool CheckMapRegion( Mobile first, Mobile second )
{
Map map = first.Map;
if ( second.Map != map )
return false;
return GetMapRegion( map, first.Location ) == GetMapRegion( map, second.Location );
}
public static int GetMapRegion( Map map, Point3D loc )
{
if ( map == null || map.MapID >= 2 )
return 0;
if ( loc.X < 5120 )
return 0;
if ( loc.Y < 2304 )
return 1;
return 2;
}
public static void OnVirtueUsed( Mobile from )
{
if ( !from.CheckAlive() )
return;
PlayerMobile protector = from as PlayerMobile;
if ( protector == null )
return;
if ( !VirtueHelper.IsSeeker( protector, VirtueName.Justice ) )
{
protector.SendLocalizedMessage( 1049610 ); // You must reach the first path in this virtue to invoke it.
}
else if ( !protector.CanBeginAction( typeof( JusticeVirtue ) ) )
{
protector.SendLocalizedMessage( 1049370 ); // You must wait a while before offering your protection again.
}
else if ( protector.JusticeProtectors.Count > 0 )
{
protector.SendLocalizedMessage( 1049542 ); // You cannot protect someone while being protected.
}
else if ( protector.Map != Map.Felucca )
{
protector.SendLocalizedMessage( 1049372 ); // You cannot use this ability here.
}
else
{
protector.BeginTarget( 14, false, TargetFlags.None, new TargetCallback( OnVirtueTargeted ) );
protector.SendLocalizedMessage( 1049366 ); // Choose the player you wish to protect.
}
}
public static void OnVirtueTargeted( Mobile from, object obj )
{
PlayerMobile protector = from as PlayerMobile;
PlayerMobile pm = obj as PlayerMobile;
if ( protector == null )
return;
if ( !VirtueHelper.IsSeeker( protector, VirtueName.Justice ) )
protector.SendLocalizedMessage( 1049610 ); // You must reach the first path in this virtue to invoke it.
else if ( !protector.CanBeginAction( typeof( JusticeVirtue ) ) )
protector.SendLocalizedMessage( 1049370 ); // You must wait a while before offering your protection again.
else if ( protector.JusticeProtectors.Count > 0 )
protector.SendLocalizedMessage( 1049542 ); // You cannot protect someone while being protected.
else if ( protector.Map != Map.Felucca )
protector.SendLocalizedMessage( 1049372 ); // You cannot use this ability here.
else if ( pm == null )
protector.SendLocalizedMessage( 1049678 ); // Only players can be protected.
else if ( pm.Map != Map.Felucca )
protector.SendLocalizedMessage( 1049372 ); // You cannot use this ability here.
else if ( pm == protector || pm.Criminal || pm.Kills >= 5 )
protector.SendLocalizedMessage( 1049436 ); // That player cannot be protected.
else if ( pm.JusticeProtectors.Count > 0 )
protector.SendLocalizedMessage( 1049369 ); // You cannot protect that player right now.
else if ( pm.HasGump( typeof( AcceptProtectorGump ) ) )
protector.SendLocalizedMessage( 1049369 ); // You cannot protect that player right now.
else
pm.SendGump( new AcceptProtectorGump( protector, pm ) );
}
public static void OnVirtueAccepted( PlayerMobile protector, PlayerMobile protectee )
{
if ( !VirtueHelper.IsSeeker( protector, VirtueName.Justice ) )
{
protector.SendLocalizedMessage( 1049610 ); // You must reach the first path in this virtue to invoke it.
}
else if ( !protector.CanBeginAction( typeof( JusticeVirtue ) ) )
{
protector.SendLocalizedMessage( 1049370 ); // You must wait a while before offering your protection again.
}
else if ( protector.JusticeProtectors.Count > 0 )
{
protector.SendLocalizedMessage( 1049542 ); // You cannot protect someone while being protected.
}
else if ( protector.Map != Map.Felucca )
{
protector.SendLocalizedMessage( 1049372 ); // You cannot use this ability here.
}
else if ( protectee.Map != Map.Felucca )
{
protector.SendLocalizedMessage( 1049372 ); // You cannot use this ability here.
}
else if ( protectee == protector || protectee.Criminal || protectee.Kills >= 5 )
{
protector.SendLocalizedMessage( 1049436 ); // That player cannot be protected.
}
else if ( protectee.JusticeProtectors.Count > 0 )
{
protector.SendLocalizedMessage( 1049369 ); // You cannot protect that player right now.
}
else
{
protectee.JusticeProtectors.Add( protector );
string args = String.Format( "{0}\t{1}", protector.Name, protectee.Name );
protectee.SendLocalizedMessage( 1049451, args ); // You are now being protected by ~1_NAME~.
protector.SendLocalizedMessage( 1049452, args ); // You are now protecting ~2_NAME~.
}
}
public static void OnVirtueRejected( PlayerMobile protector, PlayerMobile protectee )
{
string args = String.Format( "{0}\t{1}", protector.Name, protectee.Name );
protectee.SendLocalizedMessage( 1049453, args ); // You have declined protection from ~1_NAME~.
protector.SendLocalizedMessage( 1049454, args ); // ~2_NAME~ has declined your protection.
if ( protector.BeginAction( typeof( JusticeVirtue ) ) )
Timer.DelayCall( TimeSpan.FromMinutes( 15.0 ), new TimerStateCallback( RejectDelay_Callback ), protector );
}
public static void RejectDelay_Callback( object state )
{
Mobile m = state as Mobile;
if ( m != null )
m.EndAction( typeof( JusticeVirtue ) );
}
public static void CheckAtrophy( Mobile from )
{
PlayerMobile pm = from as PlayerMobile;
if ( pm == null )
return;
try
{
if ( (pm.LastJusticeLoss + LossDelay) < DateTime.UtcNow )
{
if ( VirtueHelper.Atrophy( from, VirtueName.Justice, LossAmount ) )
from.SendLocalizedMessage( 1049373 ); // You have lost some Justice.
pm.LastJusticeLoss = DateTime.UtcNow;
}
}
catch
{
}
}
}
public class AcceptProtectorGump : Gump
{
private PlayerMobile m_Protector;
private PlayerMobile m_Protectee;
public AcceptProtectorGump( PlayerMobile protector, PlayerMobile protectee ) : base( 150, 50 )
{
m_Protector = protector;
m_Protectee = protectee;
Closable = false;
AddPage( 0 );
AddBackground( 0, 0, 396, 218, 3600 );
AddImageTiled( 15, 15, 365, 190, 2624 );
AddAlphaRegion( 15, 15, 365, 190 );
AddHtmlLocalized( 30, 20, 360, 25, 1049365, 0x7FFF, false, false ); // Another player is offering you their <a href="?ForceTopic88">protection</a>:
AddLabel( 90, 55, 1153, protector.Name );
AddImage( 50, 45, 9005 );
AddImageTiled( 80, 80, 200, 1, 9107 );
AddImageTiled( 95, 82, 200, 1, 9157 );
AddRadio( 30, 110, 9727, 9730, true, 1 );
AddHtmlLocalized( 65, 115, 300, 25, 1049444, 0x7FFF, false, false ); // Yes, I would like their protection.
AddRadio( 30, 145, 9727, 9730, false, 0 );
AddHtmlLocalized( 65, 148, 300, 25, 1049445, 0x7FFF, false, false ); // No thanks, I can take care of myself.
AddButton( 160, 175, 247, 248, 2, GumpButtonType.Reply, 0 );
AddImage( 215, 0, 50581 );
AddImageTiled( 15, 14, 365, 1, 9107 );
AddImageTiled( 380, 14, 1, 190, 9105 );
AddImageTiled( 15, 205, 365, 1, 9107 );
AddImageTiled( 15, 14, 1, 190, 9105 );
AddImageTiled( 0, 0, 395, 1, 9157 );
AddImageTiled( 394, 0, 1, 217, 9155 );
AddImageTiled( 0, 216, 395, 1, 9157 );
AddImageTiled( 0, 0, 1, 217, 9155 );
}
public override void OnResponse( Server.Network.NetState sender, RelayInfo info )
{
if ( info.ButtonID == 2 )
{
bool okay = info.IsSwitched( 1 );
if ( okay )
JusticeVirtue.OnVirtueAccepted( m_Protector, m_Protectee );
else
JusticeVirtue.OnVirtueRejected( m_Protector, m_Protectee );
}
}
}
} | gpl-2.0 |
pedroresende/ezpublish-kernel | eZ/Publish/Core/FieldType/RichText/InternalLinkValidator.php | 2116 | <?php
/**
* File containing the eZ\Publish\Core\FieldType\RichText\InternalLinkValidator class.
*
* @copyright Copyright (C) eZ Systems AS. All rights reserved.
* @license For full copyright and license information view LICENSE file distributed with this source code.
*
* @version //autogentag//
*/
namespace eZ\Publish\Core\FieldType\RichText;
use eZ\Publish\API\Repository\ContentService;
use eZ\Publish\API\Repository\LocationService;
use eZ\Publish\API\Repository\Exceptions\NotFoundException;
use eZ\Publish\Core\Base\Exceptions\InvalidArgumentException;
/**
* Validator for RichText internal format links.
*/
class InternalLinkValidator
{
/**
* @param \eZ\Publish\API\Repository\ContentService $contentService
* @param \eZ\Publish\API\Repository\LocationService $locationService
*/
public function __construct(ContentService $contentService, LocationService $locationService)
{
$this->contentService = $contentService;
$this->locationService = $locationService;
}
/**
* Validates following link formats: 'ezcontent://<contentId>', 'ezremote://<contentRemoteId>', 'ezlocation://<locationId>'.
*
* @throws \eZ\Publish\API\Repository\Exceptions\InvalidArgumentException If given $scheme is not supported.
*
* @param string $scheme
* @param string $id
*
* @return bool
*/
public function validate($scheme, $id)
{
try {
switch ($scheme) {
case 'ezcontent':
$this->contentService->loadContentInfo($id);
break;
case 'ezremote':
$this->contentService->loadContentByRemoteId($id);
break;
case 'ezlocation':
$this->locationService->loadLocation($id);
break;
default:
throw new InvalidArgumentException($scheme, "Given scheme '{$scheme}' is not supported.");
}
} catch (NotFoundException $e) {
return false;
}
return true;
}
}
| gpl-2.0 |
johndj/cineallo | modules/contrib/multiversion/src/Tests/CommentStatisticsTest.php | 3980 | <?php
namespace Drupal\multiversion\Tests;
use Drupal\comment\Entity\Comment;
use Drupal\node\Entity\Node;
/**
* Tests comment statistics.
*
* @group multiversion
*/
class CommentStatisticsTest extends MultiversionWebTestBase {
/**
* The profile to install as a basis for testing.
*
* @var string
*/
protected $profile = 'standard';
/**
* Modules to enable.
*
* @var array
*/
public static $modules = ['multiversion', 'comment', 'node'];
/**
* A test node to which comments will be posted.
*
* @var \Drupal\node\NodeInterface
*/
protected $node;
/**
* {@inheritdoc}
*/
protected function setUp() {
parent::setUp();
$this->adminUser = $this->drupalCreateUser([
'administer content types',
'administer blocks',
'administer comments',
'administer comment types',
'post comments',
'create article content',
'access administration pages',
'access comments',
'access content',
]);
$this->drupalLogin($this->adminUser);
$this->drupalPlaceBlock('local_tasks_block');
$this->node = Node::create([
'type' => 'article',
'title' => 'New node',
'promote' => 1,
'uid' => $this->adminUser->id()
]);
$this->node->save();
}
/**
* Tests the node comment statistics.
*/
function testCommentNodeCommentStatistics() {
$node_storage = $this->container->get('entity.manager')->getStorage('node');
$this->drupalGet('<front>');
$this->assertNoLink(t('1 comment'));
$this->assertEqual($this->node->get('comment')->comment_count, 0, 'The number of comments for the node is correct (0 comments)');
// Test comment statistic when creating comments.
$comment1 = Comment::create([
'entity_type' => 'node',
'field_name' => 'comment',
'subject' => 'How much wood would a woodchuck chuck',
'comment_body' => $this->randomMachineName(128),
'entity_id' => $this->node->id(),
]);
$comment1->save();
$node_storage->resetCache([$this->node->id()]);
$node = $node_storage->load($this->node->id());
$this->assertEqual($node->get('comment')->comment_count, 1, 'The number of comments for the node is correct (1 comment)');
$this->drupalGet('<front>');
$this->assertLink(t('1 comment'));
$comment2 = Comment::create([
'entity_type' => 'node',
'field_name' => 'comment',
'subject' => 'A big black bug bit a big black dog',
'comment_body' => $this->randomMachineName(128),
'entity_id' => $this->node->id(),
]);
$comment2->save();
$comment3 = Comment::create([
'entity_type' => 'node',
'field_name' => 'comment',
'subject' => 'How much pot, could a pot roast roast',
'comment_body' => $this->randomMachineName(128),
'entity_id' => $this->node->id(),
]);
$comment3->save();
$node_storage->resetCache([$this->node->id()]);
$node = $node_storage->load($this->node->id());
$this->assertEqual($node->get('comment')->comment_count, 3, 'The number of comments for the node is correct (3 comments)');
$this->drupalGet('<front>');
$this->assertLink(t('3 comments'));
// Test comment statistic when deleting comments.
$comment1->delete();
$comment2->delete();
$node_storage->resetCache([$this->node->id()]);
$node = $node_storage->load($this->node->id());
$this->assertEqual($node->get('comment')->comment_count, 1, 'The number of comments for the node is correct (1 comment)');
$this->drupalGet('<front>');
$this->assertLink(t('1 comment'));
$comment3->delete();
$node_storage->resetCache([$this->node->id()]);
$node = $node_storage->load($this->node->id());
$this->assertEqual($node->get('comment')->comment_count, 0, 'The number of comments for the node is correct (0 comments)');
$this->drupalGet('<front>');
$this->assertNoLink(t('1 comment'));
$this->assertNoLink(t('comments'));
}
}
| gpl-2.0 |
lzpfmh/oceanbase | oceanbase_0.4/src/common/ob_libeasy_mem_pool.cpp | 747 | /* (C) 2010-2012 Alibaba Group Holding Limited.
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* version 2 as published by the Free Software Foundation.
*
* Version: 0.1
*
* Authors:
* Wu Di <[email protected]>
*/
#include "common/ob_libeasy_mem_pool.h"
using namespace oceanbase;
using namespace common;
void* common::ob_easy_realloc(void *ptr, size_t size)
{
void *ret = NULL;
if (size)
{
ret = ob_tc_realloc(ptr, size, ObModIds::LIBEASY);
if (ret == NULL)
{
TBSYS_LOG(WARN, "ob_tc_realloc failed, ptr:%p, size:%lu", ptr, size);
}
}
else if (ptr)
{
ob_tc_free(ptr, ObModIds::LIBEASY);
}
return ret;
}
| gpl-2.0 |
dmlloyd/openjdk-modules | hotspot/test/compiler/jvmci/errors/TestInvalidOopMap.java | 4170 | /*
* Copyright (c) 2015, 2016, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/**
* @test
* @requires vm.jvmci
* @modules jdk.internal.vm.ci/jdk.vm.ci.hotspot
* jdk.internal.vm.ci/jdk.vm.ci.code
* jdk.internal.vm.ci/jdk.vm.ci.code.site
* jdk.internal.vm.ci/jdk.vm.ci.meta
* jdk.internal.vm.ci/jdk.vm.ci.runtime
* jdk.internal.vm.ci/jdk.vm.ci.common
* @compile CodeInstallerTest.java
* @run junit/othervm -da:jdk.vm.ci... -XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCI
* -Djvmci.Compiler=null compiler.jvmci.errors.TestInvalidOopMap
*/
package compiler.jvmci.errors;
import jdk.vm.ci.code.BytecodePosition;
import jdk.vm.ci.code.DebugInfo;
import jdk.vm.ci.code.Location;
import jdk.vm.ci.code.ReferenceMap;
import jdk.vm.ci.code.Register;
import jdk.vm.ci.code.StackSlot;
import jdk.vm.ci.code.site.DataPatch;
import jdk.vm.ci.code.site.Infopoint;
import jdk.vm.ci.code.site.InfopointReason;
import jdk.vm.ci.code.site.Site;
import jdk.vm.ci.common.JVMCIError;
import jdk.vm.ci.hotspot.HotSpotCompiledCode.Comment;
import jdk.vm.ci.hotspot.HotSpotReferenceMap;
import jdk.vm.ci.meta.Assumptions.Assumption;
import jdk.vm.ci.meta.JavaKind;
import jdk.vm.ci.meta.PlatformKind;
import org.junit.Test;
/**
* Tests for errors in oop maps.
*/
public class TestInvalidOopMap extends CodeInstallerTest {
private static class InvalidReferenceMap extends ReferenceMap {
}
private void test(ReferenceMap refMap) {
BytecodePosition pos = new BytecodePosition(null, dummyMethod, 0);
DebugInfo info = new DebugInfo(pos);
info.setReferenceMap(refMap);
installEmptyCode(new Site[]{new Infopoint(0, info, InfopointReason.SAFEPOINT)}, new Assumption[0], new Comment[0], 16, new DataPatch[0], StackSlot.get(null, 0, true));
}
@Test(expected = NullPointerException.class)
public void testMissingReferenceMap() {
test(null);
}
@Test(expected = JVMCIError.class)
public void testInvalidReferenceMap() {
test(new InvalidReferenceMap());
}
@Test(expected = NullPointerException.class)
public void testNullOops() {
test(new HotSpotReferenceMap(null, new Location[0], new int[0], 8));
}
@Test(expected = NullPointerException.class)
public void testNullBase() {
test(new HotSpotReferenceMap(new Location[0], null, new int[0], 8));
}
@Test(expected = NullPointerException.class)
public void testNullSize() {
test(new HotSpotReferenceMap(new Location[0], new Location[0], null, 8));
}
@Test(expected = JVMCIError.class)
public void testInvalidLength() {
test(new HotSpotReferenceMap(new Location[1], new Location[2], new int[3], 8));
}
@Test(expected = JVMCIError.class)
public void testInvalidShortOop() {
PlatformKind kind = arch.getPlatformKind(JavaKind.Short);
Register reg = getRegister(kind, 0);
Location[] oops = new Location[]{Location.register(reg)};
Location[] base = new Location[]{null};
int[] size = new int[]{kind.getSizeInBytes()};
test(new HotSpotReferenceMap(oops, base, size, 8));
}
}
| gpl-2.0 |
JohnsonYuan/MuseScore | libmscore/pitchspelling.cpp | 31477 | //=============================================================================
// MuseScore
// Music Composition & Notation
//
// Copyright (C) 2007-2011 Werner Schweer
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License version 2
// as published by the Free Software Foundation and appearing in
// the file LICENCE.GPL
//=============================================================================
// This file contains the implementation of an pitch spelling
// algorithmus from Emilios Cambouropoulos as published in:
// "Automatic Pitch Spelling: From Numbers to Sharps and Flats"
#include "synthesizer/event.h"
#include "note.h"
#include "key.h"
#include "pitchspelling.h"
#include "staff.h"
#include "chord.h"
#include "score.h"
#include "part.h"
#include "utils.h"
namespace Ms {
//---------------------------------------------------------
// tpcIsValid
//---------------------------------------------------------
bool tpcIsValid(int val)
{
return val >= Tpc::TPC_MIN && val <= Tpc::TPC_MAX;
}
//---------------------------------------------------------
// step2tpc
//---------------------------------------------------------
int step2tpc(int step, AccidentalVal alter)
{
// TPC - tonal pitch classes
// "line of fifth's" LOF
static const int spellings[] = {
// bb b - # ##
0, 7, 14, 21, 28, // C
2, 9, 16, 23, 30, // D
4, 11, 18, 25, 32, // E
-1, 6, 13, 20, 27, // F
1, 8, 15, 22, 29, // G
3, 10, 17, 24, 31, // A
5, 12, 19, 26, 33, // B
};
int i = step*5 + int(alter)+2;
Q_ASSERT(i >= 0 && (i < int(sizeof(spellings)/sizeof(*spellings))));
return spellings[i];
}
static const int tpcByStepAndKey[int(Key::NUM_OF)][STEP_DELTA_OCTAVE] = {
// step C D E F G A B Key
{ Tpc::TPC_C_B, Tpc::TPC_D_B, Tpc::TPC_E_B, Tpc::TPC_F_B, Tpc::TPC_G_B, Tpc::TPC_A_B, Tpc::TPC_B_B}, // Cb
{ Tpc::TPC_C_B, Tpc::TPC_D_B, Tpc::TPC_E_B, Tpc::TPC_F, Tpc::TPC_G_B, Tpc::TPC_A_B, Tpc::TPC_B_B}, // Gb
{ Tpc::TPC_C, Tpc::TPC_D_B, Tpc::TPC_E_B, Tpc::TPC_F, TPC_G_B, Tpc::TPC_A_B, Tpc::TPC_B_B}, // Db
{ Tpc::TPC_C, Tpc::TPC_D_B, Tpc::TPC_E_B, Tpc::TPC_F, Tpc::TPC_G, Tpc::TPC_A_B, Tpc::TPC_B_B}, // Ab
{ Tpc::TPC_C, Tpc::TPC_D, Tpc::TPC_E_B, Tpc::TPC_F, Tpc::TPC_G, Tpc::TPC_A_B, Tpc::TPC_B_B}, // Eb
{ Tpc::TPC_C, Tpc::TPC_D, Tpc::TPC_E_B, Tpc::TPC_F, Tpc::TPC_G, Tpc::TPC_A, Tpc::TPC_B_B}, // B
{ Tpc::TPC_C, Tpc::TPC_D, Tpc::TPC_E, Tpc::TPC_F, Tpc::TPC_G, Tpc::TPC_A, Tpc::TPC_B_B}, // F
{ Tpc::TPC_C, Tpc::TPC_D, Tpc::TPC_E, Tpc::TPC_F, Tpc::TPC_G, Tpc::TPC_A, Tpc::TPC_B}, // C
{ Tpc::TPC_C, Tpc::TPC_D, Tpc::TPC_E, Tpc::TPC_F_S, Tpc::TPC_G, Tpc::TPC_A, Tpc::TPC_B}, // G
{ Tpc::TPC_C_S, Tpc::TPC_D, Tpc::TPC_E, Tpc::TPC_F_S, Tpc::TPC_G, Tpc::TPC_A, Tpc::TPC_B}, // D
{ Tpc::TPC_C_S, Tpc::TPC_D, Tpc::TPC_E, Tpc::TPC_F_S, Tpc::TPC_G_S, Tpc::TPC_A, Tpc::TPC_B}, // A
{ Tpc::TPC_C_S, Tpc::TPC_D_S, Tpc::TPC_E, Tpc::TPC_F_S, Tpc::TPC_G_S, Tpc::TPC_A, Tpc::TPC_B}, // E
{ Tpc::TPC_C_S, Tpc::TPC_D_S, Tpc::TPC_E, Tpc::TPC_F_S, Tpc::TPC_G_S, Tpc::TPC_A_S, Tpc::TPC_B}, // H
{ Tpc::TPC_C_S, Tpc::TPC_D_S, Tpc::TPC_E_S, Tpc::TPC_F_S, Tpc::TPC_G_S, Tpc::TPC_A_S, Tpc::TPC_B}, // F#
{ Tpc::TPC_C_S, Tpc::TPC_D_S, Tpc::TPC_E_S, Tpc::TPC_F_S, Tpc::TPC_G_S, Tpc::TPC_A_S, Tpc::TPC_B_S}, // C#
};
int step2tpcByKey(int step, Key key)
{
while (step < 0)
step += STEP_DELTA_OCTAVE;
while (key < Key::MIN)
key += Key::DELTA_ENHARMONIC;
while (key > Key::MAX)
key -= Key::DELTA_ENHARMONIC;
return tpcByStepAndKey[int(key) - int(Key::MIN)][step % STEP_DELTA_OCTAVE];
}
//---------------------------------------------------------
// tpc2step
//---------------------------------------------------------
int tpc2step(int tpc)
{
// 14 - C
// 15 % 7 = 1
// f c g d a e b
static const int steps[STEP_DELTA_OCTAVE] = { 3, 0, 4, 1, 5, 2, 6 };
// TODO: optimize -TCP_MIN
return steps[(tpc-Tpc::TPC_MIN) % STEP_DELTA_OCTAVE];
// without a table, could also be rendered as:
// return ((tpc-Tpc::TPC_MIN) * STEP_DELTA_TPC) / STEP_DELTA_OCTAVE + TPC_FIRST_STEP;
}
//---------------------------------------------------------
// tpc2stepByKey
//---------------------------------------------------------
int tpc2stepByKey(int tpc, Key key, int* pAlter)
{
if (pAlter)
*pAlter = tpc2alterByKey(tpc, key);
return tpc2step(tpc);
}
//---------------------------------------------------------
// step2tpc
//---------------------------------------------------------
int step2tpc(const QString& stepName, AccidentalVal alter)
{
if (stepName.isEmpty())
return Tpc::TPC_INVALID;
int r;
switch (stepName[0].toLower().toLatin1()) {
case 'c': r = 0; break;
case 'd': r = 1; break;
case 'e': r = 2; break;
case 'f': r = 3; break;
case 'g': r = 4; break;
case 'a': r = 5; break;
case 'b': r = 6; break;
default:
return Tpc::TPC_INVALID;
}
return step2tpc(r, alter);
}
//---------------------------------------------------------
// step2deltaPitchByKey
//
// returns the delta pitch above natural C for the given step in the given key
// step: 0 - 6
// key: -7 - +7
//---------------------------------------------------------
static const int pitchByStepAndKey[int(Key::NUM_OF)][STEP_DELTA_OCTAVE] = {
// step C D E F G A B Key
{ -1, 1, 3, 4, 6, 8, 10}, // Cb
{ -1, 1, 3, 5, 6, 8, 10}, // Gb
{ 0, 1, 3, 5, 6, 8, 10}, // Db
{ 0, 1, 3, 5, 7, 8, 10}, // Ab
{ 0, 2, 3, 5, 7, 8, 10}, // Eb
{ 0, 2, 3, 5, 7, 9, 10}, // B
{ 0, 2, 4, 5, 7, 9, 10}, // F
{ 0, 2, 4, 5, 7, 9, 11}, // C
{ 0, 2, 4, 6, 7, 9, 11}, // G
{ 1, 2, 4, 6, 7, 9, 11}, // D
{ 1, 2, 4, 6, 8, 9, 11}, // A
{ 1, 3, 4, 6, 8, 9, 11}, // E
{ 1, 3, 4, 6, 8, 10, 11}, // H
{ 1, 3, 5, 6, 8, 10, 11}, // F#
{ 1, 3, 5, 6, 8, 10, 12}, // C#
};
int step2deltaPitchByKey(int step, Key key)
{
while (step < 0)
step+= STEP_DELTA_OCTAVE;
while (key < Key::MIN)
key += Key::DELTA_ENHARMONIC;
while (key > Key::MAX)
key -= Key::DELTA_ENHARMONIC;
return pitchByStepAndKey[int(key)-int(Key::MIN)][step % STEP_DELTA_OCTAVE];
}
//---------------------------------------------------------
// tpc2pitch
//---------------------------------------------------------
int tpc2pitch(int tpc)
{
Q_ASSERT(tpcIsValid(tpc));
static int pitches[] = {
//step: F C G D A E B
3, -2, 5, 0, 7, 2, 9, // bb
4, -1, 6, 1, 8, 3, 10, // b
5, 0, 7, 2, 9, 4, 11, // -
6, 1, 8, 3, 10, 5, 12, // #
7, 2, 9, 4, 11, 6, 13 // ##
};
return pitches[tpc + 1];
}
//---------------------------------------------------------
// tpc2alterByKey
//
// returns the alteration (-3 to 3) of a given tpc in the given key
// to understand the formula:
// in the highest key (C#Maj), each of the first 7 tpcs (Fbb to Bbb; tpc-Tpc::TPC_MIN: 0 to 7)
// is 3 semitones below its key degree (alter = -3)
// the second 7 tpcs (Fb to Bb; tpc-Tpc::TPC_MIN: 8 to 13) are 2 semitones below (alter = -2) and so on up to 1
// thus, for C#Maj:
// (1) (tpc-Tpc::TPC_MIN) - 0 = 0 to 34 (for tcp-TCP_MIN from 0 to 34)
// (2) (tpc-Tpc::TPC_MIN) - 0) / 7 = 0 to 4 (for each settuple of tcp's) and finally
// (3) (tcp-Tpc::TPC_MIN) - 0) / 7 -3 = -3 to 1 (for each settuple of tcp's)
// where 0 = Key::C_S - Key::MAX
// for each previous key, the result of (1) increases by 1 and the classes of alter are shifted 1 TPC 'up':
// F#Maj: Fbb-Ebb => -3, Bbb to Eb => -2 and so on
// BMaj: Fbb-Abb => -3, Ebb to Ab => -2 and so on
// and so on
// thus, for any 'key', the formula is:
// ((tcp-Tpc::TPC_MIN) - (key-Key::MAX)) / TCP_DELTA_SEMITONE - 3
//---------------------------------------------------------
int tpc2alterByKey(int tpc, Key key) {
return (tpc - int(key) - int(Tpc::TPC_MIN) + int(Key::MAX)) / TPC_DELTA_SEMITONE - 3;
}
//---------------------------------------------------------
// tpc2name
// return note name
//---------------------------------------------------------
QString tpc2name(int tpc, NoteSpellingType noteSpelling, NoteCaseType noteCase, bool explicitAccidental)
{
QString s;
QString acc;
tpc2name(tpc, noteSpelling, noteCase, s, acc, explicitAccidental);
return s + (explicitAccidental ? " " : "") + acc;
}
//---------------------------------------------------------
// tpc2name
//---------------------------------------------------------
void tpc2name(int tpc, NoteSpellingType noteSpelling, NoteCaseType noteCase, QString& s, QString& acc, bool explicitAccidental)
{
int n;
tpc2name(tpc, noteSpelling, noteCase, s, n);
switch (n) {
case -2:
if (explicitAccidental) {
acc = QObject::tr("double flat");
}
else if (noteSpelling == NoteSpellingType::GERMAN_PURE) {
switch (tpc) {
case TPC_A_BB: acc = "sas"; break;
case TPC_E_BB: acc = "ses"; break;
default: acc = "eses";
}
}
else {
acc = "bb";
}
break;
case -1:
if (explicitAccidental)
acc = QObject::tr("flat");
else if (noteSpelling == NoteSpellingType::GERMAN_PURE)
acc = (tpc == TPC_A_B || tpc == TPC_E_B) ? "s" : "es";
else
acc = "b";
break;
case 0: acc = ""; break;
case 1:
if (explicitAccidental)
acc = QObject::tr("sharp");
else
acc = (noteSpelling == NoteSpellingType::GERMAN_PURE) ? "is" : "#";
break;
case 2:
if (explicitAccidental)
acc = QObject::tr("double sharp");
else
acc = (noteSpelling == NoteSpellingType::GERMAN_PURE) ? "isis" : "##";
break;
default:
qDebug("tpc2name(%d): acc %d", tpc, n);
acc = "";
break;
}
}
//---------------------------------------------------------
// tpc2name
//---------------------------------------------------------
void tpc2name(int tpc, NoteSpellingType noteSpelling, NoteCaseType noteCase, QString& s, int& acc)
{
const char names[] = "FCGDAEB";
const char gnames[] = "FCGDAEH";
const QString snames[] = { "Fa", "Do", "Sol", "Re", "La", "Mi", "Si" };
acc = ((tpc+1) / 7) - 2;
int idx = (tpc + 1) % 7;
switch (noteSpelling) {
case NoteSpellingType::GERMAN:
case NoteSpellingType::GERMAN_PURE:
s = gnames[idx];
if (s == "H" && acc == -1) {
s = "B";
if (noteSpelling == NoteSpellingType::GERMAN_PURE)
acc = 0;
}
break;
case NoteSpellingType::SOLFEGGIO:
s = snames[idx];
break;
case NoteSpellingType::FRENCH:
s = snames[idx];
if (s == "Re")
s = "Ré";
break;
default:
s = names[idx];
break;
}
switch (noteCase) {
case NoteCaseType::LOWER: s = s.toLower(); break;
case NoteCaseType::UPPER: s = s.toUpper(); break;
case NoteCaseType::CAPITAL:
case NoteCaseType::AUTO:
default:
break;
}
}
//---------------------------------------------------------
// tpc2stepName
//---------------------------------------------------------
QString tpc2stepName(int tpc)
{
const char names[] = "FCGDAEB";
return QString(names[(tpc + 1) % 7]);
}
// table of alternative spellings for one octave
// each entry is the TPC of the note
// tab1 does not contain double sharps
// tab2 does not contain double flats
static const int tab1[24] = {
14, 2, // 60 C Dbb
21, 9, // 61 C# Db
16, 4, // 62 D Ebb
23, 11, // 63 D# Eb
18, 6, // 64 E Fb
13, 1, // 65 F Gbb
20, 8, // 66 F# Gb
15, 3, // 67 G Abb
22, 10, // 68 G# Ab
17, 5, // 69 A Bbb
24, 12, // 70 A# Bb
19, 7, // 71 B Cb
};
static const int tab2[24] = {
26, 14, // 60 B# C
21, 9, // 61 C# Db
28, 16, // 62 C## D
23, 11, // 63 D# Eb
30, 18, // 64 D## E
25, 13, // 65 E# F
20, 8, // 66 F# Gb
27, 15, // 67 F## G
22, 10, // 68 G# Ab
29, 17, // 69 G## A
24, 12, // 70 A# Bb
31, 19, // 71 A## B
};
int intervalPenalty[13] = {
0, 0, 0, 0, 0, 0, 1, 3, 1, 1, 1, 3, 3
};
//---------------------------------------------------------
// enharmonicSpelling
//---------------------------------------------------------
static const int enharmonicSpelling[15][34] = {
{
//Ces f c g d a e b
1, 1, 1, 1, 1, 1, // bb
0, 0, 0, 0, 0, 0, 0, // b
1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, // #
1, 1, 1, 1, 1, 1, 1 // ##
},
{
//Ges f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 0, 0, 0, 0, 0, 0, // b
0, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, // #
1, 1, 1, 1, 1, 1, 1 // ##
},
{
//Des f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 1, 0, 0, 0, 0, 0, // b
0, 0, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, // #
1, 1, 1, 1, 1, 1, 1 // ##
},
{
//As f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 1, 0, 0, 0, 0, 0, // b
0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, // #
1, 1, 1, 1, 1, 1, 1 // ##
},
{
//Es f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 1, 0, 0, 0, 0, 0, // b
0, 0, 0, 0, 1, 1, 1,
0, 0, 1, 1, 1, 1, 1, // #
1, 1, 1, 1, 1, 1, 1 // ##
},
{
//Bb f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 1, 0, 0, 0, 0, 0, // b
0, 0, 0, 0, 0, 1, 1,
1, 0, 0, 1, 1, 1, 1, // # // (ws) penalty for f#
1, 1, 1, 1, 1, 1, 1 // ##
},
{
//F f c g d a e b // extra penalty for a# b
1, 1, 1, 1, 1, 1, // bb
1, 1, 0, 0, 0, 0, 0, // b
0, 0, 0, 0, 0, 0, 1,
0, 0, 0, 0, 1, 1, 1, // #
1, 1, 1, 1, 1, 1, 1 // ##
},
{
//C f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 1, 0, 0, 0, 0, 0, // b
0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 1, // #
1, 1, 1, 1, 1, 1, 1 // ##
},
{
//G f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 1, 1, 0, 0, 0, 0, // b
1, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 1, // #
1, 1, 1, 1, 1, 1, 1 // ##
},
{
//D f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 1, 1, 1, 0, 0, 0, // b
1, 1, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 1, // #
1, 1, 1, 1, 1, 1, 1 // ##
},
{
//A f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 1, 1, 1, 1, 0, 0, // b
1, 1, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 1, 1, // #
1, 1, 1, 1, 1, 1, 1 // ##
},
{
//E f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 1, 1, 1, 1, 1, 0, // b
1, 1, 1, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 1, // #
0, 0, 1, 1, 1, 1, 1 // ##
},
{
//H f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 1, 1, 1, 1, 1, 1, // b
1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 1, 1, // #
1, 1, 1, 1, 1, 1, 1 // ##
},
{
//Fis f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 1, 1, 1, 1, 1, 1, // b
100, 1, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, // #
0, 1, 1, 1, 1, 1, 1 // ##
},
{
//Cis f c g d a e b
1, 1, 1, 1, 1, 1, // bb
1, 1, 0, 0, 0, 0, 0, // b //Fis
100, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, // #
0, 0, 1, 1, 1, 1, 1 // ##
}
};
//---------------------------------------------------------
// penalty
//---------------------------------------------------------
static int penalty(int lof1, int lof2, int k)
{
if (k < 0 || k >= 15)
qFatal("Illegal key %d >= 15", k);
Q_ASSERT(lof1 >= 0 && lof1 < 34);
Q_ASSERT(lof2 >= 0 && lof2 < 34);
int penalty = enharmonicSpelling[k][lof1] * 4 + enharmonicSpelling[k][lof2] * 4;
int distance = lof2 > lof1 ? lof2 - lof1 : lof1 - lof2;
if (distance > 12)
penalty += 3;
else
penalty += intervalPenalty[distance];
return penalty;
}
static const int WINDOW = 9;
#if 0 // yet(?) unused
static const int WINDOW_SHIFT = 3;
static const int ASIZE = 1024; // 2 ** WINDOW
#endif
//---------------------------------------------------------
// computeWindow
//---------------------------------------------------------
static int computeWindow(const QList<Event>& notes, int start, int end, int keyIdx)
{
int p = 10000;
int idx = -1;
int pitch[10];
Q_ASSERT((end-start) < 10 && start != end);
int i = start;
int k = 0;
while (i < end)
pitch[k++] = notes[i++].dataA() % 12;
for (; k < 10; ++k)
pitch[k] = pitch[k-1];
for (int i = 0; i < 512; ++i) {
int pa = 0;
int pb = 0;
int l = pitch[0] * 2 + (i & 1);
Q_ASSERT((l >= 0) && (l < int(sizeof(tab1)/sizeof(*tab1))));
int lof1a = tab1[l];
int lof1b = tab2[l];
for (int k = 1; k < 10; ++k) {
int l = pitch[k] * 2 + ((i & (1 << k)) >> k);
Q_ASSERT((l >= 0) && (l < int(sizeof(tab1)/sizeof(*tab1))));
int lof2a = tab1[l];
int lof2b = tab2[l];
pa += penalty(lof1a, lof2a, keyIdx);
pb += penalty(lof1b, lof2b, keyIdx);
lof1a = lof2a;
lof1b = lof2b;
}
if (pa < pb) {
if (pa < p) {
p = pa;
idx = i;
}
}
else {
if (pb < p) {
p = pb;
idx = i * -1;
}
}
}
return idx;
}
//---------------------------------------------------------
// tpc
//---------------------------------------------------------
int tpc(int idx, int pitch, int opt)
{
const int* tab;
if (opt < 0) {
tab = tab2;
opt *= -1;
}
else
tab = tab1;
int i = (pitch % 12) * 2 + ((opt & (1 << idx)) >> idx);
Q_ASSERT(i >= 0 && i < 24);
return tab[i];
}
//---------------------------------------------------------
// computeWindow
//---------------------------------------------------------
int computeWindow(const QList<Note*>& notes, int start, int end)
{
int p = 10000;
int idx = -1;
int pitch[10];
int key[10];
int i = start;
int k = 0;
while (i < end) {
pitch[k] = notes[i]->pitch() % 12;
int tick = notes[i]->chord()->tick();
key[k] = int(notes[i]->staff()->key(tick)) + 7;
if (key[k] < 0 || key[k] > 14) {
qDebug("illegal key at tick %d: %d, window %d-%d",
tick, key[k] - 7, start, end);
return 0;
// abort();
}
++k;
++i;
}
for (; k < 10; ++k) {
pitch[k] = pitch[k-1];
key[k] = key[k-1];
}
for (int i = 0; i < 512; ++i) {
int pa = 0;
int pb = 0;
int l = pitch[0] * 2 + (i & 1);
Q_ASSERT(l >= 0 && l <= (int)(sizeof(tab1)/sizeof(*tab1)));
int lof1a = tab1[l];
int lof1b = tab2[l];
for (int k = 1; k < 10; ++k) {
int l = pitch[k] * 2 + ((i & (1 << k)) >> k);
Q_ASSERT(l >= 0 && l <= (int)(sizeof(tab1)/sizeof(*tab1)));
int lof2a = tab1[l];
int lof2b = tab2[l];
pa += penalty(lof1a, lof2a, key[k]);
pb += penalty(lof1b, lof2b, key[k]);
lof1a = lof2a;
lof1b = lof2b;
}
if (pa < pb) {
if (pa < p) {
p = pa;
idx = i;
}
}
else {
if (pb < p) {
p = pb;
idx = i * -1;
}
}
}
/* qDebug("compute window\n ");
for (int i = 0; i < 10; ++i)
qDebug("%2d ", pitch[i]);
qDebug("\n ");
for (int i = 0; i < 10; ++i)
qDebug("%2d ", key[i]);
qDebug("\n ");
for (int i = 0; i < 10; ++i)
qDebug("%2d ", tpc(i, pitch[i], idx));
*/
return idx;
}
//---------------------------------------------------------
// spell
//---------------------------------------------------------
void spell(QList<Event>& notes, int key)
{
key += 7;
int n = notes.size();
if (n == 0)
return;
int start = 0;
while (start < n) {
int end = start + WINDOW;
if (end > n)
end = n;
int opt = computeWindow(notes, start, end, key);
const int* tab;
if (opt < 0) {
tab = tab2;
opt *= -1;
}
else
tab = tab1;
if (start == 0) {
notes[0].setTpc(tab[(notes[0].dataA() % 12) * 2 + (opt & 1)]);
if (n > 1)
notes[1].setTpc(tab[(notes[1].dataA() % 12) * 2 + ((opt & 2)>>1)]);
if (n > 2)
notes[2].setTpc(tab[(notes[2].dataA() % 12) * 2 + ((opt & 4)>>2)]);
}
if ((end - start) >= 6) {
notes[start+3].setTpc(tab[(notes[start+3].dataA() % 12) * 2 + ((opt & 8) >> 3)]);
notes[start+4].setTpc(tab[(notes[start+4].dataA() % 12) * 2 + ((opt & 16) >> 4)]);
notes[start+5].setTpc(tab[(notes[start+5].dataA() % 12) * 2 + ((opt & 32) >> 5)]);
}
if (end == n) {
int n = end - start;
int k;
switch(n - 6) {
case 3:
k = end - start - 3;
notes[end-3].setTpc(tab[(notes[end-3].dataA() % 12) * 2 + ((opt & (1<<k)) >> k)]);
case 2:
k = end - start - 2;
notes[end-2].setTpc(tab[(notes[end-2].dataA() % 12) * 2 + ((opt & (1<<k)) >> k)]);
case 1:
k = end - start - 1;
notes[end-1].setTpc(tab[(notes[end-1].dataA() % 12) * 2 + ((opt & (1<<k)) >> k)]);
}
break;
}
// advance to next window
start += 3;
}
}
//---------------------------------------------------------
// changeAllTpcs
//---------------------------------------------------------
void changeAllTpcs(Note* n, int tpc1)
{
Interval v;
if (n && n->part() && n->part()->instrument()) {
v = n->part()->instrument()->transpose();
v.flip();
}
int tpc2 = Ms::transposeTpc(tpc1, v, true);
n->undoChangeProperty(P_ID::TPC1, tpc1);
n->undoChangeProperty(P_ID::TPC2, tpc2);
}
//---------------------------------------------------------
// spell
//---------------------------------------------------------
void Score::spellNotelist(QList<Note*>& notes)
{
int n = notes.size();
int start = 0;
while (start < n) {
int end = start + WINDOW;
if (end > n)
end = n;
int opt = computeWindow(notes, start, end);
const int* tab;
if (opt < 0) {
tab = tab2;
opt *= -1;
}
else
tab = tab1;
if (start == 0) {
changeAllTpcs(notes[0], tab[(notes[0]->pitch() % 12) * 2 + (opt & 1)]);
if (n > 1)
changeAllTpcs(notes[1], tab[(notes[1]->pitch() % 12) * 2 + ((opt & 2)>>1)]);
if (n > 2)
changeAllTpcs(notes[2], tab[(notes[2]->pitch() % 12) * 2 + ((opt & 4)>>2)]);
}
if ((end - start) >= 6) {
changeAllTpcs(notes[start+3], tab[(notes[start+3]->pitch() % 12) * 2 + ((opt & 8) >> 3)]);
changeAllTpcs(notes[start+4], tab[(notes[start+4]->pitch() % 12) * 2 + ((opt & 16) >> 4)]);
changeAllTpcs(notes[start+5], tab[(notes[start+5]->pitch() % 12) * 2 + ((opt & 32) >> 5)]);
}
if (end == n) {
int n = end - start;
int k;
switch(n - 6) {
case 3:
k = end - start - 3;
changeAllTpcs(notes[end-3], tab[(notes[end-3]->pitch() % 12) * 2 + ((opt & (1<<k)) >> k)]);
case 2:
k = end - start - 2;
changeAllTpcs(notes[end-2], tab[(notes[end-2]->pitch() % 12) * 2 + ((opt & (1<<k)) >> k)]);
case 1:
k = end - start - 1;
changeAllTpcs(notes[end-1], tab[(notes[end-1]->pitch() % 12) * 2 + ((opt & (1<<k)) >> k)]);
}
break;
}
// advance to next window
start += 3;
}
}
//---------------------------------------------------------
// pitch2tpc2
//---------------------------------------------------------
// pitch2tpc2(pitch, false) replaced by pitch2tpc(pitch, Key::C, Prefer::FLATS)
// pitch2tpc2(pitch, true) replaced by pitch2tpc(pitch, Key::C, Prefer::SHARPS)
//---------------------------------------------------------
// pitch2tpc
// preferred pitch spelling depending on key
// key -7 to +7
//
// The value of prefer sets the preferred mix of flats and sharps
// for pitches that are non-diatonic in the key specified, by
// positioning the window along the tpc sequence.
//
// Scale tones are the range shown in [ ].
// A value of 8 (Prefer::FLATS) specifies 5b 2b 6b 3b 7b [4 1 5 2 6 3 7]
// A value of 11 (Prefer::NEAREST) specifies 3b 7b [4 1 5 2 6 3 7] 4# 1# 5#
// A value of 13 (Prefer::SHARPS) specifies [4 1 5 2 6 3 7] 4# 1# 5# 2# 6#
//
// Examples for Prefer::NEAREST (n indicates explicit natural):
// C major will use Eb Bb [F C G D A E B] F# C# G#.
// E major will use Gn Dn [A E B F# C# G# D#] A# E# B#.
// F# major will use An En [B F# C# G# D# A# E#] B# Fx Cx.
// Eb major will use Gb Db [Ab Eb Bb F C G D] An En Bn.
// Gb major will use Bbb Fb [Cb Gb Db Ab Eb Bb F] Cn Gn Dn.
//---------------------------------------------------------
int pitch2tpc(int pitch, Key key, Prefer prefer)
{
return (pitch * 7 + 26 - (int(prefer) + int(key))) % 12 + (int(prefer) + int(key));
}
//---------------------------------------------------------
// pitch2absStepByKey
// absolute step (C0 = 0, D0 = 1, ... C1 = 7, D2 = 8, ...) for a pitch/tpc according to key
// if pAlter not null, returns in it the alteration with respect to the corresponding key degree (-3 to 3)
// (for instance, an F in GMaj yields alteration -1 i.e. 1 semitone below corresp. deg. of GMaj which is F#)
// key: between Key::MIN and Key::MAX
//---------------------------------------------------------
int pitch2absStepByKey(int pitch, int tpc, Key key, int *pAlter)
{
// sanitize input data
if (pitch < 0)
pitch += PITCH_DELTA_OCTAVE;
if (pitch > 127)
pitch -= PITCH_DELTA_OCTAVE;
if (tpc < Tpc::TPC_MIN)
tpc += TPC_DELTA_ENHARMONIC;
if (tpc > Tpc::TPC_MAX)
tpc -= TPC_DELTA_ENHARMONIC;
if (key < Key::MIN)
key += Key::DELTA_ENHARMONIC;
if (key > Key::MAX)
key -= Key::DELTA_ENHARMONIC;
int octave = pitch / PITCH_DELTA_OCTAVE;
if (tpc == Tpc::TPC_C_BB || tpc == Tpc::TPC_C_B)
++octave;
else if (tpc == Tpc::TPC_B_S || tpc == Tpc::TPC_B_SS)
--octave;
int step = tpc2step(tpc);
if (pAlter)
*pAlter = tpc2alterByKey(tpc, key);
return octave * STEP_DELTA_OCTAVE + step;
}
//---------------------------------------------------------
// absStep2pitchByKey
// the default pitch for the given absolute step in the given key
//---------------------------------------------------------
int absStep2pitchByKey(int step, Key key)
{
// sanitize input data
if (step < 0)
step += STEP_DELTA_OCTAVE;
if (step > 74)
step -= STEP_DELTA_OCTAVE;
if (key < Key::MIN)
key += Key::DELTA_ENHARMONIC;
if (key > Key::MAX)
key -= Key::DELTA_ENHARMONIC;
int octave = step / STEP_DELTA_OCTAVE;
int deltaPitch = step2deltaPitchByKey(step % STEP_DELTA_OCTAVE, key);
return octave * PITCH_DELTA_OCTAVE + deltaPitch;
}
}
| gpl-2.0 |
ryandougherty/mwa-capstone | MWA_Tools/build/matplotlib/doc/mpl_examples/pylab_examples/demo_bboximage.py | 1805 | import matplotlib.pyplot as plt
import numpy as np
from matplotlib.image import BboxImage
from matplotlib.transforms import Bbox, TransformedBbox
if __name__ == "__main__":
fig = plt.figure(1)
ax = plt.subplot(121)
txt = ax.text(0.5, 0.5, "test", size=30, ha="center", color="w")
kwargs = dict()
bbox_image = BboxImage(txt.get_window_extent,
norm = None,
origin=None,
clip_on=False,
**kwargs
)
a = np.arange(256).reshape(1,256)/256.
bbox_image.set_data(a)
ax.add_artist(bbox_image)
ax = plt.subplot(122)
a = np.linspace(0, 1, 256).reshape(1,-1)
a = np.vstack((a,a))
maps = sorted(m for m in plt.cm.datad if not m.endswith("_r"))
#nmaps = len(maps) + 1
#fig.subplots_adjust(top=0.99, bottom=0.01, left=0.2, right=0.99)
ncol = 2
nrow = len(maps)//ncol + 1
xpad_fraction = 0.3
dx = 1./(ncol + xpad_fraction*(ncol-1))
ypad_fraction = 0.3
dy = 1./(nrow + ypad_fraction*(nrow-1))
for i,m in enumerate(maps):
ix, iy = divmod(i, nrow)
#plt.figimage(a, 10, i*10, cmap=plt.get_cmap(m), origin='lower')
bbox0 = Bbox.from_bounds(ix*dx*(1+xpad_fraction),
1.-iy*dy*(1+ypad_fraction)-dy,
dx, dy)
bbox = TransformedBbox(bbox0, ax.transAxes)
bbox_image = BboxImage(bbox,
cmap = plt.get_cmap(m),
norm = None,
origin=None,
**kwargs
)
bbox_image.set_data(a)
ax.add_artist(bbox_image)
plt.draw()
plt.show()
| gpl-2.0 |
princehektor/torcs | src/libs/confscreens/simuconfig.cpp | 5508 | /***************************************************************************
file : simuconfig.cpp
created : Wed Nov 3 21:48:26 CET 2004
copyright : (C) 2004 by Eric Espi�
email : [email protected]
version : $Id: simuconfig.cpp,v 1.4.2.3 2011/12/29 16:14:20 berniw Exp $
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
/** @file
@version $Id: simuconfig.cpp,v 1.4.2.3 2011/12/29 16:14:20 berniw Exp $
*/
#include <stdio.h>
#include <stdlib.h>
#include <tgfclient.h>
#include <raceinit.h>
#include "simuconfig.h"
#include <portability.h>
static float LabelColor[] = {1.0, 0.0, 1.0, 1.0};
/* list of available simulation engine */
static const char *simuVersionList[] = {"simuv2", "simuv3"};
static const int nbVersions = sizeof(simuVersionList) / sizeof(simuVersionList[0]);
static int curVersion = 0;
/* gui label id */
static int SimuVersionId;
/* gui screen handles */
static void *scrHandle = NULL;
static void *prevHandle = NULL;
static void ReadSimuCfg(void)
{
const char *versionName;
int i;
const int BUFSIZE = 1024;
char buf[BUFSIZE];
snprintf(buf, BUFSIZE, "%s%s", GetLocalDir(), RACE_ENG_CFG);
void *paramHandle = GfParmReadFile(buf, GFPARM_RMODE_REREAD | GFPARM_RMODE_CREAT);
versionName = GfParmGetStr(paramHandle, "Modules", "simu", simuVersionList[0]);
for (i = 0; i < nbVersions; i++) {
if (strcmp(versionName, simuVersionList[i]) == 0) {
curVersion = i;
break;
}
}
GfParmReleaseHandle(paramHandle);
GfuiLabelSetText(scrHandle, SimuVersionId, simuVersionList[curVersion]);
}
/* Save the choosen values in the corresponding parameter file */
static void SaveSimuVersion(void * /* dummy */)
{
const int BUFSIZE = 1024;
char buf[BUFSIZE];
snprintf(buf, BUFSIZE, "%s%s", GetLocalDir(), RACE_ENG_CFG);
void *paramHandle = GfParmReadFile(buf, GFPARM_RMODE_REREAD | GFPARM_RMODE_CREAT);
GfParmSetStr(paramHandle, "Modules", "simu", simuVersionList[curVersion]);
GfParmWriteFile(NULL, paramHandle, "raceengine");
GfParmReleaseHandle(paramHandle);
/* return to previous screen */
GfuiScreenActivate(prevHandle);
return;
}
/* change the simulation version */
static void
ChangeSimuVersion(void *vp)
{
if (vp == 0) {
curVersion--;
if (curVersion < 0) {
curVersion = nbVersions - 1;
}
} else {
curVersion++;
if (curVersion == nbVersions) {
curVersion = 0;
}
}
GfuiLabelSetText(scrHandle, SimuVersionId, simuVersionList[curVersion]);
}
static void onActivate(void * /* dummy */)
{
ReadSimuCfg();
}
/* Menu creation */
void *
SimuMenuInit(void *prevMenu)
{
int x, y, x2, x3, x4, dy;
/* screen already created */
if (scrHandle) {
return scrHandle;
}
prevHandle = prevMenu;
scrHandle = GfuiScreenCreateEx((float*)NULL, NULL, onActivate, NULL, (tfuiCallback)NULL, 1);
GfuiTitleCreate(scrHandle, "Simulation Configuration", 0);
GfuiScreenAddBgImg(scrHandle, "data/img/splash-simucfg.png");
x = 20;
x2 = 240;
x3 = x2 + 100;
x4 = x2 + 200;
y = 370;
dy = 30;
y -= dy;
GfuiLabelCreate(scrHandle, "Simulation version:", GFUI_FONT_MEDIUM, x, y, GFUI_ALIGN_HL_VB, 0);
GfuiGrButtonCreate(scrHandle, "data/img/arrow-left.png", "data/img/arrow-left.png",
"data/img/arrow-left.png", "data/img/arrow-left-pushed.png",
x2, y, GFUI_ALIGN_HL_VB, 1,
(void*)-1, ChangeSimuVersion,
NULL, (tfuiCallback)NULL, (tfuiCallback)NULL);
GfuiGrButtonCreate(scrHandle, "data/img/arrow-right.png", "data/img/arrow-right.png",
"data/img/arrow-right.png", "data/img/arrow-right-pushed.png",
x4, y, GFUI_ALIGN_HR_VB, 1,
(void*)1, ChangeSimuVersion,
NULL, (tfuiCallback)NULL, (tfuiCallback)NULL);
SimuVersionId = GfuiLabelCreate(scrHandle, "", GFUI_FONT_MEDIUM_C, x3, y, GFUI_ALIGN_HC_VB, 32);
GfuiLabelSetColor(scrHandle, SimuVersionId, LabelColor);
GfuiButtonCreate(scrHandle, "Accept", GFUI_FONT_LARGE, 210, 40, 150, GFUI_ALIGN_HC_VB, GFUI_MOUSE_UP,
NULL, SaveSimuVersion, NULL, (tfuiCallback)NULL, (tfuiCallback)NULL);
GfuiButtonCreate(scrHandle, "Cancel", GFUI_FONT_LARGE, 430, 40, 150, GFUI_ALIGN_HC_VB, GFUI_MOUSE_UP,
prevMenu, GfuiScreenActivate, NULL, (tfuiCallback)NULL, (tfuiCallback)NULL);
GfuiAddKey(scrHandle, 13, "Save", NULL, SaveSimuVersion, NULL);
GfuiAddKey(scrHandle, 27, "Cancel Selection", prevMenu, GfuiScreenActivate, NULL);
GfuiAddSKey(scrHandle, GLUT_KEY_F12, "Screen-Shot", NULL, GfuiScreenShot, NULL);
GfuiAddSKey(scrHandle, GLUT_KEY_LEFT, "Previous Version in list", (void*)0, ChangeSimuVersion, NULL);
GfuiAddSKey(scrHandle, GLUT_KEY_RIGHT, "Next Version in list", (void*)1, ChangeSimuVersion, NULL);
ReadSimuCfg();
return scrHandle;
}
| gpl-2.0 |
ibm2431/darkstar | scripts/globals/items/flask_of_poison_potion.lua | 511 | -----------------------------------------
-- ID: 4157
-- Item: Poison Potion
-- Item Effect: Poison 1HP / Removes 60 HP over 180 seconds
-----------------------------------------
require("scripts/globals/status")
require("scripts/globals/msg")
function onItemCheck(target)
return 0
end
function onItemUse(target)
if (not target:hasStatusEffect(dsp.effect.POISON)) then
target:addStatusEffect(dsp.effect.POISON,1,3,180)
else
target:messageBasic(dsp.msg.basic.NO_EFFECT)
end
end
| gpl-3.0 |
nbcloud/fluorescence | src/fluorescence/net/packets/65_weather.hpp | 1195 | /*
* fluorescence is a free, customizable Ultima Online client.
* Copyright (C) 2011-2012, http://fluorescence-client.org
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef FLUO_NET_PACKETS_WEATHER_HPP
#define FLUO_NET_PACKETS_WEATHER_HPP
#include <net/packet.hpp>
namespace fluo {
namespace net {
namespace packets {
class Weather : public Packet {
public:
Weather();
virtual bool read(const int8_t* buf, unsigned int len, unsigned int& index);
virtual void onReceive();
private:
uint8_t type_;
uint8_t intensity_;
uint8_t temperature_;
};
}
}
}
#endif
| gpl-3.0 |
kristina-san/CONRAD | src/edu/stanford/rsl/conrad/physics/materials/utils/AttenuationRetrievalMode.java | 385 | package edu.stanford.rsl.conrad.physics.materials.utils;
/**
* Gives developer option on how to retrieve mass attenuation
* @author Rotimi X Ojo
*
*/
public enum AttenuationRetrievalMode {
LOCAL_RETRIEVAL,
ONLINE_RETRIEVAL;
}
/*
* Copyright (C) 2010-2014 Rotimi X Ojo
* CONRAD is developed as an Open Source project under the GNU General Public License (GPL).
*/ | gpl-3.0 |
zynjec/darkstar | scripts/globals/items/corsair_die.lua | 289 | -----------------------------------------
-- ID: 5493
-- Corsair Die
-- Teaches the job ability Corsair's Roll
-----------------------------------------
function onItemCheck(target)
return target:canLearnAbility(98)
end
function onItemUse(target)
target:addLearnedAbility(98)
end | gpl-3.0 |
thepurpleblob/gumoodle | blocks/html/lib.php | 3272 | <?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* Form for editing HTML block instances.
*
* @copyright 2010 Petr Skoda (http://skodak.org)
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
* @package block_html
* @category files
* @param stdClass $course course object
* @param stdClass $birecord_or_cm block instance record
* @param stdClass $context context object
* @param string $filearea file area
* @param array $args extra arguments
* @param bool $forcedownload whether or not force download
* @param array $options additional options affecting the file serving
* @return bool
*/
function block_html_pluginfile($course, $birecord_or_cm, $context, $filearea, $args, $forcedownload, array $options=array()) {
global $SCRIPT;
if ($context->contextlevel != CONTEXT_BLOCK) {
send_file_not_found();
}
require_course_login($course);
if ($filearea !== 'content') {
send_file_not_found();
}
$fs = get_file_storage();
$filename = array_pop($args);
$filepath = $args ? '/'.implode('/', $args).'/' : '/';
if (!$file = $fs->get_file($context->id, 'block_html', 'content', 0, $filepath, $filename) or $file->is_directory()) {
send_file_not_found();
}
if ($parentcontext = get_context_instance_by_id($birecord_or_cm->parentcontextid)) {
if ($parentcontext->contextlevel == CONTEXT_USER) {
// force download on all personal pages including /my/
//because we do not have reliable way to find out from where this is used
$forcedownload = true;
}
} else {
// weird, there should be parent context, better force dowload then
$forcedownload = true;
}
session_get_instance()->write_close();
send_stored_file($file, 60*60, 0, $forcedownload, $options);
}
/**
* Perform global search replace such as when migrating site to new URL.
* @param $search
* @param $replace
* @return void
*/
function block_html_global_db_replace($search, $replace) {
global $DB;
$instances = $DB->get_recordset('block_instances', array('blockname' => 'html'));
foreach ($instances as $instance) {
// TODO: intentionally hardcoded until MDL-26800 is fixed
$config = unserialize(base64_decode($instance->configdata));
if (isset($config->text) and is_string($config->text)) {
$config->text = str_replace($search, $replace, $config->text);
$DB->set_field('block_instances', 'configdata', base64_encode(serialize($config)), array('id' => $instance->id));
}
}
$instances->close();
}
| gpl-3.0 |
calgaryscientific/consul | agent/consul/flood.go | 1387 | package consul
import (
"time"
"github.com/hashicorp/consul/agent/router"
"github.com/hashicorp/serf/serf"
)
// FloodNotify lets all the waiting Flood goroutines know that some change may
// have affected them.
func (s *Server) FloodNotify() {
s.floodLock.RLock()
defer s.floodLock.RUnlock()
for _, ch := range s.floodCh {
select {
case ch <- struct{}{}:
default:
}
}
}
// Flood is a long-running goroutine that floods servers from the LAN to the
// given global Serf instance, such as the WAN. This will exit once either of
// the Serf instances are shut down.
func (s *Server) Flood(addrFn router.FloodAddrFn, portFn router.FloodPortFn, global *serf.Serf) {
s.floodLock.Lock()
floodCh := make(chan struct{})
s.floodCh = append(s.floodCh, floodCh)
s.floodLock.Unlock()
ticker := time.NewTicker(s.config.SerfFloodInterval)
defer ticker.Stop()
defer func() {
s.floodLock.Lock()
defer s.floodLock.Unlock()
for i, ch := range s.floodCh {
if ch == floodCh {
s.floodCh = append(s.floodCh[:i], s.floodCh[i+1:]...)
return
}
}
panic("flood channels out of sync")
}()
for {
select {
case <-s.serfLAN.ShutdownCh():
return
case <-global.ShutdownCh():
return
case <-ticker.C:
goto FLOOD
case <-floodCh:
goto FLOOD
}
FLOOD:
router.FloodJoins(s.logger, addrFn, portFn, s.config.Datacenter, s.serfLAN, global)
}
}
| mpl-2.0 |
bjalon/nuxeo-features | nuxeo-webengine-features/nuxeo-webengine-base/src/main/resources/skin/resources/script/tiny_mce/plugins/preview/langs/fr.js | 142 | // FR lang variables
// Modified by Motte, last updated 2006-03-23
tinyMCE.addToLang('',{
preview_desc : 'Prévisualisation'
});
| lgpl-2.1 |
ibkim11/dealii | source/fe/fe_data.cc | 3687 | // ---------------------------------------------------------------------
//
// Copyright (C) 2001 - 2014 by the deal.II authors
//
// This file is part of the deal.II library.
//
// The deal.II library is free software; you can use it, redistribute
// it, and/or modify it under the terms of the GNU Lesser General
// Public License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// The full text of the license can be found in the file LICENSE at
// the top level of the deal.II distribution.
//
// ---------------------------------------------------------------------
#include <deal.II/base/geometry_info.h>
#include <deal.II/fe/fe.h>
DEAL_II_NAMESPACE_OPEN
template<int dim>
FiniteElementData<dim>::FiniteElementData ()
:
dofs_per_vertex(0),
dofs_per_line(0),
dofs_per_quad(0),
dofs_per_hex(0),
first_line_index(0),
first_quad_index(0),
first_hex_index(0),
first_face_line_index(0),
first_face_quad_index(0),
dofs_per_face(0),
dofs_per_cell (0),
components(0),
degree(0),
conforming_space(unknown),
cached_primitivity(false)
{}
template <int dim>
FiniteElementData<dim>::
FiniteElementData (const std::vector<unsigned int> &dofs_per_object,
const unsigned int n_components,
const unsigned int degree,
const Conformity conformity,
const unsigned int)
:
dofs_per_vertex(dofs_per_object[0]),
dofs_per_line(dofs_per_object[1]),
dofs_per_quad(dim>1? dofs_per_object[2]:0),
dofs_per_hex(dim>2? dofs_per_object[3]:0),
first_line_index(GeometryInfo<dim>::vertices_per_cell
* dofs_per_vertex),
first_quad_index(first_line_index+
GeometryInfo<dim>::lines_per_cell
* dofs_per_line),
first_hex_index(first_quad_index+
GeometryInfo<dim>::quads_per_cell
* dofs_per_quad),
first_face_line_index(GeometryInfo<dim-1>::vertices_per_cell
* dofs_per_vertex),
first_face_quad_index((dim==3 ?
GeometryInfo<dim-1>::vertices_per_cell
* dofs_per_vertex :
GeometryInfo<dim>::vertices_per_cell
* dofs_per_vertex) +
GeometryInfo<dim-1>::lines_per_cell
* dofs_per_line),
dofs_per_face(GeometryInfo<dim>::vertices_per_face * dofs_per_vertex +
GeometryInfo<dim>::lines_per_face * dofs_per_line +
GeometryInfo<dim>::quads_per_face *dofs_per_quad),
dofs_per_cell (GeometryInfo<dim>::vertices_per_cell * dofs_per_vertex +
GeometryInfo<dim>::lines_per_cell * dofs_per_line +
GeometryInfo<dim>::quads_per_cell * dofs_per_quad +
GeometryInfo<dim>::hexes_per_cell *dofs_per_hex),
components(n_components),
degree(degree),
conforming_space(conformity),
block_indices_data(1, dofs_per_cell)
{
Assert(dofs_per_object.size()==dim+1, ExcDimensionMismatch(dofs_per_object.size()-1,dim));
}
template<int dim>
bool FiniteElementData<dim>::operator== (const FiniteElementData<dim> &f) const
{
return ((dofs_per_vertex == f.dofs_per_vertex) &&
(dofs_per_line == f.dofs_per_line) &&
(dofs_per_quad == f.dofs_per_quad) &&
(dofs_per_hex == f.dofs_per_hex) &&
(components == f.components) &&
(degree == f.degree) &&
(conforming_space == f.conforming_space));
}
template class FiniteElementData<1>;
template class FiniteElementData<2>;
template class FiniteElementData<3>;
DEAL_II_NAMESPACE_CLOSE
| lgpl-2.1 |
daniel-he/community-edition | projects/repository/source/test-resources/org/alfresco/repo/replication/script/test_replicationService.js | 4322 | // Test that we can work with the definition properly
function testReplicationDefinition()
{
// Check the empty one
test.assertEquals(EmptyName, Empty.replicationName);
test.assertEquals("Empty", Empty.description);
test.assertEquals(null, Empty.targetName);
test.assertEquals(0, Empty.payload.length);
// Check the persisted one
test.assertEquals(PersistedName, Persisted.replicationName);
test.assertEquals("Persisted", Persisted.description);
test.assertEquals(PersistedTarget, Persisted.targetName);
test.assertEquals(2, Persisted.payload.length);
test.assertEquals("workspace://SpacesStore/Testing", Persisted.payload[0].nodeRef.toString())
test.assertEquals("workspace://SpacesStore/Testing2", Persisted.payload[1].nodeRef.toString())
}
// Test listing
function testListing()
{
// All
var definitions = replicationService.loadReplicationDefinitions();
test.assertEquals(2, definitions.length);
var foundP1 = false;
var foundP2 = false;
for(var i in definitions)
{
var definition = definitions[i];
if(definition.replicationName == PersistedName)
{
foundP1 = true;
test.assertEquals(PersistedName, definition.replicationName);
test.assertEquals("Persisted", definition.description);
test.assertEquals(PersistedTarget, definition.targetName);
test.assertEquals(2, definition.payload.length);
test.assertEquals("workspace://SpacesStore/Testing", definition.payload[0].nodeRef.toString())
test.assertEquals("workspace://SpacesStore/Testing2", definition.payload[1].nodeRef.toString())
}
if(definition.replicationName == Persisted2Name)
{
foundP2 = true;
test.assertEquals(Persisted2Name, definition.replicationName);
test.assertEquals("Persisted2", definition.description);
test.assertEquals(Persisted2Target, definition.targetName);
test.assertEquals(0, definition.payload.length);
}
}
// By target - for Persisted
definitions = replicationService.loadReplicationDefinitions(PersistedTarget);
test.assertEquals(1, definitions.length);
test.assertEquals(PersistedName, definitions[0].replicationName);
// By target - for Persisted2
definitions = replicationService.loadReplicationDefinitions(Persisted2Target);
test.assertEquals(1, definitions.length);
test.assertEquals(Persisted2Name, definitions[0].replicationName);
// By target - invalid target
definitions = replicationService.loadReplicationDefinitions("MadeUpDoesntExit");
test.assertEquals(0, definitions.length);
}
// Test creating and saving
function testCreateSave()
{
// Create
var definition = replicationService.createReplicationDefinition("JS","From JS");
test.assertEquals("JS", definition.replicationName);
test.assertEquals("From JS", definition.description);
test.assertEquals(null, definition.targetName);
test.assertEquals(0, definition.payload.length);
// Set some bits
definition.targetName = "TargetTarget";
nodes = [
Persisted.payload[0], Persisted.payload[1]
]
definition.payload = nodes
// Won't be there if loaded
test.assertEquals(null, replicationService.loadReplicationDefinition("JS"));
// Save it
replicationService.saveReplicationDefinition(definition);
// Load and re-check
definition = replicationService.loadReplicationDefinition("JS");
test.assertNotNull(definition);
test.assertEquals("JS", definition.replicationName);
test.assertEquals("From JS", definition.description);
test.assertEquals("TargetTarget", definition.targetName);
test.assertEquals(2, definition.payload.length);
test.assertEquals("workspace://SpacesStore/Testing", definition.payload[0].nodeRef.toString())
test.assertEquals("workspace://SpacesStore/Testing2", definition.payload[1].nodeRef.toString())
}
// Tests running (without a full definition, so should quickly fail)
function testRunReplication()
{
var definition = replicationService.loadReplicationDefinition(Persisted2Name);
test.assertNotNull(definition);
// Should give an error about no payload
try {
replicationService.replicate(definition);
test.fail("Shouldn't be able to run a definition lacking a payload");
} catch(err) {
var msg = err.message;
test.assertTrue(msg.indexOf("payload") > -1, "Payload error not found in " + msg);
}
}
// Execute Tests
testReplicationDefinition();
testListing();
testCreateSave();
testRunReplication(); | lgpl-3.0 |
brentonhouse/brentonhouse.alloy | test/apps/testing/ALOY-665/_generated/mobileweb/alloy/controllers/index.js | 2464 | function __processArg(obj, key) {
var arg = null;
if (obj) {
arg = obj[key] || null;
delete obj[key];
}
return arg;
}
function Controller() {
function doSwipe(e) {
Ti.API.info("swipe: " + e.direction);
}
require("/alloy/controllers/BaseController").apply(this, Array.prototype.slice.call(arguments));
this.__controllerPath = "index";
this.args = arguments[0] || {};
if (arguments[0]) {
__processArg(arguments[0], "__parentSymbol");
__processArg(arguments[0], "$model");
__processArg(arguments[0], "__itemTemplate");
}
var $ = this;
var exports = {};
var __defers = {};
$.__views.index = Ti.UI.createWindow({
backgroundColor: "#efefef",
fullscreen: false,
exitOnClose: true,
id: "index"
});
$.__views.index && $.addTopLevelView($.__views.index);
try {
$.addListener($.__views.index, "touchstart", touch.start);
} catch (e) {
__defers["$.__views.index!touchstart!touch.start"] = true;
}
try {
$.addListener($.__views.index, "touchend", touch["end"].func);
} catch (e) {
__defers["$.__views.index!touchend!touch['end'].func"] = true;
}
doSwipe ? $.addListener($.__views.index, "swipe", doSwipe) : __defers["$.__views.index!swipe!doSwipe"] = true;
$.__views.label = Ti.UI.createLabel({
touchEnabled: false,
color: "#000",
height: Ti.UI.SIZE,
width: Ti.UI.SIZE,
font: {
fontSize: "24dp",
fontWeight: "bold"
},
text: "touch and swipe",
id: "label"
});
$.__views.index.add($.__views.label);
exports.destroy = function() {};
_.extend($, $.__views);
var touch = {
start: function(e) {
Ti.API.info("touchstart");
},
end: {
func: function(e) {
Ti.API.info("touchend");
}
}
};
$.index.open();
__defers["$.__views.index!touchstart!touch.start"] && $.addListener($.__views.index, "touchstart", touch.start);
__defers["$.__views.index!touchend!touch['end'].func"] && $.addListener($.__views.index, "touchend", touch["end"].func);
__defers["$.__views.index!swipe!doSwipe"] && $.addListener($.__views.index, "swipe", doSwipe);
_.extend($, exports);
}
var Alloy = require("/alloy"), Backbone = Alloy.Backbone, _ = Alloy._;
module.exports = Controller; | apache-2.0 |
GunoH/intellij-community | java/java-impl/src/com/intellij/refactoring/rename/PsiPackageRenameValidator.java | 2098 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.rename;
import com.intellij.java.JavaBundle;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.project.Project;
import com.intellij.patterns.ElementPattern;
import com.intellij.patterns.PlatformPatterns;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiPackage;
import com.intellij.psi.impl.file.PsiDirectoryFactory;
import com.intellij.util.ProcessingContext;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @author anna
*/
public class PsiPackageRenameValidator implements RenameInputValidatorEx {
private final ElementPattern<? extends PsiElement> myPattern = PlatformPatterns.psiElement(PsiPackage.class);
@NotNull
@Override
public ElementPattern<? extends PsiElement> getPattern() {
return myPattern;
}
@Nullable
@Override
public String getErrorMessage(@NotNull String newName, @NotNull Project project) {
if (FileTypeManager.getInstance().isFileIgnored(newName)) {
return JavaBundle.message("rename.package.ignored.name.warning");
}
if (newName.length() > 0) {
if (!PsiDirectoryFactory.getInstance(project).isValidPackageName(newName)) {
return JavaBundle.message("rename.package.invalid.name.error");
}
}
return null;
}
@Override
public boolean isInputValid(@NotNull String newName, @NotNull PsiElement element, @NotNull ProcessingContext context) {
return !newName.isEmpty();
}
} | apache-2.0 |
sergecodd/FireFox-OS | B2G/libcore/luni/src/test/java/org/apache/harmony/crypto/tests/javax/crypto/func/CipherDESedeTest.java | 2164 | /*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.crypto.tests.javax.crypto.func;
import junit.framework.TestCase;
public class CipherDESedeTest extends TestCase {
// 80 cases checked
public void test_DESedeNoISO() {
CipherSymmetricKeyThread DESedeNoISO = new CipherSymmetricKeyThread(
"DESede", new int[] {112, 168},// Keysize must be 112 or 168.
new String[] {
"ECB", "CBC", "CFB", "CFB8", "CFB16", "CFB24", "CFB32",
"CFB40", "CFB48", "CFB56", "CFB64", "OFB", "OFB8",
"OFB16", "OFB24", "OFB32", "OFB40", "OFB48", "OFB56",
"OFB64"}, new String[] {"NoPadding", "PKCS5Padding"});
DESedeNoISO.launcher();
assertEquals(DESedeNoISO.getFailureMessages(), 0, DESedeNoISO
.getTotalFailuresNumber());
}
// 40 cases checked
public void test_DESedeISO() {
CipherSymmetricKeyThread DESedeISO = new CipherSymmetricKeyThread(
"DESede", new int[] {112, 168},// Keysize must be 112 or 168.
new String[] {
"ECB", "CBC", "CFB", "CFB8", "CFB16", "CFB24", "CFB32",
"CFB40", "CFB48", "CFB56", "CFB64", "OFB", "OFB8",
"OFB16", "OFB24", "OFB32", "OFB40", "OFB48", "OFB56",
"OFB64"}, new String[] {"ISO10126PADDING"});
DESedeISO.launcher();
assertEquals(DESedeISO.getFailureMessages(), 0, DESedeISO
.getTotalFailuresNumber());
}
}
| apache-2.0 |
ern/elasticsearch | test/framework/src/main/java/org/elasticsearch/test/disruption/SingleNodeDisruption.java | 2340 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.test.disruption;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.test.InternalTestCluster;
import java.util.Random;
import static org.junit.Assert.assertFalse;
public abstract class SingleNodeDisruption implements ServiceDisruptionScheme {
protected final Logger logger = LogManager.getLogger(getClass());
protected volatile String disruptedNode;
protected volatile InternalTestCluster cluster;
protected final Random random;
public SingleNodeDisruption(Random random) {
this.random = new Random(random.nextLong());
}
@Override
public void applyToCluster(InternalTestCluster cluster) {
this.cluster = cluster;
if (disruptedNode == null) {
String[] nodes = cluster.getNodeNames();
disruptedNode = nodes[random.nextInt(nodes.length)];
}
}
@Override
public void removeFromCluster(InternalTestCluster cluster) {
if (disruptedNode != null) {
removeFromNode(disruptedNode, cluster);
}
}
@Override
public synchronized void applyToNode(String node, InternalTestCluster cluster) {
}
@Override
public synchronized void removeFromNode(String node, InternalTestCluster cluster) {
if (disruptedNode == null) {
return;
}
if (node.equals(disruptedNode) == false) {
return;
}
stopDisrupting();
disruptedNode = null;
}
@Override
public synchronized void testClusterClosed() {
disruptedNode = null;
}
protected void ensureNodeCount(InternalTestCluster cluster) {
assertFalse("cluster failed to form after disruption was healed", cluster.client().admin().cluster().prepareHealth()
.setWaitForNodes(String.valueOf(cluster.size()))
.setWaitForNoRelocatingShards(true)
.get().isTimedOut());
}
}
| apache-2.0 |
xuanyuanking/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala | 27776 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.util.concurrent.TimeUnit._
import scala.collection.mutable.HashMap
import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.fs.Path
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.{InternalRow, TableIdentifier}
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.physical.{HashPartitioning, Partitioning, UnknownPartitioning}
import org.apache.spark.sql.catalyst.util.truncatedString
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.execution.datasources.parquet.{ParquetFileFormat => ParquetSource}
import org.apache.spark.sql.execution.datasources.v2.PushedDownOperators
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
import org.apache.spark.sql.execution.vectorized.ConstantColumnVector
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.{BaseRelation, Filter}
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.vectorized.ColumnarBatch
import org.apache.spark.util.Utils
import org.apache.spark.util.collection.BitSet
trait DataSourceScanExec extends LeafExecNode {
def relation: BaseRelation
def tableIdentifier: Option[TableIdentifier]
protected val nodeNamePrefix: String = ""
override val nodeName: String = {
s"Scan $relation ${tableIdentifier.map(_.unquotedString).getOrElse("")}"
}
// Metadata that describes more details of this scan.
protected def metadata: Map[String, String]
protected val maxMetadataValueLength = conf.maxMetadataStringLength
override def simpleString(maxFields: Int): String = {
val metadataEntries = metadata.toSeq.sorted.map {
case (key, value) =>
key + ": " + StringUtils.abbreviate(redact(value), maxMetadataValueLength)
}
val metadataStr = truncatedString(metadataEntries, " ", ", ", "", maxFields)
redact(
s"$nodeNamePrefix$nodeName${truncatedString(output, "[", ",", "]", maxFields)}$metadataStr")
}
override def verboseStringWithOperatorId(): String = {
val metadataStr = metadata.toSeq.sorted.filterNot {
case (_, value) if (value.isEmpty || value.equals("[]")) => true
case (key, _) if (key.equals("DataFilters") || key.equals("Format")) => true
case (_, _) => false
}.map {
case (key, value) => s"$key: ${redact(value)}"
}
s"""
|$formattedNodeName
|${ExplainUtils.generateFieldString("Output", output)}
|${metadataStr.mkString("\n")}
|""".stripMargin
}
/**
* Shorthand for calling redactString() without specifying redacting rules
*/
protected def redact(text: String): String = {
Utils.redact(conf.stringRedactionPattern, text)
}
/**
* The data being read in. This is to provide input to the tests in a way compatible with
* [[InputRDDCodegen]] which all implementations used to extend.
*/
def inputRDDs(): Seq[RDD[InternalRow]]
}
/** Physical plan node for scanning data from a relation. */
case class RowDataSourceScanExec(
output: Seq[Attribute],
requiredSchema: StructType,
filters: Set[Filter],
handledFilters: Set[Filter],
pushedDownOperators: PushedDownOperators,
rdd: RDD[InternalRow],
@transient relation: BaseRelation,
tableIdentifier: Option[TableIdentifier])
extends DataSourceScanExec with InputRDDCodegen {
override lazy val metrics =
Map("numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
protected override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
rdd.mapPartitionsWithIndexInternal { (index, iter) =>
val proj = UnsafeProjection.create(schema)
proj.initialize(index)
iter.map( r => {
numOutputRows += 1
proj(r)
})
}
}
// Input can be InternalRow, has to be turned into UnsafeRows.
override protected val createUnsafeProjection: Boolean = true
override def inputRDD: RDD[InternalRow] = rdd
override val metadata: Map[String, String] = {
def seqToString(seq: Seq[Any]): String = seq.mkString("[", ", ", "]")
val markedFilters = if (filters.nonEmpty) {
for (filter <- filters) yield {
if (handledFilters.contains(filter)) s"*$filter" else s"$filter"
}
} else {
handledFilters
}
val topNOrLimitInfo =
if (pushedDownOperators.limit.isDefined && pushedDownOperators.sortValues.nonEmpty) {
val pushedTopN =
s"ORDER BY ${seqToString(pushedDownOperators.sortValues.map(_.describe()))}" +
s" LIMIT ${pushedDownOperators.limit.get}"
Some("pushedTopN" -> pushedTopN)
} else {
pushedDownOperators.limit.map(value => "PushedLimit" -> s"LIMIT $value")
}
Map(
"ReadSchema" -> requiredSchema.catalogString,
"PushedFilters" -> seqToString(markedFilters.toSeq)) ++
pushedDownOperators.aggregation.fold(Map[String, String]()) { v =>
Map("PushedAggregates" -> seqToString(v.aggregateExpressions.map(_.describe())),
"PushedGroupByColumns" -> seqToString(v.groupByColumns.map(_.describe())))} ++
topNOrLimitInfo ++
pushedDownOperators.sample.map(v => "PushedSample" ->
s"SAMPLE (${(v.upperBound - v.lowerBound) * 100}) ${v.withReplacement} SEED(${v.seed})"
)
}
// Don't care about `rdd` and `tableIdentifier` when canonicalizing.
override def doCanonicalize(): SparkPlan =
copy(
output.map(QueryPlan.normalizeExpressions(_, output)),
rdd = null,
tableIdentifier = None)
}
/**
* Physical plan node for scanning data from HadoopFsRelations.
*
* @param relation The file-based relation to scan.
* @param output Output attributes of the scan, including data attributes and partition attributes.
* @param requiredSchema Required schema of the underlying relation, excluding partition columns.
* @param partitionFilters Predicates to use for partition pruning.
* @param optionalBucketSet Bucket ids for bucket pruning.
* @param optionalNumCoalescedBuckets Number of coalesced buckets.
* @param dataFilters Filters on non-partition columns.
* @param tableIdentifier Identifier for the table in the metastore.
* @param disableBucketedScan Disable bucketed scan based on physical query plan, see rule
* [[DisableUnnecessaryBucketedScan]] for details.
*/
case class FileSourceScanExec(
@transient relation: HadoopFsRelation,
output: Seq[Attribute],
requiredSchema: StructType,
partitionFilters: Seq[Expression],
optionalBucketSet: Option[BitSet],
optionalNumCoalescedBuckets: Option[Int],
dataFilters: Seq[Expression],
tableIdentifier: Option[TableIdentifier],
disableBucketedScan: Boolean = false)
extends DataSourceScanExec {
lazy val metadataColumns: Seq[AttributeReference] =
output.collect { case MetadataAttribute(attr) => attr }
// Note that some vals referring the file-based relation are lazy intentionally
// so that this plan can be canonicalized on executor side too. See SPARK-23731.
override lazy val supportsColumnar: Boolean = {
relation.fileFormat.supportBatch(relation.sparkSession, schema)
}
private lazy val needsUnsafeRowConversion: Boolean = {
if (relation.fileFormat.isInstanceOf[ParquetSource]) {
conf.parquetVectorizedReaderEnabled
} else {
false
}
}
override def vectorTypes: Option[Seq[String]] =
relation.fileFormat.vectorTypes(
requiredSchema = requiredSchema,
partitionSchema = relation.partitionSchema,
relation.sparkSession.sessionState.conf).map { vectorTypes =>
// for column-based file format, append metadata column's vector type classes if any
vectorTypes ++ Seq.fill(metadataColumns.size)(classOf[ConstantColumnVector].getName)
}
private lazy val driverMetrics: HashMap[String, Long] = HashMap.empty
/**
* Send the driver-side metrics. Before calling this function, selectedPartitions has
* been initialized. See SPARK-26327 for more details.
*/
private def sendDriverMetrics(): Unit = {
driverMetrics.foreach(e => metrics(e._1).add(e._2))
val executionId = sparkContext.getLocalProperty(SQLExecution.EXECUTION_ID_KEY)
SQLMetrics.postDriverMetricUpdates(sparkContext, executionId,
metrics.filter(e => driverMetrics.contains(e._1)).values.toSeq)
}
private def isDynamicPruningFilter(e: Expression): Boolean =
e.find(_.isInstanceOf[PlanExpression[_]]).isDefined
@transient lazy val selectedPartitions: Array[PartitionDirectory] = {
val optimizerMetadataTimeNs = relation.location.metadataOpsTimeNs.getOrElse(0L)
val startTime = System.nanoTime()
val ret =
relation.location.listFiles(
partitionFilters.filterNot(isDynamicPruningFilter), dataFilters)
setFilesNumAndSizeMetric(ret, true)
val timeTakenMs = NANOSECONDS.toMillis(
(System.nanoTime() - startTime) + optimizerMetadataTimeNs)
driverMetrics("metadataTime") = timeTakenMs
ret
}.toArray
// We can only determine the actual partitions at runtime when a dynamic partition filter is
// present. This is because such a filter relies on information that is only available at run
// time (for instance the keys used in the other side of a join).
@transient private lazy val dynamicallySelectedPartitions: Array[PartitionDirectory] = {
val dynamicPartitionFilters = partitionFilters.filter(isDynamicPruningFilter)
if (dynamicPartitionFilters.nonEmpty) {
val startTime = System.nanoTime()
// call the file index for the files matching all filters except dynamic partition filters
val predicate = dynamicPartitionFilters.reduce(And)
val partitionColumns = relation.partitionSchema
val boundPredicate = Predicate.create(predicate.transform {
case a: AttributeReference =>
val index = partitionColumns.indexWhere(a.name == _.name)
BoundReference(index, partitionColumns(index).dataType, nullable = true)
}, Nil)
val ret = selectedPartitions.filter(p => boundPredicate.eval(p.values))
setFilesNumAndSizeMetric(ret, false)
val timeTakenMs = (System.nanoTime() - startTime) / 1000 / 1000
driverMetrics("pruningTime") = timeTakenMs
ret
} else {
selectedPartitions
}
}
/**
* [[partitionFilters]] can contain subqueries whose results are available only at runtime so
* accessing [[selectedPartitions]] should be guarded by this method during planning
*/
private def hasPartitionsAvailableAtRunTime: Boolean = {
partitionFilters.exists(ExecSubqueryExpression.hasSubquery)
}
private def toAttribute(colName: String): Option[Attribute] =
output.find(_.name == colName)
// exposed for testing
lazy val bucketedScan: Boolean = {
if (relation.sparkSession.sessionState.conf.bucketingEnabled && relation.bucketSpec.isDefined
&& !disableBucketedScan) {
val spec = relation.bucketSpec.get
val bucketColumns = spec.bucketColumnNames.flatMap(n => toAttribute(n))
bucketColumns.size == spec.bucketColumnNames.size
} else {
false
}
}
override lazy val (outputPartitioning, outputOrdering): (Partitioning, Seq[SortOrder]) = {
if (bucketedScan) {
// For bucketed columns:
// -----------------------
// `HashPartitioning` would be used only when:
// 1. ALL the bucketing columns are being read from the table
//
// For sorted columns:
// ---------------------
// Sort ordering should be used when ALL these criteria's match:
// 1. `HashPartitioning` is being used
// 2. A prefix (or all) of the sort columns are being read from the table.
//
// Sort ordering would be over the prefix subset of `sort columns` being read
// from the table.
// e.g.
// Assume (col0, col2, col3) are the columns read from the table
// If sort columns are (col0, col1), then sort ordering would be considered as (col0)
// If sort columns are (col1, col0), then sort ordering would be empty as per rule #2
// above
val spec = relation.bucketSpec.get
val bucketColumns = spec.bucketColumnNames.flatMap(n => toAttribute(n))
val numPartitions = optionalNumCoalescedBuckets.getOrElse(spec.numBuckets)
val partitioning = HashPartitioning(bucketColumns, numPartitions)
val sortColumns =
spec.sortColumnNames.map(x => toAttribute(x)).takeWhile(x => x.isDefined).map(_.get)
val shouldCalculateSortOrder =
conf.getConf(SQLConf.LEGACY_BUCKETED_TABLE_SCAN_OUTPUT_ORDERING) &&
sortColumns.nonEmpty &&
!hasPartitionsAvailableAtRunTime
val sortOrder = if (shouldCalculateSortOrder) {
// In case of bucketing, its possible to have multiple files belonging to the
// same bucket in a given relation. Each of these files are locally sorted
// but those files combined together are not globally sorted. Given that,
// the RDD partition will not be sorted even if the relation has sort columns set
// Current solution is to check if all the buckets have a single file in it
val files = selectedPartitions.flatMap(partition => partition.files)
val bucketToFilesGrouping =
files.map(_.getPath.getName).groupBy(file => BucketingUtils.getBucketId(file))
val singleFilePartitions = bucketToFilesGrouping.forall(p => p._2.length <= 1)
// TODO SPARK-24528 Sort order is currently ignored if buckets are coalesced.
if (singleFilePartitions && optionalNumCoalescedBuckets.isEmpty) {
// TODO Currently Spark does not support writing columns sorting in descending order
// so using Ascending order. This can be fixed in future
sortColumns.map(attribute => SortOrder(attribute, Ascending))
} else {
Nil
}
} else {
Nil
}
(partitioning, sortOrder)
} else {
(UnknownPartitioning(0), Nil)
}
}
@transient
private lazy val pushedDownFilters = {
val supportNestedPredicatePushdown = DataSourceUtils.supportNestedPredicatePushdown(relation)
// TODO: should be able to push filters containing metadata columns down to skip files
dataFilters.filterNot(_.references.exists {
case MetadataAttribute(_) => true
case _ => false
}).flatMap(DataSourceStrategy.translateFilter(_, supportNestedPredicatePushdown))
}
override lazy val metadata: Map[String, String] = {
def seqToString(seq: Seq[Any]) = seq.mkString("[", ", ", "]")
val location = relation.location
val locationDesc =
location.getClass.getSimpleName +
Utils.buildLocationMetadata(location.rootPaths, maxMetadataValueLength)
val metadata =
Map(
"Format" -> relation.fileFormat.toString,
"ReadSchema" -> requiredSchema.catalogString,
"Batched" -> supportsColumnar.toString,
"PartitionFilters" -> seqToString(partitionFilters),
"PushedFilters" -> seqToString(pushedDownFilters),
"DataFilters" -> seqToString(dataFilters),
"Location" -> locationDesc)
relation.bucketSpec.map { spec =>
val bucketedKey = "Bucketed"
if (bucketedScan) {
val numSelectedBuckets = optionalBucketSet.map { b =>
b.cardinality()
} getOrElse {
spec.numBuckets
}
metadata ++ Map(
bucketedKey -> "true",
"SelectedBucketsCount" -> (s"$numSelectedBuckets out of ${spec.numBuckets}" +
optionalNumCoalescedBuckets.map { b => s" (Coalesced to $b)"}.getOrElse("")))
} else if (!relation.sparkSession.sessionState.conf.bucketingEnabled) {
metadata + (bucketedKey -> "false (disabled by configuration)")
} else if (disableBucketedScan) {
metadata + (bucketedKey -> "false (disabled by query planner)")
} else {
metadata + (bucketedKey -> "false (bucket column(s) not read)")
}
} getOrElse {
metadata
}
}
override def verboseStringWithOperatorId(): String = {
val metadataStr = metadata.toSeq.sorted.filterNot {
case (_, value) if (value.isEmpty || value.equals("[]")) => true
case (key, _) if (key.equals("DataFilters") || key.equals("Format")) => true
case (_, _) => false
}.map {
case (key, _) if (key.equals("Location")) =>
val location = relation.location
val numPaths = location.rootPaths.length
val abbreviatedLocation = if (numPaths <= 1) {
location.rootPaths.mkString("[", ", ", "]")
} else {
"[" + location.rootPaths.head + s", ... ${numPaths - 1} entries]"
}
s"$key: ${location.getClass.getSimpleName} ${redact(abbreviatedLocation)}"
case (key, value) => s"$key: ${redact(value)}"
}
s"""
|$formattedNodeName
|${ExplainUtils.generateFieldString("Output", output)}
|${metadataStr.mkString("\n")}
|""".stripMargin
}
lazy val inputRDD: RDD[InternalRow] = {
val readFile: (PartitionedFile) => Iterator[InternalRow] =
relation.fileFormat.buildReaderWithPartitionValues(
sparkSession = relation.sparkSession,
dataSchema = relation.dataSchema,
partitionSchema = relation.partitionSchema,
requiredSchema = requiredSchema,
filters = pushedDownFilters,
options = relation.options,
hadoopConf = relation.sparkSession.sessionState.newHadoopConfWithOptions(relation.options))
val readRDD = if (bucketedScan) {
createBucketedReadRDD(relation.bucketSpec.get, readFile, dynamicallySelectedPartitions,
relation)
} else {
createReadRDD(readFile, dynamicallySelectedPartitions, relation)
}
sendDriverMetrics()
readRDD
}
override def inputRDDs(): Seq[RDD[InternalRow]] = {
inputRDD :: Nil
}
/** SQL metrics generated only for scans using dynamic partition pruning. */
private lazy val staticMetrics = if (partitionFilters.exists(isDynamicPruningFilter)) {
Map("staticFilesNum" -> SQLMetrics.createMetric(sparkContext, "static number of files read"),
"staticFilesSize" -> SQLMetrics.createSizeMetric(sparkContext, "static size of files read"))
} else {
Map.empty[String, SQLMetric]
}
/** Helper for computing total number and size of files in selected partitions. */
private def setFilesNumAndSizeMetric(
partitions: Seq[PartitionDirectory],
static: Boolean): Unit = {
val filesNum = partitions.map(_.files.size.toLong).sum
val filesSize = partitions.map(_.files.map(_.getLen).sum).sum
if (!static || !partitionFilters.exists(isDynamicPruningFilter)) {
driverMetrics("numFiles") = filesNum
driverMetrics("filesSize") = filesSize
} else {
driverMetrics("staticFilesNum") = filesNum
driverMetrics("staticFilesSize") = filesSize
}
if (relation.partitionSchema.nonEmpty) {
driverMetrics("numPartitions") = partitions.length
}
}
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
"numFiles" -> SQLMetrics.createMetric(sparkContext, "number of files read"),
"metadataTime" -> SQLMetrics.createTimingMetric(sparkContext, "metadata time"),
"filesSize" -> SQLMetrics.createSizeMetric(sparkContext, "size of files read")
) ++ {
// Tracking scan time has overhead, we can't afford to do it for each row, and can only do
// it for each batch.
if (supportsColumnar) {
Some("scanTime" -> SQLMetrics.createTimingMetric(sparkContext, "scan time"))
} else {
None
}
} ++ {
if (relation.partitionSchema.nonEmpty) {
Map(
"numPartitions" -> SQLMetrics.createMetric(sparkContext, "number of partitions read"),
"pruningTime" ->
SQLMetrics.createTimingMetric(sparkContext, "dynamic partition pruning time"))
} else {
Map.empty[String, SQLMetric]
}
} ++ staticMetrics
protected override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
if (needsUnsafeRowConversion) {
inputRDD.mapPartitionsWithIndexInternal { (index, iter) =>
val toUnsafe = UnsafeProjection.create(schema)
toUnsafe.initialize(index)
iter.map { row =>
numOutputRows += 1
toUnsafe(row)
}
}
} else {
inputRDD.mapPartitionsInternal { iter =>
iter.map { row =>
numOutputRows += 1
row
}
}
}
}
protected override def doExecuteColumnar(): RDD[ColumnarBatch] = {
val numOutputRows = longMetric("numOutputRows")
val scanTime = longMetric("scanTime")
inputRDD.asInstanceOf[RDD[ColumnarBatch]].mapPartitionsInternal { batches =>
new Iterator[ColumnarBatch] {
override def hasNext: Boolean = {
// The `FileScanRDD` returns an iterator which scans the file during the `hasNext` call.
val startNs = System.nanoTime()
val res = batches.hasNext
scanTime += NANOSECONDS.toMillis(System.nanoTime() - startNs)
res
}
override def next(): ColumnarBatch = {
val batch = batches.next()
numOutputRows += batch.numRows()
batch
}
}
}
}
override val nodeNamePrefix: String = "File"
/**
* Create an RDD for bucketed reads.
* The non-bucketed variant of this function is [[createReadRDD]].
*
* The algorithm is pretty simple: each RDD partition being returned should include all the files
* with the same bucket id from all the given Hive partitions.
*
* @param bucketSpec the bucketing spec.
* @param readFile a function to read each (part of a) file.
* @param selectedPartitions Hive-style partition that are part of the read.
* @param fsRelation [[HadoopFsRelation]] associated with the read.
*/
private def createBucketedReadRDD(
bucketSpec: BucketSpec,
readFile: (PartitionedFile) => Iterator[InternalRow],
selectedPartitions: Array[PartitionDirectory],
fsRelation: HadoopFsRelation): RDD[InternalRow] = {
logInfo(s"Planning with ${bucketSpec.numBuckets} buckets")
val filesGroupedToBuckets =
selectedPartitions.flatMap { p =>
p.files.map { f =>
PartitionedFileUtil.getPartitionedFile(f, f.getPath, p.values)
}
}.groupBy { f =>
BucketingUtils
.getBucketId(new Path(f.filePath).getName)
.getOrElse(throw new IllegalStateException(s"Invalid bucket file ${f.filePath}"))
}
val prunedFilesGroupedToBuckets = if (optionalBucketSet.isDefined) {
val bucketSet = optionalBucketSet.get
filesGroupedToBuckets.filter {
f => bucketSet.get(f._1)
}
} else {
filesGroupedToBuckets
}
val filePartitions = optionalNumCoalescedBuckets.map { numCoalescedBuckets =>
logInfo(s"Coalescing to ${numCoalescedBuckets} buckets")
val coalescedBuckets = prunedFilesGroupedToBuckets.groupBy(_._1 % numCoalescedBuckets)
Seq.tabulate(numCoalescedBuckets) { bucketId =>
val partitionedFiles = coalescedBuckets.get(bucketId).map {
_.values.flatten.toArray
}.getOrElse(Array.empty)
FilePartition(bucketId, partitionedFiles)
}
}.getOrElse {
Seq.tabulate(bucketSpec.numBuckets) { bucketId =>
FilePartition(bucketId, prunedFilesGroupedToBuckets.getOrElse(bucketId, Array.empty))
}
}
new FileScanRDD(fsRelation.sparkSession, readFile, filePartitions,
requiredSchema, metadataColumns)
}
/**
* Create an RDD for non-bucketed reads.
* The bucketed variant of this function is [[createBucketedReadRDD]].
*
* @param readFile a function to read each (part of a) file.
* @param selectedPartitions Hive-style partition that are part of the read.
* @param fsRelation [[HadoopFsRelation]] associated with the read.
*/
private def createReadRDD(
readFile: (PartitionedFile) => Iterator[InternalRow],
selectedPartitions: Array[PartitionDirectory],
fsRelation: HadoopFsRelation): RDD[InternalRow] = {
val openCostInBytes = fsRelation.sparkSession.sessionState.conf.filesOpenCostInBytes
val maxSplitBytes =
FilePartition.maxSplitBytes(fsRelation.sparkSession, selectedPartitions)
logInfo(s"Planning scan with bin packing, max size: $maxSplitBytes bytes, " +
s"open cost is considered as scanning $openCostInBytes bytes.")
// Filter files with bucket pruning if possible
val bucketingEnabled = fsRelation.sparkSession.sessionState.conf.bucketingEnabled
val shouldProcess: Path => Boolean = optionalBucketSet match {
case Some(bucketSet) if bucketingEnabled =>
// Do not prune the file if bucket file name is invalid
filePath => BucketingUtils.getBucketId(filePath.getName).forall(bucketSet.get)
case _ =>
_ => true
}
val splitFiles = selectedPartitions.flatMap { partition =>
partition.files.flatMap { file =>
// getPath() is very expensive so we only want to call it once in this block:
val filePath = file.getPath
if (shouldProcess(filePath)) {
val isSplitable = relation.fileFormat.isSplitable(
relation.sparkSession, relation.options, filePath)
PartitionedFileUtil.splitFiles(
sparkSession = relation.sparkSession,
file = file,
filePath = filePath,
isSplitable = isSplitable,
maxSplitBytes = maxSplitBytes,
partitionValues = partition.values
)
} else {
Seq.empty
}
}
}.sortBy(_.length)(implicitly[Ordering[Long]].reverse)
val partitions =
FilePartition.getFilePartitions(relation.sparkSession, splitFiles, maxSplitBytes)
new FileScanRDD(fsRelation.sparkSession, readFile, partitions,
requiredSchema, metadataColumns)
}
// Filters unused DynamicPruningExpression expressions - one which has been replaced
// with DynamicPruningExpression(Literal.TrueLiteral) during Physical Planning
private def filterUnusedDynamicPruningExpressions(
predicates: Seq[Expression]): Seq[Expression] = {
predicates.filterNot(_ == DynamicPruningExpression(Literal.TrueLiteral))
}
override def doCanonicalize(): FileSourceScanExec = {
FileSourceScanExec(
relation,
output.map(QueryPlan.normalizeExpressions(_, output)),
requiredSchema,
QueryPlan.normalizePredicates(
filterUnusedDynamicPruningExpressions(partitionFilters), output),
optionalBucketSet,
optionalNumCoalescedBuckets,
QueryPlan.normalizePredicates(dataFilters, output),
None,
disableBucketedScan)
}
}
| apache-2.0 |
mistic100/selectize.js | src/defaults.js | 1670 | Selectize.count = 0;
Selectize.defaults = {
plugins: [],
delimiter: ',',
persist: true,
diacritics: true,
create: false,
createOnBlur: false,
highlight: true,
openOnFocus: true,
maxOptions: 1000,
maxItems: null,
hideSelected: null,
addPrecedence: false,
selectOnTab: false,
preload: false,
scrollDuration: 60,
loadThrottle: 300,
dataAttr: 'data-data',
optgroupField: 'optgroup',
valueField: 'value',
labelField: 'text',
optgroupLabelField: 'label',
optgroupValueField: 'value',
optgroupOrder: null,
sortField: '$order',
searchField: ['text'],
searchConjunction: 'and',
mode: null,
wrapperClass: 'selectize-control',
inputClass: 'selectize-input',
dropdownClass: 'selectize-dropdown',
dropdownContentClass: 'selectize-dropdown-content',
dropdownParent: null,
/*
load : null, // function(query, callback) { ... }
score : null, // function(search) { ... }
onInitialize : null, // function() { ... }
onChange : null, // function(value) { ... }
onItemAdd : null, // function(value, $item) { ... }
onItemRemove : null, // function(value) { ... }
onClear : null, // function() { ... }
onOptionAdd : null, // function(value, data) { ... }
onOptionRemove : null, // function(value) { ... }
onOptionClear : null, // function() { ... }
onDropdownOpen : null, // function($dropdown) { ... }
onDropdownClose : null, // function($dropdown) { ... }
onType : null, // function(str) { ... }
onDelete : null, // function(values) { ... }
*/
render: {
/*
item: null,
optgroup: null,
optgroup_header: null,
option: null,
option_create: null
*/
}
}; | apache-2.0 |
dogless/airavata | modules/xbaya-gui/src/test/java/org/apache/airavata/xbaya/interpreter/XBayaConsolidatedTestSuite.java | 2568 | ///*
// *
// * Licensed to the Apache Software Foundation (ASF) under one
// * or more contributor license agreements. See the NOTICE file
// * distributed with this work for additional information
// * regarding copyright ownership. The ASF licenses this file
// * to you under the Apache License, Version 2.0 (the
// * "License"); you may not use this file except in compliance
// * with the License. You may obtain a copy of the License at
// *
// * http://www.apache.org/licenses/LICENSE-2.0
// *
// * Unless required by applicable law or agreed to in writing,
// * software distributed under the License is distributed on an
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// * KIND, either express or implied. See the License for the
// * specific language governing permissions and limitations
// * under the License.
// *
//*/
//package org.apache.airavata.xbaya.interpreter;
//
//import org.apache.airavata.xbaya.interpreter.utils.WorkflowTestUtils;
//import org.apache.axis2.AxisFault;
//import org.apache.axis2.engine.ListenerManager;
//import org.junit.AfterClass;
//import org.junit.BeforeClass;
//import org.junit.Rule;
//import org.junit.rules.MethodRule;
//import org.junit.rules.TestWatchman;
//import org.junit.runner.RunWith;
//import org.junit.runners.Suite;
//import org.junit.runners.model.FrameworkMethod;
//import org.slf4j.Logger;
//import org.slf4j.LoggerFactory;
//
//
//@RunWith(Suite.class)
//@Suite.SuiteClasses({XBayaClientTest.class, SimpleMathWorkflowTest.class, WorkflowTest.class,
// ComplexMathWorkflowTest.class, CrossProductWorkflowTest.class, ForEachWorkflowTest.class,
// SimpleForEachWorkflowTest.class, ComplexForEachWorkflowTest.class,
// WorkflowTrackingTest.class, RegistryServiceTest.class})
//public class XBayaConsolidatedTestSuite {
// private static ListenerManager manager = null;
//
// final static Logger logger = LoggerFactory.getLogger(XBayaConsolidatedTestSuite.class);
//
// @Rule
// public MethodRule watchman = new TestWatchman() {
// public void starting(FrameworkMethod method) {
// logger.info("{} being run...", method.getName());
// }
// };
//
// @BeforeClass
// public static void startServer() throws AxisFault {
// logger.info("Starting simple Axis2 Server...");
// manager = WorkflowTestUtils.axis2ServiceStarter();
// }
//
// @AfterClass
// public static void stopServer() throws AxisFault {
// logger.info("Stopping simple Axis2 Server...");
// manager.stop();
// }
//
//}
| apache-2.0 |
GunoH/intellij-community | java/java-impl/src/com/intellij/codeInsight/template/macro/MethodParameterTypesMacro.java | 1455 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.template.macro;
import com.intellij.codeInsight.template.*;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiMethod;
import com.intellij.psi.PsiParameter;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.List;
public class MethodParameterTypesMacro extends Macro {
@Override
public String getName() {
return "methodParameterTypes";
}
@Override
@NotNull
public String getDefaultValue() {
return "a";
}
@Override
public Result calculateResult(Expression @NotNull [] params, final ExpressionContext context) {
PsiElement place = context.getPsiElementAtStartOffset();
while(place != null){
if (place instanceof PsiMethod){
List<Result> result = new ArrayList<>();
Project project = place.getProject();
for (PsiParameter parameter : ((PsiMethod)place).getParameterList().getParameters()) {
result.add(new PsiTypeResult(parameter.getType(), project));
}
return new ListResult(result);
}
place = place.getParent();
}
return null;
}
@Override
public boolean isAcceptableInContext(TemplateContextType context) {
return context instanceof JavaCodeContextType;
}
}
| apache-2.0 |
ryano144/intellij-community | plugins/git4idea/src/git4idea/repo/GitRepositoryManager.java | 1887 | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.repo;
import com.intellij.dvcs.branch.DvcsSyncSettings;
import com.intellij.dvcs.repo.AbstractRepositoryManager;
import com.intellij.dvcs.repo.VcsRepositoryManager;
import com.intellij.openapi.project.Project;
import git4idea.GitPlatformFacade;
import git4idea.GitUtil;
import git4idea.ui.branch.GitMultiRootBranchConfig;
import org.jetbrains.annotations.NotNull;
import java.util.List;
public class GitRepositoryManager extends AbstractRepositoryManager<GitRepository> {
@NotNull private final GitPlatformFacade myPlatformFacade;
@NotNull private final Project myProject;
public GitRepositoryManager(@NotNull Project project, @NotNull GitPlatformFacade platformFacade,
@NotNull VcsRepositoryManager vcsRepositoryManager) {
super(vcsRepositoryManager, platformFacade.getVcs(project), GitUtil.DOT_GIT);
myProject = project;
myPlatformFacade = platformFacade;
}
@Override
public boolean isSyncEnabled() {
return myPlatformFacade.getSettings(myProject).getSyncSetting() == DvcsSyncSettings.Value.SYNC &&
!new GitMultiRootBranchConfig(getRepositories()).diverged();
}
@NotNull
@Override
public List<GitRepository> getRepositories() {
return getRepositories(GitRepository.class);
}
}
| apache-2.0 |
LucHermitte/ITK | Modules/ThirdParty/pygccxml/src/pygccxml/declarations/algorithms_cache.py | 4298 | # Copyright 2014 Insight Software Consortium.
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0.
# See http://www.boost.org/LICENSE_1_0.txt
"""
defines class that will keep results of different calculations.
"""
class declaration_algs_cache_t(object):
def __init__(self):
object.__init__(self)
self._enabled = True
self._full_name = None
self._full_partial_name = None
self._access_type = None
self._demangled_name = None
self._declaration_path = None
self._partial_declaration_path = None
self._container_key_type = None
self._container_element_type = None
def disable(self):
self._enabled = False
def enable(self):
self._enabled = True
@property
def enabled(self):
return self._enabled
@property
def full_name(self):
return self._full_name
@full_name.setter
def full_name(self, fname):
if not self.enabled:
fname = None
self._full_name = fname
@property
def full_partial_name(self):
return self._full_partial_name
@full_partial_name.setter
def full_partial_name(self, fname):
if not self.enabled:
fname = None
self._full_partial_name = fname
@property
def access_type(self):
return self._access_type
@access_type.setter
def access_type(self, access_type):
if not self.enabled:
access_type = None
self._access_type = access_type
@property
def demangled_name(self):
return self._demangled_name
@demangled_name.setter
def demangled_name(self, demangled_name):
if not self.enabled:
demangled_name = None
self._demangled_name = demangled_name
@property
def declaration_path(self):
return self._declaration_path
@declaration_path.setter
def declaration_path(self, declaration_path):
if not self.enabled:
declaration_path = None
self._declaration_path = declaration_path
@property
def partial_declaration_path(self):
return self._partial_declaration_path
@partial_declaration_path.setter
def partial_declaration_path(self, partial_declaration_path):
if not self.enabled:
partial_declaration_path = None
self._partial_declaration_path = partial_declaration_path
@property
def container_element_type(self):
return self._container_element_type
@container_element_type.setter
def container_element_type(self, etype):
if not self.enabled:
etype = None
self._container_element_type = etype
@property
def container_key_type(self):
return self._container_key_type
@container_key_type.setter
def container_key_type(self, ktype):
if not self.enabled:
ktype = None
self._container_key_type = ktype
def reset(self):
self.full_name = None
self.full_partial_name = None
self.access_type = None
self.demangled_name = None
self.declaration_path = None
self.partial_declaration_path = None
self.container_key_type = None
self.container_element_type = None
def reset_name_based(self):
self.full_name = None
self.full_partial_name = None
self.demangled_name = None
self.declaration_path = None
self.partial_declaration_path = None
self.container_key_type = None
self.container_element_type = None
def reset_access_type(self):
self.access_type = None
class type_algs_cache_t(object):
enabled = True
@staticmethod
def disable():
type_algs_cache_t.enabled = False
@staticmethod
def enable():
type_algs_cache_t.enabled = True
def __init__(self):
object.__init__(self)
self._remove_alias = None
@property
def remove_alias(self):
return self._remove_alias
@remove_alias.setter
def remove_alias(self, remove_alias):
if not type_algs_cache_t.enabled:
remove_alias = None
self._remove_alias = remove_alias
def reset(self):
self.remove_alias = None
| apache-2.0 |
GunoH/intellij-community | plugins/coverage-common/src/com/intellij/coverage/view/ElementColumnInfo.java | 788 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.coverage.view;
import com.intellij.coverage.CoverageBundle;
import com.intellij.ide.util.treeView.AlphaComparator;
import com.intellij.ide.util.treeView.NodeDescriptor;
import com.intellij.util.ui.ColumnInfo;
import java.util.Comparator;
public final class ElementColumnInfo extends ColumnInfo<NodeDescriptor<?>, String> {
public ElementColumnInfo() {
super(CoverageBundle.message("coverage.view.element"));
}
@Override
public Comparator<NodeDescriptor<?>> getComparator() {
return AlphaComparator.INSTANCE;
}
@Override
public String valueOf(NodeDescriptor node) {
return node.toString();
}
}
| apache-2.0 |
ricardocerq/elasticsearch | modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/ChannelBufferBytesReference.java | 2784 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport.netty3;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.jboss.netty.buffer.ChannelBuffer;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
final class ChannelBufferBytesReference extends BytesReference {
private final ChannelBuffer buffer;
private final int length;
private final int offset;
ChannelBufferBytesReference(ChannelBuffer buffer, int length) {
this.buffer = buffer;
this.length = length;
this.offset = buffer.readerIndex();
assert length <= buffer.readableBytes() : "length[" + length +"] > " + buffer.readableBytes();
}
@Override
public byte get(int index) {
return buffer.getByte(offset + index);
}
@Override
public int length() {
return length;
}
@Override
public BytesReference slice(int from, int length) {
return new ChannelBufferBytesReference(buffer.slice(offset + from, length), length);
}
@Override
public StreamInput streamInput() {
return new ChannelBufferStreamInput(buffer.duplicate(), length);
}
@Override
public void writeTo(OutputStream os) throws IOException {
buffer.getBytes(offset, os, length);
}
ChannelBuffer toChannelBuffer() {
return buffer.duplicate();
}
@Override
public String utf8ToString() {
return buffer.toString(offset, length, StandardCharsets.UTF_8);
}
@Override
public BytesRef toBytesRef() {
if (buffer.hasArray()) {
return new BytesRef(buffer.array(), buffer.arrayOffset() + offset, length);
}
final byte[] copy = new byte[length];
buffer.getBytes(offset, copy);
return new BytesRef(copy);
}
@Override
public long ramBytesUsed() {
return buffer.capacity();
}
}
| apache-2.0 |
medicayun/medicayundicom | dcm4jboss-all/tags/DCM4JBOSS_2_5_3/dcm4jboss-ejb/src/java/org/dcm4chex/archive/ejb/entity/CodeBean.java | 5389 | /* $Id: CodeBean.java 1541 2005-02-21 13:20:59Z javawilli $
* Copyright (c) 2002,2003 by TIANI MEDGRAPH AG
*
* This file is part of dcm4che.
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.dcm4chex.archive.ejb.entity;
import java.util.Collection;
import java.util.Iterator;
import javax.ejb.CreateException;
import javax.ejb.EntityBean;
import javax.ejb.FinderException;
import javax.ejb.RemoveException;
import org.apache.log4j.Logger;
import org.dcm4che.data.Dataset;
import org.dcm4che.dict.Tags;
import org.dcm4chex.archive.ejb.interfaces.CodeLocal;
import org.dcm4chex.archive.ejb.interfaces.CodeLocalHome;
/**
* @ejb.bean name="Code" type="CMP" view-type="local"
* primkey-field="pk"
* local-jndi-name="ejb/Code"
*
* @ejb.transaction type="Required"
*
* @ejb.persistence table-name="code"
*
* @jboss.entity-command name="hsqldb-fetch-key"
*
* @ejb.finder
* signature="Collection findAll()"
* query="SELECT OBJECT(a) FROM Code AS a"
* transaction-type="Supports"
*
* @ejb.finder
* signature="java.util.Collection findByValueAndDesignator(java.lang.String value, java.lang.String designator)"
* query="SELECT OBJECT(a) FROM Code AS a WHERE a.codeValue = ?1 AND a.codingSchemeDesignator = ?2"
* transaction-type="Supports"
*
* @author <a href="mailto:[email protected]">Gunter Zeilinger </a>
*
*/
public abstract class CodeBean implements EntityBean {
private static final Logger log = Logger.getLogger(CodeBean.class);
/**
* Auto-generated Primary Key
*
* @ejb.interface-method
* @ejb.pk-field
* @ejb.persistence column-name="pk"
* @jboss.persistence auto-increment="true"
*
*/
public abstract Integer getPk();
public abstract void setPk(Integer pk);
/**
* Code Value
*
* @ejb.interface-method
* @ejb.persistence column-name="code_value"
*/
public abstract String getCodeValue();
public abstract void setCodeValue(String value);
/**
* Code Value
*
* @ejb.interface-method
* @ejb.persistence column-name="code_designator"
*/
public abstract String getCodingSchemeDesignator();
public abstract void setCodingSchemeDesignator(String designator);
/**
* Code Value
*
* @ejb.interface-method
* @ejb.persistence column-name="code_version"
*/
public abstract String getCodingSchemeVersion();
public abstract void setCodingSchemeVersion(String version);
/**
* Code Value
*
* @ejb.interface-method
* @ejb.persistence column-name="code_meaning"
*/
public abstract String getCodeMeaning();
public abstract void setCodeMeaning(String meaning);
/**
*
* @ejb.interface-method
*/
public String asString() {
return prompt();
}
private String prompt() {
return "Code[pk=" + getPk() + ", value=" + getCodeValue()
+ ", designator=" + getCodingSchemeDesignator() + ", version="
+ getCodingSchemeVersion() + ", meaning=" + getCodeMeaning()
+ "]";
}
/**
* Create Media.
*
* @ejb.create-method
*/
public Integer ejbCreate(String value, String designator, String version,
String meaning) throws CreateException {
setCodeValue(value);
setCodingSchemeDesignator(designator);
setCodingSchemeVersion(version);
setCodeMeaning(meaning);
return null;
}
public void ejbPostCreate(String value, String designator, String version,
String meaning) throws CreateException {
log.info("Created " + prompt());
}
public void ejbRemove() throws RemoveException {
log.info("Deleting " + prompt());
}
public static CodeLocal valueOf(CodeLocalHome codeHome, Dataset item)
throws CreateException, FinderException {
if (item == null) return null;
final String value = item.getString(Tags.CodeValue);
final String designator = item.getString(Tags.CodingSchemeDesignator);
final String version = item.getString(Tags.CodingSchemeVersion);
final String meaning = item.getString(Tags.CodeMeaning);
Collection c = codeHome.findByValueAndDesignator(value, designator);
for (Iterator it = c.iterator(); it.hasNext();) {
final CodeLocal code = (CodeLocal) it.next();
if (version == null) { return code; }
final String version2 = code.getCodingSchemeVersion();
if (version2 == null || version2.equals(version)) { return code; }
}
return codeHome.create(value, designator, version, meaning);
}
}
| apache-2.0 |
porkybrain/Kvasir | Lib/Chip/Unknown/Fujitsu/MB9AF31xL/CRG.hpp | 15632 | #pragma once
#include <Register/Utility.hpp>
namespace Kvasir {
//Clock Unit Registers
namespace CrgScmCtl{ ///<System Clock Mode Control Register
using Addr = Register::Address<0x40010000,0xffffff05,0x00000000,unsigned char>;
///Master clock switch control bits
constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,5),Register::ReadWriteAccess,unsigned> rcs{};
///PLL oscillation enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(4,4),Register::ReadWriteAccess,unsigned> plle{};
///Sub clock oscillation enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(3,3),Register::ReadWriteAccess,unsigned> sosce{};
///Main clock oscillation enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,1),Register::ReadWriteAccess,unsigned> mosce{};
}
namespace CrgScmStr{ ///<System Clock Mode Status Register
using Addr = Register::Address<0x40010004,0xffffff05,0x00000000,unsigned char>;
///Master clock selection bits
constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,5),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> rcm{};
///PLL oscillation stable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(4,4),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> plrdy{};
///Sub clock oscillation stable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(3,3),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> sordy{};
///Main clock oscillation stable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,1),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> mordy{};
}
namespace CrgBscPsr{ ///<Base Clock Prescaler Register
using Addr = Register::Address<0x40010010,0xfffffff8,0x00000000,unsigned char>;
///Base clock frequency division ratio setting bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(2,0),Register::ReadWriteAccess,unsigned> bsr{};
}
namespace CrgApbc0Psr{ ///<APB0 Prescaler Register
using Addr = Register::Address<0x40010014,0xfffffffc,0x00000000,unsigned char>;
///APB0 bus clock frequency division ratio setting bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,0),Register::ReadWriteAccess,unsigned> apbc0{};
}
namespace CrgApbc1Psr{ ///<APB1 Prescaler Register
using Addr = Register::Address<0x40010018,0xffffff6c,0x00000000,unsigned char>;
///APB1 clock enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,7),Register::ReadWriteAccess,unsigned> apbc1en{};
///APB1 bus reset control bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(4,4),Register::ReadWriteAccess,unsigned> apbc1rst{};
///APB1 bus clock frequency division ratio setting bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,0),Register::ReadWriteAccess,unsigned> apbc1{};
}
namespace CrgApbc2Psr{ ///<APB2 Prescaler Register
using Addr = Register::Address<0x4001001c,0xffffff6c,0x00000000,unsigned char>;
///APB2 clock enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,7),Register::ReadWriteAccess,unsigned> apbc2en{};
///APB2 bus reset control bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(4,4),Register::ReadWriteAccess,unsigned> apbc2rst{};
///APB2 bus clock frequency division ratio setting bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,0),Register::ReadWriteAccess,unsigned> apbc2{};
}
namespace CrgSwcPsr{ ///<Software Watchdog Clock Prescaler Register
using Addr = Register::Address<0x40010020,0xffffff7c,0x00000000,unsigned char>;
///TEST bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,7),Register::ReadWriteAccess,unsigned> testb{};
///Software watchdog clock frequency division ratio setting bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,0),Register::ReadWriteAccess,unsigned> swds{};
}
namespace CrgTtcPsr{ ///<Trace Clock Prescaler Register
using Addr = Register::Address<0x40010028,0xfffffffe,0x00000000,unsigned char>;
///Trace clock frequency division ratio setting bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(0,0),Register::ReadWriteAccess,unsigned> ttc{};
}
namespace CrgCswTmr{ ///<Clock Stabilization Wait Time Register
using Addr = Register::Address<0x40010030,0xffffff80,0x00000000,unsigned char>;
///Sub clock stabilization wait time setup bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(6,4),Register::ReadWriteAccess,unsigned> sowt{};
///Main clock stabilization wait time setup bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(3,0),Register::ReadWriteAccess,unsigned> mowt{};
}
namespace CrgPswTmr{ ///<PLL Clock Stabilization Wait Time Setup Register
using Addr = Register::Address<0x40010034,0xffffffe8,0x00000000,unsigned char>;
///PLL input clock select bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(4,4),Register::ReadWriteAccess,unsigned> pinc{};
///PLL clock stabilization wait time setup bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(2,0),Register::ReadWriteAccess,unsigned> powt{};
}
namespace CrgPllCtl1{ ///<PLL Control Register 1
using Addr = Register::Address<0x40010038,0xffffff00,0x00000000,unsigned char>;
///PLL input clock frequency division ratio setting bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,4),Register::ReadWriteAccess,unsigned> pllk{};
///PLL VCO clock frequency division ratio setting bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(3,0),Register::ReadWriteAccess,unsigned> pllm{};
}
namespace CrgPllCtl2{ ///<PLL Control Register 2
using Addr = Register::Address<0x4001003c,0xffffffc0,0x00000000,unsigned char>;
///PLL feedback frequency division ratio setting bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(5,0),Register::ReadWriteAccess,unsigned> plln{};
}
namespace CrgDbwdtCtl{ ///<Debug Break Watchdog Timer Control Register
using Addr = Register::Address<0x40010054,0xffffff5f,0x00000000,unsigned char>;
///HW-WDG debug mode break bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,7),Register::ReadWriteAccess,unsigned> dphwbe{};
///SW-WDG debug mode break bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(5,5),Register::ReadWriteAccess,unsigned> dpswbe{};
}
namespace CrgIntEnr{ ///< Interrupt Enable Register
using Addr = Register::Address<0x40010060,0xffffffd8,0x00000000,unsigned char>;
///Anomalous frequency detection interrupt enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(5,5),Register::ReadWriteAccess,unsigned> fcse{};
///PLL oscillation stabilization completion interrupt enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(2,2),Register::ReadWriteAccess,unsigned> pcse{};
///Sub oscillation stabilization completion interrupt enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,1),Register::ReadWriteAccess,unsigned> scse{};
///Main oscillation stabilization completion interrupt enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(0,0),Register::ReadWriteAccess,unsigned> mcse{};
}
namespace CrgIntStr{ ///<Interrupt Status Register
using Addr = Register::Address<0x40010064,0xffffffd8,0x00000000,unsigned char>;
///Anomalous frequency detection interrupt status bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(5,5),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> fcsi{};
///PLL oscillation stabilization completion interrupt status bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(2,2),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> pcsi{};
///Sub oscillation stabilization completion interrupt status bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,1),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> scsi{};
///Main oscillation stabilization completion interrupt status bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(0,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> mcsi{};
}
namespace CrgIntClr{ ///<Interrupt Clear Register
using Addr = Register::Address<0x40010068,0xffffffd8,0x00000000,unsigned char>;
///Anomalous frequency detection interrupt cause clear bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(5,5),Register::Access<Register::AccessType::writeOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> fcsc{};
///PLL oscillation stabilization completion interrupt cause clear bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(2,2),Register::Access<Register::AccessType::writeOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> pcsc{};
///Sub oscillation stabilization completion interrupt cause clear bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,1),Register::Access<Register::AccessType::writeOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> scsc{};
///Main oscillation stabilization completion interrupt cause clear bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(0,0),Register::Access<Register::AccessType::writeOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> mcsc{};
}
namespace CrgRstStr{ ///<Reset Cause Register
using Addr = Register::Address<0x4001000c,0xfffffe0c,0x00000000,unsigned>;
///Software reset flag
constexpr Register::FieldLocation<Addr,Register::maskFromRange(8,8),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> srst{};
///Flag for anomalous frequency detection reset
constexpr Register::FieldLocation<Addr,Register::maskFromRange(7,7),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> fcsr{};
///Clock failure detection reset flag
constexpr Register::FieldLocation<Addr,Register::maskFromRange(6,6),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> csvr{};
///Hardware watchdog reset flag
constexpr Register::FieldLocation<Addr,Register::maskFromRange(5,5),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> hwdt{};
///Software watchdog reset flag
constexpr Register::FieldLocation<Addr,Register::maskFromRange(4,4),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> swdt{};
///INITX pin input reset flag
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,1),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> initx{};
///Power-on reset/low-voltage detection reset flag
constexpr Register::FieldLocation<Addr,Register::maskFromRange(0,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> ponr{};
}
namespace CrgStbCtl{ ///<Standby Mode Control Register
using Addr = Register::Address<0x40010008,0x0000ffec,0x00000000,unsigned>;
///Standby mode control write control bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(31,16),Register::ReadWriteAccess,unsigned> key{};
///Standby pin level setting bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(4,4),Register::ReadWriteAccess,unsigned> spl{};
///Standby mode selection bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,0),Register::ReadWriteAccess,unsigned> stm{};
}
namespace CrgCsvCtl{ ///<CSV control register
using Addr = Register::Address<0x40010040,0xffff8cfc,0x00000000,unsigned>;
///FCS count cycle setting bits
constexpr Register::FieldLocation<Addr,Register::maskFromRange(14,12),Register::ReadWriteAccess,unsigned> fcd{};
///FCS reset output enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(9,9),Register::ReadWriteAccess,unsigned> fcsre{};
///FCS function enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(8,8),Register::ReadWriteAccess,unsigned> fcsde{};
///Sub CSV function enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,1),Register::ReadWriteAccess,unsigned> scsve{};
///Main CSV function enable bit
constexpr Register::FieldLocation<Addr,Register::maskFromRange(0,0),Register::ReadWriteAccess,unsigned> mcsve{};
}
namespace CrgCsvStr{ ///<CSV status register
using Addr = Register::Address<0x40010044,0xfffffffc,0x00000000,unsigned char>;
///Sub clock failure detection flag
constexpr Register::FieldLocation<Addr,Register::maskFromRange(1,1),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> scmf{};
///Main clock failure detection flag
constexpr Register::FieldLocation<Addr,Register::maskFromRange(0,0),Register::Access<Register::AccessType::readOnly,Register::ReadActionType::normal,Register::ModifiedWriteValueType::normal>,unsigned> mcmf{};
}
namespace CrgFcswhCtl{ ///<Frequency detection window setting register
using Addr = Register::Address<0x40010048,0xffffffff,0x00000000,unsigned>;
}
namespace CrgFcswlCtl{ ///<Frequency detection window setting register
using Addr = Register::Address<0x4001004c,0xffffffff,0x00000000,unsigned>;
}
namespace CrgFcswdCtl{ ///<Frequency detection counter register
using Addr = Register::Address<0x40010050,0xffffffff,0x00000000,unsigned>;
}
}
| apache-2.0 |
shrimpma/phabricator | src/applications/oauthserver/controller/PhabricatorOAuthServerTokenController.php | 5276 | <?php
final class PhabricatorOAuthServerTokenController
extends PhabricatorAuthController {
public function shouldRequireLogin() {
return false;
}
public function shouldAllowRestrictedParameter($parameter_name) {
if ($parameter_name == 'code') {
return true;
}
return parent::shouldAllowRestrictedParameter($parameter_name);
}
public function processRequest() {
$request = $this->getRequest();
$grant_type = $request->getStr('grant_type');
$code = $request->getStr('code');
$redirect_uri = $request->getStr('redirect_uri');
$client_phid = $request->getStr('client_id');
$client_secret = $request->getStr('client_secret');
$response = new PhabricatorOAuthResponse();
$server = new PhabricatorOAuthServer();
if ($grant_type != 'authorization_code') {
$response->setError('unsupported_grant_type');
$response->setErrorDescription(
pht(
'Only %s %s is supported.',
'grant_type',
'authorization_code'));
return $response;
}
if (!$code) {
$response->setError('invalid_request');
$response->setErrorDescription(pht('Required parameter code missing.'));
return $response;
}
if (!$client_phid) {
$response->setError('invalid_request');
$response->setErrorDescription(
pht(
'Required parameter %s missing.',
'client_id'));
return $response;
}
if (!$client_secret) {
$response->setError('invalid_request');
$response->setErrorDescription(
pht(
'Required parameter %s missing.',
'client_secret'));
return $response;
}
// one giant try / catch around all the exciting database stuff so we
// can return a 'server_error' response if something goes wrong!
try {
$auth_code = id(new PhabricatorOAuthServerAuthorizationCode())
->loadOneWhere('code = %s',
$code);
if (!$auth_code) {
$response->setError('invalid_grant');
$response->setErrorDescription(
pht(
'Authorization code %d not found.',
$code));
return $response;
}
// if we have an auth code redirect URI, there must be a redirect_uri
// in the request and it must match the auth code redirect uri *exactly*
$auth_code_redirect_uri = $auth_code->getRedirectURI();
if ($auth_code_redirect_uri) {
$auth_code_redirect_uri = new PhutilURI($auth_code_redirect_uri);
$redirect_uri = new PhutilURI($redirect_uri);
if (!$redirect_uri->getDomain() ||
$redirect_uri != $auth_code_redirect_uri) {
$response->setError('invalid_grant');
$response->setErrorDescription(
pht(
'Redirect URI in request must exactly match redirect URI '.
'from authorization code.'));
return $response;
}
} else if ($redirect_uri) {
$response->setError('invalid_grant');
$response->setErrorDescription(
pht(
'Redirect URI in request and no redirect URI in authorization '.
'code. The two must exactly match.'));
return $response;
}
$client = id(new PhabricatorOAuthServerClient())
->loadOneWhere('phid = %s', $client_phid);
if (!$client) {
$response->setError('invalid_client');
$response->setErrorDescription(
pht(
'Client with %s %d not found.',
'client_id',
$client_phid));
return $response;
}
$server->setClient($client);
$user_phid = $auth_code->getUserPHID();
$user = id(new PhabricatorUser())
->loadOneWhere('phid = %s', $user_phid);
if (!$user) {
$response->setError('invalid_grant');
$response->setErrorDescription(
pht(
'User with PHID %d not found.',
$user_phid));
return $response;
}
$server->setUser($user);
$test_code = new PhabricatorOAuthServerAuthorizationCode();
$test_code->setClientSecret($client_secret);
$test_code->setClientPHID($client_phid);
$is_good_code = $server->validateAuthorizationCode(
$auth_code,
$test_code);
if (!$is_good_code) {
$response->setError('invalid_grant');
$response->setErrorDescription(
pht(
'Invalid authorization code %d.',
$code));
return $response;
}
$unguarded = AphrontWriteGuard::beginScopedUnguardedWrites();
$access_token = $server->generateAccessToken();
$auth_code->delete();
unset($unguarded);
$result = array(
'access_token' => $access_token->getToken(),
'token_type' => 'Bearer',
'expires_in' => PhabricatorOAuthServer::ACCESS_TOKEN_TIMEOUT,
);
return $response->setContent($result);
} catch (Exception $e) {
$response->setError('server_error');
$response->setErrorDescription(
pht(
'The authorization server encountered an unexpected condition '.
'which prevented it from fulfilling the request.'));
return $response;
}
}
}
| apache-2.0 |
GunoH/intellij-community | platform/util/testSrc/com/intellij/util/io/SafeFileOutputStreamTest.java | 7758 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.io;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.IoTestUtil;
import com.intellij.testFramework.rules.TempDirectory;
import com.intellij.util.TimeoutUtil;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.attribute.DosFileAttributeView;
import java.nio.file.attribute.FileTime;
import java.nio.file.attribute.PosixFilePermission;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.EnumSet;
import java.util.Random;
import java.util.Set;
import static com.intellij.openapi.util.io.IoTestUtil.assumeUnix;
import static java.nio.file.attribute.PosixFilePermission.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
public class SafeFileOutputStreamTest {
private static final String TEST_BACKUP_EXT = ".bak";
private static final byte[] TEST_DATA = {'h', 'e', 'l', 'l', 'o'};
@Rule public TempDirectory tempDir = new TempDirectory();
@After public void tearDown() throws Exception {
if (SystemInfo.isUnix) {
new ProcessBuilder("chmod", "-R", "u+rw", tempDir.getRoot().getPath()).start().waitFor();
}
}
@Test public void newFile() throws IOException {
checkWriteSucceed(new File(tempDir.getRoot(), "new-file.txt"));
}
@Test public void existingFile() throws IOException {
checkWriteSucceed(tempDir.newFile("test.txt"));
}
@Test public void overwritingBackup() throws IOException {
File target = tempDir.newFile("test.txt");
tempDir.newFile(target.getName() + TEST_BACKUP_EXT);
checkWriteSucceed(target);
}
@Test public void keepingAttributes() throws IOException {
assumeUnix();
File target = tempDir.newFile("test.txt");
Set<PosixFilePermission> permissions = EnumSet.of(OWNER_READ, OWNER_WRITE, OTHERS_EXECUTE);
Files.setPosixFilePermissions(target.toPath(), permissions);
checkWriteSucceed(target);
assertThat(Files.getPosixFilePermissions(target.toPath())).isEqualTo(permissions);
}
@Test public void preservingSymlinks() throws IOException {
IoTestUtil.assumeSymLinkCreationIsSupported();
File target = tempDir.newFile("test.txt");
File link = new File(tempDir.getRoot(), "link");
IoTestUtil.createSymbolicLink(link.toPath(), target.toPath());
checkWriteSucceed(link);
assertThat(link.toPath()).isSymbolicLink();
}
@Test public void newFileInReadOnlyDirectory() throws IOException {
assumeUnix();
File dir = tempDir.newDirectory("dir");
Files.setPosixFilePermissions(dir.toPath(), EnumSet.of(OWNER_READ, OWNER_EXECUTE));
checkWriteFailed(new File(dir, "test.txt"));
}
@Test public void existingFileInReadOnlyDirectory() throws IOException {
assumeUnix();
File target = tempDir.newFile("dir/test.txt");
Files.write(target.toPath(), new byte[]{'.'});
Files.setPosixFilePermissions(target.getParentFile().toPath(), EnumSet.of(OWNER_READ, OWNER_EXECUTE));
checkWriteFailed(target);
}
@Test public void largeFile() throws IOException, NoSuchAlgorithmException {
File target = tempDir.newFile("test.dat");
MessageDigest digest = MessageDigest.getInstance("SHA-256");
Random random = new Random();
byte[] buffer = new byte[8192];
try (OutputStream out = openStream(target)) {
for (int i = 0; i < 128; i++) {
random.nextBytes(buffer);
out.write(buffer);
digest.update(buffer);
}
}
byte[] expected = digest.digest();
digest.reset();
assertThat(target).isFile().hasDigest(digest, expected);
}
@Test public void backupRemovalNotCritical() throws IOException {
assumeUnix();
File target = tempDir.newFile("dir/test.txt"), backup = new File(target.getParent(), target.getName() + TEST_BACKUP_EXT);
try (OutputStream out = openStream(target)) {
out.write(TEST_DATA);
while (!backup.exists()) TimeoutUtil.sleep(10);
Files.setPosixFilePermissions(target.getParentFile().toPath(), EnumSet.of(OWNER_READ, OWNER_EXECUTE));
}
assertThat(target).isFile().hasBinaryContent(TEST_DATA);
}
@Test public void abort() throws IOException {
File target = tempDir.newFile("dir/test.txt"), backup = new File(target.getParent(), target.getName() + TEST_BACKUP_EXT);
try (OutputStream out = openStream(target)) {
out.write(TEST_DATA);
}
assertThat(target).isFile().hasBinaryContent(TEST_DATA);
try (SafeFileOutputStream out = openStream(target)) {
out.write(new byte[]{'b', 'y', 'e'});
out.abort();
}
assertThat(target).isFile().hasBinaryContent(TEST_DATA);
assertThat(backup).doesNotExist();
}
@Test public void readOnlyFileBackup() throws IOException {
File target = tempDir.newFile("dir/test.txt");
if (SystemInfo.isWindows) {
Files.getFileAttributeView(target.toPath(), DosFileAttributeView.class).setReadOnly(true);
}
else {
Files.setPosixFilePermissions(target.toPath(), EnumSet.of(OWNER_READ));
}
checkWriteFailed(target);
if (SystemInfo.isWindows) {
Files.getFileAttributeView(target.toPath(), DosFileAttributeView.class).setReadOnly(false);
}
else {
Files.setPosixFilePermissions(target.toPath(), EnumSet.of(OWNER_READ, OWNER_WRITE));
}
checkWriteSucceed(target);
}
@Test public void preemptiveStreamNew() throws IOException {
File target = new File(tempDir.getRoot(), "new-file.txt");
try (OutputStream out = new PreemptiveSafeFileOutputStream(target.toPath())) {
out.write(TEST_DATA[0]);
out.write(TEST_DATA, 1, TEST_DATA.length - 1);
}
assertThat(target).isFile().hasBinaryContent(TEST_DATA);
assertThat(target.getParentFile().list()).containsExactly(target.getName());
}
@Test public void preemptiveStreamExisting() throws IOException {
File target = tempDir.newFile("test.txt");
try (OutputStream out = new PreemptiveSafeFileOutputStream(target.toPath())) {
out.write(TEST_DATA[0]);
out.write(TEST_DATA, 1, TEST_DATA.length - 1);
}
assertThat(target).isFile().hasBinaryContent(TEST_DATA);
assertThat(target.getParentFile().list()).containsExactly(target.getName());
}
private static void checkWriteSucceed(File target) throws IOException {
try (OutputStream out = openStream(target)) {
out.write(TEST_DATA[0]);
out.write(TEST_DATA, 1, TEST_DATA.length - 1);
}
assertThat(target).isFile().hasBinaryContent(TEST_DATA);
assertThat(new File(target.getParent(), target.getName() + TEST_BACKUP_EXT)).doesNotExist();
}
private static void checkWriteFailed(File target) throws IOException {
boolean exists = Files.exists(target.toPath());
FileTime ts = exists ? Files.getLastModifiedTime(target.toPath()) : null;
byte[] content = exists ? Files.readAllBytes(target.toPath()) : null;
try {
try (OutputStream out = openStream(target)) { out.write(TEST_DATA); }
fail("writing to " + target + " should have failed");
}
catch (IOException e) {
if (exists) {
assertThat(target).hasBinaryContent(content);
assertThat(Files.getLastModifiedTime(target.toPath())).isEqualTo(ts);
}
else {
assertThat(target).doesNotExist();
}
assertThat(e.getMessage()).contains(target.getPath());
}
}
private static SafeFileOutputStream openStream(File target) {
return new SafeFileOutputStream(target, TEST_BACKUP_EXT);
}
} | apache-2.0 |
enj/origin | vendor/github.com/docker/docker/daemon/create.go | 9773 | package daemon // import "github.com/docker/docker/daemon"
import (
"fmt"
"net"
"runtime"
"strings"
"time"
"github.com/docker/docker/api/types"
containertypes "github.com/docker/docker/api/types/container"
networktypes "github.com/docker/docker/api/types/network"
"github.com/docker/docker/container"
"github.com/docker/docker/errdefs"
"github.com/docker/docker/image"
"github.com/docker/docker/pkg/idtools"
"github.com/docker/docker/pkg/system"
"github.com/docker/docker/runconfig"
"github.com/opencontainers/selinux/go-selinux/label"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
)
// CreateManagedContainer creates a container that is managed by a Service
func (daemon *Daemon) CreateManagedContainer(params types.ContainerCreateConfig) (containertypes.ContainerCreateCreatedBody, error) {
return daemon.containerCreate(params, true)
}
// ContainerCreate creates a regular container
func (daemon *Daemon) ContainerCreate(params types.ContainerCreateConfig) (containertypes.ContainerCreateCreatedBody, error) {
return daemon.containerCreate(params, false)
}
func (daemon *Daemon) containerCreate(params types.ContainerCreateConfig, managed bool) (containertypes.ContainerCreateCreatedBody, error) {
start := time.Now()
if params.Config == nil {
return containertypes.ContainerCreateCreatedBody{}, errdefs.InvalidParameter(errors.New("Config cannot be empty in order to create a container"))
}
os := runtime.GOOS
if params.Config.Image != "" {
img, err := daemon.imageService.GetImage(params.Config.Image)
if err == nil {
os = img.OS
}
} else {
// This mean scratch. On Windows, we can safely assume that this is a linux
// container. On other platforms, it's the host OS (which it already is)
if runtime.GOOS == "windows" && system.LCOWSupported() {
os = "linux"
}
}
warnings, err := daemon.verifyContainerSettings(os, params.HostConfig, params.Config, false)
if err != nil {
return containertypes.ContainerCreateCreatedBody{Warnings: warnings}, errdefs.InvalidParameter(err)
}
err = verifyNetworkingConfig(params.NetworkingConfig)
if err != nil {
return containertypes.ContainerCreateCreatedBody{Warnings: warnings}, errdefs.InvalidParameter(err)
}
if params.HostConfig == nil {
params.HostConfig = &containertypes.HostConfig{}
}
err = daemon.adaptContainerSettings(params.HostConfig, params.AdjustCPUShares)
if err != nil {
return containertypes.ContainerCreateCreatedBody{Warnings: warnings}, errdefs.InvalidParameter(err)
}
container, err := daemon.create(params, managed)
if err != nil {
return containertypes.ContainerCreateCreatedBody{Warnings: warnings}, err
}
containerActions.WithValues("create").UpdateSince(start)
return containertypes.ContainerCreateCreatedBody{ID: container.ID, Warnings: warnings}, nil
}
// Create creates a new container from the given configuration with a given name.
func (daemon *Daemon) create(params types.ContainerCreateConfig, managed bool) (retC *container.Container, retErr error) {
var (
container *container.Container
img *image.Image
imgID image.ID
err error
)
os := runtime.GOOS
if params.Config.Image != "" {
img, err = daemon.imageService.GetImage(params.Config.Image)
if err != nil {
return nil, err
}
if img.OS != "" {
os = img.OS
} else {
// default to the host OS except on Windows with LCOW
if runtime.GOOS == "windows" && system.LCOWSupported() {
os = "linux"
}
}
imgID = img.ID()
if runtime.GOOS == "windows" && img.OS == "linux" && !system.LCOWSupported() {
return nil, errors.New("operating system on which parent image was created is not Windows")
}
} else {
if runtime.GOOS == "windows" {
os = "linux" // 'scratch' case.
}
}
if err := daemon.mergeAndVerifyConfig(params.Config, img); err != nil {
return nil, errdefs.InvalidParameter(err)
}
if err := daemon.mergeAndVerifyLogConfig(¶ms.HostConfig.LogConfig); err != nil {
return nil, errdefs.InvalidParameter(err)
}
if container, err = daemon.newContainer(params.Name, os, params.Config, params.HostConfig, imgID, managed); err != nil {
return nil, err
}
defer func() {
if retErr != nil {
if err := daemon.cleanupContainer(container, true, true); err != nil {
logrus.Errorf("failed to cleanup container on create error: %v", err)
}
}
}()
if err := daemon.setSecurityOptions(container, params.HostConfig); err != nil {
return nil, err
}
container.HostConfig.StorageOpt = params.HostConfig.StorageOpt
// Fixes: https://github.com/moby/moby/issues/34074 and
// https://github.com/docker/for-win/issues/999.
// Merge the daemon's storage options if they aren't already present. We only
// do this on Windows as there's no effective sandbox size limit other than
// physical on Linux.
if runtime.GOOS == "windows" {
if container.HostConfig.StorageOpt == nil {
container.HostConfig.StorageOpt = make(map[string]string)
}
for _, v := range daemon.configStore.GraphOptions {
opt := strings.SplitN(v, "=", 2)
if _, ok := container.HostConfig.StorageOpt[opt[0]]; !ok {
container.HostConfig.StorageOpt[opt[0]] = opt[1]
}
}
}
// Set RWLayer for container after mount labels have been set
rwLayer, err := daemon.imageService.CreateLayer(container, setupInitLayer(daemon.idMappings))
if err != nil {
return nil, errdefs.System(err)
}
container.RWLayer = rwLayer
rootIDs := daemon.idMappings.RootPair()
if err := idtools.MkdirAndChown(container.Root, 0700, rootIDs); err != nil {
return nil, err
}
if err := idtools.MkdirAndChown(container.CheckpointDir(), 0700, rootIDs); err != nil {
return nil, err
}
if err := daemon.setHostConfig(container, params.HostConfig); err != nil {
return nil, err
}
if err := daemon.createContainerOSSpecificSettings(container, params.Config, params.HostConfig); err != nil {
return nil, err
}
var endpointsConfigs map[string]*networktypes.EndpointSettings
if params.NetworkingConfig != nil {
endpointsConfigs = params.NetworkingConfig.EndpointsConfig
}
// Make sure NetworkMode has an acceptable value. We do this to ensure
// backwards API compatibility.
runconfig.SetDefaultNetModeIfBlank(container.HostConfig)
daemon.updateContainerNetworkSettings(container, endpointsConfigs)
if err := daemon.Register(container); err != nil {
return nil, err
}
stateCtr.set(container.ID, "stopped")
daemon.LogContainerEvent(container, "create")
return container, nil
}
func toHostConfigSelinuxLabels(labels []string) []string {
for i, l := range labels {
labels[i] = "label=" + l
}
return labels
}
func (daemon *Daemon) generateSecurityOpt(hostConfig *containertypes.HostConfig) ([]string, error) {
for _, opt := range hostConfig.SecurityOpt {
con := strings.Split(opt, "=")
if con[0] == "label" {
// Caller overrode SecurityOpts
return nil, nil
}
}
ipcMode := hostConfig.IpcMode
pidMode := hostConfig.PidMode
privileged := hostConfig.Privileged
if ipcMode.IsHost() || pidMode.IsHost() || privileged {
return toHostConfigSelinuxLabels(label.DisableSecOpt()), nil
}
var ipcLabel []string
var pidLabel []string
ipcContainer := ipcMode.Container()
pidContainer := pidMode.Container()
if ipcContainer != "" {
c, err := daemon.GetContainer(ipcContainer)
if err != nil {
return nil, err
}
ipcLabel = label.DupSecOpt(c.ProcessLabel)
if pidContainer == "" {
return toHostConfigSelinuxLabels(ipcLabel), err
}
}
if pidContainer != "" {
c, err := daemon.GetContainer(pidContainer)
if err != nil {
return nil, err
}
pidLabel = label.DupSecOpt(c.ProcessLabel)
if ipcContainer == "" {
return toHostConfigSelinuxLabels(pidLabel), err
}
}
if pidLabel != nil && ipcLabel != nil {
for i := 0; i < len(pidLabel); i++ {
if pidLabel[i] != ipcLabel[i] {
return nil, fmt.Errorf("--ipc and --pid containers SELinux labels aren't the same")
}
}
return toHostConfigSelinuxLabels(pidLabel), nil
}
return nil, nil
}
func (daemon *Daemon) mergeAndVerifyConfig(config *containertypes.Config, img *image.Image) error {
if img != nil && img.Config != nil {
if err := merge(config, img.Config); err != nil {
return err
}
}
// Reset the Entrypoint if it is [""]
if len(config.Entrypoint) == 1 && config.Entrypoint[0] == "" {
config.Entrypoint = nil
}
if len(config.Entrypoint) == 0 && len(config.Cmd) == 0 {
return fmt.Errorf("No command specified")
}
return nil
}
// Checks if the client set configurations for more than one network while creating a container
// Also checks if the IPAMConfig is valid
func verifyNetworkingConfig(nwConfig *networktypes.NetworkingConfig) error {
if nwConfig == nil || len(nwConfig.EndpointsConfig) == 0 {
return nil
}
if len(nwConfig.EndpointsConfig) == 1 {
for k, v := range nwConfig.EndpointsConfig {
if v == nil {
return errdefs.InvalidParameter(errors.Errorf("no EndpointSettings for %s", k))
}
if v.IPAMConfig != nil {
if v.IPAMConfig.IPv4Address != "" && net.ParseIP(v.IPAMConfig.IPv4Address).To4() == nil {
return errors.Errorf("invalid IPv4 address: %s", v.IPAMConfig.IPv4Address)
}
if v.IPAMConfig.IPv6Address != "" {
n := net.ParseIP(v.IPAMConfig.IPv6Address)
// if the address is an invalid network address (ParseIP == nil) or if it is
// an IPv4 address (To4() != nil), then it is an invalid IPv6 address
if n == nil || n.To4() != nil {
return errors.Errorf("invalid IPv6 address: %s", v.IPAMConfig.IPv6Address)
}
}
}
}
return nil
}
l := make([]string, 0, len(nwConfig.EndpointsConfig))
for k := range nwConfig.EndpointsConfig {
l = append(l, k)
}
return errors.Errorf("Container cannot be connected to network endpoints: %s", strings.Join(l, ", "))
}
| apache-2.0 |
ern/elasticsearch | server/src/main/java/org/elasticsearch/http/HttpInfo.java | 3025 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.http;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.node.ReportingService;
import java.io.IOException;
public class HttpInfo implements ReportingService.Info {
private final BoundTransportAddress address;
private final long maxContentLength;
public HttpInfo(StreamInput in) throws IOException {
this(new BoundTransportAddress(in), in.readLong());
}
public HttpInfo(BoundTransportAddress address, long maxContentLength) {
this.address = address;
this.maxContentLength = maxContentLength;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
address.writeTo(out);
out.writeLong(maxContentLength);
}
static final class Fields {
static final String HTTP = "http";
static final String BOUND_ADDRESS = "bound_address";
static final String PUBLISH_ADDRESS = "publish_address";
static final String MAX_CONTENT_LENGTH = "max_content_length";
static final String MAX_CONTENT_LENGTH_IN_BYTES = "max_content_length_in_bytes";
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.HTTP);
builder.array(Fields.BOUND_ADDRESS, (Object[]) address.boundAddresses());
TransportAddress publishAddress = address.publishAddress();
String publishAddressString = publishAddress.toString();
String hostString = publishAddress.address().getHostString();
if (InetAddresses.isInetAddress(hostString) == false) {
publishAddressString = hostString + '/' + publishAddress.toString();
}
builder.field(Fields.PUBLISH_ADDRESS, publishAddressString);
builder.humanReadableField(Fields.MAX_CONTENT_LENGTH_IN_BYTES, Fields.MAX_CONTENT_LENGTH, maxContentLength());
builder.endObject();
return builder;
}
public BoundTransportAddress address() {
return address;
}
public BoundTransportAddress getAddress() {
return address();
}
public ByteSizeValue maxContentLength() {
return new ByteSizeValue(maxContentLength);
}
public ByteSizeValue getMaxContentLength() {
return maxContentLength();
}
}
| apache-2.0 |
mghosh4/druid | processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/collection/MemoryOpenHashTable.java | 12927 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.groupby.epinephelinae.collection;
import it.unimi.dsi.fastutil.ints.IntIterator;
import org.apache.datasketches.memory.Memory;
import org.apache.datasketches.memory.WritableMemory;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.query.groupby.epinephelinae.Groupers;
import javax.annotation.Nullable;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.NoSuchElementException;
/**
* An open-addressed hash table with linear probing backed by {@link WritableMemory}. Does not offer a similar
* interface to {@link java.util.Map} because this is meant to be useful to lower-level, high-performance callers.
* There is no copying or serde of keys and values: callers access the backing memory of the table directly.
*
* This table will not grow itself. Callers must handle growing if required; the {@link #copyTo} method is provided
* to assist.
*/
public class MemoryOpenHashTable
{
private static final byte USED_BYTE = 1;
private static final int USED_BYTE_SIZE = Byte.BYTES;
private final WritableMemory tableMemory;
private final int keySize;
private final int valueSize;
private final int bucketSize;
// Maximum number of elements in the table (based on numBuckets and maxLoadFactor).
private final int maxSize;
// Number of available/used buckets in the table. Always a power of two.
private final int numBuckets;
// Mask that clips a number to [0, numBuckets). Used when searching through buckets.
private final int bucketMask;
// Number of elements in the table right now.
private int size;
/**
* Create a new table.
*
* @param tableMemory backing memory for the table; must be exactly large enough to hold "numBuckets"
* @param numBuckets number of buckets for the table
* @param maxSize maximum number of elements for the table; must be less than numBuckets
* @param keySize key size in bytes
* @param valueSize value size in bytes
*/
public MemoryOpenHashTable(
final WritableMemory tableMemory,
final int numBuckets,
final int maxSize,
final int keySize,
final int valueSize
)
{
this.tableMemory = tableMemory;
this.numBuckets = numBuckets;
this.bucketMask = numBuckets - 1;
this.maxSize = maxSize;
this.keySize = keySize;
this.valueSize = valueSize;
this.bucketSize = bucketSize(keySize, valueSize);
// Our main intended users (VectorGrouper implementations) need the tableMemory to be backed by a big-endian
// ByteBuffer that is coterminous with the tableMemory, since it's going to feed that buffer into VectorAggregators
// instead of interacting with our WritableMemory directly. Nothing about this class actually requires that the
// Memory be backed by a ByteBuffer, but we'll check it here anyway for the benefit of our biggest customer.
verifyMemoryIsByteBuffer(tableMemory);
if (!tableMemory.getTypeByteOrder().equals(ByteOrder.nativeOrder())) {
throw new ISE("tableMemory must be native byte order");
}
if (tableMemory.getCapacity() != memoryNeeded(numBuckets, bucketSize)) {
throw new ISE(
"tableMemory must be size[%,d] but was[%,d]",
memoryNeeded(numBuckets, bucketSize),
tableMemory.getCapacity()
);
}
if (maxSize >= numBuckets) {
throw new ISE("maxSize must be less than numBuckets");
}
if (Integer.bitCount(numBuckets) != 1) {
throw new ISE("numBuckets must be a power of two but was[%,d]", numBuckets);
}
clear();
}
/**
* Returns the amount of memory needed for a table.
*
* This is just a multiplication, which is easy enough to do on your own, but sometimes it's nice for clarity's sake
* to call a function with a name that indicates why the multiplication is happening.
*
* @param numBuckets number of buckets
* @param bucketSize size per bucket (in bytes)
*
* @return size of table (in bytes)
*/
public static int memoryNeeded(final int numBuckets, final int bucketSize)
{
return numBuckets * bucketSize;
}
/**
* Returns the size of each bucket in a table.
*
* @param keySize size of keys (in bytes)
* @param valueSize size of values (in bytes)
*
* @return size of buckets (in bytes)
*/
public static int bucketSize(final int keySize, final int valueSize)
{
return USED_BYTE_SIZE + keySize + valueSize;
}
/**
* Clear the table, resetting size to zero.
*/
public void clear()
{
size = 0;
// Clear used flags.
for (int bucket = 0; bucket < numBuckets; bucket++) {
tableMemory.putByte((long) bucket * bucketSize, (byte) 0);
}
}
/**
* Copy this table into another one. The other table must be large enough to hold all the copied buckets. The other
* table will be cleared before the copy takes place.
*
* @param other the other table
* @param copyHandler a callback that is notified for each copied bucket
*/
public void copyTo(final MemoryOpenHashTable other, @Nullable final BucketCopyHandler copyHandler)
{
if (other.size() > 0) {
other.clear();
}
for (int bucket = 0; bucket < numBuckets; bucket++) {
final int bucketOffset = bucket * bucketSize;
if (isOffsetUsed(bucketOffset)) {
final int keyPosition = bucketOffset + USED_BYTE_SIZE;
final int keyHash = Groupers.smear(HashTableUtils.hashMemory(tableMemory, keyPosition, keySize));
final int newBucket = other.findBucket(keyHash, tableMemory, keyPosition);
if (newBucket >= 0) {
// Not expected to happen, since we cleared the other table first.
throw new ISE("Found already-used bucket while copying");
}
if (!other.canInsertNewBucket()) {
throw new ISE("Unable to copy bucket to new table, size[%,d]", other.size());
}
final int newBucketOffset = -(newBucket + 1) * bucketSize;
assert !other.isOffsetUsed(newBucketOffset);
tableMemory.copyTo(bucketOffset, other.tableMemory, newBucketOffset, bucketSize);
other.size++;
if (copyHandler != null) {
copyHandler.bucketCopied(bucket, -(newBucket + 1), this, other);
}
}
}
// Sanity check.
if (other.size() != size) {
throw new ISE("New table size[%,d] != old table size[%,d] after copying", other.size(), size);
}
}
/**
* Finds the bucket for a particular key.
*
* @param keyHash result of calling {@link HashTableUtils#hashMemory} on this key
* @param keySpace memory containing the key
* @param keySpacePosition position of the key within keySpace
*
* @return bucket number if currently occupied, or {@code -bucket - 1} if not occupied (yet)
*/
public int findBucket(final int keyHash, final Memory keySpace, final int keySpacePosition)
{
int bucket = keyHash & bucketMask;
while (true) {
final int bucketOffset = bucket * bucketSize;
if (tableMemory.getByte(bucketOffset) == 0) {
// Found unused bucket before finding our key.
return -bucket - 1;
}
final boolean keyFound = HashTableUtils.memoryEquals(
tableMemory,
bucketOffset + USED_BYTE_SIZE,
keySpace,
keySpacePosition,
keySize
);
if (keyFound) {
return bucket;
}
bucket = (bucket + 1) & bucketMask;
}
}
/**
* Returns whether this table can accept a new bucket.
*/
public boolean canInsertNewBucket()
{
return size < maxSize;
}
/**
* Initialize a bucket with a particular key.
*
* Do not call this method unless the bucket is currently unused and {@link #canInsertNewBucket()} returns true.
*
* @param bucket bucket number
* @param keySpace memory containing the key
* @param keySpacePosition position of the key within keySpace
*/
public void initBucket(final int bucket, final Memory keySpace, final int keySpacePosition)
{
final int bucketOffset = bucket * bucketSize;
// Method preconditions.
assert canInsertNewBucket() && !isOffsetUsed(bucketOffset);
// Mark the bucket used and write in the key.
tableMemory.putByte(bucketOffset, USED_BYTE);
keySpace.copyTo(keySpacePosition, tableMemory, bucketOffset + USED_BYTE_SIZE, keySize);
size++;
}
/**
* Returns the number of elements currently in the table.
*/
public int size()
{
return size;
}
/**
* Returns the number of buckets in this table. Note that not all of these can actually be used. The amount that
* can be used depends on the "maxSize" parameter provided during construction.
*/
public int numBuckets()
{
return numBuckets;
}
/**
* Returns the size of keys, in bytes.
*/
public int keySize()
{
return keySize;
}
/**
* Returns the size of values, in bytes.
*/
public int valueSize()
{
return valueSize;
}
/**
* Returns the offset within each bucket where the key starts.
*/
public int bucketKeyOffset()
{
return USED_BYTE_SIZE;
}
/**
* Returns the offset within each bucket where the value starts.
*/
public int bucketValueOffset()
{
return USED_BYTE_SIZE + keySize;
}
/**
* Returns the size in bytes of each bucket.
*/
public int bucketSize()
{
return bucketSize;
}
/**
* Returns the position within {@link #memory()} where a particular bucket starts.
*/
public int bucketMemoryPosition(final int bucket)
{
return bucket * bucketSize;
}
/**
* Returns the memory backing this table.
*/
public WritableMemory memory()
{
return tableMemory;
}
/**
* Iterates over all used buckets, returning bucket numbers for each one.
*
* The intent is that callers will pass the bucket numbers to {@link #bucketMemoryPosition} and then use
* {@link #bucketKeyOffset()} and {@link #bucketValueOffset()} to extract keys and values from the buckets as needed.
*/
public IntIterator bucketIterator()
{
return new IntIterator()
{
private int curr = 0;
private int currBucket = -1;
@Override
public boolean hasNext()
{
return curr < size;
}
@Override
public int nextInt()
{
if (curr >= size) {
throw new NoSuchElementException();
}
currBucket++;
while (!isOffsetUsed(currBucket * bucketSize)) {
currBucket++;
}
curr++;
return currBucket;
}
};
}
/**
* Returns whether the bucket at position "bucketOffset" is used or not. Note that this is a bucket position (in
* bytes), not a bucket number.
*/
private boolean isOffsetUsed(final int bucketOffset)
{
return tableMemory.getByte(bucketOffset) == USED_BYTE;
}
/**
* Validates that some Memory is coterminous with a backing big-endian ByteBuffer. Returns quietly if so, throws an
* exception otherwise.
*/
private static void verifyMemoryIsByteBuffer(final Memory memory)
{
final ByteBuffer buffer = memory.getByteBuffer();
if (buffer == null) {
throw new ISE("tableMemory must be ByteBuffer-backed");
}
if (!buffer.order().equals(ByteOrder.BIG_ENDIAN)) {
throw new ISE("tableMemory's ByteBuffer must be in big-endian order");
}
if (buffer.capacity() != memory.getCapacity() || buffer.remaining() != buffer.capacity()) {
throw new ISE("tableMemory's ByteBuffer must be coterminous");
}
}
/**
* Callback used by {@link #copyTo}.
*/
public interface BucketCopyHandler
{
/**
* Indicates that "oldBucket" in "oldTable" was copied to "newBucket" in "newTable".
*
* @param oldBucket old bucket number
* @param newBucket new bucket number
* @param oldTable old table
* @param newTable new table
*/
void bucketCopied(
int oldBucket,
int newBucket,
MemoryOpenHashTable oldTable,
MemoryOpenHashTable newTable
);
}
}
| apache-2.0 |
holmes/intellij-community | platform/lang-impl/src/com/intellij/ide/util/TreeFileChooserDialog.java | 16269 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.util;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.projectView.BaseProjectTreeBuilder;
import com.intellij.ide.projectView.ProjectViewNode;
import com.intellij.ide.projectView.TreeStructureProvider;
import com.intellij.ide.projectView.impl.AbstractProjectTreeStructure;
import com.intellij.ide.projectView.impl.ProjectAbstractTreeStructureBase;
import com.intellij.ide.projectView.impl.ProjectTreeBuilder;
import com.intellij.ide.projectView.impl.nodes.PsiFileNode;
import com.intellij.ide.util.gotoByName.ChooseByNameModel;
import com.intellij.ide.util.gotoByName.ChooseByNamePanel;
import com.intellij.ide.util.gotoByName.ChooseByNamePopupComponent;
import com.intellij.ide.util.gotoByName.GotoFileCellRenderer;
import com.intellij.ide.util.treeView.AlphaComparator;
import com.intellij.ide.util.treeView.NodeRenderer;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.search.FileTypeIndex;
import com.intellij.psi.search.FilenameIndex;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.ui.DoubleClickListener;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.TabbedPaneWrapper;
import com.intellij.ui.TreeSpeedSearch;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import java.awt.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.util.*;
import java.util.List;
/**
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
public final class TreeFileChooserDialog extends DialogWrapper implements TreeFileChooser {
private Tree myTree;
private PsiFile mySelectedFile = null;
private final Project myProject;
private BaseProjectTreeBuilder myBuilder;
private TabbedPaneWrapper myTabbedPane;
private ChooseByNamePanel myGotoByNamePanel;
@Nullable private final PsiFile myInitialFile;
@Nullable private final PsiFileFilter myFilter;
@Nullable private final FileType myFileType;
private final boolean myDisableStructureProviders;
private final boolean myShowLibraryContents;
private boolean mySelectSearchByNameTab = false;
public TreeFileChooserDialog(final Project project,
String title,
@Nullable final PsiFile initialFile,
@Nullable FileType fileType,
@Nullable PsiFileFilter filter,
final boolean disableStructureProviders,
final boolean showLibraryContents) {
super(project, true);
myInitialFile = initialFile;
myFilter = filter;
myFileType = fileType;
myDisableStructureProviders = disableStructureProviders;
myShowLibraryContents = showLibraryContents;
setTitle(title);
myProject = project;
init();
if (initialFile != null) {
// dialog does not exist yet
SwingUtilities.invokeLater(new Runnable(){
@Override
public void run() {
selectFile(initialFile);
}
});
}
SwingUtilities.invokeLater(new Runnable(){
@Override
public void run() {
handleSelectionChanged();
}
});
}
@Override
protected JComponent createCenterPanel() {
final DefaultTreeModel model = new DefaultTreeModel(new DefaultMutableTreeNode());
myTree = new Tree(model);
final ProjectAbstractTreeStructureBase treeStructure = new AbstractProjectTreeStructure(myProject) {
@Override
public boolean isFlattenPackages() {
return false;
}
@Override
public boolean isShowMembers() {
return false;
}
@Override
public boolean isHideEmptyMiddlePackages() {
return true;
}
@Override
public Object[] getChildElements(final Object element) {
return filterFiles(super.getChildElements(element));
}
@Override
public boolean isAbbreviatePackageNames() {
return false;
}
@Override
public boolean isShowLibraryContents() {
return myShowLibraryContents;
}
@Override
public boolean isShowModules() {
return false;
}
@Override
public List<TreeStructureProvider> getProviders() {
return myDisableStructureProviders ? null : super.getProviders();
}
};
myBuilder = new ProjectTreeBuilder(myProject, myTree, model, AlphaComparator.INSTANCE, treeStructure);
myTree.setRootVisible(false);
myTree.expandRow(0);
myTree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
myTree.setCellRenderer(new NodeRenderer());
UIUtil.setLineStyleAngled(myTree);
final JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(myTree);
scrollPane.setPreferredSize(JBUI.size(500, 300));
myTree.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(final KeyEvent e) {
if (KeyEvent.VK_ENTER == e.getKeyCode()) {
doOKAction();
}
}
});
new DoubleClickListener() {
@Override
protected boolean onDoubleClick(MouseEvent e) {
final TreePath path = myTree.getPathForLocation(e.getX(), e.getY());
if (path != null && myTree.isPathSelected(path)) {
doOKAction();
return true;
}
return false;
}
}.installOn(myTree);
myTree.addTreeSelectionListener(
new TreeSelectionListener() {
@Override
public void valueChanged(final TreeSelectionEvent e) {
handleSelectionChanged();
}
}
);
new TreeSpeedSearch(myTree);
myTabbedPane = new TabbedPaneWrapper(getDisposable());
final JPanel dummyPanel = new JPanel(new BorderLayout());
String name = null;
if (myInitialFile != null) {
name = myInitialFile.getName();
}
PsiElement context = myInitialFile == null ? null : myInitialFile;
myGotoByNamePanel = new ChooseByNamePanel(myProject, new MyGotoFileModel(), name, true, context) {
@Override
protected void close(final boolean isOk) {
super.close(isOk);
if (isOk) {
doOKAction();
}
else {
doCancelAction();
}
}
@Override
protected void initUI(final ChooseByNamePopupComponent.Callback callback,
final ModalityState modalityState,
boolean allowMultipleSelection) {
super.initUI(callback, modalityState, allowMultipleSelection);
dummyPanel.add(myGotoByNamePanel.getPanel(), BorderLayout.CENTER);
//IdeFocusTraversalPolicy.getPreferredFocusedComponent(myGotoByNamePanel.getPanel()).requestFocus();
if (mySelectSearchByNameTab) {
myTabbedPane.setSelectedIndex(1);
}
}
@Override
protected void showTextFieldPanel() {
}
@Override
protected void chosenElementMightChange() {
handleSelectionChanged();
}
};
myTabbedPane.addTab(IdeBundle.message("tab.chooser.project"), scrollPane);
myTabbedPane.addTab(IdeBundle.message("tab.chooser.search.by.name"), dummyPanel);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
myGotoByNamePanel.invoke(new MyCallback(), ModalityState.stateForComponent(getRootPane()), false);
}
});
myTabbedPane.addChangeListener(
new ChangeListener() {
@Override
public void stateChanged(final ChangeEvent e) {
handleSelectionChanged();
}
}
);
return myTabbedPane.getComponent();
}
public void selectSearchByNameTab() {
mySelectSearchByNameTab = true;
}
private void handleSelectionChanged(){
final PsiFile selection = calcSelectedClass();
setOKActionEnabled(selection != null);
}
@Override
protected void doOKAction() {
mySelectedFile = calcSelectedClass();
if (mySelectedFile == null) return;
super.doOKAction();
}
@Override
public void doCancelAction() {
mySelectedFile = null;
super.doCancelAction();
}
@Override
public PsiFile getSelectedFile(){
return mySelectedFile;
}
@Override
public void selectFile(@NotNull final PsiFile file) {
// Select element in the tree
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
if (myBuilder != null) {
myBuilder.select(file, file.getVirtualFile(), true);
}
}
}, ModalityState.stateForComponent(getWindow()));
}
@Override
public void showDialog() {
show();
}
private PsiFile calcSelectedClass() {
if (myTabbedPane.getSelectedIndex() == 1) {
return (PsiFile)myGotoByNamePanel.getChosenElement();
}
else {
final TreePath path = myTree.getSelectionPath();
if (path == null) return null;
final DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent();
final Object userObject = node.getUserObject();
if (!(userObject instanceof ProjectViewNode)) return null;
ProjectViewNode pvNode = (ProjectViewNode) userObject;
VirtualFile vFile = pvNode.getVirtualFile();
if (vFile != null && !vFile.isDirectory()) {
return PsiManager.getInstance(myProject).findFile(vFile);
}
return null;
}
}
@Override
public void dispose() {
if (myBuilder != null) {
Disposer.dispose(myBuilder);
myBuilder = null;
}
super.dispose();
}
@Override
protected String getDimensionServiceKey() {
return "#com.intellij.ide.util.TreeFileChooserDialog";
}
@Override
public JComponent getPreferredFocusedComponent() {
return myTree;
}
private final class MyGotoFileModel implements ChooseByNameModel {
private final int myMaxSize = WindowManagerEx.getInstanceEx().getFrame(myProject).getSize().width;
@Override
@NotNull
public Object[] getElementsByName(final String name, final boolean checkBoxState, final String pattern) {
GlobalSearchScope scope = myShowLibraryContents ? GlobalSearchScope.allScope(myProject) : GlobalSearchScope.projectScope(myProject);
final PsiFile[] psiFiles = FilenameIndex.getFilesByName(myProject, name, scope);
return filterFiles(psiFiles);
}
@Override
public String getPromptText() {
return IdeBundle.message("prompt.filechooser.enter.file.name");
}
@Override
public String getCheckBoxName() {
return null;
}
@Override
public char getCheckBoxMnemonic() {
return 0;
}
@Override
public String getNotInMessage() {
return "";
}
@Override
public String getNotFoundMessage() {
return "";
}
@Override
public boolean loadInitialCheckBoxState() {
return true;
}
@Override
public void saveInitialCheckBoxState(final boolean state) {
}
@Override
public PsiElementListCellRenderer getListCellRenderer() {
return new GotoFileCellRenderer(myMaxSize);
}
@Override
@NotNull
public String[] getNames(final boolean checkBoxState) {
final String[] fileNames;
if (myFileType != null && myProject != null) {
GlobalSearchScope scope = myShowLibraryContents ? GlobalSearchScope.allScope(myProject) : GlobalSearchScope.projectScope(myProject);
Collection<VirtualFile> virtualFiles = FileTypeIndex.getFiles(myFileType, scope);
fileNames = ContainerUtil.map2Array(virtualFiles, String.class, new Function<VirtualFile, String>() {
@Override
public String fun(VirtualFile file) {
return file.getName();
}
});
}
else {
fileNames = FilenameIndex.getAllFilenames(myProject);
}
final Set<String> array = new THashSet<String>();
for (String fileName : fileNames) {
if (!array.contains(fileName)) {
array.add(fileName);
}
}
final String[] result = ArrayUtil.toStringArray(array);
Arrays.sort(result);
return result;
}
@Override
public boolean willOpenEditor() {
return true;
}
@Override
public String getElementName(final Object element) {
if (!(element instanceof PsiFile)) return null;
return ((PsiFile)element).getName();
}
@Override
@Nullable
public String getFullName(final Object element) {
if (element instanceof PsiFile) {
final VirtualFile virtualFile = ((PsiFile)element).getVirtualFile();
return virtualFile != null ? virtualFile.getPath() : null;
}
return getElementName(element);
}
@Override
public String getHelpId() {
return null;
}
@Override
@NotNull
public String[] getSeparators() {
return new String[] {"/", "\\"};
}
@Override
public boolean useMiddleMatching() {
return false;
}
}
private final class MyCallback extends ChooseByNamePopupComponent.Callback {
@Override
public void elementChosen(final Object element) {
mySelectedFile = (PsiFile)element;
close(OK_EXIT_CODE);
}
}
private Object[] filterFiles(final Object[] list) {
Condition<PsiFile> condition = new Condition<PsiFile>() {
@Override
public boolean value(final PsiFile psiFile) {
if (myFilter != null && !myFilter.accept(psiFile)) {
return false;
}
boolean accepted = myFileType == null || psiFile.getFileType() == myFileType;
VirtualFile virtualFile = psiFile.getVirtualFile();
if (virtualFile != null && !accepted) {
accepted = virtualFile.getFileType() == myFileType;
}
return accepted;
}
};
final List<Object> result = new ArrayList<Object>(list.length);
for (Object o : list) {
final PsiFile psiFile;
if (o instanceof PsiFile) {
psiFile = (PsiFile)o;
}
else if (o instanceof PsiFileNode) {
psiFile = ((PsiFileNode)o).getValue();
}
else {
psiFile = null;
}
if (psiFile != null && !condition.value(psiFile)) {
continue;
}
else {
if (o instanceof ProjectViewNode) {
final ProjectViewNode projectViewNode = (ProjectViewNode)o;
if (!projectViewNode.canHaveChildrenMatching(condition)) {
continue;
}
}
}
result.add(o);
}
return ArrayUtil.toObjectArray(result);
}
}
| apache-2.0 |
jinfenw/runv | vendor/github.com/kardianos/osext/osext_go18.go | 106 | //+build go1.8
package osext
import "os"
func executable() (string, error) {
return os.Executable()
}
| apache-2.0 |
rmarting/camel | camel-core/src/main/java/org/apache/camel/model/dataformat/CastorDataFormat.java | 6297 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model.dataformat;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.camel.CamelContext;
import org.apache.camel.model.DataFormatDefinition;
import org.apache.camel.spi.DataFormat;
import org.apache.camel.spi.Metadata;
/**
* Castor data format is used for unmarshal a XML payload to POJO or to marshal POJO back to XML payload.
*
* @version
*/
@Metadata(firstVersion = "2.1.0", label = "dataformat,transformation,xml", title = "Castor")
@XmlRootElement(name = "castor")
@XmlAccessorType(XmlAccessType.FIELD)
@Deprecated
public class CastorDataFormat extends DataFormatDefinition {
@XmlAttribute
private String mappingFile;
@XmlAttribute
@Metadata(defaultValue = "true")
private Boolean whitelistEnabled = true;
@XmlAttribute
private String allowedUnmarshallObjects;
@XmlAttribute
private String deniedUnmarshallObjects;
@XmlAttribute @Metadata(defaultValue = "true")
private Boolean validation;
@XmlAttribute @Metadata(defaultValue = "UTF-8")
private String encoding;
@XmlAttribute
private String[] packages;
@XmlAttribute
private String[] classes;
public CastorDataFormat() {
super("castor");
}
public Boolean getValidation() {
return validation;
}
/**
* Whether validation is turned on or off.
* <p/>
* Is by default true.
*/
public void setValidation(Boolean validation) {
this.validation = validation;
}
public String getMappingFile() {
return mappingFile;
}
/**
* Path to a Castor mapping file to load from the classpath.
*/
public void setMappingFile(String mappingFile) {
this.mappingFile = mappingFile;
}
public String[] getPackages() {
return packages;
}
/**
* Add additional packages to Castor XmlContext
*/
public void setPackages(String[] packages) {
this.packages = packages;
}
public String[] getClasses() {
return classes;
}
/**
* Add additional class names to Castor XmlContext
*/
public void setClasses(String[] classes) {
this.classes = classes;
}
public String getEncoding() {
return encoding;
}
/**
* Encoding to use when marshalling an Object to XML.
* <p/>
* Is by default UTF-8
*/
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public Boolean getWhitelistEnabled() {
return whitelistEnabled;
}
/**
* Define if Whitelist feature is enabled or not
*/
public void setWhitelistEnabled(Boolean whitelistEnabled) {
this.whitelistEnabled = whitelistEnabled;
}
public String getAllowedUnmarshallObjects() {
return allowedUnmarshallObjects;
}
/**
* Define the allowed objects to be unmarshalled.
*
* You can specify the FQN class name of allowed objects, and you can use comma to separate multiple entries.
* It is also possible to use wildcards and regular expression which is based on the pattern
* defined by {@link org.apache.camel.util.EndpointHelper#matchPattern(String, String)}.
* Denied objects takes precedence over allowed objects.
*/
public void setAllowedUnmarshallObjects(String allowedUnmarshallObjects) {
this.allowedUnmarshallObjects = allowedUnmarshallObjects;
}
public String getDeniedUnmarshallObjects() {
return deniedUnmarshallObjects;
}
/**
* Define the denied objects to be unmarshalled.
*
* You can specify the FQN class name of deined objects, and you can use comma to separate multiple entries.
* It is also possible to use wildcards and regular expression which is based on the pattern
* defined by {@link org.apache.camel.util.EndpointHelper#matchPattern(String, String)}.
* Denied objects takes precedence over allowed objects.
*/
public void setDeniedUnmarshallObjects(String deniedUnmarshallObjects) {
this.deniedUnmarshallObjects = deniedUnmarshallObjects;
}
@Override
protected void configureDataFormat(DataFormat dataFormat, CamelContext camelContext) {
if (mappingFile != null) {
setProperty(camelContext, dataFormat, "mappingFile", mappingFile);
}
// should be true by default
boolean isValidation = getValidation() == null || getValidation();
setProperty(camelContext, dataFormat, "validation", isValidation);
if (encoding != null) {
setProperty(camelContext, dataFormat, "encoding", encoding);
}
if (packages != null) {
setProperty(camelContext, dataFormat, "packages", packages);
}
if (classes != null) {
setProperty(camelContext, dataFormat, "classes", classes);
}
if (whitelistEnabled != null) {
setProperty(camelContext, dataFormat, "whitelistEnabled", whitelistEnabled);
}
if (allowedUnmarshallObjects != null) {
setProperty(camelContext, dataFormat, "allowedUnmarshallObjects", allowedUnmarshallObjects);
}
if (deniedUnmarshallObjects != null) {
setProperty(camelContext, dataFormat, "deniedUnmarshallObjects", deniedUnmarshallObjects);
}
}
} | apache-2.0 |
ltilve/ChromiumGStreamerBackend | net/quic/quic_flow_controller_test.cc | 15923 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/quic/quic_flow_controller.h"
#include "base/format_macros.h"
#include "base/strings/stringprintf.h"
#include "net/quic/quic_flags.h"
#include "net/quic/quic_utils.h"
#include "net/quic/test_tools/quic_connection_peer.h"
#include "net/quic/test_tools/quic_flow_controller_peer.h"
#include "net/quic/test_tools/quic_sent_packet_manager_peer.h"
#include "net/quic/test_tools/quic_test_utils.h"
#include "net/test/gtest_util.h"
#include "testing/gmock/include/gmock/gmock.h"
namespace net {
namespace test {
// Receive window auto-tuning uses RTT in its logic.
const int64 kRtt = 100;
class QuicFlowControllerTest : public ::testing::Test {
public:
QuicFlowControllerTest()
: stream_id_(1234),
send_window_(kInitialSessionFlowControlWindowForTest),
receive_window_(kInitialSessionFlowControlWindowForTest),
connection_(Perspective::IS_CLIENT) {}
void Initialize() {
flow_controller_.reset(
new QuicFlowController(&connection_, stream_id_, Perspective::IS_CLIENT,
send_window_, receive_window_, false));
}
protected:
QuicStreamId stream_id_;
QuicByteCount send_window_;
QuicByteCount receive_window_;
scoped_ptr<QuicFlowController> flow_controller_;
MockConnection connection_;
};
TEST_F(QuicFlowControllerTest, SendingBytes) {
Initialize();
EXPECT_FALSE(flow_controller_->IsBlocked());
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(send_window_, flow_controller_->SendWindowSize());
// Send some bytes, but not enough to block.
flow_controller_->AddBytesSent(send_window_ / 2);
EXPECT_FALSE(flow_controller_->IsBlocked());
EXPECT_EQ(send_window_ / 2, flow_controller_->SendWindowSize());
// Send enough bytes to block.
flow_controller_->AddBytesSent(send_window_ / 2);
EXPECT_TRUE(flow_controller_->IsBlocked());
EXPECT_EQ(0u, flow_controller_->SendWindowSize());
// BLOCKED frame should get sent.
EXPECT_CALL(connection_, SendBlocked(stream_id_)).Times(1);
flow_controller_->MaybeSendBlocked();
// Update the send window, and verify this has unblocked.
EXPECT_TRUE(flow_controller_->UpdateSendWindowOffset(2 * send_window_));
EXPECT_FALSE(flow_controller_->IsBlocked());
EXPECT_EQ(send_window_, flow_controller_->SendWindowSize());
// Updating with a smaller offset doesn't change anything.
EXPECT_FALSE(flow_controller_->UpdateSendWindowOffset(send_window_ / 10));
EXPECT_EQ(send_window_, flow_controller_->SendWindowSize());
// Try to send more bytes, violating flow control.
EXPECT_CALL(connection_,
SendConnectionClose(QUIC_FLOW_CONTROL_SENT_TOO_MUCH_DATA));
EXPECT_DFATAL(
flow_controller_->AddBytesSent(send_window_ * 10),
base::StringPrintf("Trying to send an extra %" PRIu64 " bytes",
send_window_ * 10));
EXPECT_TRUE(flow_controller_->IsBlocked());
EXPECT_EQ(0u, flow_controller_->SendWindowSize());
}
TEST_F(QuicFlowControllerTest, ReceivingBytes) {
Initialize();
EXPECT_FALSE(flow_controller_->IsBlocked());
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
// Receive some bytes, updating highest received offset, but not enough to
// fill flow control receive window.
EXPECT_TRUE(
flow_controller_->UpdateHighestReceivedOffset(1 + receive_window_ / 2));
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ((receive_window_ / 2) - 1,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
// Consume enough bytes to send a WINDOW_UPDATE frame.
EXPECT_CALL(connection_, SendWindowUpdate(stream_id_, ::testing::_)).Times(1);
flow_controller_->AddBytesConsumed(1 + receive_window_ / 2);
// Result is that once again we have a fully open receive window.
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
}
TEST_F(QuicFlowControllerTest, OnlySendBlockedFrameOncePerOffset) {
Initialize();
// Test that we don't send duplicate BLOCKED frames. We should only send one
// BLOCKED frame at a given send window offset.
EXPECT_FALSE(flow_controller_->IsBlocked());
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(send_window_, flow_controller_->SendWindowSize());
// Send enough bytes to block.
flow_controller_->AddBytesSent(send_window_);
EXPECT_TRUE(flow_controller_->IsBlocked());
EXPECT_EQ(0u, flow_controller_->SendWindowSize());
// Expect that 2 BLOCKED frames should get sent in total.
EXPECT_CALL(connection_, SendBlocked(stream_id_)).Times(2);
// BLOCKED frame should get sent.
flow_controller_->MaybeSendBlocked();
// BLOCKED frame should not get sent again until our send offset changes.
flow_controller_->MaybeSendBlocked();
flow_controller_->MaybeSendBlocked();
flow_controller_->MaybeSendBlocked();
flow_controller_->MaybeSendBlocked();
flow_controller_->MaybeSendBlocked();
// Update the send window, then send enough bytes to block again.
EXPECT_TRUE(flow_controller_->UpdateSendWindowOffset(2 * send_window_));
EXPECT_FALSE(flow_controller_->IsBlocked());
EXPECT_EQ(send_window_, flow_controller_->SendWindowSize());
flow_controller_->AddBytesSent(send_window_);
EXPECT_TRUE(flow_controller_->IsBlocked());
EXPECT_EQ(0u, flow_controller_->SendWindowSize());
// BLOCKED frame should get sent as send offset has changed.
flow_controller_->MaybeSendBlocked();
}
TEST_F(QuicFlowControllerTest, ReceivingBytesFastIncreasesFlowWindow) {
ValueRestore<bool> old_flag(&FLAGS_quic_auto_tune_receive_window, true);
// This test will generate two WINDOW_UPDATE frames.
EXPECT_CALL(connection_, SendWindowUpdate(stream_id_, ::testing::_)).Times(2);
Initialize();
flow_controller_->set_auto_tune_receive_window(true);
// Make sure clock is inititialized.
connection_.AdvanceTime(QuicTime::Delta::FromMilliseconds(1));
QuicSentPacketManager* manager =
QuicConnectionPeer::GetSentPacketManager(&connection_);
RttStats* rtt_stats = QuicSentPacketManagerPeer::GetRttStats(manager);
rtt_stats->UpdateRtt(QuicTime::Delta::FromMilliseconds(kRtt),
QuicTime::Delta::Zero(), QuicTime::Zero());
EXPECT_FALSE(flow_controller_->IsBlocked());
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
QuicByteCount threshold =
QuicFlowControllerPeer::WindowUpdateThreshold(flow_controller_.get());
QuicStreamOffset receive_offset = threshold + 1;
// Receive some bytes, updating highest received offset, but not enough to
// fill flow control receive window.
EXPECT_TRUE(flow_controller_->UpdateHighestReceivedOffset(receive_offset));
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest - receive_offset,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
// Consume enough bytes to send a WINDOW_UPDATE frame.
flow_controller_->AddBytesConsumed(threshold + 1);
// Result is that once again we have a fully open receive window.
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
// Move time forward, but by less than two RTTs. Then receive and consume
// some more, forcing a second WINDOW_UPDATE with an increased max window
// size.
connection_.AdvanceTime(QuicTime::Delta::FromMilliseconds(2 * kRtt - 1));
receive_offset += threshold + 1;
EXPECT_TRUE(flow_controller_->UpdateHighestReceivedOffset(receive_offset));
flow_controller_->AddBytesConsumed(threshold + 1);
EXPECT_FALSE(flow_controller_->FlowControlViolation());
QuicByteCount new_threshold =
QuicFlowControllerPeer::WindowUpdateThreshold(flow_controller_.get());
EXPECT_GT(new_threshold, threshold);
}
TEST_F(QuicFlowControllerTest, ReceivingBytesFastStatusQuo) {
ValueRestore<bool> old_flag(&FLAGS_quic_auto_tune_receive_window, false);
// This test will generate two WINDOW_UPDATE frames.
EXPECT_CALL(connection_, SendWindowUpdate(stream_id_, ::testing::_)).Times(2);
Initialize();
flow_controller_->set_auto_tune_receive_window(true);
// Make sure clock is inititialized.
connection_.AdvanceTime(QuicTime::Delta::FromMilliseconds(1));
QuicSentPacketManager* manager =
QuicConnectionPeer::GetSentPacketManager(&connection_);
RttStats* rtt_stats = QuicSentPacketManagerPeer::GetRttStats(manager);
rtt_stats->UpdateRtt(QuicTime::Delta::FromMilliseconds(kRtt),
QuicTime::Delta::Zero(), QuicTime::Zero());
EXPECT_FALSE(flow_controller_->IsBlocked());
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
QuicByteCount threshold =
QuicFlowControllerPeer::WindowUpdateThreshold(flow_controller_.get());
QuicStreamOffset receive_offset = threshold + 1;
// Receive some bytes, updating highest received offset, but not enough to
// fill flow control receive window.
EXPECT_TRUE(flow_controller_->UpdateHighestReceivedOffset(receive_offset));
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest - receive_offset,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
// Consume enough bytes to send a WINDOW_UPDATE frame.
flow_controller_->AddBytesConsumed(threshold + 1);
// Result is that once again we have a fully open receive window.
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
// Move time forward, but by less than two RTTs. Then receive and consume
// some more, forcing a second WINDOW_UPDATE with an increased max window
// size.
connection_.AdvanceTime(QuicTime::Delta::FromMilliseconds(2 * kRtt - 1));
receive_offset += threshold + 1;
EXPECT_TRUE(flow_controller_->UpdateHighestReceivedOffset(receive_offset));
flow_controller_->AddBytesConsumed(threshold + 1);
EXPECT_FALSE(flow_controller_->FlowControlViolation());
QuicByteCount new_threshold =
QuicFlowControllerPeer::WindowUpdateThreshold(flow_controller_.get());
EXPECT_EQ(new_threshold, threshold);
}
TEST_F(QuicFlowControllerTest, ReceivingBytesNormalStableFlowWindow) {
ValueRestore<bool> old_flag(&FLAGS_quic_auto_tune_receive_window, true);
// This test will generate two WINDOW_UPDATE frames.
EXPECT_CALL(connection_, SendWindowUpdate(stream_id_, ::testing::_)).Times(2);
Initialize();
flow_controller_->set_auto_tune_receive_window(true);
// Make sure clock is inititialized.
connection_.AdvanceTime(QuicTime::Delta::FromMilliseconds(1));
QuicSentPacketManager* manager =
QuicConnectionPeer::GetSentPacketManager(&connection_);
RttStats* rtt_stats = QuicSentPacketManagerPeer::GetRttStats(manager);
rtt_stats->UpdateRtt(QuicTime::Delta::FromMilliseconds(kRtt),
QuicTime::Delta::Zero(), QuicTime::Zero());
EXPECT_FALSE(flow_controller_->IsBlocked());
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
QuicByteCount threshold =
QuicFlowControllerPeer::WindowUpdateThreshold(flow_controller_.get());
QuicStreamOffset receive_offset = threshold + 1;
// Receive some bytes, updating highest received offset, but not enough to
// fill flow control receive window.
EXPECT_TRUE(flow_controller_->UpdateHighestReceivedOffset(receive_offset));
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest - receive_offset,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
flow_controller_->AddBytesConsumed(threshold + 1);
// Result is that once again we have a fully open receive window.
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
// Move time forward, but by more than two RTTs. Then receive and consume
// some more, forcing a second WINDOW_UPDATE with unchanged max window size.
connection_.AdvanceTime(QuicTime::Delta::FromMilliseconds(2 * kRtt + 1));
receive_offset += threshold + 1;
EXPECT_TRUE(flow_controller_->UpdateHighestReceivedOffset(receive_offset));
flow_controller_->AddBytesConsumed(threshold + 1);
EXPECT_FALSE(flow_controller_->FlowControlViolation());
QuicByteCount new_threshold =
QuicFlowControllerPeer::WindowUpdateThreshold(flow_controller_.get());
EXPECT_EQ(new_threshold, threshold);
}
TEST_F(QuicFlowControllerTest, ReceivingBytesNormalStatusQuo) {
ValueRestore<bool> old_flag(&FLAGS_quic_auto_tune_receive_window, false);
// This test will generate two WINDOW_UPDATE frames.
EXPECT_CALL(connection_, SendWindowUpdate(stream_id_, ::testing::_)).Times(2);
Initialize();
flow_controller_->set_auto_tune_receive_window(true);
// Make sure clock is inititialized.
connection_.AdvanceTime(QuicTime::Delta::FromMilliseconds(1));
QuicSentPacketManager* manager =
QuicConnectionPeer::GetSentPacketManager(&connection_);
RttStats* rtt_stats = QuicSentPacketManagerPeer::GetRttStats(manager);
rtt_stats->UpdateRtt(QuicTime::Delta::FromMilliseconds(kRtt),
QuicTime::Delta::Zero(), QuicTime::Zero());
EXPECT_FALSE(flow_controller_->IsBlocked());
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
QuicByteCount threshold =
QuicFlowControllerPeer::WindowUpdateThreshold(flow_controller_.get());
QuicStreamOffset receive_offset = threshold + 1;
// Receive some bytes, updating highest received offset, but not enough to
// fill flow control receive window.
EXPECT_TRUE(flow_controller_->UpdateHighestReceivedOffset(receive_offset));
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest - receive_offset,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
flow_controller_->AddBytesConsumed(threshold + 1);
// Result is that once again we have a fully open receive window.
EXPECT_FALSE(flow_controller_->FlowControlViolation());
EXPECT_EQ(kInitialSessionFlowControlWindowForTest,
QuicFlowControllerPeer::ReceiveWindowSize(flow_controller_.get()));
// Move time forward, but by more than two RTTs. Then receive and consume
// some more, forcing a second WINDOW_UPDATE with unchanged max window size.
connection_.AdvanceTime(QuicTime::Delta::FromMilliseconds(2 * kRtt + 1));
receive_offset += threshold + 1;
EXPECT_TRUE(flow_controller_->UpdateHighestReceivedOffset(receive_offset));
flow_controller_->AddBytesConsumed(threshold + 1);
EXPECT_FALSE(flow_controller_->FlowControlViolation());
QuicByteCount new_threshold =
QuicFlowControllerPeer::WindowUpdateThreshold(flow_controller_.get());
EXPECT_EQ(new_threshold, threshold);
}
} // namespace test
} // namespace net
| bsd-3-clause |
scheib/chromium | third_party/blink/web_tests/dom/legacy_dom_conformance/xhtml/level3/core/nodesettextcontent03.js | 2881 | /*
Copyright © 2001-2004 World Wide Web Consortium,
(Massachusetts Institute of Technology, European Research Consortium
for Informatics and Mathematics, Keio University). All
Rights Reserved. This work is distributed under the W3C® Software License [1] in the
hope that it will be useful, but WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
[1] http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231
*/
/**
* Gets URI that identifies the test.
* @return uri identifier of test
*/
function getTargetURI() {
return "http://www.w3.org/2001/DOM-Test-Suite/level3/core/nodesettextcontent03";
}
var docsLoaded = -1000000;
var builder = null;
//
// This function is called by the testing framework before
// running the test suite.
//
// If there are no configuration exceptions, asynchronous
// document loading is started. Otherwise, the status
// is set to complete and the exception is immediately
// raised when entering the body of the test.
//
function setUpPage() {
setUpPageStatus = 'running';
try {
//
// creates test document builder, may throw exception
//
builder = createConfiguredBuilder();
docsLoaded = 0;
var docRef = null;
if (typeof(this.doc) != 'undefined') {
docRef = this.doc;
}
docsLoaded += preload(docRef, "doc", "hc_staff");
if (docsLoaded == 1) {
setUpPageStatus = 'complete';
}
} catch(ex) {
catchInitializationError(builder, ex);
setUpPageStatus = 'complete';
}
}
//
// This method is called on the completion of
// each asychronous load started in setUpTests.
//
// When every synchronous loaded document has completed,
// the page status is changed which allows the
// body of the test to be executed.
function loadComplete() {
if (++docsLoaded == 1) {
setUpPageStatus = 'complete';
}
}
/**
*
Using setTextContent on this DocumentType node, attempt to set the textContent of this
DocumentType node to textContent. Retrieve the textContent and verify if it is null.
* @author IBM
* @author Neil Delima
* @see http://www.w3.org/TR/2004/REC-DOM-Level-3-Core-20040407/core#Node3-textContent
*/
function nodesettextcontent03() {
var success;
if(checkInitialization(builder, "nodesettextcontent03") != null) return;
var doc;
var docType;
var textContent;
var docRef = null;
if (typeof(this.doc) != 'undefined') {
docRef = this.doc;
}
doc = load(docRef, "doc", "hc_staff");
docType = doc.doctype;
docType.textContent = "textContent";
textContent = docType.textContent;
assertNull("nodesettextcontent03",textContent);
}
function runTest() {
nodesettextcontent03();
}
| bsd-3-clause |
sahiljain/catapult | third_party/py_vulcanize/third_party/rjsmin/bench/write.py | 10696 | #!/usr/bin/env python
# -*- coding: ascii -*-
r"""
=========================
Write benchmark results
=========================
Write benchmark results.
:Copyright:
Copyright 2014
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Usage::
python -mbench.write [-p plain] [-t table] <pickled
-p plain Plain file to write to (like docs/BENCHMARKS).
-t table Table file to write to (like docs/_userdoc/benchmark.txt).
"""
if __doc__:
__doc__ = __doc__.encode('ascii').decode('unicode_escape')
__author__ = r"Andr\xe9 Malo".encode('ascii').decode('unicode_escape')
__docformat__ = "restructuredtext en"
__license__ = "Apache License, Version 2.0"
__version__ = "1.0.0"
import os as _os
import re as _re
import sys as _sys
try:
unicode
except NameError:
def uni(v):
if hasattr(v, 'decode'):
return v.decode('latin-1')
return str(v)
else:
def uni(v):
if isinstance(v, unicode):
return v.encode('utf-8')
return str(v)
def write_table(filename, results):
"""
Output tabled benchmark results
:Parameters:
`filename` : ``str``
Filename to write to
`results` : ``list``
Results
"""
try:
next
except NameError:
next = lambda i: (getattr(i, 'next', None) or i.__next__)()
try:
cmp
except NameError:
cmp = lambda a, b: (a > b) - (a < b)
names = [
('simple_port', 'Simple Port'),
('jsmin_2_0_9', 'jsmin 2.0.9'),
('slimit_0_8_1', 'slimit 0.8.1'),
('slimit_0_8_1_mangle', 'slimit 0.8.1 (mangle)'),
('rjsmin', '|rjsmin|'),
('_rjsmin', r'_\ |rjsmin|'),
]
benched_per_table = 2
results = sorted(results, reverse=True)
# First we transform our data into a table (list of lists)
pythons, widths = [], [0] * (benched_per_table + 1)
last_version = None
for version, _, result in results:
version = uni(version)
if not(last_version is None or version.startswith('2.')):
continue
last_version = version
namesub = _re.compile(r'(?:-\d+(?:\.\d+)*)?\.js$').sub
result = iter(result)
tables = []
# given our data it's easier to create the table transposed...
for benched in result:
rows = [['Name'] + [desc for _, desc in names]]
for _ in range(benched_per_table):
if _:
try:
benched = next(result)
except StopIteration:
rows.append([''] + ['' for _ in names])
continue
times = dict((
uni(port), (time, benched['sizes'][idx])
) for idx, (port, time) in enumerate(benched['times']))
columns = ['%s (%.1f)' % (
namesub('', _os.path.basename(uni(benched['filename']))),
benched['size'] / 1024.0,
)]
for idx, (port, _) in enumerate(names):
if port not in times:
columns.append('n/a')
continue
time, size = times[port]
if time is None:
columns.append('(failed)')
continue
columns.append('%s%.2f ms (%.1f %s)' % (
idx == 0 and ' ' or '',
time,
size / 1024.0,
idx == 0 and '\\*' or ['=', '>', '<'][
cmp(size, benched['sizes'][0])
],
))
rows.append(columns)
# calculate column widths (global for all tables)
for idx, row in enumerate(rows):
widths[idx] = max(widths[idx], max(map(len, row)))
# ... and transpose it back.
tables.append(zip(*rows))
pythons.append((version, tables))
if last_version.startswith('2.'):
break
# Second we create a rest table from it
lines = []
separator = lambda c='-': '+'.join([''] + [
c * (width + 2) for width in widths
] + [''])
for idx, (version, tables) in enumerate(pythons):
if idx:
lines.append('')
lines.append('')
line = 'Python %s' % (version,)
lines.append(line)
lines.append('~' * len(line))
for table in tables:
lines.append('')
lines.append('.. rst-class:: benchmark')
lines.append('')
for idx, row in enumerate(table):
if idx == 0:
# header
lines.append(separator())
lines.append('|'.join([''] + [
' %s%*s ' % (col, len(col) - width, '')
for width, col in zip(widths, row)
] + ['']))
lines.append(separator('='))
else: # data
lines.append('|'.join([''] + [
j == 0 and (
' %s%*s ' % (col, len(col) - widths[j], '')
) or (
['%*s ', ' %*s '][idx == 1] % (widths[j], col)
)
for j, col in enumerate(row)
] + ['']))
lines.append(separator())
fplines = []
fp = open(filename)
try:
fpiter = iter(fp)
for line in fpiter:
line = line.rstrip()
if line == '.. begin tables':
buf = []
for line in fpiter:
line = line.rstrip()
if line == '.. end tables':
fplines.append('.. begin tables')
fplines.append('')
fplines.extend(lines)
fplines.append('')
fplines.append('.. end tables')
buf = []
break
else:
buf.append(line)
else:
fplines.extend(buf)
_sys.stderr.write("Placeholder container not found!\n")
else:
fplines.append(line)
finally:
fp.close()
fp = open(filename, 'w')
try:
fp.write('\n'.join(fplines) + '\n')
finally:
fp.close()
def write_plain(filename, results):
"""
Output plain benchmark results
:Parameters:
`filename` : ``str``
Filename to write to
`results` : ``list``
Results
"""
lines = []
results = sorted(results, reverse=True)
for idx, (version, import_notes, result) in enumerate(results):
if idx:
lines.append('')
lines.append('')
lines.append('$ python%s -OO bench/main.py bench/*.js' % (
'.'.join(version.split('.')[:2])
))
lines.append('~' * 72)
for note in import_notes:
lines.append(uni(note))
lines.append('Python Release: %s' % (version,))
for single in result:
lines.append('')
lines.append('Benchmarking %r... (%.1f KiB)' % (
uni(single['filename']), single['size'] / 1024.0
))
for msg in single['messages']:
lines.append(msg)
times = []
space = max([len(uni(port)) for port, _ in single['times']])
for idx, (port, time) in enumerate(single['times']):
port = uni(port)
if time is None:
lines.append(" FAILED %s" % (port,))
else:
times.append(time)
lines.append(
" Timing %s%s ... (%5.1f KiB %s) %8.2f ms" % (
port,
" " * (space - len(port)),
single['sizes'][idx] / 1024.0,
idx == 0 and '*' or ['=', '>', '<'][
cmp(single['sizes'][idx], single['sizes'][0])
],
time
)
)
if len(times) > 1:
lines[-1] += " (factor: %s)" % (', '.join([
'%.2f' % (timed / time) for timed in times[:-1]
]))
lines.append('')
lines.append('')
lines.append('# vim: nowrap')
fp = open(filename, 'w')
try:
fp.write('\n'.join(lines) + '\n')
finally:
fp.close()
def main(argv=None):
""" Main """
import getopt as _getopt
import pickle as _pickle
if argv is None:
argv = _sys.argv[1:]
try:
opts, args = _getopt.getopt(argv, "hp:t:", ["help"])
except getopt.GetoptError:
e = _sys.exc_info()[0](_sys.exc_info()[1])
print >> _sys.stderr, "%s\nTry %s -mbench.write --help" % (
e,
_os.path.basename(_sys.executable),
)
_sys.exit(2)
plain, table = None, None
for key, value in opts:
if key in ("-h", "--help"):
print >> _sys.stderr, (
"%s -mbench.write [-p plain] [-t table] <pickled" % (
_os.path.basename(_sys.executable),
)
)
_sys.exit(0)
elif key == '-p':
plain = str(value)
elif key == '-t':
table = str(value)
struct = []
_sys.stdin = getattr(_sys.stdin, 'detach', lambda: _sys.stdin)()
try:
while True:
version, import_notes, result = _pickle.load(_sys.stdin)
if hasattr(version, 'decode'):
version = version.decode('latin-1')
struct.append((version, import_notes, result))
except EOFError:
pass
if plain:
write_plain(plain, struct)
if table:
write_table(table, struct)
if __name__ == '__main__':
main()
| bsd-3-clause |
mazimm/AliPhysics | PWGHF/vertexingHF/macros/AddTaskDmesonMCPerform.C | 1457 | AliAnalysisTaskDmesonMCPerform *AddTaskDmesonMCPerform(TString suffix="",
Int_t centOpt=AliRDHFCuts::kCentOff,
TString dpluscutfilename="")
{
//
AliAnalysisManager *mgr = AliAnalysisManager::GetAnalysisManager();
if (!mgr) {
::Error("AddTaskMCPerform", "No analysis manager to connect to.");
}
AliRDHFCutsDplustoKpipi* analysiscutsdp=0x0;
if(dpluscutfilename!="") {
TFile* fileDpcuts=TFile::Open(dpluscutfilename.Data());
if(!fileDpcuts ||(fileDpcuts&& !fileDpcuts->IsOpen())){
AliFatal("Input file not found : check your cut object");
}else{
analysiscutsdp = (AliRDHFCutsDplustoKpipi*)fileDpcuts->Get("AnalysisCuts");
}
}
AliAnalysisTaskDmesonMCPerform *task = new AliAnalysisTaskDmesonMCPerform();
// task->SetAODMismatchProtection(-1);
task->SetUseCentrality(centOpt);
if(analysiscutsdp) task->SetDplusAnalysisCuts(analysiscutsdp);
mgr->AddTask(task);
// Create containers for input/output
TString outname = "coutputDperf";
outname += suffix.Data();
TString outputfile = AliAnalysisManager::GetCommonFileName();
outputfile += ":PWGHF_D2H_MCPerform";
AliAnalysisDataContainer *coutputDmc = mgr->CreateContainer(outname,
TList::Class(),
AliAnalysisManager::kOutputContainer,
outputfile.Data());
mgr->ConnectInput(task,0,mgr->GetCommonInputContainer());
mgr->ConnectOutput(task,1,coutputDmc);
return task;
}
| bsd-3-clause |
Razzwan/yii2 | tests/data/ar/OrderItem.php | 1017 | <?php
namespace yiiunit\data\ar;
/**
* Class OrderItem
*
* @property integer $order_id
* @property integer $item_id
* @property integer $quantity
* @property string $subtotal
*/
class OrderItem extends ActiveRecord
{
public static $tableName;
public static function tableName()
{
return static::$tableName ?: 'order_item';
}
public function getOrder()
{
return $this->hasOne(Order::className(), ['id' => 'order_id']);
}
public function getItem()
{
return $this->hasOne(Item::className(), ['id' => 'item_id']);
}
// relations used by ::testFindCompositeWithJoin()
public function getOrderItemCompositeWithJoin()
{
return $this->hasOne(OrderItem::className(), ['item_id' => 'item_id', 'order_id' => 'order_id' ])
->joinWith('item');
}
public function getOrderItemCompositeNoJoin()
{
return $this->hasOne(OrderItem::className(), ['item_id' => 'item_id', 'order_id' => 'order_id' ]);
}
}
| bsd-3-clause |
vamage/consul | vendor/google.golang.org/grpc/resolver/dns/go18.go | 789 | // +build go1.8
/*
*
* Copyright 2017 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package dns
import "net"
var (
lookupHost = net.DefaultResolver.LookupHost
lookupSRV = net.DefaultResolver.LookupSRV
lookupTXT = net.DefaultResolver.LookupTXT
)
| mpl-2.0 |
titanium-as/titanium-as.github.io | js/inspinia.js | 550 | // INSPINIA Landing Page Custom scripts
$(document).ready(function () {
// Highlight the top nav as scrolling
$('body').scrollspy({
target: '.navbar-fixed-top',
offset: 80
})
// Page scrolling feature
$('a.page-scroll').bind('click', function(event) {
var link = $(this);
$('html, body').stop().animate({
scrollTop: $(link.attr('href')).offset().top - 70
}, 500);
event.preventDefault();
});
});
// Activate WOW.js plugin for animation on scrol
new WOW().init(); | mit |
sashberd/cdnjs | ajax/libs/rusha/0.8.7/rusha.js | 21144 | (function () {
function Rusha(chunkSize) {
'use strict';
var util = {
getDataType: function (data) {
if (typeof data === 'string') {
return 'string';
}
if (data instanceof Array) {
return 'array';
}
if (typeof global !== 'undefined' && global.Buffer && global.Buffer.isBuffer(data)) {
return 'buffer';
}
if (data instanceof ArrayBuffer) {
return 'arraybuffer';
}
if (data.buffer instanceof ArrayBuffer) {
return 'view';
}
if (data instanceof Blob) {
return 'blob';
}
throw new Error('Unsupported data type.');
}
};
var // Private object structure.
self$2 = { fill: 0 };
var // Calculate the length of buffer that the sha1 routine uses
// including the padding.
padlen = function (len) {
for (len += 9; len % 64 > 0; len += 1);
return len;
};
var padZeroes = function (bin, len) {
var h8 = new Uint8Array(bin.buffer);
var om = len % 4, align = len - om;
switch (om) {
case 0:
h8[align + 3] = 0;
case 1:
h8[align + 2] = 0;
case 2:
h8[align + 1] = 0;
case 3:
h8[align + 0] = 0;
}
for (var i$2 = (len >> 2) + 1; i$2 < bin.length; i$2++)
bin[i$2] = 0;
};
var padData = function (bin, chunkLen, msgLen) {
bin[chunkLen >> 2] |= 128 << 24 - (chunkLen % 4 << 3);
// To support msgLen >= 2 GiB, use a float division when computing the
// high 32-bits of the big-endian message length in bits.
bin[((chunkLen >> 2) + 2 & ~15) + 14] = msgLen / (1 << 29) | 0;
bin[((chunkLen >> 2) + 2 & ~15) + 15] = msgLen << 3;
};
var // Convert a binary string and write it to the heap.
// A binary string is expected to only contain char codes < 256.
convStr = function (H8, H32, start, len, off) {
var str = this, i$2, om = off % 4, lm = (len + om) % 4, j = len - lm;
switch (om) {
case 0:
H8[off] = str.charCodeAt(start + 3);
case 1:
H8[off + 1 - (om << 1) | 0] = str.charCodeAt(start + 2);
case 2:
H8[off + 2 - (om << 1) | 0] = str.charCodeAt(start + 1);
case 3:
H8[off + 3 - (om << 1) | 0] = str.charCodeAt(start);
}
if (len < lm + om) {
return;
}
for (i$2 = 4 - om; i$2 < j; i$2 = i$2 + 4 | 0) {
H32[off + i$2 >> 2] = str.charCodeAt(start + i$2) << 24 | str.charCodeAt(start + i$2 + 1) << 16 | str.charCodeAt(start + i$2 + 2) << 8 | str.charCodeAt(start + i$2 + 3);
}
switch (lm) {
case 3:
H8[off + j + 1 | 0] = str.charCodeAt(start + j + 2);
case 2:
H8[off + j + 2 | 0] = str.charCodeAt(start + j + 1);
case 1:
H8[off + j + 3 | 0] = str.charCodeAt(start + j);
}
};
var // Convert a buffer or array and write it to the heap.
// The buffer or array is expected to only contain elements < 256.
convBuf = function (H8, H32, start, len, off) {
var buf = this, i$2, om = off % 4, lm = (len + om) % 4, j = len - lm;
switch (om) {
case 0:
H8[off] = buf[start + 3];
case 1:
H8[off + 1 - (om << 1) | 0] = buf[start + 2];
case 2:
H8[off + 2 - (om << 1) | 0] = buf[start + 1];
case 3:
H8[off + 3 - (om << 1) | 0] = buf[start];
}
if (len < lm + om) {
return;
}
for (i$2 = 4 - om; i$2 < j; i$2 = i$2 + 4 | 0) {
H32[off + i$2 >> 2 | 0] = buf[start + i$2] << 24 | buf[start + i$2 + 1] << 16 | buf[start + i$2 + 2] << 8 | buf[start + i$2 + 3];
}
switch (lm) {
case 3:
H8[off + j + 1 | 0] = buf[start + j + 2];
case 2:
H8[off + j + 2 | 0] = buf[start + j + 1];
case 1:
H8[off + j + 3 | 0] = buf[start + j];
}
};
var convBlob = function (H8, H32, start, len, off) {
var blob = this, i$2, om = off % 4, lm = (len + om) % 4, j = len - lm;
var buf = new Uint8Array(reader.readAsArrayBuffer(blob.slice(start, start + len)));
switch (om) {
case 0:
H8[off] = buf[3];
case 1:
H8[off + 1 - (om << 1) | 0] = buf[2];
case 2:
H8[off + 2 - (om << 1) | 0] = buf[1];
case 3:
H8[off + 3 - (om << 1) | 0] = buf[0];
}
if (len < lm + om) {
return;
}
for (i$2 = 4 - om; i$2 < j; i$2 = i$2 + 4 | 0) {
H32[off + i$2 >> 2 | 0] = buf[i$2] << 24 | buf[i$2 + 1] << 16 | buf[i$2 + 2] << 8 | buf[i$2 + 3];
}
switch (lm) {
case 3:
H8[off + j + 1 | 0] = buf[j + 2];
case 2:
H8[off + j + 2 | 0] = buf[j + 1];
case 1:
H8[off + j + 3 | 0] = buf[j];
}
};
var convFn = function (data) {
switch (util.getDataType(data)) {
case 'string':
return convStr.bind(data);
case 'array':
return convBuf.bind(data);
case 'buffer':
return convBuf.bind(data);
case 'arraybuffer':
return convBuf.bind(new Uint8Array(data));
case 'view':
return convBuf.bind(new Uint8Array(data.buffer, data.byteOffset, data.byteLength));
case 'blob':
return convBlob.bind(data);
}
};
var slice = function (data, offset) {
switch (util.getDataType(data)) {
case 'string':
return data.slice(offset);
case 'array':
return data.slice(offset);
case 'buffer':
return data.slice(offset);
case 'arraybuffer':
return data.slice(offset);
case 'view':
return data.buffer.slice(offset);
}
};
var // Precompute 00 - ff strings
precomputedHex = new Array(256);
for (var i = 0; i < 256; i++) {
precomputedHex[i] = (i < 16 ? '0' : '') + i.toString(16);
}
var // Convert an ArrayBuffer into its hexadecimal string representation.
hex = function (arrayBuffer) {
var binarray = new Uint8Array(arrayBuffer);
var res = new Array(arrayBuffer.byteLength);
for (var i$2 = 0; i$2 < res.length; i$2++) {
res[i$2] = precomputedHex[binarray[i$2]];
}
return res.join('');
};
var ceilHeapSize = function (v) {
// The asm.js spec says:
// The heap object's byteLength must be either
// 2^n for n in [12, 24) or 2^24 * n for n ≥ 1.
// Also, byteLengths smaller than 2^16 are deprecated.
var p;
if (// If v is smaller than 2^16, the smallest possible solution
// is 2^16.
v <= 65536)
return 65536;
if (// If v < 2^24, we round up to 2^n,
// otherwise we round up to 2^24 * n.
v < 16777216) {
for (p = 1; p < v; p = p << 1);
} else {
for (p = 16777216; p < v; p += 16777216);
}
return p;
};
var // Initialize the internal data structures to a new capacity.
init = function (size) {
if (size % 64 > 0) {
throw new Error('Chunk size must be a multiple of 128 bit');
}
self$2.offset = 0;
self$2.maxChunkLen = size;
self$2.padMaxChunkLen = padlen(size);
// The size of the heap is the sum of:
// 1. The padded input message size
// 2. The extended space the algorithm needs (320 byte)
// 3. The 160 bit state the algoritm uses
self$2.heap = new ArrayBuffer(ceilHeapSize(self$2.padMaxChunkLen + 320 + 20));
self$2.h32 = new Int32Array(self$2.heap);
self$2.h8 = new Int8Array(self$2.heap);
self$2.core = new Rusha._core({
Int32Array: Int32Array,
DataView: DataView
}, {}, self$2.heap);
self$2.buffer = null;
};
// Iinitializethe datastructures according
// to a chunk siyze.
init(chunkSize || 64 * 1024);
var initState = function (heap, padMsgLen) {
self$2.offset = 0;
var io = new Int32Array(heap, padMsgLen + 320, 5);
io[0] = 1732584193;
io[1] = -271733879;
io[2] = -1732584194;
io[3] = 271733878;
io[4] = -1009589776;
};
var padChunk = function (chunkLen, msgLen) {
var padChunkLen = padlen(chunkLen);
var view = new Int32Array(self$2.heap, 0, padChunkLen >> 2);
padZeroes(view, chunkLen);
padData(view, chunkLen, msgLen);
return padChunkLen;
};
var // Write data to the heap.
write = function (data, chunkOffset, chunkLen, off) {
convFn(data)(self$2.h8, self$2.h32, chunkOffset, chunkLen, off || 0);
};
var // Initialize and call the RushaCore,
// assuming an input buffer of length len * 4.
coreCall = function (data, chunkOffset, chunkLen, msgLen, finalize) {
var padChunkLen = chunkLen;
write(data, chunkOffset, chunkLen);
if (finalize) {
padChunkLen = padChunk(chunkLen, msgLen);
}
self$2.core.hash(padChunkLen, self$2.padMaxChunkLen);
};
var getRawDigest = function (heap, padMaxChunkLen) {
var io = new Int32Array(heap, padMaxChunkLen + 320, 5);
var out = new Int32Array(5);
var arr = new DataView(out.buffer);
arr.setInt32(0, io[0], false);
arr.setInt32(4, io[1], false);
arr.setInt32(8, io[2], false);
arr.setInt32(12, io[3], false);
arr.setInt32(16, io[4], false);
return out;
};
var // Calculate the hash digest as an array of 5 32bit integers.
rawDigest = this.rawDigest = function (str) {
var msgLen = str.byteLength || str.length || str.size || 0;
initState(self$2.heap, self$2.padMaxChunkLen);
var chunkOffset = 0, chunkLen = self$2.maxChunkLen;
for (chunkOffset = 0; msgLen > chunkOffset + chunkLen; chunkOffset += chunkLen) {
coreCall(str, chunkOffset, chunkLen, msgLen, false);
}
coreCall(str, chunkOffset, msgLen - chunkOffset, msgLen, true);
return getRawDigest(self$2.heap, self$2.padMaxChunkLen);
};
// The digest and digestFrom* interface returns the hash digest
// as a hex string.
this.digest = this.digestFromString = this.digestFromBuffer = this.digestFromArrayBuffer = function (str) {
return hex(rawDigest(str).buffer);
};
this.resetState = function () {
initState(self$2.heap, self$2.padMaxChunkLen);
return this;
};
this.append = function (chunk) {
var chunkOffset = 0;
var chunkLen = chunk.byteLength || chunk.length || chunk.size || 0;
var turnOffset = self$2.offset % self$2.maxChunkLen;
var inputLen;
self$2.offset += chunkLen;
while (chunkOffset < chunkLen) {
inputLen = Math.min(chunkLen - chunkOffset, self$2.maxChunkLen - turnOffset);
write(chunk, chunkOffset, inputLen, turnOffset);
turnOffset += inputLen;
chunkOffset += inputLen;
if (turnOffset === self$2.maxChunkLen) {
self$2.core.hash(self$2.maxChunkLen, self$2.padMaxChunkLen);
turnOffset = 0;
}
}
return this;
};
this.getState = function () {
var turnOffset = self$2.offset % self$2.maxChunkLen;
var heap;
if (!turnOffset) {
var io = new Int32Array(self$2.heap, self$2.padMaxChunkLen + 320, 5);
heap = io.buffer.slice(io.byteOffset, io.byteOffset + io.byteLength);
} else {
heap = self$2.heap.slice(0);
}
return {
offset: self$2.offset,
heap: heap
};
};
this.setState = function (state) {
self$2.offset = state.offset;
if (state.heap.byteLength === 20) {
var io = new Int32Array(self$2.heap, self$2.padMaxChunkLen + 320, 5);
io.set(new Int32Array(state.heap));
} else {
self$2.h32.set(new Int32Array(state.heap));
}
return this;
};
var rawEnd = this.rawEnd = function () {
var msgLen = self$2.offset;
var chunkLen = msgLen % self$2.maxChunkLen;
var padChunkLen = padChunk(chunkLen, msgLen);
self$2.core.hash(padChunkLen, self$2.padMaxChunkLen);
var result = getRawDigest(self$2.heap, self$2.padMaxChunkLen);
initState(self$2.heap, self$2.padMaxChunkLen);
return result;
};
this.end = function () {
return hex(rawEnd().buffer);
};
}
;
// The low-level RushCore module provides the heart of Rusha,
// a high-speed sha1 implementation working on an Int32Array heap.
// At first glance, the implementation seems complicated, however
// with the SHA1 spec at hand, it is obvious this almost a textbook
// implementation that has a few functions hand-inlined and a few loops
// hand-unrolled.
Rusha._core = function RushaCore(stdlib, foreign, heap) {
'use asm';
var H = new stdlib.Int32Array(heap);
function hash(k, x) {
// k in bytes
k = k | 0;
x = x | 0;
var i = 0, j = 0, y0 = 0, z0 = 0, y1 = 0, z1 = 0, y2 = 0, z2 = 0, y3 = 0, z3 = 0, y4 = 0, z4 = 0, t0 = 0, t1 = 0;
y0 = H[x + 320 >> 2] | 0;
y1 = H[x + 324 >> 2] | 0;
y2 = H[x + 328 >> 2] | 0;
y3 = H[x + 332 >> 2] | 0;
y4 = H[x + 336 >> 2] | 0;
for (i = 0; (i | 0) < (k | 0); i = i + 64 | 0) {
z0 = y0;
z1 = y1;
z2 = y2;
z3 = y3;
z4 = y4;
for (j = 0; (j | 0) < 64; j = j + 4 | 0) {
t1 = H[i + j >> 2] | 0;
t0 = ((y0 << 5 | y0 >>> 27) + (y1 & y2 | ~y1 & y3) | 0) + ((t1 + y4 | 0) + 1518500249 | 0) | 0;
y4 = y3;
y3 = y2;
y2 = y1 << 30 | y1 >>> 2;
y1 = y0;
y0 = t0;
H[k + j >> 2] = t1;
}
for (j = k + 64 | 0; (j | 0) < (k + 80 | 0); j = j + 4 | 0) {
t1 = (H[j - 12 >> 2] ^ H[j - 32 >> 2] ^ H[j - 56 >> 2] ^ H[j - 64 >> 2]) << 1 | (H[j - 12 >> 2] ^ H[j - 32 >> 2] ^ H[j - 56 >> 2] ^ H[j - 64 >> 2]) >>> 31;
t0 = ((y0 << 5 | y0 >>> 27) + (y1 & y2 | ~y1 & y3) | 0) + ((t1 + y4 | 0) + 1518500249 | 0) | 0;
y4 = y3;
y3 = y2;
y2 = y1 << 30 | y1 >>> 2;
y1 = y0;
y0 = t0;
H[j >> 2] = t1;
}
for (j = k + 80 | 0; (j | 0) < (k + 160 | 0); j = j + 4 | 0) {
t1 = (H[j - 12 >> 2] ^ H[j - 32 >> 2] ^ H[j - 56 >> 2] ^ H[j - 64 >> 2]) << 1 | (H[j - 12 >> 2] ^ H[j - 32 >> 2] ^ H[j - 56 >> 2] ^ H[j - 64 >> 2]) >>> 31;
t0 = ((y0 << 5 | y0 >>> 27) + (y1 ^ y2 ^ y3) | 0) + ((t1 + y4 | 0) + 1859775393 | 0) | 0;
y4 = y3;
y3 = y2;
y2 = y1 << 30 | y1 >>> 2;
y1 = y0;
y0 = t0;
H[j >> 2] = t1;
}
for (j = k + 160 | 0; (j | 0) < (k + 240 | 0); j = j + 4 | 0) {
t1 = (H[j - 12 >> 2] ^ H[j - 32 >> 2] ^ H[j - 56 >> 2] ^ H[j - 64 >> 2]) << 1 | (H[j - 12 >> 2] ^ H[j - 32 >> 2] ^ H[j - 56 >> 2] ^ H[j - 64 >> 2]) >>> 31;
t0 = ((y0 << 5 | y0 >>> 27) + (y1 & y2 | y1 & y3 | y2 & y3) | 0) + ((t1 + y4 | 0) - 1894007588 | 0) | 0;
y4 = y3;
y3 = y2;
y2 = y1 << 30 | y1 >>> 2;
y1 = y0;
y0 = t0;
H[j >> 2] = t1;
}
for (j = k + 240 | 0; (j | 0) < (k + 320 | 0); j = j + 4 | 0) {
t1 = (H[j - 12 >> 2] ^ H[j - 32 >> 2] ^ H[j - 56 >> 2] ^ H[j - 64 >> 2]) << 1 | (H[j - 12 >> 2] ^ H[j - 32 >> 2] ^ H[j - 56 >> 2] ^ H[j - 64 >> 2]) >>> 31;
t0 = ((y0 << 5 | y0 >>> 27) + (y1 ^ y2 ^ y3) | 0) + ((t1 + y4 | 0) - 899497514 | 0) | 0;
y4 = y3;
y3 = y2;
y2 = y1 << 30 | y1 >>> 2;
y1 = y0;
y0 = t0;
H[j >> 2] = t1;
}
y0 = y0 + z0 | 0;
y1 = y1 + z1 | 0;
y2 = y2 + z2 | 0;
y3 = y3 + z3 | 0;
y4 = y4 + z4 | 0;
}
H[x + 320 >> 2] = y0;
H[x + 324 >> 2] = y1;
H[x + 328 >> 2] = y2;
H[x + 332 >> 2] = y3;
H[x + 336 >> 2] = y4;
}
return { hash: hash };
};
if (// If we'e running in Node.JS, export a module.
typeof module !== 'undefined') {
module.exports = Rusha;
} else if (// If we're running in a DOM context, export
// the Rusha object to toplevel.
typeof window !== 'undefined') {
window.Rusha = Rusha;
}
if (// If we're running in a webworker, accept
// messages containing a jobid and a buffer
// or blob object, and return the hash result.
typeof FileReaderSync !== 'undefined') {
var reader = new FileReaderSync();
var hashData = function hash(hasher, data, cb) {
try {
return cb(null, hasher.digest(data));
} catch (e) {
return cb(e);
}
};
var hashFile = function hashArrayBuffer(hasher, readTotal, blockSize, file, cb) {
var reader$2 = new self.FileReader();
reader$2.onloadend = function onloadend() {
var buffer = reader$2.result;
readTotal += reader$2.result.byteLength;
try {
hasher.append(buffer);
} catch (e) {
cb(e);
return;
}
if (readTotal < file.size) {
hashFile(hasher, readTotal, blockSize, file, cb);
} else {
cb(null, hasher.end());
}
};
reader$2.readAsArrayBuffer(file.slice(readTotal, readTotal + blockSize));
};
self.onmessage = function onMessage(event) {
var data = event.data.data, file = event.data.file, id = event.data.id;
if (typeof id === 'undefined')
return;
if (!file && !data)
return;
var blockSize = event.data.blockSize || 4 * 1024 * 1024;
var hasher = new Rusha(blockSize);
hasher.resetState();
var done = function done$2(err, hash) {
if (!err) {
self.postMessage({
id: id,
hash: hash
});
} else {
self.postMessage({
id: id,
error: err.name
});
}
};
if (data)
hashData(hasher, data, done);
if (file)
hashFile(hasher, 0, blockSize, file, done);
};
}
}()); | mit |
Donavan/dotenv | spec/dotenv/rails_spec.rb | 1750 | require "spec_helper"
ENV["RAILS_ENV"] = "test"
require "rails"
require "dotenv/rails"
describe Dotenv::Railtie do
# Fake watcher for Spring
class SpecWatcher
attr_reader :items
def initialize
@items = []
end
def add(*items)
@items |= items
end
end
before do
allow(Rails).to receive(:root)
.and_return Pathname.new(File.expand_path("../../fixtures", __FILE__))
Rails.application = double(:application)
Spring.watcher = SpecWatcher.new
end
after do
# Reset
Spring.watcher = nil
Rails.application = nil
end
context "before_configuration" do
it "calls #load" do
expect(Dotenv::Railtie.instance).to receive(:load)
ActiveSupport.run_load_hooks(:before_configuration)
end
end
context "load" do
before { Dotenv::Railtie.load }
it "watches .env with Spring" do
expect(Spring.watcher.items).to include(Rails.root.join(".env").to_s)
end
it "watches other loaded files with Spring" do
path = fixture_path("plain.env")
Dotenv.load(path)
expect(Spring.watcher.items).to include(path)
end
it "loads .env, .env.local, and .env.#{Rails.env}" do
expect(Spring.watcher.items).to eql([
Rails.root.join(".env.local").to_s,
Rails.root.join(".env.test").to_s,
Rails.root.join(".env").to_s
])
end
it "loads .env.local before .env" do
expect(ENV["DOTENV"]).to eql("local")
end
context "when Rails.root is nil" do
before do
allow(Rails).to receive(:root).and_return(nil)
end
it "falls back to RAILS_ROOT" do
ENV["RAILS_ROOT"] = "/tmp"
expect(Dotenv::Railtie.root.to_s).to eql("/tmp")
end
end
end
end
| mit |
ExoSkeleton321/expressjs-cf | node_modules/cloudinary/lib/v2/index.js | 299 | // Generated by CoffeeScript 1.10.0
(function() {
var _, v1, v2;
v1 = require('../../cloudinary.js');
_ = require('lodash');
v2 = _.clone(v1);
v2.api = require('./api');
v2.uploader = require('./uploader');
module.exports = v2;
}).call(this);
//# sourceMappingURL=index.js.map
| mit |
Promptus/errbit | config/mongo.rb | 369 | Mongoid.configure do |config|
uri = if Errbit::Config.mongo_url == 'mongodb://localhost'
"mongodb://localhost/errbit_#{Rails.env}"
else
Errbit::Config.mongo_url
end
config.load_configuration({
sessions: {
default: {
uri: uri
}
},
options: {
use_activesupport_time_zone: true
}
})
end
| mit |
seogi1004/cdnjs | ajax/libs/froala-editor/2.7.2/js/languages/ru.js | 24636 | /*!
* froala_editor v2.7.2 (https://www.froala.com/wysiwyg-editor)
* License https://froala.com/wysiwyg-editor/terms/
* Copyright 2014-2017 Froala Labs
*/
(function (factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(['jquery'], factory);
} else if (typeof module === 'object' && module.exports) {
// Node/CommonJS
module.exports = function( root, jQuery ) {
if ( jQuery === undefined ) {
// require('jQuery') returns a factory that requires window to
// build a jQuery instance, we normalize how we use modules
// that require this pattern but the window provided is a noop
// if it's defined (how jquery works)
if ( typeof window !== 'undefined' ) {
jQuery = require('jquery');
}
else {
jQuery = require('jquery')(root);
}
}
return factory(jQuery);
};
} else {
// Browser globals
factory(window.jQuery);
}
}(function ($) {
/**
* Russian
*/
$.FE.LANGUAGE['ru'] = {
translation: {
// Place holder
"Type something": "\u041d\u0430\u043f\u0438\u0448\u0438\u0442\u0435 \u0447\u0442\u043e\u002d\u043d\u0438\u0431\u0443\u0434\u044c",
// Basic formatting
"Bold": "\u0416\u0438\u0440\u043d\u044b\u0439",
"Italic": "\u041a\u0443\u0440\u0441\u0438\u0432",
"Underline": "\u041f\u043e\u0434\u0447\u0435\u0440\u043a\u043d\u0443\u0442\u044b\u0439",
"Strikethrough": "\u0417\u0430\u0447\u0435\u0440\u043a\u043d\u0443\u0442\u044b\u0439",
// Main buttons
"Insert": "\u0412\u0441\u0442\u0430\u0432\u0438\u0442\u044c",
"Delete": "\u0423\u0434\u0430\u043b\u0438\u0442\u044c",
"Cancel": "\u041e\u0442\u043c\u0435\u043d\u0438\u0442\u044c",
"OK": "\u041e\u043a",
"Back": "\u043d\u0430\u0437\u0430\u0434",
"Remove": "\u0423\u0434\u0430\u043b\u0438\u0442\u044c",
"More": "\u0411\u043e\u043b\u044c\u0448\u0435",
"Update": "\u041e\u0431\u043d\u043e\u0432\u0438\u0442\u044c",
"Style": "\u0421\u0442\u0438\u043b\u044c",
// Font
"Font Family": "\u0428\u0440\u0438\u0444\u0442",
"Font Size": "\u0420\u0430\u0437\u043c\u0435\u0440 \u0448\u0440\u0438\u0444\u0442\u0430",
// Colors
"Colors": "\u0426\u0432\u0435\u0442\u0430",
"Background": "\u0424\u043e\u043d",
"Text": "\u0422\u0435\u043a\u0441\u0442",
"HEX Color": "Шестигранный цвет",
// Paragraphs
"Paragraph Format": "\u0424\u043e\u0440\u043c\u0430\u0442 \u0430\u0431\u0437\u0430\u0446\u0430",
"Normal": "\u041d\u043e\u0440\u043c\u0430\u043b\u044c\u043d\u044b\u0439",
"Code": "\u041a\u043e\u0434",
"Heading 1": "\u0417\u0430\u0433\u043e\u043b\u043e\u0432\u043e\u043a 1",
"Heading 2": "\u0417\u0430\u0433\u043e\u043b\u043e\u0432\u043e\u043a 2",
"Heading 3": "\u0417\u0430\u0433\u043e\u043b\u043e\u0432\u043e\u043a 3",
"Heading 4": "\u0417\u0430\u0433\u043e\u043b\u043e\u0432\u043e\u043a 4",
// Style
"Paragraph Style": "\u0421\u0442\u0438\u043b\u044c \u0430\u0431\u0437\u0430\u0446\u0430",
"Inline Style": "\u0412\u0441\u0442\u0440\u043e\u0435\u043d\u043d\u044b\u0439 \u0441\u0442\u0438\u043b\u044c",
// Alignment
"Align": "\u0412\u044b\u0440\u043e\u0432\u043d\u044f\u0442\u044c \u043f\u043e",
"Align Left": "\u041f\u043e \u043b\u0435\u0432\u043e\u043c\u0443 \u043a\u0440\u0430\u044e",
"Align Center": "\u041f\u043e \u0446\u0435\u043d\u0442\u0440\u0443",
"Align Right": "\u041f\u043e \u043f\u0440\u0430\u0432\u043e\u043c\u0443 \u043a\u0440\u0430\u044e",
"Align Justify": "\u041f\u043e \u0448\u0438\u0440\u0438\u043d\u0435",
"None": "\u041d\u0438\u043a\u0430\u043a",
// Lists
"Ordered List": "\u041d\u0443\u043c\u0435\u0440\u043e\u0432\u0430\u043d\u043d\u044b\u0439 \u0441\u043f\u0438\u0441\u043e\u043a",
"Unordered List": "\u041c\u0430\u0440\u043a\u0438\u0440\u043e\u0432\u0430\u043d\u043d\u044b\u0439 \u0441\u043f\u0438\u0441\u043e\u043a",
// Indent
"Decrease Indent": "\u0423\u043c\u0435\u043d\u044c\u0448\u0438\u0442\u044c \u043e\u0442\u0441\u0442\u0443\u043f",
"Increase Indent": "\u0423\u0432\u0435\u043b\u0438\u0447\u0438\u0442\u044c \u043e\u0442\u0441\u0442\u0443\u043f",
// Links
"Insert Link": "\u0412\u0441\u0442\u0430\u0432\u0438\u0442\u044c \u0441\u0441\u044b\u043b\u043a\u0443",
"Open in new tab": "\u041e\u0442\u043a\u0440\u044b\u0442\u044c \u0432 \u043d\u043e\u0432\u043e\u0439 \u0432\u043a\u043b\u0430\u0434\u043a\u0435",
"Open Link": "\u041f\u0435\u0440\u0435\u0439\u0442\u0438 \u043f\u043e \u0441\u0441\u044b\u043b\u043a\u0435",
"Edit Link": "\u041e\u0442\u0440\u0435\u0434\u0430\u043a\u0442\u0438\u0440\u043e\u0432\u0430\u0442\u044c \u0441\u0441\u044b\u043b\u043a\u0443",
"Unlink": "\u0423\u0434\u0430\u043b\u0438\u0442\u044c \u0441\u0441\u044b\u043b\u043a\u0443",
"Choose Link": "\u0412\u044b\u0431\u0435\u0440\u0438\u0442\u0435 \u0441\u0441\u044b\u043b\u043a\u0443",
// Images
"Insert Image": "\u0412\u0441\u0442\u0430\u0432\u0438\u0442\u044c \u0438\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435",
"Upload Image": "\u0417\u0430\u0433\u0440\u0443\u0437\u0438\u0442\u044c \u0438\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435",
"By URL": "\u041f\u043e \u0441\u0441\u044b\u043b\u043a\u0435",
"Browse": "\u0417\u0430\u0433\u0440\u0443\u0436\u0435\u043d\u043d\u044b\u0435 \u0438\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u044f",
"Drop image": "\u041f\u0435\u0440\u0435\u043c\u0435\u0441\u0442\u0438\u0442\u0435 \u0441\u044e\u0434\u0430 \u0438\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435",
"or click": "\u0438\u043b\u0438 \u043d\u0430\u0436\u043c\u0438\u0442\u0435",
"Manage Images": "\u0423\u043f\u0440\u0430\u0432\u043b\u0435\u043d\u0438\u0435 \u0438\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u044f\u043c\u0438",
"Loading": "\u0417\u0430\u0433\u0440\u0443\u0437\u043a\u0430",
"Deleting": "\u0423\u0434\u0430\u043b\u0435\u043d\u0438\u0435",
"Tags": "\u041a\u043b\u044e\u0447\u0435\u0432\u044b\u0435 \u0441\u043b\u043e\u0432\u0430",
"Are you sure? Image will be deleted.": "\u0412\u044b \u0443\u0432\u0435\u0440\u0435\u043d\u044b? \u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435 \u0431\u0443\u0434\u0435\u0442 \u0443\u0434\u0430\u043b\u0435\u043d\u043e.",
"Replace": "\u0417\u0430\u043c\u0435\u043d\u0438\u0442\u044c",
"Uploading": "\u0417\u0430\u0433\u0440\u0443\u0437\u043a\u0430",
"Loading image": "\u0417\u0430\u0433\u0440\u0443\u0437\u043a\u0430 \u0438\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u044f",
"Display": "\u041f\u043e\u043b\u043e\u0436\u0435\u043d\u0438\u0435",
"Inline": "\u041e\u0431\u0442\u0435\u043a\u0430\u043d\u0438\u0435 \u0442\u0435\u043a\u0441\u0442\u043e\u043c",
"Break Text": "\u0412\u0441\u0442\u0440\u043e\u0435\u043d\u043d\u043e\u0435 \u0432 \u0442\u0435\u043a\u0441\u0442",
"Alternate Text": "\u0410\u043b\u044c\u0442\u0435\u0440\u043d\u0430\u0442\u0438\u0432\u043d\u044b\u0439 \u0442\u0435\u043a\u0441\u0442",
"Change Size": "\u0418\u0437\u043c\u0435\u043d\u0438\u0442\u044c \u0440\u0430\u0437\u043c\u0435\u0440",
"Width": "\u0428\u0438\u0440\u0438\u043d\u0430",
"Height": "\u0412\u044b\u0441\u043e\u0442\u0430",
"Something went wrong. Please try again.": "\u0427\u0442\u043e\u002d\u0442\u043e \u043f\u043e\u0448\u043b\u043e \u043d\u0435 \u0442\u0430\u043a\u002e \u041f\u043e\u0436\u0430\u043b\u0443\u0439\u0441\u0442\u0430\u002c \u043f\u043e\u043f\u0440\u043e\u0431\u0443\u0439\u0442\u0435 \u0435\u0449\u0435 \u0440\u0430\u0437\u002e",
"Image Caption": "Подпись изображения",
"Advanced Edit": "Расширенное редактирование",
// Video
"Insert Video": "\u0412\u0441\u0442\u0430\u0432\u0438\u0442\u044c \u0432\u0438\u0434\u0435\u043e",
"Embedded Code": "\u0048\u0054\u004d\u004c\u002d\u043a\u043e\u0434 \u0434\u043b\u044f \u0432\u0441\u0442\u0430\u0432\u043a\u0438",
"Paste in a video URL": "Вставить URL-адрес видео",
"Drop video": "Падение видео",
"Your browser does not support HTML5 video.": "Ваш браузер не поддерживает видео html5.",
"Upload Video": "Загрузить видео",
// Tables
"Insert Table": "\u0412\u0441\u0442\u0430\u0432\u0438\u0442\u044c \u0442\u0430\u0431\u043b\u0438\u0446\u0443",
"Table Header": "\u0417\u0430\u0433\u043e\u043b\u043e\u0432\u043e\u043a \u0442\u0430\u0431\u043b\u0438\u0446\u044b",
"Remove Table": "\u0423\u0434\u0430\u043b\u0438\u0442\u044c \u0442\u0430\u0431\u043b\u0438\u0446\u0443",
"Table Style": "\u0421\u0442\u0438\u043b\u044c \u0442\u0430\u0431\u043b\u0438\u0446\u044b",
"Horizontal Align": "\u0413\u043e\u0440\u0438\u0437\u043e\u043d\u0442\u0430\u043b\u044c\u043d\u043e\u0435 \u0432\u044b\u0440\u0430\u0432\u043d\u0438\u0432\u0430\u043d\u0438\u0435",
"Row": "\u0421\u0442\u0440\u043e\u043a\u0430",
"Insert row above": "\u0412\u0441\u0442\u0430\u0432\u0438\u0442\u044c \u0441\u0442\u0440\u043e\u043a\u0443 \u0441\u0432\u0435\u0440\u0445\u0443",
"Insert row below": "\u0412\u0441\u0442\u0430\u0432\u0438\u0442\u044c \u0441\u0442\u0440\u043e\u043a\u0443 \u0441\u043d\u0438\u0437\u0443",
"Delete row": "\u0423\u0434\u0430\u043b\u0438\u0442\u044c \u0441\u0442\u0440\u043e\u043a\u0443",
"Column": "\u0421\u0442\u043e\u043b\u0431\u0435\u0446",
"Insert column before": "\u0412\u0441\u0442\u0430\u0432\u0438\u0442\u044c \u0441\u0442\u043e\u043b\u0431\u0435\u0446 \u0441\u043b\u0435\u0432\u0430",
"Insert column after": "\u0412\u0441\u0442\u0430\u0432\u0438\u0442\u044c \u0441\u0442\u043e\u043b\u0431\u0435\u0446 \u0441\u043f\u0440\u0430\u0432\u0430",
"Delete column": "\u0423\u0434\u0430\u043b\u0438\u0442\u044c \u0441\u0442\u043e\u043b\u0431\u0435\u0446",
"Cell": "\u042f\u0447\u0435\u0439\u043a\u0430",
"Merge cells": "\u041e\u0431\u044a\u0435\u0434\u0438\u043d\u0438\u0442\u044c \u044f\u0447\u0435\u0439\u043a\u0438",
"Horizontal split": "\u0420\u0430\u0437\u0434\u0435\u043b\u0438\u0442\u044c \u0433\u043e\u0440\u0438\u0437\u043e\u043d\u0442\u0430\u043b\u044c\u043d\u043e",
"Vertical split": "\u0420\u0430\u0437\u0434\u0435\u043b\u0438\u0442\u044c \u0432\u0435\u0440\u0442\u0438\u043a\u0430\u043b\u044c\u043d\u043e",
"Cell Background": "\u0424\u043e\u043d \u044f\u0447\u0435\u0439\u043a\u0438",
"Vertical Align": "\u0412\u0435\u0440\u0442\u0438\u043a\u0430\u043b\u044c\u043d\u043e\u0435 \u0432\u044b\u0440\u0430\u0432\u043d\u0438\u0432\u0430\u043d\u0438\u0435",
"Top": "\u041f\u043e \u0432\u0435\u0440\u0445\u043d\u0435\u043c\u0443 \u043a\u0440\u0430\u044e",
"Middle": "\u041f\u043e\u0441\u0435\u0440\u0435\u0434\u0438\u043d\u0435",
"Bottom": "\u041f\u043e \u043d\u0438\u0436\u043d\u0435\u043c\u0443 \u043a\u0440\u0430\u044e",
"Align Top": "\u0412\u044b\u0440\u043e\u0432\u043d\u044f\u0442\u044c \u043f\u043e \u0432\u0435\u0440\u0445\u043d\u0435\u043c\u0443 \u043a\u0440\u0430\u044e",
"Align Middle": "\u0412\u044b\u0440\u043e\u0432\u043d\u044f\u0442\u044c \u043f\u043e \u0441\u0435\u0440\u0435\u0434\u0438\u043d\u0435",
"Align Bottom": "\u0412\u044b\u0440\u043e\u0432\u043d\u044f\u0442\u044c \u043f\u043e \u043d\u0438\u0436\u043d\u0435\u043c\u0443 \u043a\u0440\u0430\u044e",
"Cell Style": "\u0421\u0442\u0438\u043b\u044c \u044f\u0447\u0435\u0439\u043a\u0438",
// Files
"Upload File": "\u0417\u0430\u0433\u0440\u0443\u0437\u0438\u0442\u044c \u0444\u0430\u0439\u043b",
"Drop file": "\u041f\u0435\u0440\u0435\u043c\u0435\u0441\u0442\u0438\u0442\u0435 \u0441\u044e\u0434\u0430 \u0444\u0430\u0439\u043b",
// Emoticons
"Emoticons": "\u0421\u043c\u0430\u0439\u043b\u0438\u043a\u0438",
"Grinning face": "\u0423\u0445\u043c\u044b\u043b\u043a\u0430 \u043d\u0430 \u043b\u0438\u0446\u0435",
"Grinning face with smiling eyes": "\u0423\u0441\u043c\u0435\u0445\u043d\u0443\u0432\u0448\u0435\u0435\u0441\u044f \u043b\u0438\u0446\u043e \u0441 \u0443\u043b\u044b\u0431\u0430\u044e\u0449\u0438\u043c\u0438\u0441\u044f \u0433\u043b\u0430\u0437\u0430\u043c\u0438",
"Face with tears of joy": "\u041b\u0438\u0446\u043e \u0441\u043e \u0441\u043b\u0435\u0437\u0430\u043c\u0438 \u0440\u0430\u0434\u043e\u0441\u0442\u0438",
"Smiling face with open mouth": "\u0423\u043b\u044b\u0431\u0430\u044e\u0449\u0435\u0435\u0441\u044f \u043b\u0438\u0446\u043e \u0441 \u043e\u0442\u043a\u0440\u044b\u0442\u044b\u043c \u0440\u0442\u043e\u043c",
"Smiling face with open mouth and smiling eyes": "\u0423\u043b\u044b\u0431\u0430\u044f\u0441\u044c \u043b\u0438\u0446\u043e \u0441 \u043e\u0442\u043a\u0440\u044b\u0442\u044b\u043c \u0440\u0442\u043e\u043c \u0438 \u0443\u043b\u044b\u0431\u0430\u044e\u0449\u0438\u0435\u0441\u044f \u0433\u043b\u0430\u0437\u0430",
"Smiling face with open mouth and cold sweat": "\u0423\u043b\u044b\u0431\u0430\u044f\u0441\u044c \u043b\u0438\u0446\u043e \u0441 \u043e\u0442\u043a\u0440\u044b\u0442\u044b\u043c \u0440\u0442\u043e\u043c \u0438 \u0445\u043e\u043b\u043e\u0434\u043d\u044b\u0439 \u043f\u043e\u0442",
"Smiling face with open mouth and tightly-closed eyes": "\u0423\u043b\u044b\u0431\u0430\u044f\u0441\u044c \u043b\u0438\u0446\u043e \u0441 \u043e\u0442\u043a\u0440\u044b\u0442\u044b\u043c \u0440\u0442\u043e\u043c \u0438 \u043f\u043b\u043e\u0442\u043d\u043e \u0437\u0430\u043a\u0440\u044b\u0442\u044b\u043c\u0438 \u0433\u043b\u0430\u0437\u0430\u043c\u0438",
"Smiling face with halo": "\u0423\u043b\u044b\u0431\u0430\u044f\u0441\u044c \u043b\u0438\u0446\u043e \u0433\u0430\u043b\u043e",
"Smiling face with horns": "\u0423\u043b\u044b\u0431\u0430\u044f\u0441\u044c \u043b\u0438\u0446\u043e \u0441 \u0440\u043e\u0433\u0430\u043c\u0438",
"Winking face": "\u043f\u043e\u0434\u043c\u0438\u0433\u0438\u0432\u0430\u044f \u043b\u0438\u0446\u043e",
"Smiling face with smiling eyes": "\u0423\u043b\u044b\u0431\u0430\u044f\u0441\u044c \u043b\u0438\u0446\u043e \u0441 \u0443\u043b\u044b\u0431\u0430\u044e\u0449\u0438\u043c\u0438\u0441\u044f \u0433\u043b\u0430\u0437\u0430\u043c\u0438",
"Face savoring delicious food": "\u041b\u0438\u0446\u043e \u0441\u043c\u0430\u043a\u0443\u044e\u0449\u0435\u0435 \u0432\u043a\u0443\u0441\u043d\u0443\u044e \u0435\u0434\u0443",
"Relieved face": "\u041e\u0441\u0432\u043e\u0431\u043e\u0436\u0434\u0435\u043d\u044b \u043b\u0438\u0446\u043e",
"Smiling face with heart-shaped eyes": "\u0423\u043b\u044b\u0431\u0430\u044f\u0441\u044c \u043b\u0438\u0446\u043e \u0432 \u0444\u043e\u0440\u043c\u0435 \u0441\u0435\u0440\u0434\u0446\u0430 \u0433\u043b\u0430\u0437\u0430\u043c\u0438",
"Smiling face with sunglasses": "\u0423\u043b\u044b\u0431\u0430\u044f\u0441\u044c \u043b\u0438\u0446\u043e \u0441 \u043e\u0447\u043a\u0430\u043c\u0438",
"Smirking face": "\u0423\u0441\u043c\u0435\u0445\u043d\u0443\u0432\u0448\u0438\u0441\u044c \u043b\u0438\u0446\u043e",
"Neutral face": "\u041e\u0431\u044b\u0447\u043d\u044b\u0439 \u043b\u0438\u0446\u043e",
"Expressionless face": "\u041d\u0435\u0432\u044b\u0440\u0430\u0437\u0438\u0442\u0435\u043b\u044c\u043d\u043e\u0435 \u043b\u0438\u0446\u043e",
"Unamused face": "\u041d\u0435 \u0441\u043c\u0435\u0448\u043d\u043e \u043b\u0438\u0446\u043e",
"Face with cold sweat": "\u041b\u0438\u0446\u043e \u0432 \u0445\u043e\u043b\u043e\u0434\u043d\u043e\u043c \u043f\u043e\u0442\u0443",
"Pensive face": "\u0417\u0430\u0434\u0443\u043c\u0447\u0438\u0432\u044b\u0439 \u043b\u0438\u0446\u043e",
"Confused face": "\u0421\u043c\u0443\u0449\u0435\u043d\u043d\u043e\u0435 \u043b\u0438\u0446\u043e",
"Confounded face": "\u041f\u043e\u0441\u0442\u044b\u0434\u043d\u043e\u0435 \u043b\u0438\u0446\u043e",
"Kissing face": "\u041f\u043e\u0446\u0435\u043b\u0443\u0438 \u043b\u0438\u0446\u043e",
"Face throwing a kiss": "\u041b\u0438\u0446\u043e \u043e\u0442\u043f\u0440\u0430\u0432\u043b\u044f\u044e\u0449\u0435\u0435 \u043f\u043e\u0446\u0435\u043b\u0443\u0439",
"Kissing face with smiling eyes": "\u041f\u043e\u0446\u0435\u043b\u0443\u0438 \u043b\u0438\u0446\u043e \u0441 \u0443\u043b\u044b\u0431\u0430\u044e\u0449\u0438\u043c\u0438\u0441\u044f \u0433\u043b\u0430\u0437\u0430\u043c\u0438",
"Kissing face with closed eyes": "\u041f\u043e\u0446\u0435\u043b\u0443\u0438 \u043b\u0438\u0446\u043e \u0441 \u0437\u0430\u043a\u0440\u044b\u0442\u044b\u043c\u0438 \u0433\u043b\u0430\u0437\u0430\u043c\u0438",
"Face with stuck out tongue": "\u041b\u0438\u0446\u043e \u0441 \u0442\u043e\u0440\u0447\u0430\u0449\u0438\u043c \u044f\u0437\u044b\u043a\u043e\u043c",
"Face with stuck out tongue and winking eye": "\u041b\u0438\u0446\u043e \u0441 \u0442\u043e\u0440\u0447\u0430\u0449\u0438\u043c \u044f\u0437\u044b\u043a\u043e\u043c \u0438 \u043f\u043e\u0434\u043c\u0438\u0433\u0438\u0432\u0430\u044e\u0449\u0438\u043c \u0433\u043b\u0430\u0437\u043e\u043c",
"Face with stuck out tongue and tightly-closed eyes": "\u041b\u0438\u0446\u043e \u0441 \u0442\u043e\u0440\u0447\u0430\u0449\u0438\u043c \u044f\u0437\u044b\u043a\u043e\u043c \u0438 \u043f\u043b\u043e\u0442\u043d\u043e \u0437\u0430\u043a\u0440\u044b\u0442\u044b\u043c\u0438 \u0433\u043b\u0430\u0437\u0430\u043c\u0438",
"Disappointed face": "\u0420\u0430\u0437\u043e\u0447\u0430\u0440\u043e\u0432\u0430\u043d\u043d\u043e\u0435 \u043b\u0438\u0446\u043e",
"Worried face": "\u041e\u0431\u0435\u0441\u043f\u043e\u043a\u043e\u0435\u043d\u043d\u044b\u0439 \u043b\u0438\u0446\u043e",
"Angry face": "\u0417\u043b\u043e\u0439 \u043b\u0438\u0446\u043e",
"Pouting face": "\u041f\u0443\u0445\u043b\u044b\u0435 \u043b\u0438\u0446\u043e",
"Crying face": "\u041f\u043b\u0430\u0447\u0443\u0449\u0435\u0435 \u043b\u0438\u0446\u043e",
"Persevering face": "\u041d\u0430\u0441\u0442\u043e\u0439\u0447\u0438\u0432\u0430\u044f \u043b\u0438\u0446\u043e",
"Face with look of triumph": "\u041b\u0438\u0446\u043e \u0441 \u0432\u0438\u0434\u043e\u043c \u0442\u0440\u0438\u0443\u043c\u0444\u0430",
"Disappointed but relieved face": "\u0420\u0430\u0437\u043e\u0447\u0430\u0440\u043e\u0432\u0430\u043d\u043d\u043e\u0435\u002c \u043d\u043e \u0441\u043f\u043e\u043a\u043e\u0439\u043d\u043e\u0435 \u043b\u0438\u0446\u043e",
"Frowning face with open mouth": "\u041d\u0430\u0445\u043c\u0443\u0440\u0435\u043d\u043d\u043e\u0435 \u043b\u0438\u0446\u043e \u0441 \u043e\u0442\u043a\u0440\u044b\u0442\u044b\u043c \u0440\u0442\u043e\u043c",
"Anguished face": "\u043c\u0443\u0447\u0438\u0442\u0435\u043b\u044c\u043d\u044b\u0439 \u043b\u0438\u0446\u043e",
"Fearful face": "\u041d\u0430\u043f\u0443\u0433\u0430\u043d\u043d\u043e\u0435 \u043b\u0438\u0446\u043e",
"Weary face": "\u0423\u0441\u0442\u0430\u043b\u044b\u0439 \u043b\u0438\u0446\u043e",
"Sleepy face": "\u0441\u043e\u043d\u043d\u043e\u0435 \u043b\u0438\u0446\u043e",
"Tired face": "\u0423\u0441\u0442\u0430\u043b\u0438 \u043b\u0438\u0446\u043e",
"Grimacing face": "\u0413\u0440\u0438\u043c\u0430\u0441\u0430 \u043d\u0430 \u043b\u0438\u0446\u0435",
"Loudly crying face": "\u0413\u0440\u043e\u043c\u043a\u043e \u043f\u043b\u0430\u0447\u0430 \u043b\u0438\u0446\u043e",
"Face with open mouth": "\u041b\u0438\u0446\u043e \u0441 \u043e\u0442\u043a\u0440\u044b\u0442\u044b\u043c \u0440\u0442\u043e\u043c",
"Hushed face": "\u0417\u0430\u0442\u0438\u0445\u0448\u0438\u0439 \u043b\u0438\u0446\u043e",
"Face with open mouth and cold sweat": "\u041b\u0438\u0446\u043e \u0441 \u043e\u0442\u043a\u0440\u044b\u0442\u044b\u043c \u0440\u0442\u043e\u043c \u0432 \u0445\u043e\u043b\u043e\u0434\u043d\u043e\u043c \u043f\u043e\u0442\u0443",
"Face screaming in fear": "\u041b\u0438\u0446\u043e \u043a\u0440\u0438\u0447\u0430\u0449\u0435\u0435 \u043e\u0442 \u0441\u0442\u0440\u0430\u0445\u0430",
"Astonished face": "\u0423\u0434\u0438\u0432\u043b\u0435\u043d\u043d\u043e\u0435 \u043b\u0438\u0446\u043e",
"Flushed face": "\u041f\u043e\u043a\u0440\u0430\u0441\u043d\u0435\u0432\u0448\u0435\u0435 \u043b\u0438\u0446\u043e",
"Sleeping face": "\u0421\u043f\u044f\u0449\u0430\u044f \u043b\u0438\u0446\u043e",
"Dizzy face": "\u0414\u0438\u0437\u0437\u0438 \u043b\u0438\u0446\u043e",
"Face without mouth": "\u041b\u0438\u0446\u043e \u0431\u0435\u0437 \u0440\u0442\u0430",
"Face with medical mask": "\u041b\u0438\u0446\u043e \u0441 \u043c\u0435\u0434\u0438\u0446\u0438\u043d\u0441\u043a\u043e\u0439 \u043c\u0430\u0441\u043a\u043e\u0439",
// Line breaker
"Break": "\u041d\u043e\u0432\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430",
// Math
"Subscript": "\u041d\u0438\u0436\u043d\u0438\u0439 \u0438\u043d\u0434\u0435\u043a\u0441",
"Superscript": "\u0412\u0435\u0440\u0445\u043d\u0438\u0439 \u0438\u043d\u0434\u0435\u043a\u0441",
// Full screen
"Fullscreen": "\u041d\u0430 \u0432\u0435\u0441\u044c \u044d\u043a\u0440\u0430\u043d",
// Horizontal line
"Insert Horizontal Line": "\u0412\u0441\u0442\u0430\u0432\u0438\u0442\u044c \u0433\u043e\u0440\u0438\u0437\u043e\u043d\u0442\u0430\u043b\u044c\u043d\u0443\u044e \u043b\u0438\u043d\u0438\u044e",
// Clear formatting
"Clear Formatting": "\u0423\u0434\u0430\u043b\u0438\u0442\u044c \u0444\u043e\u0440\u043c\u0430\u0442\u0438\u0440\u043e\u0432\u0430\u043d\u0438\u0435",
// Undo, redo
"Undo": "\u041e\u0442\u043c\u0435\u043d\u0438\u0442\u044c",
"Redo": "\u041f\u043e\u0432\u0442\u043e\u0440\u0438\u0442\u044c",
// Select all
"Select All": "\u0412\u044b\u0431\u0440\u0430\u0442\u044c \u0432\u0441\u0451",
// Code view
"Code View": "\u041f\u0440\u043e\u0441\u043c\u043e\u0442\u0440 \u0048\u0054\u004d\u004c\u002d\u043a\u043e\u0434\u0430",
// Quote
"Quote": "\u0426\u0438\u0442\u0430\u0442\u0430",
"Increase": "\u0423\u0432\u0435\u043b\u0438\u0447\u0435\u043d\u0438\u0435",
"Decrease": "\u0421\u043d\u0438\u0436\u0435\u043d\u0438\u0435",
// Quick Insert
"Quick Insert": "\u0411\u044b\u0441\u0442\u0440\u0430\u044f \u0432\u0441\u0442\u0430\u0432\u043a\u0430",
// Spcial Characters
"Special Characters": "Специальные символы",
"Latin": "Латинский",
"Greek": "Греческий",
"Cyrillic": "Кириллица",
"Punctuation": "Пунктуация",
"Currency": "Валюта",
"Arrows": "Стрелки",
"Math": "Математический",
"Misc": "Разное",
// Print.
"Print": "Распечатать",
// Spell Checker.
"Spell Checker": "Программа проверки орфографии",
// Help
"Help": "Помогите",
"Shortcuts": "Сочетания",
"Inline Editor": "Встроенный редактор",
"Show the editor": "Показать редактора",
"Common actions": "Общие действия",
"Copy": "Копия",
"Cut": "Порез",
"Paste": "Вставить",
"Basic Formatting": "Базовое форматирование",
"Increase quote level": "Увеличить уровень котировки",
"Decrease quote level": "Уменьшить уровень кавычек",
"Image / Video": "Изображение / видео",
"Resize larger": "Изменить размер",
"Resize smaller": "Уменьшить размер",
"Table": "Таблица",
"Select table cell": "Выбрать ячейку таблицы",
"Extend selection one cell": "Продлить выделение одной ячейки",
"Extend selection one row": "Расширить выделение на одну строку",
"Navigation": "Навигация",
"Focus popup / toolbar": "Фокусное всплывающее окно / панель инструментов",
"Return focus to previous position": "Вернуть фокус на предыдущую позицию",
// Embed.ly
"Embed URL": "Вставить URL-адрес",
"Paste in a URL to embed": "Вставить URL-адрес для встраивания",
// Word Paste.
"The pasted content is coming from a Microsoft Word document. Do you want to keep the format or clean it up?": "Вложенный контент поступает из документа Microsoft Word. вы хотите сохранить формат или очистить его?",
"Keep": "Держать",
"Clean": "Чистый",
"Word Paste Detected": "Обнаружена паста слов"
},
direction: "ltr"
};
}));
| mit |
srinikumar11/symfony2 | vendor/symfony/src/Symfony/Component/Security/Http/RememberMe/AbstractRememberMeServices.php | 9413 | <?php
namespace Symfony\Component\Security\Http\RememberMe;
use Symfony\Component\Security\Core\Exception\AuthenticationException;
use Symfony\Component\Security\Core\User\UserInterface;
use Symfony\Component\Security\Core\Authentication\Token\RememberMeToken;
use Symfony\Component\Security\Http\Logout\LogoutHandlerInterface;
use Symfony\Component\Security\Core\Authentication\Token\TokenInterface;
use Symfony\Component\Security\Core\Exception\UnsupportedUserException;
use Symfony\Component\Security\Core\Exception\UsernameNotFoundException;
use Symfony\Component\Security\Core\Exception\CookieTheftException;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Cookie;
use Symfony\Component\HttpKernel\Log\LoggerInterface;
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/**
* Base class implementing the RememberMeServicesInterface
*
* @author Johannes M. Schmitt <[email protected]>
*/
abstract class AbstractRememberMeServices implements RememberMeServicesInterface, LogoutHandlerInterface
{
const COOKIE_DELIMITER = ':';
protected $logger;
protected $options;
private $providerKey;
private $key;
private $userProviders;
/**
* Constructor
*
* @param array $userProviders
* @param string $key
* @param string $providerKey
* @param array $options
* @param LoggerInterface $logger
*/
public function __construct(array $userProviders, $key, $providerKey, array $options = array(), LoggerInterface $logger = null)
{
if (empty($key)) {
throw new \InvalidArgumentException('$key must not be empty.');
}
if (empty($providerKey)) {
throw new \InvalidArgumentException('$providerKey must not be empty.');
}
if (0 === count($userProviders)) {
throw new \InvalidArgumentException('You must provide at least one user provider.');
}
$this->userProviders = $userProviders;
$this->key = $key;
$this->providerKey = $providerKey;
$this->options = $options;
$this->logger = $logger;
}
/**
* Returns the parameter that is used for checking whether remember-me
* services have been requested.
*
* @return string
*/
public function getRememberMeParameter()
{
return $this->options['remember_me_parameter'];
}
public function getKey()
{
return $this->key;
}
/**
* Implementation of RememberMeServicesInterface. Detects whether a remember-me
* cookie was set, decodes it, and hands it to subclasses for further processing.
*
* @param Request $request
* @return TokenInterface
*/
public final function autoLogin(Request $request)
{
if (null === $cookie = $request->cookies->get($this->options['name'])) {
return;
}
if (null !== $this->logger) {
$this->logger->debug('Remember-me cookie detected.');
}
$cookieParts = $this->decodeCookie($cookie);
try {
$user = $this->processAutoLoginCookie($cookieParts, $request);
if (!$user instanceof UserInterface) {
throw new \RuntimeException('processAutoLoginCookie() must return a UserInterface implementation.');
}
if (null !== $this->logger) {
$this->logger->info('Remember-me cookie accepted.');
}
return new RememberMeToken($user, $this->providerKey, $this->key);
} catch (CookieTheftException $theft) {
$this->cancelCookie($request);
throw $theft;
} catch (UsernameNotFoundException $notFound) {
if (null !== $this->logger) {
$this->logger->info('User for remember-me cookie not found.');
}
} catch (UnsupportedUserException $unSupported) {
if (null !== $this->logger) {
$this->logger->warn('User class for remember-me cookie not supported.');
}
} catch (AuthenticationException $invalid) {
if (null !== $this->logger) {
$this->logger->debug('Remember-Me authentication failed: '.$invalid->getMessage());
}
}
$this->cancelCookie($request);
return null;
}
/**
* Implementation for LogoutHandlerInterface. Deletes the cookie.
*
* @param Request $request
* @param Response $response
* @param TokenInterface $token
* @return void
*/
public function logout(Request $request, Response $response, TokenInterface $token)
{
$this->cancelCookie($request);
}
/**
* Implementation for RememberMeServicesInterface. Deletes the cookie when
* an attempted authentication fails.
*
* @param Request $request
* @return void
*/
public final function loginFail(Request $request)
{
$this->cancelCookie($request);
$this->onLoginFail($request);
}
/**
* Implementation for RememberMeServicesInterface. This is called when an
* authentication is successful.
*
* @param Request $request
* @param Response $response
* @param TokenInterface $token The token that resulted in a successful authentication
* @return void
*/
public final function loginSuccess(Request $request, Response $response, TokenInterface $token)
{
if (!$token->getUser() instanceof UserInterface) {
if (null !== $this->logger) {
$this->logger->debug('Remember-me ignores token since it does not contain a UserInterface implementation.');
}
return;
}
if (!$this->isRememberMeRequested($request)) {
if (null !== $this->logger) {
$this->logger->debug('Remember-me was not requested.');
}
return;
}
if (null !== $this->logger) {
$this->logger->debug('Remember-me was requested; setting cookie.');
}
$this->onLoginSuccess($request, $response, $token);
}
/**
* Subclasses should validate the cookie and do any additional processing
* that is required. This is called from autoLogin().
*
* @param array $cookieParts
* @param Request $request
* @return TokenInterface
*/
abstract protected function processAutoLoginCookie(array $cookieParts, Request $request);
protected function onLoginFail(Request $request)
{
}
/**
* This is called after a user has been logged in successfully, and has
* requested remember-me capabilities. The implementation usually sets a
* cookie and possibly stores a persistent record of it.
*
* @param Request $request
* @param Response $response
* @param TokenInterface $token
* @return void
*/
abstract protected function onLoginSuccess(Request $request, Response $response, TokenInterface $token);
protected final function getUserProvider($class)
{
foreach ($this->userProviders as $provider) {
if ($provider->supportsClass($class)) {
return $provider;
}
}
throw new UnsupportedUserException(sprintf('There is no user provider that supports class "%s".', $class));
}
/**
* Decodes the raw cookie value
*
* @param string $rawCookie
* @return array
*/
protected function decodeCookie($rawCookie)
{
return explode(self::COOKIE_DELIMITER, base64_decode($rawCookie));
}
/**
* Encodes the cookie parts
*
* @param array $cookieParts
* @return string
*/
protected function encodeCookie(array $cookieParts)
{
return base64_encode(implode(self::COOKIE_DELIMITER, $cookieParts));
}
/**
* Deletes the remember-me cookie
*
* @param Request $request
* @return void
*/
protected function cancelCookie(Request $request)
{
if (null !== $this->logger) {
$this->logger->debug(sprintf('Clearing remember-me cookie "%s"', $this->options['name']));
}
$request->attributes->set(self::COOKIE_ATTR_NAME, new Cookie($this->options['name'], null, 1, $this->options['path'], $this->options['domain']));
}
/**
* Checks whether remember-me capabilities where requested
*
* @param Request $request
* @return Boolean
*/
protected function isRememberMeRequested(Request $request)
{
if (true === $this->options['always_remember_me']) {
return true;
}
$parameter = $request->request->get($this->options['remember_me_parameter'], null, true);
if ($parameter === null && null !== $this->logger) {
$this->logger->debug(sprintf('Did not send remember-me cookie (remember-me parameter "%s" was not sent).', $this->options['remember_me_parameter']));
}
return $parameter === 'true' || $parameter === 'on' || $parameter === '1' || $parameter === 'yes';
}
}
| mit |
ensemblr/llvm-project-boilerplate | include/llvm/lib/Fuzzer/test/StrstrTest.cpp | 754 | // This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
// Test strstr and strcasestr hooks.
#include <string>
#include <string.h>
#include <cstdint>
#include <cstdio>
#include <cstdlib>
// Windows does not have strcasestr and memmem, so we are not testing them.
#ifdef _WIN32
#define strcasestr strstr
#define memmem(a, b, c, d) true
#endif
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size) {
if (Size < 4) return 0;
std::string s(reinterpret_cast<const char*>(Data), Size);
if (strstr(s.c_str(), "FUZZ") &&
strcasestr(s.c_str(), "aBcD") &&
memmem(s.data(), s.size(), "kuku", 4)
) {
fprintf(stderr, "BINGO\n");
exit(1);
}
return 0;
}
| mit |
doktordirk/aurelia-authentication-loopback-sample | client-cli-es2015/aurelia_project/generators/element.js | 1178 | import {inject} from 'aurelia-dependency-injection';
import {Project, ProjectItem, CLIOptions, UI} from 'aurelia-cli';
@inject(Project, CLIOptions, UI)
export default class ElementGenerator {
constructor(project, options, ui) {
this.project = project;
this.options = options;
this.ui = ui;
}
execute() {
return this.ui
.ensureAnswer(this.options.args[0], 'What would you like to call the custom element?')
.then(name => {
let fileName = this.project.makeFileName(name);
let className = this.project.makeClassName(name);
this.project.elements.add(
ProjectItem.text(`${fileName}.js`, this.generateJSSource(className)),
ProjectItem.text(`${fileName}.html`, this.generateHTMLSource(className))
);
return this.project.commitChanges()
.then(() => this.ui.log(`Created ${fileName}.`));
});
}
generateJSSource(className) {
return `import {bindable} from 'aurelia-framework';
export class ${className} {
@bindable value;
valueChanged(newValue, oldValue) {
}
}`
}
generateHTMLSource(className) {
return `<template>
<h1>\${value}</h1>
</template>`
}
}
| mit |
merongivian/opal | spec/lib/compiler_spec.rb | 4386 | require 'lib/spec_helper'
describe Opal::Compiler do
describe 'requiring' do
it 'calls #require' do
expect_compiled("require 'pippo'").to include('self.$require("pippo")')
end
end
describe 'requirable' do
it 'executes the file' do
expect_compiled("").to include('(function(Opal) {')
expect_compiled("").to end_with("})(Opal);\n")
end
it 'puts the compiled into "Opal.modules"' do
options = { :requirable => true, :file => "pippo" }
expect_compiled("", options).to include('Opal.modules["pippo"] = function(Opal) {')
expect_compiled("", options).to end_with("};\n")
end
end
it "should compile simple ruby values" do
expect_compiled("3.142").to include("return 3.142")
expect_compiled("123e1").to include("return 1230")
expect_compiled("123E+10").to include("return 1230000000000")
expect_compiled("false").to include("return false")
expect_compiled("true").to include("return true")
expect_compiled("nil").to include("return nil")
end
it "should compile ruby strings" do
expect_compiled('"hello world"').to include('return "hello world"')
expect_compiled('"hello #{100}"').to include('"hello "', '100')
end
it "should compile method calls" do
expect_compiled("self.inspect").to include("$inspect()")
expect_compiled("self.map { |a| a + 10 }").to include("$map")
end
it "should compile constant lookups" do
expect_compiled("Object").to include("Object")
expect_compiled("Array").to include("Array")
end
describe "class names" do
it "generates a named function for class using $ prefix" do
expect_compiled("class Foo; end").to include("function $Foo")
end
end
describe "debugger special method" do
it "generates debugger keyword in javascript" do
expect_compiled("debugger").to include("debugger")
expect_compiled("debugger").to_not include("$debugger")
end
end
describe "DATA special variable" do
it "is not a special case unless __END__ part present in source" do
expect_compiled("DATA").to include("DATA")
expect_compiled("DATA\n__END__").to_not include("DATA")
end
it "DATA gets compiled as a reference to special $__END__ variable" do
expect_compiled("a = DATA\n__END__").to include("a = $__END__")
end
it "causes the compiler to create a reference to special __END__ variable" do
expect_compiled("DATA\n__END__\nFord Perfect").to include("$__END__ = ")
end
it "does not create a reference to __END__ vairbale unless __END__ content present" do
expect_compiled("DATA").to_not include("$__END__ = ")
end
end
describe "escapes in x-strings" do
it "compiles the exscapes directly as appearing in x-strings" do
expect_compiled('`"hello\nworld"`').to include('"hello\nworld"')
expect_compiled('%x{"hello\nworld"}').to include('"hello\nworld"')
end
end
describe 'pre-processing require-ish methods' do
describe '#require' do
it 'parses and resolve #require argument' do
compiler = compiler_for(%Q{require "#{__FILE__}"})
expect(compiler.requires).to eq([__FILE__])
end
end
describe '#autoload' do
it 'ignores autoload outside of context class' do
compiler = compiler_for(%Q{autoload Whatever, "#{__FILE__}"})
expect(compiler.requires).to eq([])
end
it 'parses and resolve second #autoload arguments' do
compiler = compiler_for(%Q{class Foo; autoload Whatever, "#{__FILE__}"; end})
expect(compiler.requires).to eq([__FILE__])
end
end
describe '#require_relative' do
it 'parses and resolve #require_relative argument' do
compiler = compiler_for(%Q{require_relative "./#{File.basename(__FILE__)}"}, file: __FILE__)
expect(compiler.requires).to eq([__FILE__])
end
end
describe '#require_tree' do
require 'pathname'
let(:file) { Pathname(__FILE__).join('../fixtures/require_tree_test.rb') }
it 'parses and resolve #require argument' do
compiler = compiler_for(file.read)
expect(compiler.required_trees).to eq(['../fixtures/required_tree_test'])
end
end
end
def expect_compiled(*args)
expect(Opal::Compiler.new(*args).compile)
end
def compiler_for(*args)
Opal::Compiler.new(*args).tap(&:compile)
end
end
| mit |
jechnd/BlogProject | vendor/symfony/src/Symfony/Component/HttpFoundation/Response.php | 23491 | <?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\HttpFoundation;
/**
* Response represents an HTTP response.
*
* @author Fabien Potencier <[email protected]>
*
* @api
*/
class Response
{
/**
* @var \Symfony\Component\HttpFoundation\ResponseHeaderBag
*/
public $headers;
protected $content;
protected $version;
protected $statusCode;
protected $statusText;
protected $charset;
static public $statusTexts = array(
100 => 'Continue',
101 => 'Switching Protocols',
200 => 'OK',
201 => 'Created',
202 => 'Accepted',
203 => 'Non-Authoritative Information',
204 => 'No Content',
205 => 'Reset Content',
206 => 'Partial Content',
300 => 'Multiple Choices',
301 => 'Moved Permanently',
302 => 'Found',
303 => 'See Other',
304 => 'Not Modified',
305 => 'Use Proxy',
307 => 'Temporary Redirect',
400 => 'Bad Request',
401 => 'Unauthorized',
402 => 'Payment Required',
403 => 'Forbidden',
404 => 'Not Found',
405 => 'Method Not Allowed',
406 => 'Not Acceptable',
407 => 'Proxy Authentication Required',
408 => 'Request Timeout',
409 => 'Conflict',
410 => 'Gone',
411 => 'Length Required',
412 => 'Precondition Failed',
413 => 'Request Entity Too Large',
414 => 'Request-URI Too Long',
415 => 'Unsupported Media Type',
416 => 'Requested Range Not Satisfiable',
417 => 'Expectation Failed',
418 => 'I\'m a teapot',
500 => 'Internal Server Error',
501 => 'Not Implemented',
502 => 'Bad Gateway',
503 => 'Service Unavailable',
504 => 'Gateway Timeout',
505 => 'HTTP Version Not Supported',
);
/**
* Constructor.
*
* @param string $content The response content
* @param integer $status The response status code
* @param array $headers An array of response headers
*
* @api
*/
public function __construct($content = '', $status = 200, $headers = array())
{
$this->headers = new ResponseHeaderBag($headers);
$this->setContent($content);
$this->setStatusCode($status);
$this->setProtocolVersion('1.0');
if (!$this->headers->has('Date')) {
$this->setDate(new \DateTime(null, new \DateTimeZone('UTC')));
}
}
/**
* Returns the response content as it will be sent (with the headers).
*
* @return string The response content
*/
public function __toString()
{
$this->prepare();
return
sprintf('HTTP/%s %s %s', $this->version, $this->statusCode, $this->statusText)."\r\n".
$this->headers."\r\n".
$this->getContent();
}
/**
* Clones the current Response instance.
*/
public function __clone()
{
$this->headers = clone $this->headers;
}
/**
* Prepares the Response before it is sent to the client.
*
* This method tweaks the Response to ensure that it is
* compliant with RFC 2616.
*/
public function prepare()
{
if ($this->isInformational() || in_array($this->statusCode, array(204, 304))) {
$this->setContent('');
}
// Fix Content-Type
$charset = $this->charset ?: 'UTF-8';
if (!$this->headers->has('Content-Type')) {
$this->headers->set('Content-Type', 'text/html; charset='.$charset);
} elseif (0 === strpos($this->headers->get('Content-Type'), 'text/') && false === strpos($this->headers->get('Content-Type'), 'charset')) {
// add the charset
$this->headers->set('Content-Type', $this->headers->get('Content-Type').'; charset='.$charset);
}
// Fix Content-Length
if ($this->headers->has('Transfer-Encoding')) {
$this->headers->remove('Content-Length');
}
}
/**
* Sends HTTP headers.
*/
public function sendHeaders()
{
// headers have already been sent by the developer
if (headers_sent()) {
return;
}
$this->prepare();
// status
header(sprintf('HTTP/%s %s %s', $this->version, $this->statusCode, $this->statusText));
// headers
foreach ($this->headers->all() as $name => $values) {
foreach ($values as $value) {
header($name.': '.$value, false);
}
}
// cookies
foreach ($this->headers->getCookies() as $cookie) {
setcookie($cookie->getName(), $cookie->getValue(), $cookie->getExpiresTime(), $cookie->getPath(), $cookie->getDomain(), $cookie->isSecure(), $cookie->isHttpOnly());
}
}
/**
* Sends content for the current web response.
*/
public function sendContent()
{
echo $this->content;
}
/**
* Sends HTTP headers and content.
*
* @api
*/
public function send()
{
$this->sendHeaders();
$this->sendContent();
if (function_exists('fastcgi_finish_request')) {
fastcgi_finish_request();
}
}
/**
* Sets the response content.
*
* Valid types are strings, numbers, and objects that implement a __toString() method.
*
* @param mixed $content
*
* @api
*/
public function setContent($content)
{
if (null !== $content && !is_string($content) && !is_numeric($content) && !is_callable(array($content, '__toString'))) {
throw new \UnexpectedValueException('The Response content must be a string or object implementing __toString(), "'.gettype($content).'" given.');
}
$this->content = (string) $content;
}
/**
* Gets the current response content.
*
* @return string Content
*
* @api
*/
public function getContent()
{
return $this->content;
}
/**
* Sets the HTTP protocol version (1.0 or 1.1).
*
* @param string $version The HTTP protocol version
*
* @api
*/
public function setProtocolVersion($version)
{
$this->version = $version;
}
/**
* Gets the HTTP protocol version.
*
* @return string The HTTP protocol version
*
* @api
*/
public function getProtocolVersion()
{
return $this->version;
}
/**
* Sets the response status code.
*
* @param integer $code HTTP status code
* @param string $text HTTP status text
*
* @throws \InvalidArgumentException When the HTTP status code is not valid
*
* @api
*/
public function setStatusCode($code, $text = null)
{
$this->statusCode = (int) $code;
if ($this->isInvalid()) {
throw new \InvalidArgumentException(sprintf('The HTTP status code "%s" is not valid.', $code));
}
$this->statusText = false === $text ? '' : (null === $text ? self::$statusTexts[$this->statusCode] : $text);
}
/**
* Retrieves the status code for the current web response.
*
* @return string Status code
*
* @api
*/
public function getStatusCode()
{
return $this->statusCode;
}
/**
* Sets the response charset.
*
* @param string $charset Character set
*
* @api
*/
public function setCharset($charset)
{
$this->charset = $charset;
}
/**
* Retrieves the response charset.
*
* @return string Character set
*
* @api
*/
public function getCharset()
{
return $this->charset;
}
/**
* Returns true if the response is worth caching under any circumstance.
*
* Responses marked "private" with an explicit Cache-Control directive are
* considered uncacheable.
*
* Responses with neither a freshness lifetime (Expires, max-age) nor cache
* validator (Last-Modified, ETag) are considered uncacheable.
*
* @return Boolean true if the response is worth caching, false otherwise
*
* @api
*/
public function isCacheable()
{
if (!in_array($this->statusCode, array(200, 203, 300, 301, 302, 404, 410))) {
return false;
}
if ($this->headers->hasCacheControlDirective('no-store') || $this->headers->getCacheControlDirective('private')) {
return false;
}
return $this->isValidateable() || $this->isFresh();
}
/**
* Returns true if the response is "fresh".
*
* Fresh responses may be served from cache without any interaction with the
* origin. A response is considered fresh when it includes a Cache-Control/max-age
* indicator or Expiration header and the calculated age is less than the freshness lifetime.
*
* @return Boolean true if the response is fresh, false otherwise
*
* @api
*/
public function isFresh()
{
return $this->getTtl() > 0;
}
/**
* Returns true if the response includes headers that can be used to validate
* the response with the origin server using a conditional GET request.
*
* @return Boolean true if the response is validateable, false otherwise
*
* @api
*/
public function isValidateable()
{
return $this->headers->has('Last-Modified') || $this->headers->has('ETag');
}
/**
* Marks the response as "private".
*
* It makes the response ineligible for serving other clients.
*
* @api
*/
public function setPrivate()
{
$this->headers->removeCacheControlDirective('public');
$this->headers->addCacheControlDirective('private');
}
/**
* Marks the response as "public".
*
* It makes the response eligible for serving other clients.
*
* @api
*/
public function setPublic()
{
$this->headers->addCacheControlDirective('public');
$this->headers->removeCacheControlDirective('private');
}
/**
* Returns true if the response must be revalidated by caches.
*
* This method indicates that the response must not be served stale by a
* cache in any circumstance without first revalidating with the origin.
* When present, the TTL of the response should not be overridden to be
* greater than the value provided by the origin.
*
* @return Boolean true if the response must be revalidated by a cache, false otherwise
*
* @api
*/
public function mustRevalidate()
{
return $this->headers->hasCacheControlDirective('must-revalidate') || $this->headers->has('must-proxy-revalidate');
}
/**
* Returns the Date header as a DateTime instance.
*
* @return \DateTime A \DateTime instance
*
* @throws \RuntimeException When the header is not parseable
*
* @api
*/
public function getDate()
{
return $this->headers->getDate('Date');
}
/**
* Sets the Date header.
*
* @param \DateTime $date A \DateTime instance
*
* @api
*/
public function setDate(\DateTime $date)
{
$date->setTimezone(new \DateTimeZone('UTC'));
$this->headers->set('Date', $date->format('D, d M Y H:i:s').' GMT');
}
/**
* Returns the age of the response.
*
* @return integer The age of the response in seconds
*/
public function getAge()
{
if ($age = $this->headers->get('Age')) {
return $age;
}
return max(time() - $this->getDate()->format('U'), 0);
}
/**
* Marks the response stale by setting the Age header to be equal to the maximum age of the response.
*
* @api
*/
public function expire()
{
if ($this->isFresh()) {
$this->headers->set('Age', $this->getMaxAge());
}
}
/**
* Returns the value of the Expires header as a DateTime instance.
*
* @return \DateTime A DateTime instance
*
* @api
*/
public function getExpires()
{
return $this->headers->getDate('Expires');
}
/**
* Sets the Expires HTTP header with a DateTime instance.
*
* If passed a null value, it removes the header.
*
* @param \DateTime $date A \DateTime instance
*
* @api
*/
public function setExpires(\DateTime $date = null)
{
if (null === $date) {
$this->headers->remove('Expires');
} else {
$date = clone $date;
$date->setTimezone(new \DateTimeZone('UTC'));
$this->headers->set('Expires', $date->format('D, d M Y H:i:s').' GMT');
}
}
/**
* Sets the number of seconds after the time specified in the response's Date
* header when the the response should no longer be considered fresh.
*
* First, it checks for a s-maxage directive, then a max-age directive, and then it falls
* back on an expires header. It returns null when no maximum age can be established.
*
* @return integer|null Number of seconds
*
* @api
*/
public function getMaxAge()
{
if ($age = $this->headers->getCacheControlDirective('s-maxage')) {
return $age;
}
if ($age = $this->headers->getCacheControlDirective('max-age')) {
return $age;
}
if (null !== $this->getExpires()) {
return $this->getExpires()->format('U') - $this->getDate()->format('U');
}
return null;
}
/**
* Sets the number of seconds after which the response should no longer be considered fresh.
*
* This methods sets the Cache-Control max-age directive.
*
* @param integer $value Number of seconds
*
* @api
*/
public function setMaxAge($value)
{
$this->headers->addCacheControlDirective('max-age', $value);
}
/**
* Sets the number of seconds after which the response should no longer be considered fresh by shared caches.
*
* This methods sets the Cache-Control s-maxage directive.
*
* @param integer $value Number of seconds
*
* @api
*/
public function setSharedMaxAge($value)
{
$this->setPublic();
$this->headers->addCacheControlDirective('s-maxage', $value);
}
/**
* Returns the response's time-to-live in seconds.
*
* It returns null when no freshness information is present in the response.
*
* When the responses TTL is <= 0, the response may not be served from cache without first
* revalidating with the origin.
*
* @return integer The TTL in seconds
*
* @api
*/
public function getTtl()
{
if ($maxAge = $this->getMaxAge()) {
return $maxAge - $this->getAge();
}
return null;
}
/**
* Sets the response's time-to-live for shared caches.
*
* This method adjusts the Cache-Control/s-maxage directive.
*
* @param integer $seconds Number of seconds
*
* @api
*/
public function setTtl($seconds)
{
$this->setSharedMaxAge($this->getAge() + $seconds);
}
/**
* Sets the response's time-to-live for private/client caches.
*
* This method adjusts the Cache-Control/max-age directive.
*
* @param integer $seconds Number of seconds
*
* @api
*/
public function setClientTtl($seconds)
{
$this->setMaxAge($this->getAge() + $seconds);
}
/**
* Returns the Last-Modified HTTP header as a DateTime instance.
*
* @return \DateTime A DateTime instance
*
* @api
*/
public function getLastModified()
{
return $this->headers->getDate('Last-Modified');
}
/**
* Sets the Last-Modified HTTP header with a DateTime instance.
*
* If passed a null value, it removes the header.
*
* @param \DateTime $date A \DateTime instance
*
* @api
*/
public function setLastModified(\DateTime $date = null)
{
if (null === $date) {
$this->headers->remove('Last-Modified');
} else {
$date = clone $date;
$date->setTimezone(new \DateTimeZone('UTC'));
$this->headers->set('Last-Modified', $date->format('D, d M Y H:i:s').' GMT');
}
}
/**
* Returns the literal value of the ETag HTTP header.
*
* @return string The ETag HTTP header
*
* @api
*/
public function getEtag()
{
return $this->headers->get('ETag');
}
/**
* Sets the ETag value.
*
* @param string $etag The ETag unique identifier
* @param Boolean $weak Whether you want a weak ETag or not
*
* @api
*/
public function setEtag($etag = null, $weak = false)
{
if (null === $etag) {
$this->headers->remove('Etag');
} else {
if (0 !== strpos($etag, '"')) {
$etag = '"'.$etag.'"';
}
$this->headers->set('ETag', (true === $weak ? 'W/' : '').$etag);
}
}
/**
* Sets the response's cache headers (validation and/or expiration).
*
* Available options are: etag, last_modified, max_age, s_maxage, private, and public.
*
* @param array $options An array of cache options
*
* @api
*/
public function setCache(array $options)
{
if ($diff = array_diff(array_keys($options), array('etag', 'last_modified', 'max_age', 's_maxage', 'private', 'public'))) {
throw new \InvalidArgumentException(sprintf('Response does not support the following options: "%s".', implode('", "', array_values($diff))));
}
if (isset($options['etag'])) {
$this->setEtag($options['etag']);
}
if (isset($options['last_modified'])) {
$this->setLastModified($options['last_modified']);
}
if (isset($options['max_age'])) {
$this->setMaxAge($options['max_age']);
}
if (isset($options['s_maxage'])) {
$this->setSharedMaxAge($options['s_maxage']);
}
if (isset($options['public'])) {
if ($options['public']) {
$this->setPublic();
} else {
$this->setPrivate();
}
}
if (isset($options['private'])) {
if ($options['private']) {
$this->setPrivate();
} else {
$this->setPublic();
}
}
}
/**
* Modifies the response so that it conforms to the rules defined for a 304 status code.
*
* This sets the status, removes the body, and discards any headers
* that MUST NOT be included in 304 responses.
*
* @see http://tools.ietf.org/html/rfc2616#section-10.3.5
*
* @api
*/
public function setNotModified()
{
$this->setStatusCode(304);
$this->setContent(null);
// remove headers that MUST NOT be included with 304 Not Modified responses
foreach (array('Allow', 'Content-Encoding', 'Content-Language', 'Content-Length', 'Content-MD5', 'Content-Type', 'Last-Modified') as $header) {
$this->headers->remove($header);
}
}
/**
* Returns true if the response includes a Vary header.
*
* @return true if the response includes a Vary header, false otherwise
*
* @api
*/
public function hasVary()
{
return (Boolean) $this->headers->get('Vary');
}
/**
* Returns an array of header names given in the Vary header.
*
* @return array An array of Vary names
*
* @api
*/
public function getVary()
{
if (!$vary = $this->headers->get('Vary')) {
return array();
}
return is_array($vary) ? $vary : preg_split('/[\s,]+/', $vary);
}
/**
* Sets the Vary header.
*
* @param string|array $headers
* @param Boolean $replace Whether to replace the actual value of not (true by default)
*
* @api
*/
public function setVary($headers, $replace = true)
{
$this->headers->set('Vary', $headers, $replace);
}
/**
* Determines if the Response validators (ETag, Last-Modified) match
* a conditional value specified in the Request.
*
* If the Response is not modified, it sets the status code to 304 and
* removes the actual content by calling the setNotModified() method.
*
* @param Request $request A Request instance
*
* @return Boolean true if the Response validators match the Request, false otherwise
*
* @api
*/
public function isNotModified(Request $request)
{
$lastModified = $request->headers->get('If-Modified-Since');
$notModified = false;
if ($etags = $request->getEtags()) {
$notModified = (in_array($this->getEtag(), $etags) || in_array('*', $etags)) && (!$lastModified || $this->headers->get('Last-Modified') == $lastModified);
} elseif ($lastModified) {
$notModified = $lastModified == $this->headers->get('Last-Modified');
}
if ($notModified) {
$this->setNotModified();
}
return $notModified;
}
// http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
/**
* @api
*/
public function isInvalid()
{
return $this->statusCode < 100 || $this->statusCode >= 600;
}
/**
* @api
*/
public function isInformational()
{
return $this->statusCode >= 100 && $this->statusCode < 200;
}
/**
* @api
*/
public function isSuccessful()
{
return $this->statusCode >= 200 && $this->statusCode < 300;
}
/**
* @api
*/
public function isRedirection()
{
return $this->statusCode >= 300 && $this->statusCode < 400;
}
/**
* @api
*/
public function isClientError()
{
return $this->statusCode >= 400 && $this->statusCode < 500;
}
/**
* @api
*/
public function isServerError()
{
return $this->statusCode >= 500 && $this->statusCode < 600;
}
/**
* @api
*/
public function isOk()
{
return 200 === $this->statusCode;
}
/**
* @api
*/
public function isForbidden()
{
return 403 === $this->statusCode;
}
/**
* @api
*/
public function isNotFound()
{
return 404 === $this->statusCode;
}
/**
* @api
*/
public function isRedirect($location = null)
{
return in_array($this->statusCode, array(201, 301, 302, 303, 307)) && (null === $location ?: $location == $this->headers->get('Location'));
}
/**
* @api
*/
public function isEmpty()
{
return in_array($this->statusCode, array(201, 204, 304));
}
}
| mit |
hypnoscope/let-s-code-an-indie-game | episode_23/src/graphics/sprite.lua | 582 | local view = require("src.graphics.view")
local sprite = {}
local draw = function (self, view, x, y)
view:inContext(function ()
local xOffset = self.image:getWidth() / 2
local yOffset = self.image:getHeight() / 2
love.graphics.draw(self.image, x - xOffset, y - yOffset, 0)
if DEBUG then
love.graphics.rectangle("fill", x, y, 1, 1)
end
end)
end
sprite.create = function (imagePath)
local inst = {}
inst.image = love.graphics.newImage(imagePath)
inst.image:setFilter('nearest', 'nearest')
inst.draw = draw
return inst
end
return sprite
| mit |
willduff/OpenLiveWriter-1 | src/managed/OpenLiveWriter.Interop/Com/ActiveDocuments/IOleClientSite.cs | 976 | // Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.ComTypes;
namespace OpenLiveWriter.Interop.Com.ActiveDocuments
{
/// <summary>
///
/// </summary>
[ComImport]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
[Guid("00000118-0000-0000-C000-000000000046")]
public interface IOleClientSite
{
void SaveObject();
[PreserveSig]
int GetMoniker(
[In] OLEGETMONIKER dwAssign,
[In] OLEWHICHMK dwWhichMoniker,
[Out] out IMoniker ppmk);
[PreserveSig]
int GetContainer(
[Out] out IOleContainer ppContainer);
void ShowObject();
void OnShowWindow(
[In, MarshalAs(UnmanagedType.Bool)] bool fShow);
[PreserveSig]
int RequestNewObjectLayout();
}
}
| mit |
kenzierocks/SpongeCommon | src/main/java/org/spongepowered/common/profile/query/package-info.java | 1352 | /*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
@org.spongepowered.api.util.annotation.NonnullByDefault
package org.spongepowered.common.profile.query;
| mit |
alexa-infra/negine | thirdparty/boost-python/libs/python/pyste/tests/nestedUT.py | 600 | # Copyright Bruno da Silva de Oliveira 2003. Use, modification and
# distribution is subject to the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import unittest
from _nested import *
class NestedTest(unittest.TestCase):
def testIt(self):
self.assertEqual(Root.staticXValue, 10)
self.assertEqual(Root.Y.staticYValue, 20)
z = Root.Y.Z()
z.valueZ = 3
self.assertEqual(z.valueZ, 3)
if __name__ == '__main__':
unittest.main()
| mit |
ISKCON-Cultural-Centre/eISKCON | src/app/shared/sdk/services/core/error.service.ts | 623 | /* tslint:disable */
import { Injectable } from '@angular/core';
import { Response } from '@angular/http';
import { Observable } from 'rxjs/Observable';
//import { ErrorObservable } from 'rxjs/observable/ErrorObservable';
import 'rxjs/add/observable/throw';
/**
* Default error handler
*/
@Injectable()
export class ErrorHandler {
// ErrorObservable when rxjs version < rc.5
// ErrorObservable<string> when rxjs version = rc.5
// I'm leaving any for now to avoid breaking apps using both versions
public handleError(error: Response): any {
return Observable.throw(error.json().error || 'Server error');
}
}
| mit |
pail23/openhab2-addons | addons/binding/org.openhab.binding.mihome/src/main/java/org/openhab/binding/mihome/internal/ColorUtil.java | 3164 | /**
* Copyright (c) 2010-2018 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.binding.mihome.internal;
/**
* Color utilities for conversions
*
* @author Patrick Boos - Initial contribution
*/
public class ColorUtil {
/**
* Convert color temperature in Kelvins to RGB color for AWT
*
* @param temperature the temperature
* @return ready to use color object
*/
public static int getRGBFromK(int temperature) {
// Used this: https://gist.github.com/paulkaplan/5184275 at the beginning
// based on http://stackoverflow.com/questions/7229895/display-temperature-as-a-color-with-c
// this answer: http://stackoverflow.com/a/24856307
// (so, just interpretation of pseudocode in Java)
double x = temperature / 1000.0;
if (x > 40) {
x = 40;
}
double red;
double green;
double blue;
// R
if (temperature < 6527) {
red = 1;
} else {
double[] redpoly = { 4.93596077e0, -1.29917429e0, 1.64810386e-01, -1.16449912e-02, 4.86540872e-04,
-1.19453511e-05, 1.59255189e-07, -8.89357601e-10 };
red = poly(redpoly, x);
}
// G
if (temperature < 850) {
green = 0;
} else if (temperature <= 6600) {
double[] greenpoly = { -4.95931720e-01, 1.08442658e0, -9.17444217e-01, 4.94501179e-01, -1.48487675e-01,
2.49910386e-02, -2.21528530e-03, 8.06118266e-05 };
green = poly(greenpoly, x);
} else {
double[] greenpoly = { 3.06119745e0, -6.76337896e-01, 8.28276286e-02, -5.72828699e-03, 2.35931130e-04,
-5.73391101e-06, 7.58711054e-08, -4.21266737e-10 };
green = poly(greenpoly, x);
}
// B
if (temperature < 1900) {
blue = 0;
} else if (temperature < 6600) {
double[] bluepoly = { 4.93997706e-01, -8.59349314e-01, 5.45514949e-01, -1.81694167e-01, 4.16704799e-02,
-6.01602324e-03, 4.80731598e-04, -1.61366693e-05 };
blue = poly(bluepoly, x);
} else {
blue = 1;
}
red = clamp(red, 0, 1);
blue = clamp(blue, 0, 1);
green = clamp(green, 0, 1);
return (int) (red * 255 * 256 * 256) + (int) (green * 255 * 256) + (int) (blue * 255);
}
public static double poly(double[] coefficients, double x) {
double result = coefficients[0];
double xn = x;
for (int i = 1; i < coefficients.length; i++) {
result += xn * coefficients[i];
xn *= x;
}
return result;
}
public static double clamp(double x, double min, double max) {
if (x < min) {
return min;
}
if (x > max) {
return max;
}
return x;
}
}
| epl-1.0 |
misilot/vufind | module/VuFind/src/VuFind/Search/Options/PluginFactory.php | 2276 | <?php
/**
* Search options plugin factory
*
* PHP version 5
*
* Copyright (C) Villanova University 2010.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* @category VuFind
* @package Search
* @author Demian Katz <[email protected]>
* @license http://opensource.org/licenses/gpl-2.0.php GNU General Public License
* @link https://vufind.org/wiki/development:plugins:record_drivers Wiki
*/
namespace VuFind\Search\Options;
use Zend\ServiceManager\ServiceLocatorInterface;
/**
* Search options plugin factory
*
* @category VuFind
* @package Search
* @author Demian Katz <[email protected]>
* @license http://opensource.org/licenses/gpl-2.0.php GNU General Public License
* @link https://vufind.org/wiki/development:plugins:record_drivers Wiki
*/
class PluginFactory extends \VuFind\ServiceManager\AbstractPluginFactory
{
/**
* Constructor
*/
public function __construct()
{
$this->defaultNamespace = 'VuFind\Search';
$this->classSuffix = '\Options';
}
/**
* Create a service for the specified name.
*
* @param ServiceLocatorInterface $serviceLocator Service locator
* @param string $name Name of service
* @param string $requestedName Unfiltered name of service
*
* @return object
*/
public function createServiceWithName(ServiceLocatorInterface $serviceLocator,
$name, $requestedName
) {
$class = $this->getClassName($name, $requestedName);
return new $class(
$serviceLocator->getServiceLocator()->get('VuFind\Config')
);
}
}
| gpl-2.0 |
TKTL-SDN/SoftOffload-Agent | elements/userlevel/mcastsocket.hh | 5190 | // -*- mode: c++; c-basic-offset: 2 -*-
#ifndef CLICK_MCASTSOCKET_HH
#define CLICK_MCASTSOCKET_HH
#include <click/element.hh>
#include <click/string.hh>
#include <click/task.hh>
#include <click/notifier.hh>
#include <sys/un.h>
CLICK_DECLS
/*
=c
McastSocket(MCASTIP, MCASTPORT [, SOURCEIP] [, SOURCEPORT] [, I<KEYWORDS>])
=s comm
a multicast UDP socket transport (user-level)
=d
Transports packets over multicast UDP. Packets do not flow through
McastSocket elements (i.e., McastSocket is an "x/y" element). Instead,
input packets are sent via multicast to any number of remote hosts or
processes, and packets from the remote hosts or processes are emitted
on the output.
If "SOURCEIP" (and optionally "SOURCEPORT") is specified, the address
is used as the source of outgoing packets, and the multicast traffic is
only sent and received on the interface designated by SOURCEIP.
If "SOURCEIP" is not specified, the system routing table determines the
source address and interface for sending and receiving the multicast
traffic.
Note that since the McastSocket listens on the same multicast address
as it sends to, it will by default receive copies of its own packets.
If a source IP is specified, McastSocket will automatically drop these
looping packets. If a source IP is not specified, disabling the LOOP
option can be an alternative, if no other subscribing processes run on
the same host as the Click process.
McastSocket inputs are agnostic, i.e., they may be either "pull" or
"push". If pushed, packets will block on the underlying socket;
otherwise, the socket will pull packets as it can accept them. For
best performance, place a Notifier element (such as NotifierQueue)
upstream of a "pull" McastSocket.
Keyword arguments are:
=over 8
=item LOOP
Boolean. Whether processes on the local machine (including this one!)
should receive copies of the outgoing traffic (IP_MULTICAST_LOOP).
The default is true.
=item SNAPLEN
Unsigned integer. Maximum length of packets that can be
received. Default is 2048 bytes.
=item SNDBUF
Unsigned integer. Sets the maximum size in bytes of the underlying
socket send buffer. The default value is set by the wmem_default
sysctl and the maximum allowed value is set by the wmem_max sysctl.
=item RCVBUF
Unsigned integer. Sets the maximum size in bytes of the underlying
socket receive buffer. The default value is set by the rmem_default
sysctl and the maximum allowed value is set by the rmem_max sysctl.
=item TIMESTAMP
Boolean. If set, sets the timestamp field on received packets to the
current time. Default is true.
=item HEADROOM
Integer. Per-packet headroom. Defaults to 28.
=back
=e
Start the following Click router:
link :: McastSocket(239.0.0.0, 1234);
arpR :: ARPResponder(10.0.0.1/32 02:02:02:02:02:02) -> link;
arpQ :: ARPQuerier(10.0.0.1, 02:02:02:02:02:02) -> link;
ip :: Strip(14)
-> CheckIPHeader()
-> IPClassifier(icmp and dst host 10.0.0.1)
-> CheckICMPHeader()
-> ICMPPingResponder()
-> arpQ;
// Note DROP_OWN to prevent multicast loop from messing things up.
link
-> HostEtherFilter(02:02:02:02:02:02, DROP_OWN true, DROP_OTHER true)
-> Classifier(
12/0806 20/0001, // ARP query
12/0806 20/0002, // ARP reply
12/0800, // IP
) => arpR, [1]arpQ, ip;
Then grab any Linux live CD image and start a QEMU virtual machine with
the following options:
qemu -net nic -net socket,mcast=239.0.0.0:1234 -cdrom livecd.iso
After adding an IP address, you will be able to ping 10.0.0.1 from the
virtual machine. The use of multicast transparently bridges any number
of Click and QEMU processes on any number of hosts on a LAN.
=a Socket
*/
class McastSocket : public Element {
public:
McastSocket() CLICK_COLD;
~McastSocket() CLICK_COLD;
const char *class_name() const { return "McastSocket"; }
const char *port_count() const { return "0-1/0-1"; }
const char *processing() const { return "a/h"; }
const char *flow_code() const { return "x/y"; }
const char *flags() const { return "S3"; }
virtual int configure(Vector<String> &conf, ErrorHandler *) CLICK_COLD;
virtual int initialize(ErrorHandler *) CLICK_COLD;
virtual void cleanup(CleanupStage) CLICK_COLD;
void add_handlers() CLICK_COLD;
bool run_task(Task *);
void push(int port, Packet*);
private:
Task _task;
int _recv_sock;
int _send_sock;
NotifierSignal _signal; // packet is available to pull()
WritablePacket *_rq; // queue to receive pulled packets
Packet *_wq; // queue to store pulled packet for when sendto() blocks
bool _loop; // IP_MULTICAST_LOOP socket option
bool _timestamp; // set the timestamp on received packets
int _rcvbuf; // maximum socket receive buffer in bytes
int _snaplen; // maximum received packet length
int _sndbuf; // maximum socket send buffer in bytes
unsigned _headroom;
struct sockaddr_in _mcast;
struct sockaddr_in _source;
void cleanup();
void selected(int fd, int mask);
int write_packet(Packet*);
int initialize_socket_error(ErrorHandler *, const char *);
};
CLICK_ENDDECLS
#endif
| gpl-2.0 |
LiJingBiao/desktop-app | public/tinymce/plugins/image/plugin.min.js | 276 | tinymce.PluginManager.add("image",function(i,t){function e(){win=i.windowManager.open({title:"Insert/edit image",file:t+"/dialog.htm",width:550,height:345})}i.addButton("image",{icon:"image",tooltip:"Insert/edit image",onclick:e,stateSelector:"img:not([data-mce-object])"})}); | gpl-2.0 |
anthonyryan1/xbmc | xbmc/windows/GUIWindowSplash.cpp | 1440 | /*
* Copyright (C) 2015-2018 Team Kodi
* This file is part of Kodi - https://kodi.tv
*
* SPDX-License-Identifier: GPL-2.0-or-later
* See LICENSES/README.md for more information.
*/
#include "GUIWindowSplash.h"
#include "guilib/GUIImage.h"
#include "guilib/GUIWindowManager.h"
#include "settings/AdvancedSettings.h"
#include "settings/SettingsComponent.h"
#include "Util.h"
CGUIWindowSplash::CGUIWindowSplash(void) : CGUIWindow(WINDOW_SPLASH, "")
{
m_loadType = LOAD_ON_GUI_INIT;
m_image = nullptr;
}
CGUIWindowSplash::~CGUIWindowSplash(void) = default;
void CGUIWindowSplash::OnInitWindow()
{
if (!CServiceBroker::GetSettingsComponent()->GetAdvancedSettings()->m_splashImage)
return;
m_image = std::unique_ptr<CGUIImage>(new CGUIImage(0, 0, 0, 0, CServiceBroker::GetWinSystem()->GetGfxContext().GetWidth(), CServiceBroker::GetWinSystem()->GetGfxContext().GetHeight(), CTextureInfo(CUtil::GetSplashPath())));
m_image->SetAspectRatio(CAspectRatio::AR_SCALE);
}
void CGUIWindowSplash::Render()
{
CServiceBroker::GetWinSystem()->GetGfxContext().SetRenderingResolution(CServiceBroker::GetWinSystem()->GetGfxContext().GetResInfo(), true);
if (!m_image)
return;
m_image->SetWidth(CServiceBroker::GetWinSystem()->GetGfxContext().GetWidth());
m_image->SetHeight(CServiceBroker::GetWinSystem()->GetGfxContext().GetHeight());
m_image->AllocResources();
m_image->Render();
m_image->FreeResources();
}
| gpl-2.0 |
minhhoangle/firstproject.dev | wp-content/plugins/types/includes/classes/page/listing/abstract.php | 1321 | <?php
/**
* Represents an abstract listing page.
*/
abstract class WPCF_Page_Listing_Abstract extends WPCF_Page_Abstract {
protected function __construct() {
add_filter( 'wpcf_admin_menu_get_subpages', array( $this, 'add_submenu' ) );
}
/**
* Add submenu for the page.
*
* Hooked into the wpcf_admin_menu_get_subpages filter.
*
* @param array $submenus
* @return array
*/
public abstract function add_submenu( $submenus );
/**
* @return string Page slug.
*/
protected abstract function get_page_name();
/**
* Render the page.
*
* @return void
*/
public abstract function page_handler();
/**
* Temporary hack. Insert the submenu item directly after the "User Fields" item.
*
* See the wpcf_admin_menu_get_subpages filter for description of parameters.
*
* @param array[] $all_submenus
* @param array $submenu_to_add
*
* @return array
*/
protected function add_submenu_at_the_end( $all_submenus, $submenu_to_add ) {
$usermeta_index = array_search( 'wpcf-um', array_keys( $all_submenus ) );
$temp = array_slice( $all_submenus, 0, $usermeta_index + 1 );
$temp[ $this->get_page_name() ] = $submenu_to_add;
$all_submenus = array_merge( $temp, array_slice( $all_submenus, $usermeta_index + 1, count( $all_submenus ) ) );
return $all_submenus;
}
} | gpl-2.0 |
ouyangjie/oceanbase | oceanbase_0.4/src/updateserver/ob_schema_mgr.cpp | 16135 | ////===================================================================
//
// ob_schema_mgr.cpp updateserver / Oceanbase
//
// Copyright (C) 2010 Taobao.com, Inc.
//
// Created on 2010-10-08 by Yubai ([email protected])
//
// -------------------------------------------------------------------
//
// Description
//
//
// -------------------------------------------------------------------
//
// Change Log
//
////====================================================================
#include "ob_schema_mgr.h"
namespace oceanbase
{
namespace updateserver
{
using namespace common;
using namespace common::hash;
SchemaMgrImp::SchemaMgrImp() : ref_cnt_(0), name_map_(), id_map_()
{
TBSYS_LOG(INFO, "construct SchemaMgrImp %p", this);
if (0 != name_map_.create(SCHEMA_HASH_SIZE))
{
TBSYS_LOG(ERROR, "create schema map fail");
}
if (0 != id_map_.create(SCHEMA_HASH_SIZE))
{
TBSYS_LOG(ERROR, "create schema map fail");
}
}
SchemaMgrImp::~SchemaMgrImp()
{
TBSYS_LOG(INFO, "deconstruct SchemaMgrImp %p", this);
clear_();
}
void SchemaMgrImp::analyse_column_(const ObSchema &schema, SchemaInfo &schema_info)
{
const ObColumnSchema *column_schema_iter = NULL;
for (column_schema_iter = schema.column_begin();
column_schema_iter != schema.column_end();
column_schema_iter++)
{
if (NULL == column_schema_iter)
{
break;
}
if (ObCreateTimeType == column_schema_iter->get_type())
{
if (OB_INVALID_ID != schema_info.create_time_column_id)
{
TBSYS_LOG(WARN, "there is already a create_time type column id=%lu, cur=%ld",
schema_info.create_time_column_id, column_schema_iter->get_id());
}
else
{
schema_info.create_time_column_id = column_schema_iter->get_id();
}
}
else if (ObModifyTimeType == column_schema_iter->get_type())
{
if (OB_INVALID_ID != schema_info.modify_time_column_id)
{
TBSYS_LOG(WARN, "there is already a modify_time type column id=%lu, cur=%ld",
schema_info.modify_time_column_id, column_schema_iter->get_id());
}
else
{
schema_info.modify_time_column_id = column_schema_iter->get_id();
}
}
}
}
int SchemaMgrImp::set_schema_(const ObSchema &schema)
{
int ret = OB_SUCCESS;
void *memory = NULL;
if (NULL == (memory = ob_malloc(sizeof(SchemaInfo), ObModIds::OB_UPS_SCHEMA)))
{
TBSYS_LOG(WARN, "ob_malloc schema memory fail");
ret = OB_ERROR;
}
else
{
SchemaInfo *pschema_info = NULL;
if (NULL == (pschema_info = new(memory) SchemaInfo()))
{
TBSYS_LOG(WARN, "placement new SchemaInfo fail memory=%p", memory);
ret = OB_ERROR;
}
else
{
ObSchema *pschema = &(pschema_info->schema);
*pschema = schema;
analyse_column_(schema, *pschema_info);
const ObString table_name(0, strlen(pschema->get_table_name()),
const_cast<char*>(pschema->get_table_name()));
int hash_ret = name_map_.set(table_name, pschema_info);
if (HASH_INSERT_SUCC != hash_ret)
{
TBSYS_LOG(WARN, "update name map fail hash_ret=%d", hash_ret);
ret = OB_ERROR;
}
else
{
hash_ret = id_map_.set(pschema->get_table_id(), pschema_info);
if (HASH_INSERT_SUCC != hash_ret)
{
TBSYS_LOG(WARN, "update id map fail hash_ret=%d", hash_ret);
name_map_.erase(table_name);
ret = OB_ERROR;
}
}
}
if (OB_SUCCESS != ret)
{
pschema_info->~SchemaInfo();
ob_free(memory);
}
}
return ret;
}
uint64_t SchemaMgrImp::get_create_time_column_id(const uint64_t table_id) const
{
uint64_t ret = OB_INVALID_ID;
SchemaInfo *pschema_info = NULL;
if (HASH_EXIST == id_map_.get(table_id, pschema_info)
&& NULL != pschema_info)
{
ret = pschema_info->create_time_column_id;
}
return ret;
}
uint64_t SchemaMgrImp::get_modify_time_column_id(const uint64_t table_id) const
{
uint64_t ret = OB_INVALID_ID;
SchemaInfo *pschema_info = NULL;
if (HASH_EXIST == id_map_.get(table_id, pschema_info)
&& NULL != pschema_info)
{
ret = pschema_info->modify_time_column_id;
}
return ret;
}
ObSchema *SchemaMgrImp::get_schema(const ObString &table_name) const
{
ObSchema *ret = NULL;
SchemaInfo *pschema_info = NULL;
if (HASH_EXIST == name_map_.get(table_name, pschema_info)
&& NULL != pschema_info)
{
ret = &(pschema_info->schema);
}
return ret;
}
ObSchema *SchemaMgrImp::get_schema(const uint64_t table_id) const
{
ObSchema *ret = NULL;
SchemaInfo *pschema_info = NULL;
if (HASH_EXIST == id_map_.get(table_id, pschema_info)
&& NULL != pschema_info)
{
ret = &(pschema_info->schema);
}
return ret;
}
void SchemaMgrImp::clear_()
{
if (0 != ref_cnt_)
{
TBSYS_LOG(WARN, "ref_cnt=%ld do not equal to 0", ref_cnt_);
}
else
{
id_map_t::iterator iter;
for (iter = id_map_.begin(); iter != id_map_.end(); ++iter)
{
if (NULL != iter->second)
{
iter->second->~SchemaInfo();
ob_free(iter->second);
}
}
id_map_.clear();
name_map_.clear();
}
}
int SchemaMgrImp::set_schemas(const ObSchemaManagerWrapper &schema_manager)
{
int ret = OB_SUCCESS;
const ObSchemaManager *com_schema_ptr = schema_manager.get_impl();
if (NULL != com_schema_ptr)
{
com_schema_mgr_ = *com_schema_ptr;
}
const ObSchema *schema_iter = NULL;
for (schema_iter = schema_manager.begin(); schema_iter != schema_manager.end(); schema_iter++)
{
if (NULL == schema_iter
|| OB_SUCCESS != (ret = set_schema_(*schema_iter)))
{
ret = (OB_SUCCESS == ret) ? OB_ERROR : ret;
break;
}
}
return ret;
}
const common::ObSchemaManager &SchemaMgrImp::get_com_schema_mgr() const
{
return com_schema_mgr_;
}
int SchemaMgrImp::build_sstable_schema(sstable::ObSSTableSchema &sstable_schema) const
{
int ret = OB_SUCCESS;
sstable::ObSSTableSchemaColumnDef column_info;
id_map_t::const_iterator iter;
for (iter = id_map_.begin(); iter != id_map_.end(); iter++)
{
SchemaInfo *schema_info = iter->second;
if (NULL == schema_info)
{
TBSYS_LOG(ERROR, "invalid schema_info table_id=%lu", iter->first);
ret = OB_ERROR;
break;
}
else
{
const ObColumnSchema *column_schema = schema_info->schema.column_begin();
for (; column_schema != schema_info->schema.column_end(); column_schema++)
{
if (NULL == column_schema)
{
TBSYS_LOG(ERROR, "invalid column_schema table_id=%lu", iter->first);
ret = OB_ERROR;
break;
}
else
{
column_info.reserved_ = 0;
column_info.column_group_id_ = DEFAULT_COLUMN_GROUP_ID;
column_info.column_name_id_ = column_schema->get_id();
column_info.column_value_type_ = column_schema->get_type();
column_info.table_id_ = iter->first;
if (OB_SUCCESS != (ret = sstable_schema.add_column_def(column_info)))
{
TBSYS_LOG(WARN, "add_column_def fail ret=%d group_id=%lu column_id=%lu value_type=%d table_id=%lu",
ret, column_info.column_group_id_, column_info.column_name_id_,
column_info.column_value_type_, column_info.table_id_);
break;
}
}
}
}
}
return ret;
}
void SchemaMgrImp::dump2text() const
{
const int64_t BUFFER_SIZE = 1024;
char buffer[BUFFER_SIZE];
snprintf(buffer, BUFFER_SIZE, "/tmp/ups_schemas.pid_%d.tim_%ld", getpid(), tbsys::CTimeUtil::getTime());
FILE *fd = fopen(buffer, "w");
if (NULL != fd)
{
id_map_t::const_iterator iter;
for (iter = id_map_.begin(); iter != id_map_.end(); ++iter)
{
if (NULL != iter->second)
{
const ObSchema &s = iter->second->schema;
fprintf(fd, "[TABLE_SCHEMA] table_id=%lu table_type=%d table_name=%s split_pos=%d rowkey_max_length=%d\n",
s.get_table_id(), s.get_table_type(), s.get_table_name(), s.get_split_pos(), s.get_rowkey_max_length());
s.print_info();
const ObColumnSchema *c = NULL;
for (c = s.column_begin(); c != s.column_end(); c++)
{
if (NULL != c)
{
c->print_info();
fprintf(fd, " [COLUMN_SCHEMA] column_id=%lu column_name=%s column_type=%d size=%ld\n",
c->get_id(), c->get_name(), c->get_type(), c->get_size());
}
}
}
}
}
fclose(fd);
}
////////////////////////////////////////////////////////////////////////////////////////////////////
const SchemaMgr::SchemaHandle SchemaMgr::INVALID_SCHEMA_HANDLE = NULL;
SchemaMgr::SchemaMgr() : cur_schema_mgr_imp_(NULL), has_schema_(false)
{
if (NULL == (cur_schema_mgr_imp_ = new(std::nothrow) SchemaMgrImp()))
{
TBSYS_LOG(ERROR, "new schema mgr imp fail");
}
}
SchemaMgr::~SchemaMgr()
{
if (NULL != cur_schema_mgr_imp_)
{
delete cur_schema_mgr_imp_;
cur_schema_mgr_imp_ = NULL;
}
}
int SchemaMgr::set_schemas(const ObSchemaManagerWrapper &schema_manager)
{
int ret = OB_SUCCESS;
SchemaMgrImp *tmp_schema_mgr_imp = NULL;
if (NULL == (tmp_schema_mgr_imp = new(std::nothrow) SchemaMgrImp()))
{
TBSYS_LOG(WARN, "new tmp schema mgr imp fail");
ret = OB_ERROR;
}
else if (OB_SUCCESS != (ret = tmp_schema_mgr_imp->set_schemas(schema_manager)))
{
TBSYS_LOG(WARN, "set schemas to tmp schema mgr imp fail ret=%d", ret);
delete tmp_schema_mgr_imp;
tmp_schema_mgr_imp = NULL;
}
else
{
rwlock_.wrlock();
SchemaMgrImp *prev_schema_mgr_imp = cur_schema_mgr_imp_;
cur_schema_mgr_imp_ = tmp_schema_mgr_imp;
if (NULL != prev_schema_mgr_imp
&& 0 == prev_schema_mgr_imp->get_ref_cnt())
{
delete prev_schema_mgr_imp;
prev_schema_mgr_imp = NULL;
}
rwlock_.unlock();
has_schema_ = true;
}
return ret;
}
int SchemaMgr::get_schemas(ObSchemaManagerWrapper &schema_manager)
{
int ret = OB_SUCCESS;
rwlock_.rdlock();
if (NULL == cur_schema_mgr_imp_)
{
ret = OB_ERROR;
}
else
{
ret = schema_manager.set_impl(cur_schema_mgr_imp_->get_com_schema_mgr());
}
rwlock_.unlock();
return ret;
}
uint64_t SchemaMgr::get_create_time_column_id(const uint64_t table_id) const
{
uint64_t ret = OB_INVALID_ID;
rwlock_.rdlock();
if (NULL == cur_schema_mgr_imp_)
{
TBSYS_LOG(WARN, "schema mgr imp null pointer");
}
else
{
ret = cur_schema_mgr_imp_->get_create_time_column_id(table_id);
}
rwlock_.unlock();
return ret;
}
uint64_t SchemaMgr::get_modify_time_column_id(const uint64_t table_id) const
{
uint64_t ret = OB_INVALID_ID;
rwlock_.rdlock();
if (NULL == cur_schema_mgr_imp_)
{
TBSYS_LOG(WARN, "schema mgr imp null pointer");
}
else
{
ret = cur_schema_mgr_imp_->get_modify_time_column_id(table_id);
}
rwlock_.unlock();
return ret;
}
ObSchema *SchemaMgr::get_schema(const ObString &table_name, SchemaHandle &schema_handle) const
{
ObSchema *ret = NULL;
rwlock_.rdlock();
if (NULL == cur_schema_mgr_imp_)
{
TBSYS_LOG(WARN, "schema mgr imp null pointer");
}
else
{
cur_schema_mgr_imp_->inc_ref_cnt();
if (NULL != (ret = cur_schema_mgr_imp_->get_schema(table_name)))
{
schema_handle = cur_schema_mgr_imp_;
}
else
{
cur_schema_mgr_imp_->dec_ref_cnt();
}
}
rwlock_.unlock();
return ret;
}
ObSchema *SchemaMgr::get_schema(const uint64_t table_id, SchemaHandle &schema_handle) const
{
ObSchema *ret = NULL;
rwlock_.rdlock();
if (NULL == cur_schema_mgr_imp_)
{
TBSYS_LOG(WARN, "schema mgr imp null pointer");
}
else
{
cur_schema_mgr_imp_->inc_ref_cnt();
if (NULL != (ret = cur_schema_mgr_imp_->get_schema(table_id)))
{
schema_handle = cur_schema_mgr_imp_;
}
else
{
cur_schema_mgr_imp_->dec_ref_cnt();
}
}
rwlock_.unlock();
return ret;
}
void SchemaMgr::revert_schema_handle(SchemaHandle &schema_handle)
{
rwlock_.rdlock();
if (NULL != schema_handle)
{
int64_t ref_cnt = schema_handle->dec_ref_cnt();
if (0 > ref_cnt)
{
TBSYS_LOG(WARN, "schema_mgr=%p have a unexpected ref_cnt=%d", schema_handle, ref_cnt);
}
if (0 == ref_cnt
&& schema_handle != cur_schema_mgr_imp_)
{
delete schema_handle;
schema_handle = NULL;
}
}
rwlock_.unlock();
}
SchemaMgr::SchemaHandle SchemaMgr::get_schema_handle()
{
SchemaHandle ret = NULL;
rwlock_.rdlock();
if (NULL == cur_schema_mgr_imp_)
{
TBSYS_LOG(WARN, "schema mgr imp null pointer");
}
else
{
cur_schema_mgr_imp_->inc_ref_cnt();
ret = cur_schema_mgr_imp_;
}
rwlock_.unlock();
return ret;
}
int SchemaMgr::build_sstable_schema(const SchemaHandle schema_handle,
sstable::ObSSTableSchema &sstable_schema) const
{
int ret = OB_SUCCESS;
if (NULL == schema_handle)
{
TBSYS_LOG(WARN, "invalid schema_handle=%p", schema_handle);
ret = OB_ERROR;
}
else
{
schema_handle->build_sstable_schema(sstable_schema);
}
return ret;
}
uint64_t SchemaMgr::get_create_time_column_id(const SchemaHandle schema_handle, const uint64_t table_id) const
{
uint64_t ret = OB_INVALID_ID;
if (NULL == schema_handle)
{
TBSYS_LOG(WARN, "invalid schema_handle=%p", schema_handle);
}
else
{
ret = schema_handle->get_create_time_column_id(table_id);
}
return ret;
}
uint64_t SchemaMgr::get_modify_time_column_id(const SchemaHandle schema_handle, const uint64_t table_id) const
{
uint64_t ret = OB_INVALID_ID;
if (NULL == schema_handle)
{
TBSYS_LOG(WARN, "invalid schema_handle=%p", schema_handle);
}
else
{
ret = schema_handle->get_modify_time_column_id(table_id);
}
return ret;
}
void SchemaMgr::dump2text() const
{
rwlock_.rdlock();
if (NULL == cur_schema_mgr_imp_)
{
TBSYS_LOG(WARN, "schema mgr imp null pointer");
}
else
{
cur_schema_mgr_imp_->dump2text();
}
rwlock_.unlock();
}
}
}
| gpl-2.0 |
shenzeyu/3dweb | src/main/webapp/scripts/framework/ckeditor/_source/lang/lt.js | 23620 | /*
Copyright (c) 2003-2011, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
/**
* @fileOverview Defines the {@link CKEDITOR.lang} object, for the
* Lithuanian language.
*/
/**#@+
@type String
@example
*/
/**
* Constains the dictionary of language entries.
* @namespace
*/
CKEDITOR.lang['lt'] =
{
/**
* The language reading direction. Possible values are "rtl" for
* Right-To-Left languages (like Arabic) and "ltr" for Left-To-Right
* languages (like English).
* @default 'ltr'
*/
dir : 'ltr',
/*
* Screenreader titles. Please note that screenreaders are not always capable
* of reading non-English words. So be careful while translating it.
*/
editorTitle : 'Rich text editor, %1, press ALT 0 for help.', // MISSING
// ARIA descriptions.
toolbar : 'Toolbar', // MISSING
editor : 'Rich Text Editor', // MISSING
// Toolbar buttons without dialogs.
source : 'Šaltinis',
newPage : 'Naujas puslapis',
save : 'Išsaugoti',
preview : 'Peržiūra',
cut : 'Iškirpti',
copy : 'Kopijuoti',
paste : 'Įdėti',
print : 'Spausdinti',
underline : 'Pabrauktas',
bold : 'Pusjuodis',
italic : 'Kursyvas',
selectAll : 'Pažymėti viską',
removeFormat : 'Panaikinti formatą',
strike : 'Perbrauktas',
subscript : 'Apatinis indeksas',
superscript : 'Viršutinis indeksas',
horizontalrule : 'Įterpti horizontalią liniją',
pagebreak : 'Įterpti puslapių skirtuką',
pagebreakAlt : 'Page Break', // MISSING
unlink : 'Panaikinti nuorodą',
undo : 'Atšaukti',
redo : 'Atstatyti',
// Common messages and labels.
common :
{
browseServer : 'Naršyti po serverį',
url : 'URL',
protocol : 'Protokolas',
upload : 'Siųsti',
uploadSubmit : 'Siųsti į serverį',
image : 'Vaizdas',
flash : 'Flash',
form : 'Forma',
checkbox : 'Žymimasis langelis',
radio : 'Žymimoji akutė',
textField : 'Teksto laukas',
textarea : 'Teksto sritis',
hiddenField : 'Nerodomas laukas',
button : 'Mygtukas',
select : 'Atrankos laukas',
imageButton : 'Vaizdinis mygtukas',
notSet : '<nėra nustatyta>',
id : 'Id',
name : 'Vardas',
langDir : 'Teksto kryptis',
langDirLtr : 'Iš kairės į dešinę (LTR)',
langDirRtl : 'Iš dešinės į kairę (RTL)',
langCode : 'Kalbos kodas',
longDescr : 'Ilgas aprašymas URL',
cssClass : 'Stilių lentelės klasės',
advisoryTitle : 'Konsultacinė antraštė',
cssStyle : 'Stilius',
ok : 'OK',
cancel : 'Nutraukti',
close : 'Close', // MISSING
preview : 'Preview', // MISSING
generalTab : 'Bendros savybės',
advancedTab : 'Papildomas',
validateNumberFailed : 'This value is not a number.', // MISSING
confirmNewPage : 'Any unsaved changes to this content will be lost. Are you sure you want to load new page?', // MISSING
confirmCancel : 'Some of the options have been changed. Are you sure to close the dialog?', // MISSING
options : 'Options', // MISSING
target : 'Target', // MISSING
targetNew : 'New Window (_blank)', // MISSING
targetTop : 'Topmost Window (_top)', // MISSING
targetSelf : 'Same Window (_self)', // MISSING
targetParent : 'Parent Window (_parent)', // MISSING
langDirLTR : 'Left to Right (LTR)', // MISSING
langDirRTL : 'Right to Left (RTL)', // MISSING
styles : 'Style', // MISSING
cssClasses : 'Stylesheet Classes', // MISSING
width : 'Plotis',
height : 'Aukštis',
align : 'Lygiuoti',
alignLeft : 'Kairę',
alignRight : 'Dešinę',
alignCenter : 'Centrą',
alignTop : 'Viršūnę',
alignMiddle : 'Vidurį',
alignBottom : 'Apačią',
invalidHeight : 'Height must be a number.', // MISSING
invalidWidth : 'Width must be a number.', // MISSING
// Put the voice-only part of the label in the span.
unavailable : '%1<span class="cke_accessibility">, unavailable</span>' // MISSING
},
contextmenu :
{
options : 'Context Menu Options' // MISSING
},
// Special char dialog.
specialChar :
{
toolbar : 'Įterpti specialų simbolį',
title : 'Pasirinkite specialų simbolį',
options : 'Special Character Options' // MISSING
},
// Link dialog.
link :
{
toolbar : 'Įterpti/taisyti nuorodą',
other : '<kitas>',
menu : 'Taisyti nuorodą',
title : 'Nuoroda',
info : 'Nuorodos informacija',
target : 'Paskirties vieta',
upload : 'Siųsti',
advanced : 'Papildomas',
type : 'Nuorodos tipas',
toUrl : 'URL', // MISSING
toAnchor : 'Žymė šiame puslapyje',
toEmail : 'El.paštas',
targetFrame : '<kadras>',
targetPopup : '<išskleidžiamas langas>',
targetFrameName : 'Paskirties kadro vardas',
targetPopupName : 'Paskirties lango vardas',
popupFeatures : 'Išskleidžiamo lango savybės',
popupResizable : 'Resizable', // MISSING
popupStatusBar : 'Būsenos juosta',
popupLocationBar: 'Adreso juosta',
popupToolbar : 'Mygtukų juosta',
popupMenuBar : 'Meniu juosta',
popupFullScreen : 'Visas ekranas (IE)',
popupScrollBars : 'Slinkties juostos',
popupDependent : 'Priklausomas (Netscape)',
popupLeft : 'Kairė pozicija',
popupTop : 'Viršutinė pozicija',
id : 'Id', // MISSING
langDir : 'Teksto kryptis',
langDirLTR : 'Iš kairės į dešinę (LTR)',
langDirRTL : 'Iš dešinės į kairę (RTL)',
acccessKey : 'Prieigos raktas',
name : 'Vardas',
langCode : 'Teksto kryptis',
tabIndex : 'Tabuliavimo indeksas',
advisoryTitle : 'Konsultacinė antraštė',
advisoryContentType : 'Konsultacinio turinio tipas',
cssClasses : 'Stilių lentelės klasės',
charset : 'Susietų išteklių simbolių lentelė',
styles : 'Stilius',
rel : 'Relationship', // MISSING
selectAnchor : 'Pasirinkite žymę',
anchorName : 'Pagal žymės vardą',
anchorId : 'Pagal žymės Id',
emailAddress : 'El.pašto adresas',
emailSubject : 'Žinutės tema',
emailBody : 'Žinutės turinys',
noAnchors : '(Šiame dokumente žymių nėra)',
noUrl : 'Prašome įvesti nuorodos URL',
noEmail : 'Prašome įvesti el.pašto adresą'
},
// Anchor dialog
anchor :
{
toolbar : 'Įterpti/modifikuoti žymę',
menu : 'Žymės savybės',
title : 'Žymės savybės',
name : 'Žymės vardas',
errorName : 'Prašome įvesti žymės vardą'
},
// List style dialog
list:
{
numberedTitle : 'Numbered List Properties', // MISSING
bulletedTitle : 'Bulleted List Properties', // MISSING
type : 'Type', // MISSING
start : 'Start', // MISSING
validateStartNumber :'List start number must be a whole number.', // MISSING
circle : 'Circle', // MISSING
disc : 'Disc', // MISSING
square : 'Square', // MISSING
none : 'None', // MISSING
notset : '<not set>', // MISSING
armenian : 'Armenian numbering', // MISSING
georgian : 'Georgian numbering (an, ban, gan, etc.)', // MISSING
lowerRoman : 'Lower Roman (i, ii, iii, iv, v, etc.)', // MISSING
upperRoman : 'Upper Roman (I, II, III, IV, V, etc.)', // MISSING
lowerAlpha : 'Lower Alpha (a, b, c, d, e, etc.)', // MISSING
upperAlpha : 'Upper Alpha (A, B, C, D, E, etc.)', // MISSING
lowerGreek : 'Lower Greek (alpha, beta, gamma, etc.)', // MISSING
decimal : 'Decimal (1, 2, 3, etc.)', // MISSING
decimalLeadingZero : 'Decimal leading zero (01, 02, 03, etc.)' // MISSING
},
// Find And Replace Dialog
findAndReplace :
{
title : 'Surasti ir pakeisti',
find : 'Rasti',
replace : 'Pakeisti',
findWhat : 'Surasti tekstą:',
replaceWith : 'Pakeisti tekstu:',
notFoundMsg : 'Nurodytas tekstas nerastas.',
matchCase : 'Skirti didžiąsias ir mažąsias raides',
matchWord : 'Atitikti pilną žodį',
matchCyclic : 'Match cyclic', // MISSING
replaceAll : 'Pakeisti viską',
replaceSuccessMsg : '%1 occurrence(s) replaced.' // MISSING
},
// Table Dialog
table :
{
toolbar : 'Lentelė',
title : 'Lentelės savybės',
menu : 'Lentelės savybės',
deleteTable : 'Šalinti lentelę',
rows : 'Eilutės',
columns : 'Stulpeliai',
border : 'Rėmelio dydis',
widthPx : 'taškais',
widthPc : 'procentais',
widthUnit : 'width unit', // MISSING
cellSpace : 'Tarpas tarp langelių',
cellPad : 'Trapas nuo langelio rėmo iki teksto',
caption : 'Antraštė',
summary : 'Santrauka',
headers : 'Antraštės',
headersNone : 'Nėra',
headersColumn : 'Pirmas stulpelis',
headersRow : 'Pirma eilutė',
headersBoth : 'Abu',
invalidRows : 'Number of rows must be a number greater than 0.', // MISSING
invalidCols : 'Number of columns must be a number greater than 0.', // MISSING
invalidBorder : 'Border size must be a number.', // MISSING
invalidWidth : 'Table width must be a number.', // MISSING
invalidHeight : 'Table height must be a number.', // MISSING
invalidCellSpacing : 'Cell spacing must be a number.', // MISSING
invalidCellPadding : 'Cell padding must be a number.', // MISSING
cell :
{
menu : 'Langelis',
insertBefore : 'Įterpti langelį prieš',
insertAfter : 'Įterpti langelį po',
deleteCell : 'Šalinti langelius',
merge : 'Sujungti langelius',
mergeRight : 'Sujungti su dešine',
mergeDown : 'Sujungti su apačia',
splitHorizontal : 'Skaidyti langelį horizontaliai',
splitVertical : 'Skaidyti langelį vertikaliai',
title : 'Cell Properties', // MISSING
cellType : 'Cell Type', // MISSING
rowSpan : 'Rows Span', // MISSING
colSpan : 'Columns Span', // MISSING
wordWrap : 'Word Wrap', // MISSING
hAlign : 'Horizontal Alignment', // MISSING
vAlign : 'Vertical Alignment', // MISSING
alignBaseline : 'Baseline', // MISSING
bgColor : 'Background Color', // MISSING
borderColor : 'Border Color', // MISSING
data : 'Data', // MISSING
header : 'Header', // MISSING
yes : 'Yes', // MISSING
no : 'No', // MISSING
invalidWidth : 'Cell width must be a number.', // MISSING
invalidHeight : 'Cell height must be a number.', // MISSING
invalidRowSpan : 'Rows span must be a whole number.', // MISSING
invalidColSpan : 'Columns span must be a whole number.', // MISSING
chooseColor : 'Choose' // MISSING
},
row :
{
menu : 'Eilutė',
insertBefore : 'Įterpti eilutę prieš',
insertAfter : 'Įterpti eilutę po',
deleteRow : 'Šalinti eilutes'
},
column :
{
menu : 'Stulpelis',
insertBefore : 'Įterpti stulpelį prieš',
insertAfter : 'Įterpti stulpelį po',
deleteColumn : 'Šalinti stulpelius'
}
},
// Button Dialog.
button :
{
title : 'Mygtuko savybės',
text : 'Tekstas (Reikšmė)',
type : 'Tipas',
typeBtn : 'Mygtukas',
typeSbm : 'Siųsti',
typeRst : 'Išvalyti'
},
// Checkbox and Radio Button Dialogs.
checkboxAndRadio :
{
checkboxTitle : 'Žymimojo langelio savybės',
radioTitle : 'Žymimosios akutės savybės',
value : 'Reikšmė',
selected : 'Pažymėtas'
},
// Form Dialog.
form :
{
title : 'Formos savybės',
menu : 'Formos savybės',
action : 'Veiksmas',
method : 'Metodas',
encoding : 'Encoding' // MISSING
},
// Select Field Dialog.
select :
{
title : 'Atrankos lauko savybės',
selectInfo : 'Informacija',
opAvail : 'Galimos parinktys',
value : 'Reikšmė',
size : 'Dydis',
lines : 'eilučių',
chkMulti : 'Leisti daugeriopą atranką',
opText : 'Tekstas',
opValue : 'Reikšmė',
btnAdd : 'Įtraukti',
btnModify : 'Modifikuoti',
btnUp : 'Aukštyn',
btnDown : 'Žemyn',
btnSetValue : 'Laikyti pažymėta reikšme',
btnDelete : 'Trinti'
},
// Textarea Dialog.
textarea :
{
title : 'Teksto srities savybės',
cols : 'Ilgis',
rows : 'Plotis'
},
// Text Field Dialog.
textfield :
{
title : 'Teksto lauko savybės',
name : 'Vardas',
value : 'Reikšmė',
charWidth : 'Ilgis simboliais',
maxChars : 'Maksimalus simbolių skaičius',
type : 'Tipas',
typeText : 'Tekstas',
typePass : 'Slaptažodis'
},
// Hidden Field Dialog.
hidden :
{
title : 'Nerodomo lauko savybės',
name : 'Vardas',
value : 'Reikšmė'
},
// Image Dialog.
image :
{
title : 'Vaizdo savybės',
titleButton : 'Vaizdinio mygtuko savybės',
menu : 'Vaizdo savybės',
infoTab : 'Vaizdo informacija',
btnUpload : 'Siųsti į serverį',
upload : 'Nusiųsti',
alt : 'Alternatyvus Tekstas',
lockRatio : 'Išlaikyti proporciją',
unlockRatio : 'Unlock Ratio', // MISSING
resetSize : 'Atstatyti dydį',
border : 'Rėmelis',
hSpace : 'Hor.Erdvė',
vSpace : 'Vert.Erdvė',
alertUrl : 'Prašome įvesti vaizdo URL',
linkTab : 'Nuoroda',
button2Img : 'Do you want to transform the selected image button on a simple image?', // MISSING
img2Button : 'Do you want to transform the selected image on a image button?', // MISSING
urlMissing : 'Image source URL is missing.', // MISSING
validateBorder : 'Border must be a whole number.', // MISSING
validateHSpace : 'HSpace must be a whole number.', // MISSING
validateVSpace : 'VSpace must be a whole number.' // MISSING
},
// Flash Dialog
flash :
{
properties : 'Flash savybės',
propertiesTab : 'Properties', // MISSING
title : 'Flash savybės',
chkPlay : 'Automatinis paleidimas',
chkLoop : 'Ciklas',
chkMenu : 'Leisti Flash meniu',
chkFull : 'Allow Fullscreen', // MISSING
scale : 'Mastelis',
scaleAll : 'Rodyti visą',
scaleNoBorder : 'Be rėmelio',
scaleFit : 'Tikslus atitikimas',
access : 'Script Access', // MISSING
accessAlways : 'Always', // MISSING
accessSameDomain: 'Same domain', // MISSING
accessNever : 'Never', // MISSING
alignAbsBottom : 'Absoliučią apačią',
alignAbsMiddle : 'Absoliutų vidurį',
alignBaseline : 'Apatinę liniją',
alignTextTop : 'Teksto viršūnę',
quality : 'Quality', // MISSING
qualityBest : 'Best', // MISSING
qualityHigh : 'High', // MISSING
qualityAutoHigh : 'Auto High', // MISSING
qualityMedium : 'Medium', // MISSING
qualityAutoLow : 'Auto Low', // MISSING
qualityLow : 'Low', // MISSING
windowModeWindow: 'Window', // MISSING
windowModeOpaque: 'Opaque', // MISSING
windowModeTransparent : 'Transparent', // MISSING
windowMode : 'Window mode', // MISSING
flashvars : 'Variables for Flash', // MISSING
bgcolor : 'Fono spalva',
hSpace : 'Hor.Erdvė',
vSpace : 'Vert.Erdvė',
validateSrc : 'Prašome įvesti nuorodos URL',
validateHSpace : 'HSpace must be a number.', // MISSING
validateVSpace : 'VSpace must be a number.' // MISSING
},
// Speller Pages Dialog
spellCheck :
{
toolbar : 'Rašybos tikrinimas',
title : 'Spell Check', // MISSING
notAvailable : 'Sorry, but service is unavailable now.', // MISSING
errorLoading : 'Error loading application service host: %s.', // MISSING
notInDic : 'Žodyne nerastas',
changeTo : 'Pakeisti į',
btnIgnore : 'Ignoruoti',
btnIgnoreAll : 'Ignoruoti visus',
btnReplace : 'Pakeisti',
btnReplaceAll : 'Pakeisti visus',
btnUndo : 'Atšaukti',
noSuggestions : '- Nėra pasiūlymų -',
progress : 'Vyksta rašybos tikrinimas...',
noMispell : 'Rašybos tikrinimas baigtas: Nerasta rašybos klaidų',
noChanges : 'Rašybos tikrinimas baigtas: Nėra pakeistų žodžių',
oneChange : 'Rašybos tikrinimas baigtas: Vienas žodis pakeistas',
manyChanges : 'Rašybos tikrinimas baigtas: Pakeista %1 žodžių',
ieSpellDownload : 'Rašybos tikrinimas neinstaliuotas. Ar Jūs norite jį dabar atsisiųsti?'
},
smiley :
{
toolbar : 'Veideliai',
title : 'Įterpti veidelį',
options : 'Smiley Options' // MISSING
},
elementsPath :
{
eleLabel : 'Elements path', // MISSING
eleTitle : '%1 element' // MISSING
},
numberedlist : 'Numeruotas sąrašas',
bulletedlist : 'Suženklintas sąrašas',
indent : 'Padidinti įtrauką',
outdent : 'Sumažinti įtrauką',
justify :
{
left : 'Lygiuoti kairę',
center : 'Centruoti',
right : 'Lygiuoti dešinę',
block : 'Lygiuoti abi puses'
},
blockquote : 'Citata',
clipboard :
{
title : 'Įdėti',
cutError : 'Jūsų naršyklės saugumo nustatymai neleidžia redaktoriui automatiškai įvykdyti iškirpimo operacijų. Tam prašome naudoti klaviatūrą (Ctrl/Cmd+X).',
copyError : 'Jūsų naršyklės saugumo nustatymai neleidžia redaktoriui automatiškai įvykdyti kopijavimo operacijų. Tam prašome naudoti klaviatūrą (Ctrl/Cmd+C).',
pasteMsg : 'Žemiau esančiame įvedimo lauke įdėkite tekstą, naudodami klaviatūrą (<STRONG>Ctrl/Cmd+V</STRONG>) ir paspauskite mygtuką <STRONG>OK</STRONG>.',
securityMsg : 'Dėl jūsų naršyklės saugumo nustatymų, redaktorius negali tiesiogiai pasiekti laikinosios atminties. Jums reikia nukopijuoti dar kartą į šį langą.',
pasteArea : 'Paste Area' // MISSING
},
pastefromword :
{
confirmCleanup : 'The text you want to paste seems to be copied from Word. Do you want to clean it before pasting?', // MISSING
toolbar : 'Įdėti iš Word',
title : 'Įdėti iš Word',
error : 'It was not possible to clean up the pasted data due to an internal error' // MISSING
},
pasteText :
{
button : 'Įdėti kaip gryną tekstą',
title : 'Įdėti kaip gryną tekstą'
},
templates :
{
button : 'Šablonai',
title : 'Turinio šablonai',
options : 'Template Options', // MISSING
insertOption : 'Pakeisti dabartinį turinį pasirinktu šablonu',
selectPromptMsg : 'Pasirinkite norimą šabloną<br>(<b>Dėmesio!</b> esamas turinys bus prarastas):',
emptyListMsg : '(Šablonų sąrašas tuščias)'
},
showBlocks : 'Rodyti blokus',
stylesCombo :
{
label : 'Stilius',
panelTitle : 'Formatting Styles', // MISSING
panelTitle1 : 'Block Styles', // MISSING
panelTitle2 : 'Inline Styles', // MISSING
panelTitle3 : 'Object Styles' // MISSING
},
format :
{
label : 'Šrifto formatas',
panelTitle : 'Šrifto formatas',
tag_p : 'Normalus',
tag_pre : 'Formuotas',
tag_address : 'Kreipinio',
tag_h1 : 'Antraštinis 1',
tag_h2 : 'Antraštinis 2',
tag_h3 : 'Antraštinis 3',
tag_h4 : 'Antraštinis 4',
tag_h5 : 'Antraštinis 5',
tag_h6 : 'Antraštinis 6',
tag_div : 'Normal (DIV)' // MISSING
},
div :
{
title : 'Create Div Container', // MISSING
toolbar : 'Create Div Container', // MISSING
cssClassInputLabel : 'Stylesheet Classes', // MISSING
styleSelectLabel : 'Style', // MISSING
IdInputLabel : 'Id', // MISSING
languageCodeInputLabel : ' Language Code', // MISSING
inlineStyleInputLabel : 'Inline Style', // MISSING
advisoryTitleInputLabel : 'Advisory Title', // MISSING
langDirLabel : 'Language Direction', // MISSING
langDirLTRLabel : 'Left to Right (LTR)', // MISSING
langDirRTLLabel : 'Right to Left (RTL)', // MISSING
edit : 'Edit Div', // MISSING
remove : 'Remove Div' // MISSING
},
iframe :
{
title : 'IFrame Properties', // MISSING
toolbar : 'IFrame', // MISSING
noUrl : 'Please type the iframe URL', // MISSING
scrolling : 'Enable scrollbars', // MISSING
border : 'Show frame border' // MISSING
},
font :
{
label : 'Šriftas',
voiceLabel : 'Font', // MISSING
panelTitle : 'Šriftas'
},
fontSize :
{
label : 'Šrifto dydis',
voiceLabel : 'Font Size', // MISSING
panelTitle : 'Šrifto dydis'
},
colorButton :
{
textColorTitle : 'Teksto spalva',
bgColorTitle : 'Fono spalva',
panelTitle : 'Colors', // MISSING
auto : 'Automatinis',
more : 'Daugiau spalvų...'
},
colors :
{
'000' : 'Black', // MISSING
'800000' : 'Maroon', // MISSING
'8B4513' : 'Saddle Brown', // MISSING
'2F4F4F' : 'Dark Slate Gray', // MISSING
'008080' : 'Teal', // MISSING
'000080' : 'Navy', // MISSING
'4B0082' : 'Indigo', // MISSING
'696969' : 'Dark Gray', // MISSING
'B22222' : 'Fire Brick', // MISSING
'A52A2A' : 'Brown', // MISSING
'DAA520' : 'Golden Rod', // MISSING
'006400' : 'Dark Green', // MISSING
'40E0D0' : 'Turquoise', // MISSING
'0000CD' : 'Medium Blue', // MISSING
'800080' : 'Purple', // MISSING
'808080' : 'Gray', // MISSING
'F00' : 'Red', // MISSING
'FF8C00' : 'Dark Orange', // MISSING
'FFD700' : 'Gold', // MISSING
'008000' : 'Green', // MISSING
'0FF' : 'Cyan', // MISSING
'00F' : 'Blue', // MISSING
'EE82EE' : 'Violet', // MISSING
'A9A9A9' : 'Dim Gray', // MISSING
'FFA07A' : 'Light Salmon', // MISSING
'FFA500' : 'Orange', // MISSING
'FFFF00' : 'Yellow', // MISSING
'00FF00' : 'Lime', // MISSING
'AFEEEE' : 'Pale Turquoise', // MISSING
'ADD8E6' : 'Light Blue', // MISSING
'DDA0DD' : 'Plum', // MISSING
'D3D3D3' : 'Light Grey', // MISSING
'FFF0F5' : 'Lavender Blush', // MISSING
'FAEBD7' : 'Antique White', // MISSING
'FFFFE0' : 'Light Yellow', // MISSING
'F0FFF0' : 'Honeydew', // MISSING
'F0FFFF' : 'Azure', // MISSING
'F0F8FF' : 'Alice Blue', // MISSING
'E6E6FA' : 'Lavender', // MISSING
'FFF' : 'White' // MISSING
},
scayt :
{
title : 'Spell Check As You Type', // MISSING
opera_title : 'Not supported by Opera', // MISSING
enable : 'Enable SCAYT', // MISSING
disable : 'Disable SCAYT', // MISSING
about : 'About SCAYT', // MISSING
toggle : 'Toggle SCAYT', // MISSING
options : 'Options', // MISSING
langs : 'Languages', // MISSING
moreSuggestions : 'More suggestions', // MISSING
ignore : 'Ignore', // MISSING
ignoreAll : 'Ignore All', // MISSING
addWord : 'Add Word', // MISSING
emptyDic : 'Dictionary name should not be empty.', // MISSING
optionsTab : 'Options', // MISSING
allCaps : 'Ignore All-Caps Words', // MISSING
ignoreDomainNames : 'Ignore Domain Names', // MISSING
mixedCase : 'Ignore Words with Mixed Case', // MISSING
mixedWithDigits : 'Ignore Words with Numbers', // MISSING
languagesTab : 'Languages', // MISSING
dictionariesTab : 'Dictionaries', // MISSING
dic_field_name : 'Dictionary name', // MISSING
dic_create : 'Create', // MISSING
dic_restore : 'Restore', // MISSING
dic_delete : 'Delete', // MISSING
dic_rename : 'Rename', // MISSING
dic_info : 'Initially the User Dictionary is stored in a Cookie. However, Cookies are limited in size. When the User Dictionary grows to a point where it cannot be stored in a Cookie, then the dictionary may be stored on our server. To store your personal dictionary on our server you should specify a name for your dictionary. If you already have a stored dictionary, please type its name and click the Restore button.', // MISSING
aboutTab : 'About' // MISSING
},
about :
{
title : 'About CKEditor', // MISSING
dlgTitle : 'About CKEditor', // MISSING
moreInfo : 'For licensing information please visit our web site:', // MISSING
copy : 'Copyright © $1. All rights reserved.' // MISSING
},
maximize : 'Maximize', // MISSING
minimize : 'Minimize', // MISSING
fakeobjects :
{
anchor : 'Anchor', // MISSING
flash : 'Flash Animation', // MISSING
iframe : 'IFrame', // MISSING
hiddenfield : 'Hidden Field', // MISSING
unknown : 'Unknown Object' // MISSING
},
resize : 'Drag to resize', // MISSING
colordialog :
{
title : 'Select color', // MISSING
options : 'Color Options', // MISSING
highlight : 'Highlight', // MISSING
selected : 'Selected Color', // MISSING
clear : 'Clear' // MISSING
},
toolbarCollapse : 'Collapse Toolbar', // MISSING
toolbarExpand : 'Expand Toolbar', // MISSING
bidi :
{
ltr : 'Text direction from left to right', // MISSING
rtl : 'Text direction from right to left' // MISSING
}
};
| gpl-2.0 |
serviciodeempleo/PaginaSPE | components/com_komento/themes/kuro/profile/activities.php | 820 | <?php
/**
* @package Komento
* @copyright Copyright (C) 2012 Stack Ideas Private Limited. All rights reserved.
* @license GNU/GPL, see LICENSE.php
*
* Komento is free software. This version may have been modified pursuant
* to the GNU General Public License, and as distributed it includes or
* is derivative of works licensed under the GNU General Public License or
* other free or open source software licenses.
* See COPYRIGHT.php for copyright notices and details.
*/
defined('_JEXEC') or die('Restricted access');
if( count( $items ) > 0 ) { ?>
<ul class="kmt-stream for-activity reset-child">
<?php echo $this->fetch( 'profile/activities/list.php' ); ?>
</ul>
<?php echo $this->fetch( 'profile/loadmore.php' );
} else { ?>
<p><?php echo JText::_( 'COM_KOMENTO_NO_ACTIVITIES_FOUND' ); ?></p>
<?php }
| gpl-2.0 |
wisdom-garden/dotcms | src/com/dotmarketing/portlets/campaigns/model/Click.java | 1179 | package com.dotmarketing.portlets.campaigns.model;
import java.io.Serializable;
import com.dotmarketing.util.InodeUtils;
/** @author Hibernate CodeGenerator */
public class Click extends com.dotmarketing.beans.Inode implements Serializable {
private static final long serialVersionUID = 1L;
/** nullable persistent field */
private String link;
/** nullable persistent field */
private int clickCount;
/** default constructor */
public Click() {
super.setType("click");
}
public String getInode() {
if(InodeUtils.isSet(this.inode))
return this.inode;
return "";
}
public void setInode(String inode) {
this.inode = inode;
}
public int getClickCount() {
return this.clickCount;
}
public void setClickCount(int clickCount) {
this.clickCount = clickCount;
}
public void setClickCount(Object clickCount) {
try {
this.clickCount = Integer.parseInt((String) clickCount);
} catch (Exception e) {
}
}
/**
* Returns the link.
* @return String
*/
public String getLink() {
return link;
}
/**
* Sets the link.
* @param link The link to set
*/
public void setLink(String link) {
this.link = link;
}
}
| gpl-3.0 |
CBSkarmory/AWGW | src/main/java/info/gridworld/gui/PseudoInfiniteViewport.java | 4321 | /*
* AP(r) Computer Science GridWorld Case Study:
* Copyright(c) 2002-2006 College Entrance Examination Board
* (http://www.collegeboard.com).
*
* This code is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* @author Julie Zelenski
* @author Cay Horstmann
*/
package info.gridworld.gui;
import java.awt.Point;
import java.awt.Dimension;
import java.awt.Color;
import javax.swing.*;
/**
* A <code>PseudoInfiniteViewport</code> is a <code>JViewport</code>
* subclass that translates scroll actions into pan actions across an unbounded
* view. <br />
* This code is not tested on the AP CS A and AB exams. It contains GUI
* implementation details that are not intended to be understood by AP CS
* students.
*/
public class PseudoInfiniteViewport extends JViewport
{
/**
* The Pannable interface contains those methods the view installed in a
* PseudoInfiniteViewport needs to support to enable panning behavior along
* with scrolling.
*/
public interface Pannable
{
void panBy(int hDelta, int vDelta);
boolean isPannableUnbounded();
void showPanTip();
}
private JScrollPane scrollParent;
private Point panPoint = new Point(0, 0);
/**
* Construct a new PseudoInfiniteViewport object for the given scrollpane.
* @param parent the JScrollPane for which this will be the viewport
*/
public PseudoInfiniteViewport(JScrollPane parent)
{
scrollParent = parent;
setBackground(Color.lightGray);
}
/**
* Sets the view position (upper left) to a new point. Overridden from
* JViewport to do a pan, instead of scroll, on an unbounded view.
* @param pt the Point to become the upper left
*/
public void setViewPosition(Point pt)
{
boolean isAdjusting = scrollParent.getVerticalScrollBar()
.getValueIsAdjusting()
|| scrollParent.getHorizontalScrollBar().getValueIsAdjusting();
boolean changed = true;
if (viewIsUnbounded())
{
int hDelta = pt.x - panPoint.x;
int vDelta = pt.y - panPoint.y;
if (hDelta != 0 && vDelta == 0)
getPannableView().panBy(hDelta, vDelta);
else if (vDelta != 0 && hDelta == 0)
getPannableView().panBy(hDelta, vDelta);
else
changed = false; // no pan action was taken
panPoint = pt;
if (!panPoint.equals(getPanCenterPoint()) && !isAdjusting)
{ // needs recentering
panPoint = getPanCenterPoint();
fireStateChanged(); // update scrollbars to match
}
}
else
// ordinary scroll behavior
{
changed = !getViewPosition().equals(pt);
super.setViewPosition(pt);
}
if (changed || isAdjusting)
getPannableView().showPanTip(); // briefly show tip
}
/**
* Returns current view position (upper left). Overridden from JViewport to
* use pan center point for unbounded view.
*/
public Point getViewPosition()
{
return (viewIsUnbounded() ? getPanCenterPoint() : super
.getViewPosition());
}
/**
* Returns current view size. Overridden from JViewport to use preferred
* virtual size for unbounded view.
*/
public Dimension getViewSize()
{
return (viewIsUnbounded() ? getView().getPreferredSize() : super
.getViewSize());
}
// some simple private helpers
private Pannable getPannableView()
{
return (Pannable) getView();
}
private boolean viewIsUnbounded()
{
Pannable p = getPannableView();
return (p != null && p.isPannableUnbounded());
}
private Point getPanCenterPoint()
{
Dimension size = getViewSize();
return new Point(size.width / 2, size.height / 2);
}
}
| gpl-3.0 |
devtsit/opencart | upload/admin/controller/module/special.php | 4509 | <?php
class ControllerModulespecial extends Controller {
private $error = array();
public function index() {
$this->load->language('module/special');
$this->document->setTitle($this->language->get('heading_title'));
$this->load->model('setting/setting');
if (($this->request->server['REQUEST_METHOD'] == 'POST') && $this->validate()) {
$this->model_setting_setting->editSetting('special', $this->request->post);
$this->cache->delete('product');
$this->session->data['success'] = $this->language->get('text_success');
$this->redirect($this->url->link('extension/module', 'token=' . $this->session->data['token'], 'SSL'));
}
$this->data['heading_title'] = $this->language->get('heading_title');
$this->data['text_enabled'] = $this->language->get('text_enabled');
$this->data['text_disabled'] = $this->language->get('text_disabled');
$this->data['text_content_top'] = $this->language->get('text_content_top');
$this->data['text_content_bottom'] = $this->language->get('text_content_bottom');
$this->data['text_column_left'] = $this->language->get('text_column_left');
$this->data['text_column_right'] = $this->language->get('text_column_right');
$this->data['entry_limit'] = $this->language->get('entry_limit');
$this->data['entry_image'] = $this->language->get('entry_image');
$this->data['entry_layout'] = $this->language->get('entry_layout');
$this->data['entry_position'] = $this->language->get('entry_position');
$this->data['entry_status'] = $this->language->get('entry_status');
$this->data['entry_sort_order'] = $this->language->get('entry_sort_order');
$this->data['button_save'] = $this->language->get('button_save');
$this->data['button_cancel'] = $this->language->get('button_cancel');
$this->data['button_add_module'] = $this->language->get('button_add_module');
$this->data['button_remove'] = $this->language->get('button_remove');
if (isset($this->error['warning'])) {
$this->data['error_warning'] = $this->error['warning'];
} else {
$this->data['error_warning'] = '';
}
if (isset($this->error['image'])) {
$this->data['error_image'] = $this->error['image'];
} else {
$this->data['error_image'] = array();
}
$this->data['breadcrumbs'] = array();
$this->data['breadcrumbs'][] = array(
'text' => $this->language->get('text_home'),
'href' => $this->url->link('common/home', 'token=' . $this->session->data['token'], 'SSL'),
'separator' => false
);
$this->data['breadcrumbs'][] = array(
'text' => $this->language->get('text_module'),
'href' => $this->url->link('extension/module', 'token=' . $this->session->data['token'], 'SSL'),
'separator' => ' :: '
);
$this->data['breadcrumbs'][] = array(
'text' => $this->language->get('heading_title'),
'href' => $this->url->link('module/special', 'token=' . $this->session->data['token'], 'SSL'),
'separator' => ' :: '
);
$this->data['action'] = $this->url->link('module/special', 'token=' . $this->session->data['token'], 'SSL');
$this->data['cancel'] = $this->url->link('extension/module', 'token=' . $this->session->data['token'], 'SSL');
$this->data['modules'] = array();
if (isset($this->request->post['special_module'])) {
$this->data['modules'] = $this->request->post['special_module'];
} elseif ($this->config->get('special_module')) {
$this->data['modules'] = $this->config->get('special_module');
}
$this->load->model('design/layout');
$this->data['layouts'] = $this->model_design_layout->getLayouts();
$this->template = 'module/special.tpl';
$this->children = array(
'common/header',
'common/footer'
);
$this->response->setOutput($this->render());
}
private function validate() {
if (!$this->user->hasPermission('modify', 'module/special')) {
$this->error['warning'] = $this->language->get('error_permission');
}
if (isset($this->request->post['special_module'])) {
foreach ($this->request->post['special_module'] as $key => $value) {
if (!$value['image_width'] || !$value['image_height']) {
$this->error['image'][$key] = $this->language->get('error_image');
}
}
}
if (!$this->error) {
return true;
} else {
return false;
}
}
}
?> | gpl-3.0 |
monzonj/core | dotCMS/html/js/dojo/release/dojo/dojox/editor/plugins/nls/zh-tw/Breadcrumb.js | 297 | //>>built
define("dojox/editor/plugins/nls/zh-tw/Breadcrumb",({"nodeActions":"${nodeName} 動作","selectContents":"選取內容","selectElement":"選取元素","deleteElement":"刪除元素","deleteContents":"刪除內容","moveStart":"將游標移到開頭","moveEnd":"將游標移到末尾"})); | gpl-3.0 |
sli7246/canvas_branch | spec/models/media_object_spec.rb | 3051 | #
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper.rb')
describe MediaObject do
context "loading with legacy support" do
it "should load by either media_id or old_media_id" do
course
mo = factory_with_protected_attributes(MediaObject, :media_id => '0_abcdefgh', :old_media_id => '1_01234567', :context => @course)
MediaObject.by_media_id('0_abcdefgh').first.should == mo
MediaObject.by_media_id('1_01234567').first.should == mo
end
it "should raise an error if someone tries to use find_by_media_id" do
lambda { MediaObject.find_by_media_id('fjdksl') }.should raise_error
end
end
describe ".build_media_objects" do
it "should delete attachments created temporarily for import" do
course
folder = Folder.assert_path(CC::CCHelper::MEDIA_OBJECTS_FOLDER, @course)
@a1 = attachment_model(:folder => folder, :uploaded_data => stub_file_data('video1.mp4', nil, 'video/mp4'))
@a2 = attachment_model(:context => @course, :uploaded_data => stub_file_data('video1.mp4', nil, 'video/mp4'))
data = {
:entries => [
{ :originalId => @a1.id, },
{ :originalId => @a2.id, },
],
}
MediaObject.build_media_objects(data, Account.default.id)
@a1.reload.file_state.should == 'deleted'
@a2.reload.file_state.should == 'available'
end
end
describe ".ensure_media_object" do
it "should not create if the media object exists already" do
MediaObject.create!(:context => user, :media_id => "test")
expect {
MediaObject.ensure_media_object("test", {})
}.to change { Delayed::Job.jobs_count(:future) }.by(0)
end
it "should not create if the media id doesn't exist in kaltura" do
MediaObject.expects(:media_id_exists?).with("test").returns(false)
expect {
MediaObject.ensure_media_object("test", {})
run_jobs
}.to change { Delayed::Job.jobs_count(:future) }.by(0)
end
it "should create the media object" do
MediaObject.expects(:media_id_exists?).with("test").returns(true)
expect {
MediaObject.ensure_media_object("test", { :context => user })
run_jobs
}.to change { Delayed::Job.jobs_count(:future) }.by(1)
obj = MediaObject.by_media_id("test").first
obj.context.should == @user
end
end
end
| agpl-3.0 |
NullVoxPopuli/aeonvera | vendor/bundle/ruby/2.4.0/gems/powerpack-0.1.1/lib/powerpack.rb | 170 | require 'powerpack/version'
require 'powerpack/enumerable'
require 'powerpack/hash'
require 'powerpack/numeric'
require 'powerpack/string'
require 'powerpack/array'
| agpl-3.0 |
dnoegel/shopware-aop | engine/Shopware/Models/Order/Status.php | 4404 | <?php
/**
* Shopware 5
* Copyright (c) shopware AG
*
* According to our dual licensing model, this program can be used either
* under the terms of the GNU Affero General Public License, version 3,
* or under a proprietary license.
*
* The texts of the GNU Affero General Public License with an additional
* permission and of our proprietary license can be found at and
* in the LICENSE file you have received along with this program.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* "Shopware" is a registered trademark of shopware AG.
* The licensing of the program under the AGPLv3 does not imply a
* trademark license. Therefore any rights, title and interest in
* our trademarks remain entirely with us.
*/
namespace Shopware\Models\Order;
use Shopware\Components\Model\ModelEntity;
use Doctrine\ORM\Mapping as ORM;
/**
* Shopware order status model represents the status of an order (payment or order state).
*
* The Shopware order status model represents a row of the s_core_states table.
* The s_core_states table has the follows indices:
* <code>
* - PRIMARY KEY (`id`)
* </code>
*
* @ORM\Entity
* @ORM\Table(name="s_core_states")
*/
class Status extends ModelEntity
{
/**
* Consts defining the group
*/
const GROUP_STATE = 'state';
const GROUP_PAYMENT = 'payment';
/**
* @var integer $id
*
* @ORM\Column(name="id", type="integer", nullable=false)
* @ORM\Id
* @ORM\GeneratedValue(strategy="IDENTITY")
*/
private $id;
/**
* @var string $description
*
* @ORM\Column(name="description", type="string", length=255, nullable=false)
*/
private $description;
/**
* @var integer $position
*
* @ORM\Column(name="position", type="integer", nullable=false)
*/
private $position;
/**
* @var string $group
*
* @ORM\Column(name="`group`", type="string", length=25, nullable=false)
*/
private $group;
/**
* @var integer $sendMail
*
* @ORM\Column(name="mail", type="integer", nullable=false)
*/
private $sendMail;
/**
* INVERSE SIDE
* @ORM\OneToOne(targetEntity="Shopware\Models\Mail\Mail", mappedBy="status")
* @var \Shopware\Models\Mail\Mail
*/
protected $mail;
/**
* Get id
*
* @return integer
*/
public function getId()
{
return $this->id;
}
/**
* Set description
*
* @param string $description
* @return Status
*/
public function setDescription($description)
{
$this->description = $description;
return $this;
}
/**
* Get description
*
* @return string
*/
public function getDescription()
{
return $this->description;
}
/**
* Set position
*
* @param integer $position
* @return Status
*/
public function setPosition($position)
{
$this->position = $position;
return $this;
}
/**
* Get position
*
* @return integer
*/
public function getPosition()
{
return $this->position;
}
/**
* Set group
*
* @param string $group
* @return Status
*/
public function setGroup($group)
{
$this->group = $group;
return $this;
}
/**
* Get group
*
* @return string
*/
public function getGroup()
{
return $this->group;
}
/**
* Set sendMail
*
* @param integer $sendMail
* @return Status
*/
public function setSendMail($sendMail)
{
$this->sendMail = $sendMail;
return $this;
}
/**
* Get sendMail
*
* @return integer
*/
public function getSendMail()
{
return $this->sendMail;
}
/**
* @return \Shopware\Models\Mail\Mail
*/
public function getMail()
{
return $this->mail;
}
/**
* @param \Shopware\Models\Mail\Mail|array|null $mail
* @return \Shopware\Models\Mail\Mail
*/
public function setMail($mail)
{
$this->mail = $mail;
return $this;
}
}
| agpl-3.0 |
indictranstech/biggift-erpnext | erpnext/controllers/buying_controller.py | 10127 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _, msgprint
from frappe.utils import flt
from erpnext.setup.utils import get_company_currency
from erpnext.accounts.party import get_party_details
from erpnext.stock.get_item_details import get_conversion_factor
from erpnext.controllers.stock_controller import StockController
class BuyingController(StockController):
def __setup__(self):
if hasattr(self, "taxes"):
self.print_templates = {
"taxes": "templates/print_formats/includes/taxes.html"
}
def get_feed(self):
return _("From {0} | {1} {2}").format(self.supplier_name, self.currency,
self.grand_total)
def validate(self):
super(BuyingController, self).validate()
if getattr(self, "supplier", None) and not self.supplier_name:
self.supplier_name = frappe.db.get_value("Supplier", self.supplier, "supplier_name")
self.is_item_table_empty()
self.set_qty_as_per_stock_uom()
self.validate_stock_or_nonstock_items()
self.validate_warehouse()
def set_missing_values(self, for_validate=False):
super(BuyingController, self).set_missing_values(for_validate)
self.set_supplier_from_item_default()
self.set_price_list_currency("Buying")
# set contact and address details for supplier, if they are not mentioned
if getattr(self, "supplier", None):
self.update_if_missing(get_party_details(self.supplier, party_type="Supplier"))
self.set_missing_item_details()
def set_supplier_from_item_default(self):
if self.meta.get_field("supplier") and not self.supplier:
for d in self.get("items"):
supplier = frappe.db.get_value("Item", d.item_code, "default_supplier")
if supplier:
self.supplier = supplier
break
def validate_stock_or_nonstock_items(self):
if self.meta.get_field("taxes") and not self.get_stock_items():
tax_for_valuation = [d.account_head for d in self.get("taxes")
if d.category in ["Valuation", "Valuation and Total"]]
if tax_for_valuation:
frappe.throw(_("Tax Category can not be 'Valuation' or 'Valuation and Total' as all items are non-stock items"))
def set_total_in_words(self):
from frappe.utils import money_in_words
company_currency = get_company_currency(self.company)
if self.meta.get_field("base_in_words"):
self.base_in_words = money_in_words(self.base_grand_total, company_currency)
if self.meta.get_field("in_words"):
self.in_words = money_in_words(self.grand_total, self.currency)
# update valuation rate
def update_valuation_rate(self, parentfield):
"""
item_tax_amount is the total tax amount applied on that item
stored for valuation
TODO: rename item_tax_amount to valuation_tax_amount
"""
stock_items = self.get_stock_items()
stock_items_qty, stock_items_amount = 0, 0
last_stock_item_idx = 1
for d in self.get(parentfield):
if d.item_code and d.item_code in stock_items:
stock_items_qty += flt(d.qty)
stock_items_amount += flt(d.base_net_amount)
last_stock_item_idx = d.idx
total_valuation_amount = sum([flt(d.base_tax_amount_after_discount_amount) for d in self.get("taxes")
if d.category in ["Valuation", "Valuation and Total"]])
valuation_amount_adjustment = total_valuation_amount
for i, item in enumerate(self.get(parentfield)):
if item.item_code and item.qty and item.item_code in stock_items:
item_proportion = flt(item.base_net_amount) / stock_items_amount if stock_items_amount \
else flt(item.qty) / stock_items_qty
if i == (last_stock_item_idx - 1):
item.item_tax_amount = flt(valuation_amount_adjustment,
self.precision("item_tax_amount", item))
else:
item.item_tax_amount = flt(item_proportion * total_valuation_amount,
self.precision("item_tax_amount", item))
valuation_amount_adjustment -= item.item_tax_amount
self.round_floats_in(item)
if flt(item.conversion_factor)==0:
item.conversion_factor = get_conversion_factor(item.item_code, item.uom).get("conversion_factor") or 1.0
qty_in_stock_uom = flt(item.qty * item.conversion_factor)
rm_supp_cost = flt(item.rm_supp_cost) if self.doctype=="Purchase Receipt" else 0.0
landed_cost_voucher_amount = flt(item.landed_cost_voucher_amount) \
if self.doctype == "Purchase Receipt" else 0.0
item.valuation_rate = ((item.base_net_amount + item.item_tax_amount + rm_supp_cost
+ landed_cost_voucher_amount) / qty_in_stock_uom)
else:
item.valuation_rate = 0.0
def validate_for_subcontracting(self):
if not self.is_subcontracted and self.sub_contracted_items:
frappe.throw(_("Please enter 'Is Subcontracted' as Yes or No"))
if self.is_subcontracted == "Yes":
if self.doctype == "Purchase Receipt" and not self.supplier_warehouse:
frappe.throw(_("Supplier Warehouse mandatory for sub-contracted Purchase Receipt"))
for item in self.get("items"):
if item in self.sub_contracted_items and not item.bom:
frappe.throw(_("Please select BOM in BOM field for Item {0}").format(item.item_code))
else:
for item in self.get("items"):
if item.bom:
item.bom = None
def create_raw_materials_supplied(self, raw_material_table):
if self.is_subcontracted=="Yes":
parent_items = []
for item in self.get("items"):
if self.doctype == "Purchase Receipt":
item.rm_supp_cost = 0.0
if item.item_code in self.sub_contracted_items:
self.update_raw_materials_supplied(item, raw_material_table)
if [item.item_code, item.name] not in parent_items:
parent_items.append([item.item_code, item.name])
self.cleanup_raw_materials_supplied(parent_items, raw_material_table)
elif self.doctype == "Purchase Receipt":
for item in self.get("items"):
item.rm_supp_cost = 0.0
def update_raw_materials_supplied(self, item, raw_material_table):
bom_items = self.get_items_from_bom(item.item_code, item.bom)
raw_materials_cost = 0
for bom_item in bom_items:
# check if exists
exists = 0
for d in self.get(raw_material_table):
if d.main_item_code == item.item_code and d.rm_item_code == bom_item.item_code \
and d.reference_name == item.name:
rm, exists = d, 1
break
if not exists:
rm = self.append(raw_material_table, {})
required_qty = flt(bom_item.qty_consumed_per_unit) * flt(item.qty) * flt(item.conversion_factor)
rm.reference_name = item.name
rm.bom_detail_no = bom_item.name
rm.main_item_code = item.item_code
rm.rm_item_code = bom_item.item_code
rm.stock_uom = bom_item.stock_uom
rm.required_qty = required_qty
rm.conversion_factor = item.conversion_factor
if self.doctype == "Purchase Receipt":
rm.consumed_qty = required_qty
rm.description = bom_item.description
if item.batch_no and not rm.batch_no:
rm.batch_no = item.batch_no
# get raw materials rate
if self.doctype == "Purchase Receipt":
from erpnext.stock.utils import get_incoming_rate
rm.rate = get_incoming_rate({
"item_code": bom_item.item_code,
"warehouse": self.supplier_warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"qty": -1 * required_qty,
"serial_no": rm.serial_no
})
if not rm.rate:
from erpnext.stock.stock_ledger import get_valuation_rate
rm.rate = get_valuation_rate(bom_item.item_code, self.supplier_warehouse)
else:
rm.rate = bom_item.rate
rm.amount = required_qty * flt(rm.rate)
raw_materials_cost += flt(rm.amount)
if self.doctype == "Purchase Receipt":
item.rm_supp_cost = raw_materials_cost
def cleanup_raw_materials_supplied(self, parent_items, raw_material_table):
"""Remove all those child items which are no longer present in main item table"""
delete_list = []
for d in self.get(raw_material_table):
if [d.main_item_code, d.reference_name] not in parent_items:
# mark for deletion from doclist
delete_list.append(d)
# delete from doclist
if delete_list:
rm_supplied_details = self.get(raw_material_table)
self.set(raw_material_table, [])
for d in rm_supplied_details:
if d not in delete_list:
self.append(raw_material_table, d)
def get_items_from_bom(self, item_code, bom):
bom_items = frappe.db.sql("""select t2.item_code,
t2.qty / ifnull(t1.quantity, 1) as qty_consumed_per_unit,
t2.rate, t2.stock_uom, t2.name, t2.description
from `tabBOM` t1, `tabBOM Item` t2, tabItem t3
where t2.parent = t1.name and t1.item = %s
and t1.docstatus = 1 and t1.is_active = 1 and t1.name = %s
and t2.item_code = t3.name and t3.is_stock_item = 1""", (item_code, bom), as_dict=1)
if not bom_items:
msgprint(_("Specified BOM {0} does not exist for Item {1}").format(bom, item_code), raise_exception=1)
return bom_items
@property
def sub_contracted_items(self):
if not hasattr(self, "_sub_contracted_items"):
self._sub_contracted_items = []
item_codes = list(set(item.item_code for item in
self.get("items")))
if item_codes:
self._sub_contracted_items = [r[0] for r in frappe.db.sql("""select name
from `tabItem` where name in (%s) and is_sub_contracted_item=1""" % \
(", ".join((["%s"]*len(item_codes))),), item_codes)]
return self._sub_contracted_items
@property
def purchase_items(self):
if not hasattr(self, "_purchase_items"):
self._purchase_items = []
item_codes = list(set(item.item_code for item in
self.get("items")))
if item_codes:
self._purchase_items = [r[0] for r in frappe.db.sql("""select name
from `tabItem` where name in (%s) and is_purchase_item='Yes'""" % \
(", ".join((["%s"]*len(item_codes))),), item_codes)]
return self._purchase_items
def is_item_table_empty(self):
if not len(self.get("items")):
frappe.throw(_("Item table can not be blank"))
def set_qty_as_per_stock_uom(self):
for d in self.get("items"):
if d.meta.get_field("stock_qty"):
if not d.conversion_factor:
frappe.throw(_("Row {0}: Conversion Factor is mandatory").format(d.idx))
d.stock_qty = flt(d.qty) * flt(d.conversion_factor)
| agpl-3.0 |
guttulus/CRM-Guttulus- | application/views/notifier/ticket_change.php | 2188 | ------------------------------------------------------------
<?php echo lang('do not reply warning') ?>
------------------------------------------------------------
<?php echo lang('new comment posted', $ticket->getSummary()) ?>.
<?php
/* Send the message body unless the configuration file specifically says not to:
** to prevent sending the body of email messages add the following to config.php
** For config.php: define('SHOW_MESSAGE_BODY', false);
*/
if ((!defined('SHOW_TICKET_BODY')) or (SHOW_TICKET_BODY == true)) {
echo "\n----------------\n";
?>
<?php $changes = $changeset->getChanges();
if (is_array($changes)) {
foreach ($changes as $change) {
if (trim($change->getFromData()) == "") {
if ($change->dataNeedsTranslation()) {
echo strip_tags(lang('change set to', lang($change->getType()), lang($change->getToData())));
} else {
echo strip_tags(lang('change set to', lang($change->getType()), $change->getToData()));
} // if
} elseif (trim($change->getToData()) == "") {
if ($change->dataNeedsTranslation()) {
echo strip_tags(lang('change from to', lang($change->getType()), lang($change->getFromData()), lang('n/a')));
} else {
echo strip_tags(lang('change from to', lang($change->getType()), $change->getFromData(), lang('n/a')));
} // if
} else {
if ($change->dataNeedsTranslation()) {
echo strip_tags(lang('change from to', lang($change->getType()), lang($change->getFromData()), lang($change->getToData())));
} else {
echo strip_tags(lang('change from to', lang($change->getType()), $change->getFromData(), $change->getToData()));
} // if
} // if
echo "\n";
} // foreach
} // if
echo "\n";
if (trim($changeset->getComment())) {
echo lang('comment').":\n";
echo $changeset->getComment();
} // if
echo "\n----------------\n\n";
}
?>
<?php echo lang('view new ticket') ?>:
- <?php echo str_replace('&', '&', $ticket->getViewUrl()) ?>
Company: <?php echo owner_company()->getName() ?>
Project: <?php echo $ticket->getProject()->getName() ?>
--
<?php echo ROOT_URL ?> | agpl-3.0 |
Livit/Livit.Learn.EdX | lms/static/js/groups/views/cohort_discussions_course_wide.js | 4516 | ;(function (define) {
'use strict';
define(['jquery', 'underscore', 'backbone', 'gettext', 'js/groups/views/cohort_discussions',
'edx-ui-toolkit/js/utils/html-utils'],
function ($, _, Backbone, gettext, CohortDiscussionConfigurationView, HtmlUtils) {
var CourseWideDiscussionsView = CohortDiscussionConfigurationView.extend({
events: {
'change .check-discussion-subcategory-course-wide': 'discussionCategoryStateChanged',
'click .cohort-course-wide-discussions-form .action-save': 'saveCourseWideDiscussionsForm'
},
initialize: function (options) {
this.template = HtmlUtils.template($('#cohort-discussions-course-wide-tpl').text());
this.cohortSettings = options.cohortSettings;
},
render: function () {
HtmlUtils.setHtml(this.$('.cohort-course-wide-discussions-nav'), this.template({
courseWideTopicsHtml: this.getCourseWideDiscussionsHtml(
this.model.get('course_wide_discussions')
)
}));
this.setDisabled(this.$('.cohort-course-wide-discussions-form .action-save'), true);
},
/**
* Returns the html list for course-wide discussion topics.
* @param {object} courseWideDiscussions - course-wide discussions object from server.
* @returns {HtmlSnippet} - HTML list for course-wide discussion topics.
*/
getCourseWideDiscussionsHtml: function (courseWideDiscussions) {
var subCategoryTemplate = HtmlUtils.template($('#cohort-discussions-subcategory-tpl').html()),
entries = courseWideDiscussions.entries,
children = courseWideDiscussions.children;
return HtmlUtils.joinHtml.apply(this, _.map(children, function (name) {
var entry = entries[name];
return subCategoryTemplate({
name: name,
id: entry.id,
is_cohorted: entry.is_cohorted,
type: 'course-wide'
});
}));
},
/**
* Enables the save button for course-wide discussions.
*/
discussionCategoryStateChanged: function(event) {
event.preventDefault();
this.setDisabled(this.$('.cohort-course-wide-discussions-form .action-save'), false);
},
/**
* Sends the cohorted_course_wide_discussions to the server and renders the view.
*/
saveCourseWideDiscussionsForm: function (event) {
event.preventDefault();
var self = this,
courseWideCohortedDiscussions = self.getCohortedDiscussions(
'.check-discussion-subcategory-course-wide:checked'
),
fieldData = { cohorted_course_wide_discussions: courseWideCohortedDiscussions };
self.saveForm(self.$('.course-wide-discussion-topics'),fieldData)
.done(function () {
self.model.fetch()
.done(function () {
self.render();
self.showMessage(gettext('Your changes have been saved.'), self.$('.course-wide-discussion-topics'));
}).fail(function() {
var errorMessage = gettext("We've encountered an error. Refresh your browser and then try again.");
self.showMessage(errorMessage, self.$('.course-wide-discussion-topics'), 'error')
});
});
}
});
return CourseWideDiscussionsView;
});
}).call(this, define || RequireJS.define);
| agpl-3.0 |
j1fig/fiware-orion | scripts/managedb/list-entities.py | 4483 | #!/usr/bin/python
# -*- coding: latin-1 -*-
# Copyright 2014 Telefonica Investigacion y Desarrollo, S.A.U
#
# This file is part of Orion Context Broker.
#
# Orion Context Broker is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Orion Context Broker is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Orion Context Broker. If not, see http://www.gnu.org/licenses/.
#
# For those usages not covered by this license please contact with
# iot_support at tid dot es
from pymongo import MongoClient, DESCENDING
from datetime import datetime, timedelta
from sys import argv
def usage():
print ' Usage: ./list-entities.py <db> <hour|day|week|month|all> [entity_filter] '
print ' Example ./list-entities.py orion week'
print ' Example ./list-entities.py orion week TEST_SENSOR'
def entityString(entity):
s = entity['id']
if (entity.has_key('type')):
s += ' (' + entity['type'] + ')'
return s
# This script can be easily adapted to used creation date instead of modification date
# just changing the following variable to 'creDate'
refDate = 'modDate'
if argv[1] == "-u":
usage()
exit(0)
if 3 <= len(argv) <= 4:
db = argv[1]
range = argv[2]
else:
print 'Wrong number of arguments'
usage()
exit(1)
# Check range string
if (range != "hour" and range != "day" and range != "week" and range != "month" and range != "all"):
print 'Wrong range string: ' + range
usage()
exit(1)
# Optional argument: filter
query = {}
if len(argv) == 4:
query['_id.id'] = {'$regex': argv[3]}
client = MongoClient('localhost', 27017)
col = client[db]['entities']
now = datetime.now()
last_hour = now - timedelta(hours=1)
last_day = now - timedelta(days=1)
last_week = now - timedelta (days=7)
last_month = now - timedelta (days=30)
day_mark_printed = False
week_mark_printed = False
month_mark_printed = False
old_mark_printed = False
query[refDate] = {'$exists': True}
docs = col.find(query)
if (docs.count() == 0):
print "no entities"
exit(0)
# First pass: count documents in each range
n_hour = 0
n_day = 0
n_week = 0
n_month = 0
n_old = 0
for doc in docs:
date = datetime.fromtimestamp(int(doc[refDate]))
if (date < last_month):
n_old += 1
elif (date < last_week):
n_month += 1
elif (date < last_day):
n_week += 1
elif (date < last_hour):
n_day += 1
else:
n_hour += 1
# Second pass: printing entity information itself
if (n_hour > 0):
print "=== updated in last hour (" + str(n_hour) + " entities)"
for doc in col.find(query).sort(refDate, direction=DESCENDING):
date = datetime.fromtimestamp(int(doc[refDate]))
if ((range == "hour" and date < last_hour) or (range == "day" and date < last_day) or
(range == "week" and date < last_week) or (range == "month" and date <last_month)):
break
if (date < last_month and not old_mark_printed and n_old > 0):
print "=== older than one month (" + str(n_old) + " entities)"
old_mark_printed = True
elif (date < last_week and not month_mark_printed and n_month > 0):
print "=== updated in last month (" + str(n_month)+ " entities)"
month_mark_printed = True
elif (date < last_day and not week_mark_printed and n_week > 0):
print "=== updated in last week (" + str(n_week) + " entities)"
week_mark_printed = True
elif (date < last_hour and not day_mark_printed and n_day > 0):
print "=== updated in last day (" + str(n_day) + " entities)"
day_mark_printed = True
dateString = date.strftime('%Y-%m-%d %H:%M:%S')
print '-- ' + dateString + ': ' + entityString(doc['_id'])
if (range == "all"):
query[refDate]['$exists'] = False
docs = col.find(query)
n = docs.count()
if (n > 0):
print "=== without date (" + str(n) + " entities), probably last update was done with Orion 0.8.0 (released in October 9th, 2013)"
for doc in docs:
print '-- (no date) : ' + entityString(doc['_id'])
| agpl-3.0 |
marchelbling/osg | src/osg/ScissorIndexed.cpp | 1766 | /* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2014 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#include <osg/ScissorIndexed>
#include <osg/GLExtensions>
#include <osg/State>
using namespace osg;
ScissorIndexed::ScissorIndexed():
_index(0),
_x(0.0f),
_y(0.0f),
_width(800.0f),
_height(600.0f) // defaults same as osg::Viewport and osg::Scissor
{
}
ScissorIndexed::~ScissorIndexed()
{
}
void ScissorIndexed::setIndex(unsigned int index)
{
if (_index==index) return;
ReassignToParents needToReassingToParentsWhenMemberValueChanges(this);
_index = index;
}
void ScissorIndexed::apply(State& state) const
{
const GLExtensions* extensions = state.get<GLExtensions>();
if (extensions->glScissorIndexed)
{
extensions->glScissorIndexed(static_cast<GLuint>(_index),
static_cast<GLfloat>(_x),
static_cast<GLfloat>(_y),
static_cast<GLfloat>(_width),
static_cast<GLfloat>(_height));
}
else
{
OSG_WARN<<"Warning: ScissorIndexed::apply(..) failed, glScissorIndexed is not support by OpenGL driver."<<std::endl;
}
}
| lgpl-2.1 |
harikrushna-Huawei/hackathon | protocols/isis/api/src/main/java/org/onosproject/isis/controller/IsisPduType.java | 2537 | /*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.isis.controller;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
/**
* Representation of ISIS PDU types.
*/
public enum IsisPduType {
/**
* Represents Level-1 LAN hello packet.
*/
L1HELLOPDU(15),
/**
* Represents Level-2 LAN hello packet.
*/
L2HELLOPDU(16),
/**
* Represents point-to-point hello packet.
*/
P2PHELLOPDU(17),
/**
* Represents Level-1 link state packet.
*/
L1LSPDU(18),
/**
* Represents Level-2 link state packet.
*/
L2LSPDU(20),
/**
* Represents Level-1 complete sequence number packet.
*/
L1CSNP(24),
/**
* Represents Level-2 complete sequence number packet.
*/
L2CSNP(25),
/**
* Represents Level-1 partial sequence number packet.
*/
L1PSNP(26),
/**
* Represents Level-2 partial sequence number packet.
*/
L2PSNP(27);
// Reverse lookup table
private static final Map<Integer, IsisPduType> LOOKUP = new HashMap<>();
// Populate the lookup table on loading time
static {
for (IsisPduType isisPduType : EnumSet.allOf(IsisPduType.class)) {
LOOKUP.put(isisPduType.value(), isisPduType);
}
}
private int value;
/**
* Creates an instance of ISIS PDU type.
*
* @param value represents ISIS PDU type
*/
private IsisPduType(int value) {
this.value = value;
}
/**
* Gets the enum instance from type value - reverse lookup purpose.
*
* @param pduTypeValue PDU type value
* @return ISIS PDU type instance
*/
public static IsisPduType get(int pduTypeValue) {
return LOOKUP.get(pduTypeValue);
}
/**
* Gets the value representing PDU type.
*
* @return value represents PDU type
*/
public int value() {
return value;
}
} | apache-2.0 |
nicolaferraro/camel | components/camel-netty/src/test/java/org/apache/camel/component/netty/DatagramPacketByteArrayCodecTest.java | 3163 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.netty;
import java.net.InetSocketAddress;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.AddressedEnvelope;
import io.netty.channel.DefaultAddressedEnvelope;
import io.netty.channel.embedded.EmbeddedChannel;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class DatagramPacketByteArrayCodecTest {
private static final String VALUE = "~!Camel rocks@%";
@Test
public void testDecoder() {
ByteBuf buf = Unpooled.buffer();
buf.writeBytes(VALUE.getBytes());
ByteBuf input = buf.duplicate();
AddressedEnvelope<Object, InetSocketAddress> addressedEnvelop
= new DefaultAddressedEnvelope<>(input, new InetSocketAddress(8888));
EmbeddedChannel channel = new EmbeddedChannel(ChannelHandlerFactories.newByteArrayDecoder("udp").newChannelHandler());
assertTrue(channel.writeInbound(addressedEnvelop));
assertTrue(channel.finish());
AddressedEnvelope<Object, InetSocketAddress> result = (AddressedEnvelope) channel.readInbound();
assertEquals(result.recipient().getPort(), addressedEnvelop.recipient().getPort());
assertTrue(result.content() instanceof byte[]);
assertEquals(VALUE, new String((byte[]) result.content()));
assertNull(channel.readInbound());
}
@Test
public void testEncoder() {
ByteBuf buf = Unpooled.buffer();
buf.writeBytes(VALUE.getBytes());
AddressedEnvelope<Object, InetSocketAddress> addressedEnvelop
= new DefaultAddressedEnvelope<>(VALUE.getBytes(), new InetSocketAddress(8888));
EmbeddedChannel channel = new EmbeddedChannel(ChannelHandlerFactories.newByteArrayEncoder("udp").newChannelHandler());
assertTrue(channel.writeOutbound(addressedEnvelop));
assertTrue(channel.finish());
AddressedEnvelope output = (AddressedEnvelope) channel.readOutbound();
assertTrue(output.content() instanceof ByteBuf);
ByteBuf resultContent = (ByteBuf) output.content();
assertEquals(VALUE, new String(resultContent.array()));
assertNull(channel.readOutbound());
}
}
| apache-2.0 |
amckee23/drools | drools-pmml/src/main/java/org/drools/pmml/pmml_4_2/PMMLError.java | 1030 | /*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.pmml.pmml_4_2;
import org.drools.compiler.compiler.DroolsError;
public class PMMLError extends DroolsError {
private String message;
public PMMLError( String message ) {
this.message = message;
}
@Override
public String getMessage() {
return message;
}
@Override
public int[] getLines() {
return new int[ 0 ]; //To change body of implemented methods use File | Settings | File Templates.
}
}
| apache-2.0 |
gnanam336/auto | value/src/it/functional/src/main/java/PackagelessNestedValueType.java | 1023 | /*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.google.auto.value.AutoValue;
import java.util.Map;
/**
* @author [email protected] (Éamonn McManus)
*/
public class PackagelessNestedValueType {
@AutoValue
public abstract static class Nested {
abstract Map<Integer, String> numberNames();
public static Nested create(Map<Integer, String> numberNames) {
return new AutoValue_PackagelessNestedValueType_Nested(numberNames);
}
}
}
| apache-2.0 |
christophd/camel | components/camel-cxf/src/test/java/org/apache/camel/component/cxf/ssl/SslGlobalTest.java | 4219 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.cxf.ssl;
import java.util.ArrayList;
import java.util.List;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.SSLContextParametersAware;
import org.apache.camel.component.cxf.CXFTestSupport;
import org.apache.camel.component.cxf.common.message.CxfConstants;
import org.apache.camel.support.jsse.SSLContextParameters;
import org.apache.camel.test.spring.junit5.CamelSpringTestSupport;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.springframework.context.support.AbstractXmlApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class SslGlobalTest extends CamelSpringTestSupport {
protected static final String GREET_ME_OPERATION = "greetMe";
protected static final String TEST_MESSAGE = "Hello World!";
protected static final String JAXWS_SERVER_ADDRESS
= "https://localhost:" + CXFTestSupport.getPort1() + "/CxfSslTest/SoapContext/SoapPort";
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
SSLContextParameters parameters = context.getRegistry().lookupByNameAndType("mySslContext", SSLContextParameters.class);
((SSLContextParametersAware) context.getComponent("cxf")).setUseGlobalSslContextParameters(true);
context.setSSLContextParameters(parameters);
return context;
}
@Test
public void testInvokingTrustRoute() throws Exception {
Exchange reply = sendJaxWsMessage("direct:trust");
if (reply.isFailed()) {
Exception exception = reply.getException();
String msg = exception.getMessage();
if (msg.contains("socket reset for TTL")) {
// ignore flaky test on JDK11
return;
}
}
assertFalse(reply.isFailed(), "We expect no exception here");
}
@Test
public void testInvokingWrongTrustRoute() throws Exception {
Exchange reply = sendJaxWsMessage("direct:wrongTrust");
assertTrue(reply.isFailed(), "We expect the exception here");
Throwable e = reply.getException().getCause();
assertEquals("javax.net.ssl.SSLHandshakeException", e.getClass().getCanonicalName());
}
protected Exchange sendJaxWsMessage(String endpointUri) throws InterruptedException {
Exchange exchange = template.send(endpointUri, new Processor() {
public void process(final Exchange exchange) {
final List<String> params = new ArrayList<>();
params.add(TEST_MESSAGE);
exchange.getIn().setBody(params);
exchange.getIn().setHeader(CxfConstants.OPERATION_NAME, GREET_ME_OPERATION);
}
});
return exchange;
}
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
// we can put the http conduit configuration here
return new ClassPathXmlApplicationContext("org/apache/camel/component/cxf/CxfGlobalSslContext.xml");
}
}
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.