content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
Go
Go
use golang.org/x/sys service.setrecoveryactions()
3c585e6567e5931fcc62b10ca4eda9a6e0ce97fb
<ide><path>cmd/dockerd/service_windows.go <ide> import ( <ide> "os/exec" <ide> "path/filepath" <ide> "time" <del> "unsafe" <ide> <ide> "github.com/sirupsen/logrus" <ide> "github.com/spf13/pflag" <ide> func registerService() error { <ide> } <ide> defer s.Close() <ide> <del> // See http://stackoverflow.com/questions/35151052/how-do-i-configure-failure-actions-of-a-windows-service-written-in-go <del> const ( <del> scActionNone = 0 <del> scActionRestart = 1 <del> scActionReboot = 2 <del> scActionRunCommand = 3 <del> <del> serviceConfigFailureActions = 2 <add> err = s.SetRecoveryActions( <add> []mgr.RecoveryAction{ <add> {Type: mgr.ServiceRestart, Delay: 60 * time.Second}, <add> {Type: mgr.ServiceRestart, Delay: 60 * time.Second}, <add> {Type: mgr.NoAction}, <add> }, <add> uint32(24*time.Hour/time.Second), <ide> ) <del> <del> type serviceFailureActions struct { <del> ResetPeriod uint32 <del> RebootMsg *uint16 <del> Command *uint16 <del> ActionsCount uint32 <del> Actions uintptr <del> } <del> <del> type scAction struct { <del> Type uint32 <del> Delay uint32 <del> } <del> t := []scAction{ <del> {Type: scActionRestart, Delay: uint32(60 * time.Second / time.Millisecond)}, <del> {Type: scActionRestart, Delay: uint32(60 * time.Second / time.Millisecond)}, <del> {Type: scActionNone}, <del> } <del> lpInfo := serviceFailureActions{ResetPeriod: uint32(24 * time.Hour / time.Second), ActionsCount: uint32(3), Actions: uintptr(unsafe.Pointer(&t[0]))} <del> err = windows.ChangeServiceConfig2(s.Handle, serviceConfigFailureActions, (*byte)(unsafe.Pointer(&lpInfo))) <ide> if err != nil { <ide> return err <ide> }
1
Javascript
Javascript
fix $httpparamserializerjqlike description
65bed615dfc41bfcf93ad00db9e389e3b0a35559
<ide><path>src/ng/http.js <ide> function $HttpParamSerializerJQLikeProvider() { <ide> /** <ide> * @ngdoc service <ide> * @name $httpParamSerializerJQLike <add> * @description <ide> * <ide> * Alternative $http params serializer that follows jQuerys `param()` method {http://api.jquery.com/jquery.param/} logic. <ide> * */
1
Python
Python
add adafactor optimizer from fairseq
971d1802d009d9996b36a34a34477cee849ef39f
<ide><path>src/transformers/__init__.py <ide> <ide> # Optimization <ide> from .optimization import ( <add> Adafactor, <ide> AdamW, <ide> get_constant_schedule, <ide> get_constant_schedule_with_warmup, <ide><path>src/transformers/optimization.py <ide> def step(self, closure: Callable = None): <ide> p.data.add_(p.data, alpha=-group["lr"] * group["weight_decay"]) <ide> <ide> return loss <add> <add> <add>class Adafactor(Optimizer): <add> """ <add> AdaFactor pytorch implementation can be used as a drop in replacement for Adam <add> original fairseq code: https://github.com/pytorch/fairseq/blob/master/fairseq/optim/adafactor.py <add> <add> Paper: `Adafactor: Adaptive Learning Rates with Sublinear Memory Cost` https://arxiv.org/abs/1804.04235 <add> Note that this optimizer internally adjusts the learning rate depending on the *scale_parameter*, *relative_step* and <add> *warmup_init* options. To use a manual (external) learning rate schedule you should set `scale_parameter=False` and `relative_step=False`. <add> <add> Arguments: <add> params (iterable): iterable of parameters to optimize or dicts defining parameter groups <add> lr (float, optional): external learning rate (default: None) <add> eps (tuple[float, float]): regularization constants for square gradient <add> and parameter scale respectively (default: (1e-30, 1e-3)) <add> clip_threshold (float, default 1.0): threshold of root mean square of final gradient update <add> decay_rate (float, default: -0.8): coefficient used to compute running averages of square <add> beta1 (float): coefficient used for computing running averages of gradient <add> weight_decay (float, default=0): weight decay (L2 penalty) <add> scale_parameter (bool, default: True): if True, learning rate is scaled by root mean square of <add> relative_step (bool, default: True): if True, time-dependent learning rate is computed instead of external learning rate <add> warmup_init (bool, default: False): time-dependent learning rate computation depends on whether warm-up initialization is being used <add> <add> This implementation handles low-precision (FP16, bfloat) values, but we have not thoroughly tested. <add> <add> Recommended T5 finetuning settings: <add> scheduled LR warm-up to fixed LR, disable relative updates, use clip threshold: https://arxiv.org/abs/2004.14546 <add> Adafactor(model.parameters(), lr=1e-3, relative_step=False, warmup_init=True) <add> Alternatively, relative_step with warmup_init can be used. <add> Training without LR warmup or clip threshold, is not recommended. Additional optimizer operations like gradient clipping, should not be used alongside Adafactor. <add> <add> Usage:: <add> # replace AdamW with Adafactor <add> optimizer = Adafactor(model.parameters(), lr=1e-3, eps=(1e-30, 1e-3), clip_threshold=1.0, <add> decay_rate=-0.8, beta1=None, weight_decay=0.0, relative_step=False, <add> scale_parameter=False, warmup_init=False,) <add> """ <add> <add> def __init__( <add> self, <add> params, <add> lr=None, <add> eps=(1e-30, 1e-3), <add> clip_threshold=1.0, <add> decay_rate=-0.8, <add> beta1=None, <add> weight_decay=0.0, <add> scale_parameter=True, <add> relative_step=True, <add> warmup_init=False, <add> ): <add> if lr is not None and relative_step: <add> raise ValueError("Cannot combine manual lr and relative_step options") <add> if warmup_init and not relative_step: <add> raise ValueError("warmup_init requires relative_step=True") <add> <add> defaults = dict( <add> lr=lr, <add> eps=eps, <add> clip_threshold=clip_threshold, <add> decay_rate=decay_rate, <add> beta1=beta1, <add> weight_decay=weight_decay, <add> scale_parameter=scale_parameter, <add> relative_step=relative_step, <add> warmup_init=warmup_init, <add> ) <add> super().__init__(params, defaults) <add> <add> @staticmethod <add> def _get_lr(param_group, param_state): <add> rel_step_sz = param_group["lr"] <add> if param_group["relative_step"]: <add> min_step = 1e-6 * param_state["step"] if param_group["warmup_init"] else 1e-2 <add> rel_step_sz = min(min_step, 1.0 / math.sqrt(param_state["step"])) <add> param_scale = 1.0 <add> if param_group["scale_parameter"]: <add> param_scale = max(param_group["eps"][1], param_state["RMS"]) <add> return param_scale * rel_step_sz <add> <add> @staticmethod <add> def _get_options(param_group, param_shape): <add> factored = len(param_shape) >= 2 <add> use_first_moment = param_group["beta1"] is not None <add> return factored, use_first_moment <add> <add> @staticmethod <add> def _rms(tensor): <add> return tensor.norm(2) / (tensor.numel() ** 0.5) <add> <add> @staticmethod <add> def _approx_sq_grad(exp_avg_sq_row, exp_avg_sq_col): <add> r_factor = (exp_avg_sq_row / exp_avg_sq_row.mean(dim=-1, keepdim=True)).rsqrt_() <add> c_factor = exp_avg_sq_col.rsqrt() <add> return torch.mm(r_factor.unsqueeze(-1), c_factor.unsqueeze(0)) <add> <add> def step(self, closure=None): <add> """Performs a single optimization step. <add> Arguments: <add> closure (callable, optional): A closure that reevaluates the model <add> and returns the loss. <add> """ <add> loss = None <add> if closure is not None: <add> loss = closure() <add> <add> for group in self.param_groups: <add> for p in group["params"]: <add> if p.grad is None: <add> continue <add> grad = p.grad.data <add> if grad.dtype in {torch.float16, torch.bfloat16}: <add> grad = grad.float() <add> if grad.is_sparse: <add> raise RuntimeError("Adafactor does not support sparse gradients.") <add> <add> state = self.state[p] <add> grad_shape = grad.shape <add> <add> factored, use_first_moment = self._get_options(group, grad_shape) <add> # State Initialization <add> if len(state) == 0: <add> state["step"] = 0 <add> <add> if use_first_moment: <add> # Exponential moving average of gradient values <add> state["exp_avg"] = torch.zeros_like(grad) <add> if factored: <add> state["exp_avg_sq_row"] = torch.zeros(grad_shape[:-1]).to(grad) <add> state["exp_avg_sq_col"] = torch.zeros(grad_shape[:-2] + grad_shape[-1:]).to(grad) <add> else: <add> state["exp_avg_sq"] = torch.zeros_like(grad) <add> <add> state["RMS"] = 0 <add> else: <add> if use_first_moment: <add> state["exp_avg"] = state["exp_avg"].to(grad) <add> if factored: <add> state["exp_avg_sq_row"] = state["exp_avg_sq_row"].to(grad) <add> state["exp_avg_sq_col"] = state["exp_avg_sq_col"].to(grad) <add> else: <add> state["exp_avg_sq"] = state["exp_avg_sq"].to(grad) <add> <add> p_data_fp32 = p.data <add> if p.data.dtype in {torch.float16, torch.bfloat16}: <add> p_data_fp32 = p_data_fp32.float() <add> <add> state["step"] += 1 <add> state["RMS"] = self._rms(p_data_fp32) <add> group["lr"] = self._get_lr(group, state) <add> <add> beta2t = 1.0 - math.pow(state["step"], group["decay_rate"]) <add> update = (grad ** 2) + group["eps"][0] <add> if factored: <add> exp_avg_sq_row = state["exp_avg_sq_row"] <add> exp_avg_sq_col = state["exp_avg_sq_col"] <add> <add> exp_avg_sq_row.mul_(beta2t).add_(1.0 - beta2t, update.mean(dim=-1)) <add> exp_avg_sq_col.mul_(beta2t).add_(1.0 - beta2t, update.mean(dim=-2)) <add> <add> # Approximation of exponential moving average of square of gradient <add> update = self._approx_sq_grad(exp_avg_sq_row, exp_avg_sq_col) <add> update.mul_(grad) <add> else: <add> exp_avg_sq = state["exp_avg_sq"] <add> <add> exp_avg_sq.mul_(beta2t).add_(1.0 - beta2t, update) <add> update = exp_avg_sq.rsqrt().mul_(grad) <add> <add> update.div_((self._rms(update) / group["clip_threshold"]).clamp_(min=1.0)) <add> update.mul_(group["lr"]) <add> <add> if use_first_moment: <add> exp_avg = state["exp_avg"] <add> exp_avg.mul_(group["beta1"]).add_(1 - group["beta1"], update) <add> update = exp_avg <add> <add> if group["weight_decay"] != 0: <add> p_data_fp32.add_(-group["weight_decay"] * group["lr"], p_data_fp32) <add> <add> p_data_fp32.add_(-update) <add> <add> if p.data.dtype in {torch.float16, torch.bfloat16}: <add> p.data.copy_(p_data_fp32) <add> <add> return loss <ide><path>tests/test_optimization.py <ide> import torch <ide> <ide> from transformers import ( <add> Adafactor, <ide> AdamW, <ide> get_constant_schedule, <ide> get_constant_schedule_with_warmup, <ide> def test_adam_w(self): <ide> w.grad.zero_() <ide> self.assertListAlmostEqual(w.tolist(), [0.4, 0.2, -0.5], tol=1e-2) <ide> <add> def test_adafactor(self): <add> w = torch.tensor([0.1, -0.2, -0.1], requires_grad=True) <add> target = torch.tensor([0.4, 0.2, -0.5]) <add> criterion = torch.nn.MSELoss() <add> # No warmup, constant schedule, no gradient clipping <add> optimizer = Adafactor( <add> params=[w], <add> lr=1e-2, <add> eps=(1e-30, 1e-3), <add> clip_threshold=1.0, <add> decay_rate=-0.8, <add> beta1=None, <add> weight_decay=0.0, <add> relative_step=False, <add> scale_parameter=False, <add> warmup_init=False, <add> ) <add> for _ in range(1000): <add> loss = criterion(w, target) <add> loss.backward() <add> optimizer.step() <add> w.grad.detach_() # No zero_grad() function on simple tensors. we do it ourselves. <add> w.grad.zero_() <add> self.assertListAlmostEqual(w.tolist(), [0.4, 0.2, -0.5], tol=1e-2) <add> <ide> <ide> @require_torch <ide> class ScheduleInitTest(unittest.TestCase):
3
Python
Python
fix issue with nan scalar
2a5422da7cb6759d75477738cc192fee3ca2a19c
<ide><path>numpy/lib/function_base.py <ide> def percentile(a, <ide> ``i < j``. If ``g`` is the fractional part of the index surrounded by <ide> ``i`` and alpha and beta are correction constants modifying i and j. <ide> <del> Below, 'q' is the quantile value, 'n' is the samle size and <add> Below, 'q' is the quantile value, 'n' is the sample size and <ide> alpha and beta are constants. <ide> The following formula gives an interpolation "i + g" of where the quantile <ide> would be in the sorted sample. <ide> def _quantile( <ide> if np.any(slices_having_nans): <ide> if result.ndim == 0 and out is None: <ide> # can't write to a scalar <del> result = np.array(np.nan, dtype=arr.dtype) <add> result = arr.dtype.type(np.nan) <ide> else: <ide> result[..., slices_having_nans] = np.nan <ide> return result <ide><path>numpy/lib/tests/test_function_base.py <ide> def test_quantile_monotonic_hypo(self, arr): <ide> quantile = np.quantile(arr, p0) <ide> assert_equal(np.sort(quantile), quantile) <ide> <add> def test_quantile_scalar_nan(self): <add> a = np.array([[10., 7., 4.], [3., 2., 1.]]) <add> a[0][1] = np.nan <add> actual = np.quantile(a, 0.5) <add> assert np.isscalar(actual) <add> assert_equal(np.quantile(a, 0.5), np.nan) <ide> <ide> class TestLerp: <ide> @hypothesis.given(t0=st.floats(allow_nan=False, allow_infinity=False,
2
Text
Text
add checkpoint links
1f2c317e957e8d564f258c6ee03518b4803857f5
<ide><path>official/resnet/README.md <ide> num_gpus: <ide> + 0: Use OneDeviceStrategy and train on CPU. <ide> + 1: Use OneDeviceStrategy and train on GPU. <ide> + 2+: Use MirroredStrategy (data parallelism) to distribute a batch between devices. <add> <add>### Pre-trained model <add>You can download 190 MB pre-trained versions of ResNet-50. Reported accuracies are top-1 single-crop accuracy for the ImageNet validation set. Simply download and uncompress the file, and point the model to the extracted directory using the `--model_dir` flag. <add> <add>ResNet-50 v2 (Accuracy 76.05%): <add>* [Checkpoint](http://download.tensorflow.org/models/official/20180601_resnet_v2_imagenet_checkpoint.tar.gz) <add>* [SavedModel](http://download.tensorflow.org/models/official/20180601_resnet_v2_imagenet_savedmodel.tar.gz) <add> <add>ResNet-50 v2 (fp16, Accuracy 75.56%): <add>* [Checkpoint](http://download.tensorflow.org/models/official/20180601_resnet_v2_fp16_imagenet_checkpoint.tar.gz) <add>* [SavedModel](http://download.tensorflow.org/models/official/20180601_resnet_v2_fp16_imagenet_savedmodel.tar.gz) <add> <add>ResNet-50 v1 (Accuracy 75.91%): <add>* [Checkpoint](http://download.tensorflow.org/models/official/20180601_resnet_v1_imagenet_checkpoint.tar.gz) <add>* [SavedModel](http://download.tensorflow.org/models/official/20180601_resnet_v1_imagenet_savedmodel.tar.gz)
1
Javascript
Javascript
fix crypto test case
34f56e2d711aaff10838167aa4ece77d278cfa09
<ide><path>test/parallel/test-crypto-cipher-decipher.js <ide> testCipher2(Buffer.from('0123456789abcdef')); <ide> 'TypedArray, or DataView. Received type object' <ide> }); <ide> <del> common.expectsError( <del> () => crypto.createCipher('aes-256-cbc', 'secret').setAuthTag(null), <del> { <del> code: 'ERR_INVALID_ARG_TYPE', <del> type: TypeError, <del> message: 'The "buffer" argument must be one of type Buffer, ' + <del> 'TypedArray, or DataView. Received type object' <del> }); <del> <ide> common.expectsError( <ide> () => crypto.createCipher('aes-256-cbc', 'secret').setAAD(null), <ide> { <ide> testCipher2(Buffer.from('0123456789abcdef')); <ide> 'Received type object' <ide> }); <ide> <add> common.expectsError( <add> () => crypto.createDecipher('aes-256-cbc', 'secret').setAuthTag(null), <add> { <add> code: 'ERR_INVALID_ARG_TYPE', <add> type: TypeError, <add> message: 'The "buffer" argument must be one of type Buffer, ' + <add> 'TypedArray, or DataView. Received type object' <add> }); <add> <ide> common.expectsError( <ide> () => crypto.createDecipher('aes-256-cbc', null), <ide> {
1
Javascript
Javascript
simplify functions and remove redundant args/calls
e7d8eee46d3ed11fe7054db5e616bd2a8eeb2c1b
<ide><path>src/ngAnimate/animateQueue.js <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> } <ide> } <ide> <del> function isAllowed(ruleType, element, currentAnimation, previousAnimation) { <add> function isAllowed(ruleType, currentAnimation, previousAnimation) { <ide> return rules[ruleType].some(function(fn) { <del> return fn(element, currentAnimation, previousAnimation); <add> return fn(currentAnimation, previousAnimation); <ide> }); <ide> } <ide> <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> return and ? a && b : a || b; <ide> } <ide> <del> rules.join.push(function(element, newAnimation, currentAnimation) { <add> rules.join.push(function(newAnimation, currentAnimation) { <ide> // if the new animation is class-based then we can just tack that on <ide> return !newAnimation.structural && hasAnimationClasses(newAnimation); <ide> }); <ide> <del> rules.skip.push(function(element, newAnimation, currentAnimation) { <add> rules.skip.push(function(newAnimation, currentAnimation) { <ide> // there is no need to animate anything if no classes are being added and <ide> // there is no structural animation that will be triggered <ide> return !newAnimation.structural && !hasAnimationClasses(newAnimation); <ide> }); <ide> <del> rules.skip.push(function(element, newAnimation, currentAnimation) { <add> rules.skip.push(function(newAnimation, currentAnimation) { <ide> // why should we trigger a new structural animation if the element will <ide> // be removed from the DOM anyway? <ide> return currentAnimation.event === 'leave' && newAnimation.structural; <ide> }); <ide> <del> rules.skip.push(function(element, newAnimation, currentAnimation) { <add> rules.skip.push(function(newAnimation, currentAnimation) { <ide> // if there is an ongoing current animation then don't even bother running the class-based animation <ide> return currentAnimation.structural && currentAnimation.state === RUNNING_STATE && !newAnimation.structural; <ide> }); <ide> <del> rules.cancel.push(function(element, newAnimation, currentAnimation) { <add> rules.cancel.push(function(newAnimation, currentAnimation) { <ide> // there can never be two structural animations running at the same time <ide> return currentAnimation.structural && newAnimation.structural; <ide> }); <ide> <del> rules.cancel.push(function(element, newAnimation, currentAnimation) { <add> rules.cancel.push(function(newAnimation, currentAnimation) { <ide> // if the previous animation is already running, but the new animation will <ide> // be triggered, but the new animation is structural <ide> return currentAnimation.state === RUNNING_STATE && newAnimation.structural; <ide> }); <ide> <del> rules.cancel.push(function(element, newAnimation, currentAnimation) { <add> rules.cancel.push(function(newAnimation, currentAnimation) { <ide> // cancel the animation if classes added / removed in both animation cancel each other out, <ide> // but only if the current animation isn't structural <ide> <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> return this === arg || !!(this.compareDocumentPosition(arg) & 16); <ide> }; <ide> <del> function findCallbacks(parent, element, event) { <del> var targetNode = getDomNode(element); <del> var targetParentNode = getDomNode(parent); <del> <add> function findCallbacks(targetParentNode, targetNode, event) { <ide> var matches = []; <ide> var entries = callbackRegistry[event]; <ide> if (entries) { <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> }); <ide> } <ide> <del> function cleanupEventListeners(phase, element) { <del> if (phase === 'close' && !element[0].parentNode) { <add> function cleanupEventListeners(phase, node) { <add> if (phase === 'close' && !node.parentNode) { <ide> // If the element is not attached to a parentNode, it has been removed by <ide> // the domOperation, and we can safely remove the event callbacks <del> $animate.off(element); <add> $animate.off(node); <ide> } <ide> } <ide> <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> // the input data when running `$animateCss`. <ide> var options = copy(initialOptions); <ide> <del> var node, parent; <ide> element = stripCommentsFromElement(element); <del> if (element) { <del> node = getDomNode(element); <del> parent = element.parent(); <del> } <add> var node = getDomNode(element); <add> var parentNode = node && node.parentNode; <ide> <ide> options = prepareAnimationOptions(options); <ide> <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> // there is no point in traversing the same collection of parent ancestors if a followup <ide> // animation will be run on the same element that already did all that checking work <ide> if (!skipAnimations && (!hasExistingAnimation || existingAnimation.state !== PRE_DIGEST_STATE)) { <del> skipAnimations = !areAnimationsAllowed(element, parent, event); <add> skipAnimations = !areAnimationsAllowed(node, parentNode, event); <ide> } <ide> <ide> if (skipAnimations) { <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> } <ide> <ide> if (isStructural) { <del> closeChildAnimations(element); <add> closeChildAnimations(node); <ide> } <ide> <ide> var newAnimation = { <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> }; <ide> <ide> if (hasExistingAnimation) { <del> var skipAnimationFlag = isAllowed('skip', element, newAnimation, existingAnimation); <add> var skipAnimationFlag = isAllowed('skip', newAnimation, existingAnimation); <ide> if (skipAnimationFlag) { <ide> if (existingAnimation.state === RUNNING_STATE) { <ide> close(); <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> return existingAnimation.runner; <ide> } <ide> } <del> var cancelAnimationFlag = isAllowed('cancel', element, newAnimation, existingAnimation); <add> var cancelAnimationFlag = isAllowed('cancel', newAnimation, existingAnimation); <ide> if (cancelAnimationFlag) { <ide> if (existingAnimation.state === RUNNING_STATE) { <ide> // this will end the animation right away and it is safe <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> // a joined animation means that this animation will take over the existing one <ide> // so an example would involve a leave animation taking over an enter. Then when <ide> // the postDigest kicks in the enter will be ignored. <del> var joinAnimationFlag = isAllowed('join', element, newAnimation, existingAnimation); <add> var joinAnimationFlag = isAllowed('join', newAnimation, existingAnimation); <ide> if (joinAnimationFlag) { <ide> if (existingAnimation.state === RUNNING_STATE) { <ide> normalizeAnimationDetails(element, newAnimation); <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> <ide> if (!isValidAnimation) { <ide> close(); <del> clearElementAnimationState(element); <add> clearElementAnimationState(node); <ide> return runner; <ide> } <ide> <ide> // the counter keeps track of cancelled animations <ide> var counter = (existingAnimation.counter || 0) + 1; <ide> newAnimation.counter = counter; <ide> <del> markElementAnimationState(element, PRE_DIGEST_STATE, newAnimation); <add> markElementAnimationState(node, PRE_DIGEST_STATE, newAnimation); <ide> <ide> $rootScope.$$postDigest(function() { <ide> var animationDetails = activeAnimationsLookup.get(node); <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> // isn't allowed to animate from here then we need to clear the state of the element <ide> // so that any future animations won't read the expired animation data. <ide> if (!isValidAnimation) { <del> clearElementAnimationState(element); <add> clearElementAnimationState(node); <ide> } <ide> <ide> return; <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> ? 'setClass' <ide> : animationDetails.event; <ide> <del> markElementAnimationState(element, RUNNING_STATE); <add> markElementAnimationState(node, RUNNING_STATE); <ide> var realRunner = $$animation(element, event, animationDetails.options); <ide> <ide> // this will update the runner's flow-control events based on <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> close(!status); <ide> var animationDetails = activeAnimationsLookup.get(node); <ide> if (animationDetails && animationDetails.counter === counter) { <del> clearElementAnimationState(getDomNode(element)); <add> clearElementAnimationState(node); <ide> } <ide> notifyProgress(runner, event, 'close', {}); <ide> }); <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> <ide> function notifyProgress(runner, event, phase, data) { <ide> runInNextPostDigestOrNow(function() { <del> var callbacks = findCallbacks(parent, element, event); <add> var callbacks = findCallbacks(parentNode, node, event); <ide> if (callbacks.length) { <ide> // do not optimize this call here to RAF because <ide> // we don't know how heavy the callback code here will <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> forEach(callbacks, function(callback) { <ide> callback(element, phase, data); <ide> }); <del> cleanupEventListeners(phase, element); <add> cleanupEventListeners(phase, node); <ide> }); <ide> } else { <del> cleanupEventListeners(phase, element); <add> cleanupEventListeners(phase, node); <ide> } <ide> }); <ide> runner.progress(event, phase, data); <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> } <ide> } <ide> <del> function closeChildAnimations(element) { <del> var node = getDomNode(element); <add> function closeChildAnimations(node) { <ide> var children = node.querySelectorAll('[' + NG_ANIMATE_ATTR_NAME + ']'); <ide> forEach(children, function(child) { <ide> var state = parseInt(child.getAttribute(NG_ANIMATE_ATTR_NAME), 10); <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> }); <ide> } <ide> <del> function clearElementAnimationState(element) { <del> var node = getDomNode(element); <add> function clearElementAnimationState(node) { <ide> node.removeAttribute(NG_ANIMATE_ATTR_NAME); <ide> activeAnimationsLookup.remove(node); <ide> } <ide> <del> function isMatchingElement(nodeOrElmA, nodeOrElmB) { <del> return getDomNode(nodeOrElmA) === getDomNode(nodeOrElmB); <del> } <del> <ide> /** <ide> * This fn returns false if any of the following is true: <ide> * a) animations on any parent element are disabled, and animations on the element aren't explicitly allowed <ide> * b) a parent element has an ongoing structural animation, and animateChildren is false <ide> * c) the element is not a child of the body <ide> * d) the element is not a child of the $rootElement <ide> */ <del> function areAnimationsAllowed(element, parentElement, event) { <del> var bodyElement = jqLite($document[0].body); <del> var bodyElementDetected = isMatchingElement(element, bodyElement) || element[0].nodeName === 'HTML'; <del> var rootElementDetected = isMatchingElement(element, $rootElement); <add> function areAnimationsAllowed(node, parentNode, event) { <add> var bodyNode = $document[0].body; <add> var rootNode = getDomNode($rootElement); <add> <add> var bodyNodeDetected = (node === bodyNode) || node.nodeName === 'HTML'; <add> var rootNodeDetected = (node === rootNode); <ide> var parentAnimationDetected = false; <add> var elementDisabled = disabledElementsLookup.get(node); <ide> var animateChildren; <del> var elementDisabled = disabledElementsLookup.get(getDomNode(element)); <ide> <del> var parentHost = jqLite.data(element[0], NG_ANIMATE_PIN_DATA); <add> var parentHost = jqLite.data(node, NG_ANIMATE_PIN_DATA); <ide> if (parentHost) { <del> parentElement = parentHost; <add> parentNode = getDomNode(parentHost); <ide> } <ide> <del> parentElement = getDomNode(parentElement); <del> <del> while (parentElement) { <del> if (!rootElementDetected) { <add> while (parentNode) { <add> if (!rootNodeDetected) { <ide> // angular doesn't want to attempt to animate elements outside of the application <ide> // therefore we need to ensure that the rootElement is an ancestor of the current element <del> rootElementDetected = isMatchingElement(parentElement, $rootElement); <add> rootNodeDetected = (parentNode === rootNode); <ide> } <ide> <del> if (parentElement.nodeType !== ELEMENT_NODE) { <add> if (parentNode.nodeType !== ELEMENT_NODE) { <ide> // no point in inspecting the #document element <ide> break; <ide> } <ide> <del> var details = activeAnimationsLookup.get(parentElement) || {}; <add> var details = activeAnimationsLookup.get(parentNode) || {}; <ide> // either an enter, leave or move animation will commence <ide> // therefore we can't allow any animations to take place <ide> // but if a parent animation is class-based then that's ok <ide> if (!parentAnimationDetected) { <del> var parentElementDisabled = disabledElementsLookup.get(parentElement); <add> var parentNodeDisabled = disabledElementsLookup.get(parentNode); <ide> <del> if (parentElementDisabled === true && elementDisabled !== false) { <add> if (parentNodeDisabled === true && elementDisabled !== false) { <ide> // disable animations if the user hasn't explicitly enabled animations on the <ide> // current element <ide> elementDisabled = true; <ide> // element is disabled via parent element, no need to check anything else <ide> break; <del> } else if (parentElementDisabled === false) { <add> } else if (parentNodeDisabled === false) { <ide> elementDisabled = false; <ide> } <ide> parentAnimationDetected = details.structural; <ide> } <ide> <ide> if (isUndefined(animateChildren) || animateChildren === true) { <del> var value = jqLite.data(parentElement, NG_ANIMATE_CHILDREN_DATA); <add> var value = jqLite.data(parentNode, NG_ANIMATE_CHILDREN_DATA); <ide> if (isDefined(value)) { <ide> animateChildren = value; <ide> } <ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate <ide> // there is no need to continue traversing at this point <ide> if (parentAnimationDetected && animateChildren === false) break; <ide> <del> if (!bodyElementDetected) { <add> if (!bodyNodeDetected) { <ide> // we also need to ensure that the element is or will be a part of the body element <ide> // otherwise it is pointless to even issue an animation to be rendered <del> bodyElementDetected = isMatchingElement(parentElement, bodyElement); <add> bodyNodeDetected = (parentNode === bodyNode); <ide> } <ide> <del> if (bodyElementDetected && rootElementDetected) { <add> if (bodyNodeDetected && rootNodeDetected) { <ide> // If both body and root have been found, any other checks are pointless, <ide> // as no animation data should live outside the application <ide> break; <ide> } <ide> <del> if (!rootElementDetected) { <del> // If no rootElement is detected, check if the parentElement is pinned to another element <del> parentHost = jqLite.data(parentElement, NG_ANIMATE_PIN_DATA); <add> if (!rootNodeDetected) { <add> // If `rootNode` is not detected, check if `parentNode` is pinned to another element <add> parentHost = jqLite.data(parentNode, NG_ANIMATE_PIN_DATA); <ide> if (parentHost) { <ide> // The pin target element becomes the next parent element <del> parentElement = getDomNode(parentHost); <add> parentNode = getDomNode(parentHost); <ide> continue; <ide> } <ide> } <ide> <del> parentElement = parentElement.parentNode; <add> parentNode = parentNode.parentNode; <ide> } <ide> <ide> var allowAnimation = (!parentAnimationDetected || animateChildren) && elementDisabled !== true; <del> return allowAnimation && rootElementDetected && bodyElementDetected; <add> return allowAnimation && rootNodeDetected && bodyNodeDetected; <ide> } <ide> <del> function markElementAnimationState(element, state, details) { <add> function markElementAnimationState(node, state, details) { <ide> details = details || {}; <ide> details.state = state; <ide> <del> var node = getDomNode(element); <ide> node.setAttribute(NG_ANIMATE_ATTR_NAME, state); <ide> <ide> var oldValue = activeAnimationsLookup.get(node);
1
Python
Python
fix black test
e80581b6a68b230b0a0d9556665592cd4a503852
<ide><path>libcloud/compute/drivers/openstack.py <ide> def _to_sizes(self, obj): <ide> def _create_args_to_params(self, node, **kwargs): <ide> server_params = { <ide> "name": kwargs.get("name"), <del> "metadata": kwargs.get("ex_metadata", {}) or {} <add> "metadata": kwargs.get("ex_metadata", {}) or {}, <ide> } <ide> <ide> if kwargs.get("ex_files", None): <del> server_params["personality"] = self._files_to_personality(kwargs.get("ex_files")) <add> server_params["personality"] = self._files_to_personality( <add> kwargs.get("ex_files") <add> ) <ide> <ide> if kwargs.get("ex_availability_zone", None): <ide> server_params["availability_zone"] = kwargs["ex_availability_zone"]
1
Python
Python
add multi-gpu to mnist, take two
e8726907ea5a479359b2a0d93f9a5d4b9b8a1e12
<ide><path>official/mnist/mnist.py <ide> def model_fn(features, labels, mode, params): <ide> }) <ide> if mode == tf.estimator.ModeKeys.TRAIN: <ide> optimizer = tf.train.AdamOptimizer(learning_rate=1e-4) <add> <add> # If we are running multi-GPU, we need to wrap the optimizer. <add> if params.get('multi_gpu'): <add> optimizer = tf.contrib.estimator.TowerOptimizer(optimizer) <add> <ide> logits = model(image, training=True) <ide> loss = tf.losses.softmax_cross_entropy(onehot_labels=labels, logits=logits) <ide> accuracy = tf.metrics.accuracy( <ide> def model_fn(features, labels, mode, params): <ide> }) <ide> <ide> <add>def validate_batch_size_for_multi_gpu(batch_size): <add> """For multi-gpu, batch-size must be a multiple of the number of <add> available GPUs. <add> <add> Note that this should eventually be handled by replicate_model_fn <add> directly. Multi-GPU support is currently experimental, however, <add> so doing the work here until that feature is in place. <add> """ <add> from tensorflow.python.client import device_lib <add> <add> local_device_protos = device_lib.list_local_devices() <add> num_gpus = sum([1 for d in local_device_protos if d.device_type == 'GPU']) <add> if not num_gpus: <add> raise ValueError('Multi-GPU mode was specified, but no GPUs ' <add> 'were found. To use CPU, run without --multi_gpu.') <add> <add> remainder = batch_size % num_gpus <add> if remainder: <add> err = ('When running with multiple GPUs, batch size ' <add> 'must be a multiple of the number of available GPUs. ' <add> 'Found {} GPUs with a batch size of {}; try --batch_size={} instead.' <add> ).format(num_gpus, batch_size, batch_size - remainder) <add> raise ValueError(err) <add> <add> <ide> def main(unused_argv): <add> model_function = model_fn <add> <add> if FLAGS.multi_gpu: <add> validate_batch_size_for_multi_gpu(FLAGS.batch_size) <add> <add> # There are two steps required if using multi-GPU: (1) wrap the model_fn, <add> # and (2) wrap the optimizer. The first happens here, and (2) happens <add> # in the model_fn itself when the optimizer is defined. <add> model_function = tf.contrib.estimator.replicate_model_fn( <add> model_fn, loss_reduction=tf.losses.Reduction.MEAN) <add> <ide> data_format = FLAGS.data_format <ide> if data_format is None: <ide> data_format = ('channels_first' <ide> if tf.test.is_built_with_cuda() else 'channels_last') <ide> mnist_classifier = tf.estimator.Estimator( <del> model_fn=model_fn, <add> model_fn=model_function, <ide> model_dir=FLAGS.model_dir, <ide> params={ <del> 'data_format': data_format <add> 'data_format': data_format, <add> 'multi_gpu': FLAGS.multi_gpu <ide> }) <ide> <ide> # Train the model <ide> def eval_input_fn(): <ide> mnist_classifier.export_savedmodel(FLAGS.export_dir, input_fn) <ide> <ide> <del>if __name__ == '__main__': <del> parser = argparse.ArgumentParser() <del> parser.add_argument( <del> '--batch_size', <del> type=int, <del> default=100, <del> help='Number of images to process in a batch') <del> parser.add_argument( <del> '--data_dir', <del> type=str, <del> default='/tmp/mnist_data', <del> help='Path to directory containing the MNIST dataset') <del> parser.add_argument( <del> '--model_dir', <del> type=str, <del> default='/tmp/mnist_model', <del> help='The directory where the model will be stored.') <del> parser.add_argument( <del> '--train_epochs', type=int, default=40, help='Number of epochs to train.') <del> parser.add_argument( <del> '--data_format', <del> type=str, <del> default=None, <del> choices=['channels_first', 'channels_last'], <del> help='A flag to override the data format used in the model. channels_first ' <del> 'provides a performance boost on GPU but is not always compatible ' <del> 'with CPU. If left unspecified, the data format will be chosen ' <del> 'automatically based on whether TensorFlow was built for CPU or GPU.') <del> parser.add_argument( <del> '--export_dir', <del> type=str, <del> help='The directory where the exported SavedModel will be stored.') <add>class MNISTArgParser(argparse.ArgumentParser): <add> <add> def __init__(self): <add> super(MNISTArgParser, self).__init__() <ide> <add> self.add_argument( <add> '--multi_gpu', action='store_true', <add> help='If set, run across all available GPUs.') <add> self.add_argument( <add> '--batch_size', <add> type=int, <add> default=100, <add> help='Number of images to process in a batch') <add> self.add_argument( <add> '--data_dir', <add> type=str, <add> default='/tmp/mnist_data', <add> help='Path to directory containing the MNIST dataset') <add> self.add_argument( <add> '--model_dir', <add> type=str, <add> default='/tmp/mnist_model', <add> help='The directory where the model will be stored.') <add> self.add_argument( <add> '--train_epochs', <add> type=int, <add> default=40, <add> help='Number of epochs to train.') <add> self.add_argument( <add> '--data_format', <add> type=str, <add> default=None, <add> choices=['channels_first', 'channels_last'], <add> help='A flag to override the data format used in the model. ' <add> 'channels_first provides a performance boost on GPU but is not always ' <add> 'compatible with CPU. If left unspecified, the data format will be ' <add> 'chosen automatically based on whether TensorFlow was built for CPU or ' <add> 'GPU.') <add> self.add_argument( <add> '--export_dir', <add> type=str, <add> help='The directory where the exported SavedModel will be stored.') <add> <add> <add>if __name__ == '__main__': <add> parser = MNISTArgParser() <ide> tf.logging.set_verbosity(tf.logging.INFO) <ide> FLAGS, unparsed = parser.parse_known_args() <ide> tf.app.run(main=main, argv=[sys.argv[0]] + unparsed) <ide><path>official/mnist/mnist_test.py <ide> def test_mnist(self): <ide> self.assertEqual(predictions['probabilities'].shape, (10,)) <ide> self.assertEqual(predictions['classes'].shape, ()) <ide> <del> def mnist_model_fn_helper(self, mode): <add> def mnist_model_fn_helper(self, mode, multi_gpu=False): <ide> features, labels = dummy_input_fn() <ide> image_count = features.shape[0] <ide> spec = mnist.model_fn(features, labels, mode, { <del> 'data_format': 'channels_last' <add> 'data_format': 'channels_last', <add> 'multi_gpu': multi_gpu <ide> }) <ide> <ide> if mode == tf.estimator.ModeKeys.PREDICT: <ide> def mnist_model_fn_helper(self, mode): <ide> def test_mnist_model_fn_train_mode(self): <ide> self.mnist_model_fn_helper(tf.estimator.ModeKeys.TRAIN) <ide> <add> def test_mnist_model_fn_train_mode_multi_gpu(self): <add> self.mnist_model_fn_helper(tf.estimator.ModeKeys.TRAIN, multi_gpu=True) <add> <ide> def test_mnist_model_fn_eval_mode(self): <ide> self.mnist_model_fn_helper(tf.estimator.ModeKeys.EVAL) <ide>
2
Text
Text
add initial "service" docs
f565bc7ec964537a4e20bf5d4e497d15c0c9b5bf
<ide><path>docs/reference/commandline/index.md <ide> You start the Docker daemon with the command line. How you start the daemon affe <ide> * [swarm leave](swarm_leave.md) <ide> * [swarm update](swarm_update.md) <ide> <add>### Swarm service commands <add> <add>* [service create](service_create.md) <add>* [service inspect](service_inspect.md) <add>* [service ls](service_ls.md) <add>* [service rm](service_rm.md) <add>* [service scale](service_scale.md) <add>* [service tasks](service_tasks.md) <add>* [service update](service_update.md) <ide><path>docs/reference/commandline/service_create.md <add><!--[metadata]> <add>+++ <add>title = "service create" <add>description = "The service create command description and usage" <add>keywords = ["service, create"] <add> <add>[menu.main] <add>parent = "smn_cli" <add>+++ <add><![end-metadata]--> <add> <add>**Warning:** this command is part of the Swarm management feature introduced in Docker 1.12, and might be subject to non backward-compatible changes. <add> <add># service create <add> <add>```Markdown <add>Usage: docker service create [OPTIONS] IMAGE [COMMAND] [ARG...] <add> <add>Create a new service <add> <add>Options: <add> --constraint value Placement constraints (default []) <add> --endpoint-mode string Endpoint mode(Valid values: VIP, DNSRR) <add> -e, --env value Set environment variables (default []) <add> --help Print usage <add> -l, --label value Service labels (default []) <add> --limit-cpu value Limit CPUs (default 0.000) <add> --limit-memory value Limit Memory (default 0 B) <add> --mode string Service mode (replicated or global) (default "replicated") <add> -m, --mount value Attach a mount to the service <add> --name string Service name <add> --network value Network attachments (default []) <add> -p, --publish value Publish a port as a node port (default []) <add> --replicas value Number of tasks (default none) <add> --reserve-cpu value Reserve CPUs (default 0.000) <add> --reserve-memory value Reserve Memory (default 0 B) <add> --restart-condition string Restart when condition is met (none, on_failure, or any) <add> --restart-delay value Delay between restart attempts (default none) <add> --restart-max-attempts value Maximum number of restarts before giving up (default none) <add> --restart-window value Window used to evalulate the restart policy (default none) <add> --stop-grace-period value Time to wait before force killing a container (default none) <add> --update-delay duration Delay between updates <add> --update-parallelism uint Maximum number of tasks updated simultaneously <add> -u, --user string Username or UID <add> -w, --workdir string Working directory inside the container <add>``` <add> <add>Creates a service as described by the specified parameters. This command has to <add>be run targeting a manager node. <add> <add>## Examples <add> <add>### Create a service <add> <add>```bash <add>$ docker service create --name redis redis:3.0.6 <add>dmu1ept4cxcfe8k8lhtux3ro3 <add> <add>$ docker service ls <add>ID NAME REPLICAS IMAGE COMMAND <add>dmu1ept4cxcf redis 1/1 redis:3.0.6 <add>``` <add> <add>### Create a service with 5 tasks <add> <add>You can set the number of tasks for a service using the `--replicas` option. The <add>following command creates a `redis` service with `5` tasks: <add> <add>```bash <add>$ docker service create --name redis --replicas=5 redis:3.0.6 <add>4cdgfyky7ozwh3htjfw0d12qv <add>``` <add> <add>The above command sets the *desired* number of tasks for the service. Even <add>though the command returns directly, actual scaling of the service may take <add>some time. The `REPLICAS` column shows both the *actual* and *desired* number <add>of tasks for the service. <add> <add>In the following example, the desired number of tasks is set to `5`, but the <add>*actual* number is `3` <add> <add>```bash <add>$ docker service ls <add>ID NAME REPLICAS IMAGE COMMAND <add>4cdgfyky7ozw redis 3/5 redis:3.0.7 <add>``` <add> <add>Once all the tasks are created, the actual number of tasks is equal to the <add>desired number: <add> <add>```bash <add>$ docker service ls <add>ID NAME REPLICAS IMAGE COMMAND <add>4cdgfyky7ozw redis 5/5 redis:3.0.7 <add>``` <add> <add> <add>### Create a service with a rolling update constraints <add> <add> <add>```bash <add>$ docker service create \ <add> --replicas 10 \ <add> --name redis \ <add> --update-delay 10s \ <add> --update-parallelism 2 \ <add> redis:3.0.6 <add>``` <add> <add>When this service is [updated](service_update.md), a rolling update will update <add>tasks in batches of `2`, with `10s` between batches. <add> <add>### Setting environment variables (-e --env) <add> <add>This sets environmental variables for all tasks in a service. For example: <add> <add>```bash <add>$ docker service create --name redis_2 --replicas 5 --env MYVAR=foo redis:3.0.6 <add>``` <add> <add>### Set metadata on a service (-l --label) <add> <add>A label is a `key=value` pair that applies metadata to a service. To label a <add>service with two labels: <add> <add>```bash <add>$ docker service create \ <add> --name redis_2 \ <add> --label com.example.foo="bar" <add> --label bar=baz \ <add> redis:3.0.6 <add>``` <add> <add>For more information about labels, refer to [apply custom <add>metadata](../../userguide/labels-custom-metadata.md) <add> <add>### Service mode <add> <add>Is this a replicated service or a global service. A replicated service runs as <add>many tasks as specified, while a global service runs on each active node in the <add>swarm. <add> <add>The following command creates a "global" service: <add> <add>```bash <add>$ docker service create --name redis_2 --mode global redis:3.0.6 <add>``` <add> <add> <add>## Related information <add> <add>* [service inspect](service_inspect.md) <add>* [service ls](service_ls.md) <add>* [service rm](service_rm.md) <add>* [service scale](service_scale.md) <add>* [service tasks](service_tasks.md) <add>* [service update](service_update.md) <ide><path>docs/reference/commandline/service_inspect.md <add><!--[metadata]> <add>+++ <add>title = "service inspect" <add>description = "The service inspect command description and usage" <add>keywords = ["service, inspect"] <add>[menu.main] <add>parent = "smn_cli" <add>+++ <add><![end-metadata]--> <add> <add>**Warning:** this command is part of the Swarm management feature introduced in Docker 1.12, and might be subject to non backward-compatible changes. <add> <add># service inspect <add> <add>```Markdown <add>Usage: docker service inspect [OPTIONS] SERVICE [SERVICE...] <add> <add>Inspect a service <add> <add>Options: <add> -f, --format string Format the output using the given go template <add> --help Print usage <add> -p, --pretty Print the information in a human friendly format. <add>``` <add> <add> <add>Inspects the specified service. This command has to be run targeting a manager <add>node. <add> <add>By default, this renders all results in a JSON array. If a format is specified, <add>the given template will be executed for each result. <add> <add>Go's [text/template](http://golang.org/pkg/text/template/) package <add>describes all the details of the format. <add> <add>## Examples <add> <add>### Inspecting a service by name or ID <add> <add>You can inspect a service, either by its *name*, or *ID* <add> <add>For example, given the following service; <add> <add>```bash <add>$ docker service ls <add>ID NAME REPLICAS IMAGE COMMAND <add>dmu1ept4cxcf redis 3/3 redis:3.0.6 <add>``` <add> <add>Both `docker service inspect redis`, and `docker service inspect dmu1ept4cxcf` <add>produce the same result: <add> <add>```bash <add>$ docker service inspect redis <add>[ <add> { <add> "ID": "dmu1ept4cxcfe8k8lhtux3ro3", <add> "Version": { <add> "Index": 12 <add> }, <add> "CreatedAt": "2016-06-17T18:44:02.558012087Z", <add> "UpdatedAt": "2016-06-17T18:44:02.558012087Z", <add> "Spec": { <add> "Name": "redis", <add> "TaskTemplate": { <add> "ContainerSpec": { <add> "Image": "redis:3.0.6" <add> }, <add> "Resources": { <add> "Limits": {}, <add> "Reservations": {} <add> }, <add> "RestartPolicy": { <add> "Condition": "any", <add> "MaxAttempts": 0 <add> }, <add> "Placement": {} <add> }, <add> "Mode": { <add> "Replicated": { <add> "Replicas": 1 <add> } <add> }, <add> "UpdateConfig": {}, <add> "EndpointSpec": { <add> "Mode": "vip" <add> } <add> }, <add> "Endpoint": { <add> "Spec": {} <add> } <add> } <add>] <add>``` <add> <add>```bash <add>$ docker service inspect dmu1ept4cxcf <add>[ <add> { <add> "ID": "dmu1ept4cxcfe8k8lhtux3ro3", <add> "Version": { <add> "Index": 12 <add> }, <add> ... <add> } <add>] <add>``` <add> <add>### Inspect a service using pretty-print <add> <add>You can print the inspect output in a human-readable format instead of the default <add>JSON output, by using the `--pretty` option: <add> <add>```bash <add>$ docker service inspect --pretty frontend <add>ID: c8wgl7q4ndfd52ni6qftkvnnp <add>Name: frontend <add>Labels: <add> - org.example.projectname=demo-app <add>Mode: REPLICATED <add> Replicas: 5 <add>Placement: <add> Strategy: Spread <add>UpdateConfig: <add> Parallelism: 0 <add>ContainerSpec: <add> Image: nginx:alpine <add>Resources: <add>Reservations: <add>Limits: <add>Ports: <add> Name = <add> Protocol = tcp <add> TargetPort = 443 <add> PublishedPort = 4443 <add>``` <add> <add> <add>### Finding the number of tasks running as part of a service <add> <add>The `--format` option can be used to obtain specific information about a <add>service. For example, the following command outputs the number of replicas <add>of the "redis" service. <add> <add>```bash <add>$ docker service inspect --format='{{.Spec.Mode.Replicated.Replicas}}' redis <add>10 <add>``` <add> <add> <add>## Related information <add> <add>* [service create](service_create.md) <add>* [service ls](service_ls.md) <add>* [service rm](service_rm.md) <add>* [service scale](service_scale.md) <add>* [service tasks](service_tasks.md) <add>* [service update](service_update.md) <ide><path>docs/reference/commandline/service_ls.md <add><!--[metadata]> <add>+++ <add>title = "service ls" <add>description = "The service ls command description and usage" <add>keywords = ["service, ls"] <add>[menu.main] <add>parent = "smn_cli" <add>+++ <add><![end-metadata]--> <add> <add>**Warning:** this command is part of the Swarm management feature introduced in Docker 1.12, and might be subject to non backward-compatible changes. <add> <add># service ls <add> <add>```Markdown <add>docker service ls --help <add> <add>Usage: docker service ls [OPTIONS] <add> <add>List services <add> <add>Aliases: <add> ls, list <add> <add>Options: <add> -f, --filter value Filter output based on conditions provided <add> --help Print usage <add> -q, --quiet Only display IDs <add>``` <add> <add>This command when run targeting a manager, lists services are running in the <add>swarm. <add> <add>On a manager node: <add>```bash <add>ID NAME REPLICAS IMAGE COMMAND <add>c8wgl7q4ndfd frontend 5/5 nginx:alpine <add>dmu1ept4cxcf redis 3/3 redis:3.0.6 <add>``` <add> <add>The `REPLICAS` column shows both the *actual* and *desired* number of tasks for <add>the service. <add> <add> <add>## Filtering <add> <add>The filtering flag (`-f` or `--filter`) format is of "key=value". If there is more <add>than one filter, then pass multiple flags (e.g., `--filter "foo=bar" --filter "bif=baz"`) <add> <add>The currently supported filters are: <add> <add>* [id](#id) <add>* [label](#label) <add>* [name](#name) <add> <add>#### ID <add> <add>The `id` filter matches all or part of a service's id. <add> <add>```bash <add>$ docker service ls -f "id=0bcjw" <add>ID NAME REPLICAS IMAGE COMMAND <add>0bcjwfh8ychr redis 1/1 redis:3.0.6 <add>``` <add> <add>#### Label <add> <add>The `label` filter matches services based on the presence of a `label` alone or <add>a `label` and a value. <add> <add>The following filter matches all services with a `project` label regardless of <add>its value: <add> <add>```bash <add>$ docker service ls --filter label=project <add>ID NAME REPLICAS IMAGE COMMAND <add>01sl1rp6nj5u frontend2 1/1 nginx:alpine <add>36xvvwwauej0 frontend 5/5 nginx:alpine <add>74nzcxxjv6fq backend 3/3 redis:3.0.6 <add>``` <add> <add>The following filter matches only services with the `project` label with the <add>`project-a` value. <add> <add>```bash <add>$ docker service ls --filter label=project=project-a <add>ID NAME REPLICAS IMAGE COMMAND <add>36xvvwwauej0 frontend 5/5 nginx:alpine <add>74nzcxxjv6fq backend 3/3 redis:3.0.6 <add>``` <add> <add> <add>#### Name <add> <add>The `name` filter matches on all or part of a tasks's name. <add> <add>The following filter matches services with a name containing `redis`. <add> <add>```bash <add>$ docker service ls --filter name=redis <add>ID NAME REPLICAS IMAGE COMMAND <add>0bcjwfh8ychr redis 1/1 redis:3.0.6 <add>``` <add> <add>## Related information <add> <add>* [service create](service_create.md) <add>* [service inspect](service_inspect.md) <add>* [service rm](service_rm.md) <add>* [service scale](service_scale.md) <add>* [service tasks](service_tasks.md) <add>* [service update](service_update.md) <ide><path>docs/reference/commandline/service_rm.md <add><!--[metadata]> <add>+++ <add>title = "service rm" <add>description = "The service rm command description and usage" <add>keywords = ["service, rm"] <add>[menu.main] <add>parent = "smn_cli" <add>+++ <add><![end-metadata]--> <add> <add>**Warning:** this command is part of the Swarm management feature introduced in Docker 1.12, and might be subject to non backward-compatible changes. <add> <add># service rm <add> <add>```Markdown <add>Usage: docker service rm [OPTIONS] SERVICE <add> <add>Remove a service <add> <add>Aliases: <add> rm, remove <add> <add>Options: <add> --help Print usage <add>``` <add> <add>Removes the specified services from the swarm. This command has to be run <add>targeting a manager node. <add> <add>For example, to remove the redis service: <add> <add>```bash <add>$ docker service rm redis <add>redis <add>$ docker service ls <add>ID NAME SCALE IMAGE COMMAND <add>``` <add> <add>> **Warning**: Unlike `docker rm`, this command does not ask for confirmation <add>> before removing a running service. <add> <add> <add> <add>## Related information <add> <add>* [service create](service_create.md) <add>* [service inspect](service_inspect.md) <add>* [service ls](service_ls.md) <add>* [service scale](service_scale.md) <add>* [service tasks](service_tasks.md) <add>* [service update](service_update.md) <ide><path>docs/reference/commandline/service_scale.md <add><!--[metadata]> <add>+++ <add>title = "service scale" <add>description = "The service scale command description and usage" <add>keywords = ["service, scale"] <add>[menu.main] <add>parent = "smn_cli" <add>+++ <add><![end-metadata]--> <add> <add>**Warning:** this command is part of the Swarm management feature introduced in Docker 1.12, and might be subject to non backward-compatible changes. <add> <add># service scale <add> <add> Usage: docker service scale SERVICE=REPLICAS [SERVICE=REPLICAS...] <add> <add> Scale one or multiple services <add> <add> Options: <add> --help Print usage <add> <add> <add>## Examples <add> <add>### Scale a service <add> <add>If you scale a service, you set the *desired* number of replicas. Even though <add>the command returns directly, actual scaling of the service may take some time. <add> <add>For example, the following command scales the "frontend" service to 50 tasks. <add> <add>```bash <add>$ docker service scale frontend=50 <add>frontend scaled to 50 <add>``` <add> <add>Directly afterwards, run `docker service ls`, to see the actual number of <add>replicas <add> <add>```bash <add>$ docker service ls --filter name=frontend <add> <add>ID NAME REPLICAS IMAGE COMMAND <add>3pr5mlvu3fh9 frontend 15/50 nginx:alpine <add>``` <add> <add>You can also scale a service using the [`docker service update`](service_update.md) <add>command. The following commands are therefore equivalent: <add> <add>```bash <add>$ docker service scale frontend=50 <add>$ docker service update --replicas=50 frontend <add>``` <add> <add>### Scale multiple services <add> <add>The `docker service scale` command allows you to set the desired number of <add>tasks for multiple services at once. The following example scales both the <add>backend and frontend services: <add> <add>```bash <add>$ docker service scale backend=3 frontend=5 <add>backend scaled to 3 <add>frontend scaled to 5 <add> <add>$ docker service ls <add>ID NAME REPLICAS IMAGE COMMAND <add>3pr5mlvu3fh9 frontend 5/5 nginx:alpine <add>74nzcxxjv6fq backend 3/3 redis:3.0.6 <add>``` <add> <add>## Related information <add> <add>* [service create](service_create.md) <add>* [service inspect](service_inspect.md) <add>* [service ls](service_ls.md) <add>* [service rm](service_rm.md) <add>* [service tasks](service_tasks.md) <add>* [service update](service_update.md) <ide><path>docs/reference/commandline/service_tasks.md <add><!--[metadata]> <add>+++ <add>title = "service tasks" <add>description = "The service tasks command description and usage" <add>keywords = ["service, tasks"] <add>[menu.main] <add>parent = "smn_cli" <add>+++ <add><![end-metadata]--> <add> <add>**Warning:** this command is part of the Swarm management feature introduced in Docker 1.12, and might be subject to non backward-compatible changes. <add> <add># service tasks <add> <add>```Markdown <add>Usage: docker service tasks [OPTIONS] SERVICE <add> <add>List the tasks of a service <add> <add>Options: <add> -a, --all Display all tasks <add> -f, --filter value Filter output based on conditions provided <add> --help Print usage <add> -n, --no-resolve Do not map IDs to Names <add>``` <add> <add>Lists the tasks that are running as part of the specified service. This command <add>has to be run targeting a manager node. <add> <add> <add>## Examples <add> <add>### Listing the tasks that are part of a service <add> <add>The following command shows all the tasks that are part of the `redis` service: <add> <add>```bash <add>$ docker service tasks redis <add>ID NAME SERVICE IMAGE LAST STATE DESIRED STATE NODE <add>0qihejybwf1x5vqi8lgzlgnpq redis.1 redis redis:3.0.6 Running 8 seconds Running manager1 <add>bk658fpbex0d57cqcwoe3jthu redis.2 redis redis:3.0.6 Running 9 seconds Running worker2 <add>5ls5s5fldaqg37s9pwayjecrf redis.3 redis redis:3.0.6 Running 9 seconds Running worker1 <add>8ryt076polmclyihzx67zsssj redis.4 redis redis:3.0.6 Running 9 seconds Running worker1 <add>1x0v8yomsncd6sbvfn0ph6ogc redis.5 redis redis:3.0.6 Running 8 seconds Running manager1 <add>71v7je3el7rrw0osfywzs0lko redis.6 redis redis:3.0.6 Running 9 seconds Running worker2 <add>4l3zm9b7tfr7cedaik8roxq6r redis.7 redis redis:3.0.6 Running 9 seconds Running worker2 <add>9tfpyixiy2i74ad9uqmzp1q6o redis.8 redis redis:3.0.6 Running 9 seconds Running worker1 <add>3w1wu13yuplna8ri3fx47iwad redis.9 redis redis:3.0.6 Running 8 seconds Running manager1 <add>8eaxrb2fqpbnv9x30vr06i6vt redis.10 redis redis:3.0.6 Running 8 seconds Running manager1 <add>``` <add> <add> <add>## Filtering <add> <add>The filtering flag (`-f` or `--filter`) format is a `key=value` pair. If there <add>is more than one filter, then pass multiple flags (e.g. `--filter "foo=bar" --filter "bif=baz"`). <add>Multiple filter flags are combined as an `OR` filter. For example, <add>`-f type=custom -f type=builtin` returns both `custom` and `builtin` networks. <add> <add>The currently supported filters are: <add> <add>* [id](#id) <add>* [name](#name) <add> <add> <add>#### ID <add> <add>The `id` filter matches on all or a prefix of a task's ID. <add> <add>```bash <add>$ docker service tasks -f "id=8" redis <add>ID NAME SERVICE IMAGE LAST STATE DESIRED STATE NODE <add>8ryt076polmclyihzx67zsssj redis.4 redis redis:3.0.6 Running 4 minutes Running worker1 <add>8eaxrb2fqpbnv9x30vr06i6vt redis.10 redis redis:3.0.6 Running 4 minutes Running manager1 <add>``` <add> <add>#### Name <add> <add>The `name` filter matches on task names. <add> <add>```bash <add>$ docker service tasks -f "name=redis.1" redis <add>ID NAME SERVICE IMAGE DESIRED STATE LAST STATE NODE <add>0qihejybwf1x5vqi8lgzlgnpq redis.1 redis redis:3.0.6 Running Running 8 seconds manager1 <add>``` <add> <add> <add>## Related information <add> <add>* [service create](service_create.md) <add>* [service inspect](service_inspect.md) <add>* [service ls](service_ls.md) <add>* [service rm](service_rm.md) <add>* [service scale](service_scale.md) <add>* [service update](service_update.md) <ide><path>docs/reference/commandline/service_update.md <add><!--[metadata]> <add>+++ <add>title = "service update" <add>description = "The service update command description and usage" <add>keywords = ["service, update"] <add>[menu.main] <add>parent = "smn_cli" <add>+++ <add><![end-metadata]--> <add> <add>**Warning:** this command is part of the Swarm management feature introduced in Docker 1.12, and might be subject to non backward-compatible changes. <add> <add># service update <add> <add>```Markdown <add>Usage: docker service update [OPTIONS] SERVICE <add> <add>Update a service <add> <add>Options: <add> --arg value Service command args (default []) <add> --command value Service command (default []) <add> --constraint value Placement constraints (default []) <add> --endpoint-mode string Endpoint mode(Valid values: VIP, DNSRR) <add> -e, --env value Set environment variables (default []) <add> --help Print usage <add> --image string Service image tag <add> -l, --label value Service labels (default []) <add> --limit-cpu value Limit CPUs (default 0.000) <add> --limit-memory value Limit Memory (default 0 B) <add> --mode string Service mode (replicated or global) (default "replicated") <add> -m, --mount value Attach a mount to the service <add> --name string Service name <add> --network value Network attachments (default []) <add> -p, --publish value Publish a port as a node port (default []) <add> --replicas value Number of tasks (default none) <add> --reserve-cpu value Reserve CPUs (default 0.000) <add> --reserve-memory value Reserve Memory (default 0 B) <add> --restart-condition string Restart when condition is met (none, on_failure, or any) <add> --restart-delay value Delay between restart attempts (default none) <add> --restart-max-attempts value Maximum number of restarts before giving up (default none) <add> --restart-window value Window used to evalulate the restart policy (default none) <add> --stop-grace-period value Time to wait before force killing a container (default none) <add> --update-delay duration Delay between updates <add> --update-parallelism uint Maximum number of tasks updated simultaneously <add> -u, --user string Username or UID <add> -w, --workdir string Working directory inside the container <add>``` <add> <add>Updates a service as described by the specified parameters. This command has to be run targeting a manager node. <add>The parameters are the same as [`docker service create`](service_create.md). Please look at the description there <add>for further information. <add> <add>## Examples <add> <add>### Update a service <add> <add>```bash <add>$ docker service update --limit-cpu 2 redis <add>``` <add> <add>## Related information <add> <add>* [service create](service_create.md) <add>* [service inspect](service_inspect.md) <add>* [service tasks](service_tasks.md) <add>* [service ls](service_ls.md) <add>* [service rm](service_rm.md)
8
Javascript
Javascript
use port number from env in tls socket test
c9e682d587e3095667423cd7ada1afea7561ff3e
<ide><path>test/parallel/test-tls-async-cb-after-socket-end.js <ide> server.on('resumeSession', function(id, cb) { <ide> next(); <ide> }); <ide> <del>server.listen(1443, function() { <add>server.listen(common.PORT, function() { <ide> var clientOpts = { <del> port: 1443, <add> port: common.PORT, <ide> rejectUnauthorized: false, <ide> session: false <ide> };
1
Ruby
Ruby
fix method name in `renderer` doc [ci skip]
67471a28f5e68fc6057b705c4bdce27d7edb1a78
<ide><path>actionpack/lib/action_controller/renderer.rb <ide> def with_defaults(defaults) <ide> end <ide> <ide> # Accepts a custom Rack environment to render templates in. <del> # It will be merged with ActionController::Renderer.defaults <add> # It will be merged with the default Rack environment defined by <add> # +ActionController::Renderer::DEFAULTS+. <ide> def initialize(controller, env, defaults) <ide> @controller = controller <ide> @defaults = defaults
1
Text
Text
replace vars with let and const
38c22252f4deb2dd7f10644404ae122f1fd0cb88
<ide><path>docs/docs/addons-animation.md <ide> class TodoList extends React.Component { <ide> } <ide> <ide> handleAdd() { <del> var newItems = this.state.items.concat([ <add> const newItems = this.state.items.concat([ <ide> prompt('Enter some text') <ide> ]); <ide> this.setState({items: newItems}); <ide> } <ide> <ide> handleRemove(i) { <del> var newItems = this.state.items.slice(); <add> let newItems = this.state.items.slice(); <ide> newItems.splice(i, 1); <ide> this.setState({items: newItems}); <ide> } <ide> <ide> render() { <del> var items = this.state.items.map((item, i) => ( <add> const items = this.state.items.map((item, i) => ( <ide> <div key={item} onClick={() => this.handleRemove(i)}> <ide> {item} <ide> </div> <ide> The example below would **not** work, because the `ReactCSSTransitionGroup` is b <ide> <ide> ```javascript{4,6,13} <ide> render() { <del> var items = this.state.items.map((item, i) => ( <add> const items = this.state.items.map((item, i) => ( <ide> <div key={item} onClick={() => this.handleRemove(i)}> <ide> <ReactCSSTransitionGroup transitionName="example"> <ide> {item} <ide> However if you only need to render a single child inside `ReactTransitionGroup`, <ide> <ide> ```javascript <ide> function FirstChild(props) { <del> var childrenArray = React.Children.toArray(props.children); <add> const childrenArray = React.Children.toArray(props.children); <ide> return childrenArray[0] || null; <ide> } <ide> ``` <ide><path>docs/docs/addons-create-fragment.md <ide> That is, if you have a component such as: <ide> <ide> ```js <ide> function Swapper(props) { <del> var children; <add> let children; <ide> if (props.swapped) { <ide> children = [props.rightChildren, props.leftChildren]; <ide> } else { <ide> To solve this problem, you can use the `createFragment` add-on to give keys to t <ide> Instead of creating arrays, we write: <ide> <ide> ```js <del>var createFragment = require('react-addons-create-fragment'); <add>import createFragment from 'react-addons-create-fragment' <ide> <ide> function Swapper(props) { <del> var children; <add> let children; <ide> if (props.swapped) { <ide> children = createFragment({ <ide> right: props.rightChildren, <ide><path>docs/docs/addons-test-utils.md <ide> You can then begin to assert facts about the output. For example, if your compon <ide> Then you can assert: <ide> <ide> ```javascript <del>var renderer = ReactTestUtils.createRenderer(); <add>const renderer = ReactTestUtils.createRenderer(); <ide> result = renderer.getRenderOutput(); <ide> expect(result.type).toBe('div'); <ide> expect(result.props.children).toEqual([ <ide> Simulate an event dispatch on a DOM node with optional `eventData` event data. <ide> <ide> ```javascript <ide> // <button ref="button">...</button> <del>var node = this.refs.button; <add>const node = this.refs.button; <ide> ReactTestUtils.Simulate.click(node); <ide> ``` <ide> <ide> **Changing the value of an input field and then pressing ENTER.** <ide> <ide> ```javascript <ide> // <input ref="input" /> <del>var node = this.refs.input; <add>const node = this.refs.input; <ide> node.value = 'giraffe'; <ide> ReactTestUtils.Simulate.change(node); <ide> ReactTestUtils.Simulate.keyDown(node, {key: "Enter", keyCode: 13, which: 13}); <ide><path>docs/docs/addons-update.md <ide> myData.a.b.push(9); <ide> You have no way of determining which data has changed since the previous copy has been overwritten. Instead, you need to create a new copy of `myData` and change only the parts of it that need to be changed. Then you can compare the old copy of `myData` with the new one in `shouldComponentUpdate()` using triple-equals: <ide> <ide> ```js <del>var newData = deepCopy(myData); <add>const newData = deepCopy(myData); <ide> newData.x.y.z = 7; <ide> newData.a.b.push(9); <ide> ``` <ide> <ide> Unfortunately, deep copies are expensive, and sometimes impossible. You can alleviate this by only copying objects that need to be changed and by reusing the objects that haven't changed. Unfortunately, in today's JavaScript this can be cumbersome: <ide> <ide> ```js <del>var newData = extend(myData, { <add>const newData = extend(myData, { <ide> x: extend(myData.x, { <ide> y: extend(myData.x.y, {z: 7}), <ide> }), <ide> While this is fairly performant (since it only makes a shallow copy of `log n` o <ide> ```js <ide> import update from 'react-addons-update'; <ide> <del>var newData = update(myData, { <add>const newData = update(myData, { <ide> x: {y: {z: {$set: 7}}}, <ide> a: {b: {$push: [9]}} <ide> }); <ide> The `$`-prefixed keys are called *commands*. The data structure they are "mutati <ide> ### Simple push <ide> <ide> ```js <del>var initialArray = [1, 2, 3]; <del>var newArray = update(initialArray, {$push: [4]}); // => [1, 2, 3, 4] <add>const initialArray = [1, 2, 3]; <add>const newArray = update(initialArray, {$push: [4]}); // => [1, 2, 3, 4] <ide> ``` <ide> `initialArray` is still `[1, 2, 3]`. <ide> <ide> ### Nested collections <ide> <ide> ```js <del>var collection = [1, 2, {a: [12, 17, 15]}]; <del>var newCollection = update(collection, {2: {a: {$splice: [[1, 1, 13, 14]]}}}); <add>const collection = [1, 2, {a: [12, 17, 15]}]; <add>const newCollection = update(collection, {2: {a: {$splice: [[1, 1, 13, 14]]}}}); <ide> // => [1, 2, {a: [12, 13, 14, 15]}] <ide> ``` <ide> This accesses `collection`'s index `2`, key `a`, and does a splice of one item starting from index `1` (to remove `17`) while inserting `13` and `14`. <ide> <ide> ### Updating a value based on its current one <ide> <ide> ```js <del>var obj = {a: 5, b: 3}; <del>var newObj = update(obj, {b: {$apply: function(x) {return x * 2;}}}); <add>const obj = {a: 5, b: 3}; <add>const newObj = update(obj, {b: {$apply: function(x) {return x * 2;}}}); <ide> // => {a: 5, b: 6} <ide> // This is equivalent, but gets verbose for deeply nested collections: <del>var newObj2 = update(obj, {b: {$set: obj.b * 2}}); <add>const newObj2 = update(obj, {b: {$set: obj.b * 2}}); <ide> ``` <ide> <ide> ### (Shallow) Merge <ide> <ide> ```js <del>var obj = {a: 5, b: 3}; <del>var newObj = update(obj, {$merge: {b: 6, c: 7}}); // => {a: 5, b: 6, c: 7} <add>const obj = {a: 5, b: 3}; <add>const newObj = update(obj, {$merge: {b: 6, c: 7}}); // => {a: 5, b: 6, c: 7} <ide> ``` <ide><path>docs/docs/jsx-in-depth.md <ide> You can also refer to a React component using dot-notation from within JSX. This <ide> ```js <ide> import React from 'react'; <ide> <del>var MyComponents = { <add>const MyComponents = { <ide> DatePicker: function(props) { <ide> return <div>imagine a {props.color} datepicker here</div>; <ide> } <ide> function Story1(props) { <ide> } <ide> <ide> function render2(props) { <del> var MyComponent = components[props.story]; <add> const MyComponent = components[props.story]; <ide> <ide> // Valid JSX <ide> return <MyComponent />; <ide> For `MyComponent`, The value of `props.foo` will be `10` because the expression <ide> <ide> ```js <ide> function NumberDescriber(props) { <del> var description; <add> let description; <ide> if (props.number % 2 == 0) { <ide> description = <strong>even</strong>; <ide> } else { <ide> If you already have `props` as an object, and you want to pass it in JSX, you ca <ide> <ide> ```js <ide> function render1() { <del> var props = {left: 'ben', right: 'hector'}; <add> const props = {left: 'ben', right: 'hector'}; <ide> return <MyComponent {...props} />; <ide> } <ide> <ide> function Item(props) { <ide> } <ide> <ide> function renderTodoList() { <del> var todos = ['finish doc', 'submit pr', 'nag dan to review']; <add> const todos = ['finish doc', 'submit pr', 'nag dan to review']; <ide> return ( <ide> <ul> <ide> {todos.map((message) => <Item key={message} message={message} />)} <ide> function ListOfTenThings() { <ide> <ide> // Calls the children callback numTimes to produce a repeated component <ide> function Repeat(props) { <del> var items = []; <del> for (var i = 0; i < numTimes; i++) { <add> let items = []; <add> for (let i = 0; i < numTimes; i++) { <ide> items.push(props.children(i)); <ide> } <ide> return <div>{items}</div> <ide><path>docs/docs/optimizing-performance.md <ide> If you're using Create React App, both `Object.assign` and the object spread syn <ide> Immutability makes tracking changes cheap. A change will always result in a new object so we only need to check if the reference to the object has changed. For example, in this regular JavaScript code: <ide> <ide> ```javascript <del>var x = { foo: "bar" }; <del>var y = x; <add>const x = { foo: "bar" }; <add>const y = x; <ide> y.foo = "baz"; <ide> x === y; // true <ide> ``` <ide> <ide> Although `y` was edited, since it's a reference to the same object as `x`, this comparison returns `true`. You can write similar code with immutable.js: <ide> <ide> ```javascript <del>var SomeRecord = Immutable.Record({ foo: null }); <del>var x = new SomeRecord({ foo: 'bar' }); <del>var y = x.set('foo', 'baz'); <add>const SomeRecord = Immutable.Record({ foo: null }); <add>const x = new SomeRecord({ foo: 'bar' }); <add>const y = x.set('foo', 'baz'); <ide> x === y; // false <ide> ``` <ide> <ide><path>docs/docs/react-without-jsx.md <ide> The component can either be provided as a string, or as a subclass of `React.Com <ide> If you get tired of typing `React.createElement` so much, one common pattern is to assign a shorthand: <ide> <ide> ```js <del>var e = React.createElement; <add>const e = React.createElement; <ide> <ide> ReactDOM.render( <ide> e('div', null, 'Hello World'), <ide><path>docs/docs/reference-events.md <ide> As such, you cannot access the event in an asynchronous way. <ide> function onClick(event) { <ide> console.log(event); // => nullified object. <ide> console.log(event.type); // => "click" <del> var eventType = event.type; // => "click" <add> const eventType = event.type; // => "click" <ide> <ide> setTimeout(function() { <ide> console.log(event.type); // => null <ide><path>docs/docs/typechecking-with-proptypes.md <ide> With `React.PropTypes.element` you can specify that only a single child can be p <ide> class MyComponent extends React.Component { <ide> render() { <ide> // This must be exactly one element or it will warn. <del> var children = this.props.children; <add> const children = this.props.children; <ide> return ( <ide> <div> <ide> {children} <ide><path>docs/docs/web-components.md <ide> function BrickFlipbox() { <ide> ## Using React in your Web Components <ide> <ide> ```javascript <del>var proto = Object.create(HTMLElement.prototype, { <add>const proto = Object.create(HTMLElement.prototype, { <ide> attachedCallback: { <ide> value: function() { <del> var mountPoint = document.createElement('span'); <add> const mountPoint = document.createElement('span'); <ide> this.createShadowRoot().appendChild(mountPoint); <ide> <del> var name = this.getAttribute('name'); <del> var url = 'https://www.google.com/search?q=' + encodeURIComponent(name); <add> const name = this.getAttribute('name'); <add> const url = 'https://www.google.com/search?q=' + encodeURIComponent(name); <ide> ReactDOM.render(<a href={url}>{name}</a>, mountPoint); <ide> } <ide> }
10
Text
Text
use code markup/markdown in headers
cdb727d6db0be76c3ed2585e71c0427809f377fb
<ide><path>doc/api/string_decoder.md <ide> decoder.write(Buffer.from([0x82])); <ide> console.log(decoder.end(Buffer.from([0xAC]))); <ide> ``` <ide> <del>## Class: StringDecoder <add>## Class: `StringDecoder` <ide> <del>### new StringDecoder(\[encoding\]) <add>### `new StringDecoder([encoding])` <ide> <!-- YAML <ide> added: v0.1.99 <ide> --> <ide> added: v0.1.99 <ide> <ide> Creates a new `StringDecoder` instance. <ide> <del>### stringDecoder.end(\[buffer\]) <add>### `stringDecoder.end([buffer])` <ide> <!-- YAML <ide> added: v0.9.3 <ide> --> <ide> substitution characters appropriate for the character encoding. <ide> If the `buffer` argument is provided, one final call to `stringDecoder.write()` <ide> is performed before returning the remaining input. <ide> <del>### stringDecoder.write(buffer) <add>### `stringDecoder.write(buffer)` <ide> <!-- YAML <ide> added: v0.1.99 <ide> changes:
1
PHP
PHP
apply fixes from styleci
481cd52e06cfa56c941d93ec5401438b6cbf7a23
<ide><path>types/Support/LazyCollection.php <ide> <ide> use Illuminate\Contracts\Support\Arrayable; <ide> use Illuminate\Support\LazyCollection; <del> <ide> use function PHPStan\Testing\assertType; <ide> <ide> $collection = new LazyCollection([new User]);
1
Java
Java
add support for shadows to rctvirtualtext
76abec8894d51d2cc68e190834c2bd80d41a2942
<ide><path>ReactAndroid/src/main/java/com/facebook/react/flat/RCTVirtualText.java <ide> import android.text.SpannableStringBuilder; <ide> import android.text.TextUtils; <ide> <add>import com.facebook.react.bridge.ReadableMap; <ide> import com.facebook.react.uimanager.PixelUtil; <ide> import com.facebook.react.uimanager.annotations.ReactProp; <ide> import com.facebook.react.uimanager.ViewProps; <ide> private static final String ITALIC = "italic"; <ide> private static final String NORMAL = "normal"; <ide> <add> private static final String PROP_SHADOW_OFFSET = "textShadowOffset"; <add> private static final String PROP_SHADOW_RADIUS = "textShadowRadius"; <add> private static final String PROP_SHADOW_COLOR = "textShadowColor"; <add> private static final int DEFAULT_TEXT_SHADOW_COLOR = 0x55000000; <add> <ide> private FontStylingSpan mFontStylingSpan = FontStylingSpan.INSTANCE; <add> private ShadowStyleSpan mShadowStyleSpan = ShadowStyleSpan.INSTANCE; <ide> <ide> @Override <ide> protected void performCollectText(SpannableStringBuilder builder) { <ide> protected void performApplySpans(SpannableStringBuilder builder, int begin, int <ide> end, <ide> Spannable.SPAN_INCLUSIVE_EXCLUSIVE); <ide> <add> if (mShadowStyleSpan.getColor() != 0 && mShadowStyleSpan.getRadius() != 0) { <add> mShadowStyleSpan.freeze(); <add> <add> builder.setSpan( <add> mShadowStyleSpan, <add> begin, <add> end, <add> Spannable.SPAN_INCLUSIVE_EXCLUSIVE); <add> } <add> <ide> for (int i = 0, childCount = getChildCount(); i < childCount; ++i) { <ide> FlatTextShadowNode child = (FlatTextShadowNode) getChildAt(i); <ide> child.applySpans(builder); <ide> public void setFontStyle(@Nullable String fontStyleString) { <ide> } <ide> } <ide> <add> @ReactProp(name = PROP_SHADOW_OFFSET) <add> public void setTextShadowOffset(@Nullable ReadableMap offsetMap) { <add> float dx = 0; <add> float dy = 0; <add> if (offsetMap != null) { <add> if (offsetMap.hasKey("width")) { <add> dx = PixelUtil.toPixelFromDIP(offsetMap.getDouble("width")); <add> } <add> if (offsetMap.hasKey("height")) { <add> dy = PixelUtil.toPixelFromDIP(offsetMap.getDouble("height")); <add> } <add> } <add> <add> if (!mShadowStyleSpan.offsetMatches(dx, dy)) { <add> getShadowSpan().setOffset(dx, dy); <add> notifyChanged(false); <add> } <add> } <add> <add> @ReactProp(name = PROP_SHADOW_RADIUS) <add> public void setTextShadowRadius(float textShadowRadius) { <add> textShadowRadius = PixelUtil.toPixelFromDIP(textShadowRadius); <add> if (mShadowStyleSpan.getRadius() != textShadowRadius) { <add> getShadowSpan().setRadius(textShadowRadius); <add> notifyChanged(false); <add> } <add> } <add> <add> @ReactProp(name = PROP_SHADOW_COLOR, defaultInt = DEFAULT_TEXT_SHADOW_COLOR, customType = "Color") <add> public void setTextShadowColor(int textShadowColor) { <add> if (mShadowStyleSpan.getColor() != textShadowColor) { <add> getShadowSpan().setColor(textShadowColor); <add> notifyChanged(false); <add> } <add> } <add> <ide> /** <ide> * Returns font size for this node. <ide> * When called on RCTText, this value is never -1 (unset). <ide> protected final FontStylingSpan getSpan() { <ide> return mFontStylingSpan; <ide> } <ide> <add> private final ShadowStyleSpan getShadowSpan() { <add> if (mShadowStyleSpan.isFrozen()) { <add> mShadowStyleSpan = mShadowStyleSpan.mutableCopy(); <add> } <add> return mShadowStyleSpan; <add> } <add> <ide> /** <ide> * Return -1 if the input string is not a valid numeric fontWeight (100, 200, ..., 900), otherwise <ide> * return the weight. <ide><path>ReactAndroid/src/main/java/com/facebook/react/flat/ShadowStyleSpan.java <add>/** <add> * Copyright (c) 2015-present, Facebook, Inc. <add> * All rights reserved. <add> * <add> * This source code is licensed under the BSD-style license found in the <add> * LICENSE file in the root directory of this source tree. An additional grant <add> * of patent rights can be found in the PATENTS file in the same directory. <add> */ <add> <add>package com.facebook.react.flat; <add> <add>import android.text.TextPaint; <add>import android.text.style.CharacterStyle; <add> <add>/* package */ final class ShadowStyleSpan extends CharacterStyle { <add> <add> /* package */ static final ShadowStyleSpan INSTANCE = new ShadowStyleSpan(0, 0, 0, 0, true); <add> <add> private float mDx; <add> private float mDy; <add> private float mRadius; <add> private int mColor; <add> private boolean mFrozen; <add> <add> private ShadowStyleSpan(float dx, float dy, float radius, int color, boolean frozen) { <add> mDx = dx; <add> mDy = dy; <add> mRadius = radius; <add> mColor = color; <add> mFrozen = frozen; <add> } <add> <add> public boolean offsetMatches(float dx, float dy) { <add> return mDx == dx && mDy == dy; <add> } <add> <add> public void setOffset(float dx, float dy) { <add> mDx = dx; <add> mDy = dy; <add> } <add> <add> public float getRadius() { <add> return mRadius; <add> } <add> <add> public void setRadius(float radius) { <add> mRadius = radius; <add> } <add> <add> public int getColor() { <add> return mColor; <add> } <add> <add> public void setColor(int color) { <add> mColor = color; <add> } <add> <add> /* package */ ShadowStyleSpan mutableCopy() { <add> return new ShadowStyleSpan(mDx, mDy, mRadius, mColor, false); <add> } <add> <add> /* package */ boolean isFrozen() { <add> return mFrozen; <add> } <add> <add> /* package */ void freeze() { <add> mFrozen = true; <add> } <add> <add> @Override <add> public void updateDrawState(TextPaint textPaint) { <add> textPaint.setShadowLayer(mRadius, mDx, mDy, mColor); <add> } <add>}
2
Go
Go
add benchmark for etchosts.delete
de7607f50920d3bf199ce46f77dc06c79f51848a
<ide><path>libnetwork/etchosts/etchosts_test.go <ide> func TestDelete(t *testing.T) { <ide> Hosts: "testhostname2", <ide> IP: "2.2.2.2", <ide> }, <add> Record{ <add> Hosts: "testhostname3", <add> IP: "3.3.3.3", <add> }, <ide> }); err != nil { <ide> t.Fatal(err) <ide> } <ide> func TestDelete(t *testing.T) { <ide> Hosts: "testhostname1", <ide> IP: "1.1.1.1", <ide> }, <add> Record{ <add> Hosts: "testhostname3", <add> IP: "3.3.3.3", <add> }, <ide> }); err != nil { <ide> t.Fatal(err) <ide> } <ide> func TestConcurrentWrites(t *testing.T) { <ide> t.Fatalf("Expected to find '%s' got '%s'", expected, content) <ide> } <ide> } <add> <add>func benchDelete(b *testing.B) { <add> b.StopTimer() <add> file, err := ioutil.TempFile("", "") <add> if err != nil { <add> b.Fatal(err) <add> } <add> defer func() { <add> b.StopTimer() <add> file.Close() <add> os.Remove(file.Name()) <add> b.StartTimer() <add> }() <add> <add> err = Build(file.Name(), "", "", "", nil) <add> if err != nil { <add> b.Fatal(err) <add> } <add> <add> var records []Record <add> var toDelete []Record <add> for i := 0; i < 255; i++ { <add> record := Record{ <add> Hosts: fmt.Sprintf("testhostname%d", i), <add> IP: fmt.Sprintf("%d.%d.%d.%d", i, i, i, i), <add> } <add> records = append(records, record) <add> if i%2 == 0 { <add> toDelete = append(records, record) <add> } <add> } <add> <add> if err := Add(file.Name(), records); err != nil { <add> b.Fatal(err) <add> } <add> <add> b.StartTimer() <add> if err := Delete(file.Name(), toDelete); err != nil { <add> b.Fatal(err) <add> } <add>} <add> <add>func BenchmarkDelete(b *testing.B) { <add> for i := 0; i < b.N; i++ { <add> benchDelete(b) <add> } <add>}
1
Javascript
Javascript
increase coverage for util.inspect()
c19ea7bd810e91f1359f2d7ee53302e268a78059
<ide><path>test/parallel/test-util-inspect.js <ide> assert.strictEqual( <ide> // Consistency check. <ide> assert(fullObjectGraph(global).has(Function.prototype)); <ide> } <add> <add>{ <add> // Confirm that own constructor value displays correctly. <add> <add> function Fhqwhgads() {} <add> <add> const sterrance = new Fhqwhgads(); <add> sterrance.constructor = Fhqwhgads; <add> <add> assert.strictEqual( <add> util.inspect(sterrance, { showHidden: true }), <add> 'Fhqwhgads {\n' + <add> ' constructor: <ref *1> [Function: Fhqwhgads] {\n' + <add> ' [length]: 0,\n' + <add> " [name]: 'Fhqwhgads',\n" + <add> ' [prototype]: { [constructor]: [Circular *1] }\n' + <add> ' }\n' + <add> '}' <add> ); <add>}
1
Java
Java
write charsequence instances to databuffers
6361b0cb236e62b24a07af5d0a20f42d78d4efaa
<ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/DataBuffer.java <ide> import java.io.InputStream; <ide> import java.io.OutputStream; <ide> import java.nio.ByteBuffer; <add>import java.nio.CharBuffer; <add>import java.nio.charset.Charset; <add>import java.nio.charset.CharsetEncoder; <add>import java.nio.charset.CoderResult; <add>import java.nio.charset.CodingErrorAction; <ide> import java.util.function.IntPredicate; <ide> <add>import org.springframework.util.Assert; <add> <ide> /** <ide> * Basic abstraction over byte buffers. <ide> * <ide> * can also be used on non-Netty platforms (i.e. Servlet containers). <ide> * <ide> * @author Arjen Poutsma <add> * @author Brian Clozel <ide> * @since 5.0 <ide> * @see DataBufferFactory <ide> */ <ide> public interface DataBuffer { <ide> */ <ide> DataBuffer capacity(int capacity); <ide> <add> /** <add> * Ensure that the current buffer has enough {@link #writableByteCount()} <add> * to write the amount of data given as an argument. If not, the missing <add> * capacity will be added to the buffer. <add> * @param capacity the writable capacity to check for <add> * @return this buffer <add> * @since 5.1.4 <add> */ <add> DataBuffer ensureCapacity(int capacity); <add> <ide> /** <ide> * Return the position from which this buffer will read. <ide> * @return the read position <ide> public interface DataBuffer { <ide> DataBuffer write(byte b); <ide> <ide> /** <del> * Write the given source into this buffer, startin at the current writing position <add> * Write the given source into this buffer, starting at the current writing position <ide> * of this buffer. <ide> * @param source the bytes to be written into this buffer <ide> * @return this buffer <ide> public interface DataBuffer { <ide> */ <ide> DataBuffer write(ByteBuffer... buffers); <ide> <add> /** <add> * Write the given {@code CharSequence} using the given {@code Charset}, <add> * starting at the current writing position. <add> * @param charSequence the char sequence to write into this buffer <add> * @param charset the charset to encode the char sequence with <add> * @return this buffer <add> * @since 5.1.4 <add> */ <add> default DataBuffer write(CharSequence charSequence, Charset charset) { <add> Assert.notNull(charSequence, "'charSequence' must not be null"); <add> Assert.notNull(charset, "'charset' must not be null"); <add> CharsetEncoder charsetEncoder = charset.newEncoder() <add> .onMalformedInput(CodingErrorAction.REPLACE) <add> .onUnmappableCharacter(CodingErrorAction.REPLACE); <add> CharBuffer inBuffer = CharBuffer.wrap(charSequence); <add> int estimatedSize = (int) (inBuffer.remaining() * charsetEncoder.averageBytesPerChar()); <add> ByteBuffer outBuffer = ensureCapacity(estimatedSize) <add> .asByteBuffer(writePosition(), writableByteCount()); <add> for (; ; ) { <add> CoderResult cr = inBuffer.hasRemaining() ? <add> charsetEncoder.encode(inBuffer, outBuffer, true) : CoderResult.UNDERFLOW; <add> if (cr.isUnderflow()) { <add> cr = charsetEncoder.flush(outBuffer); <add> } <add> if (cr.isUnderflow()) { <add> break; <add> } <add> if (cr.isOverflow()) { <add> writePosition(outBuffer.position()); <add> int maximumSize = (int) (inBuffer.remaining() * charsetEncoder.maxBytesPerChar()); <add> ensureCapacity(maximumSize); <add> outBuffer = asByteBuffer(writePosition(), writableByteCount()); <add> } <add> } <add> writePosition(outBuffer.position()); <add> return this; <add> } <add> <ide> /** <ide> * Create a new {@code DataBuffer} whose contents is a shared subsequence of this <ide> * data buffer's content. Data between this data buffer and the returned buffer is <ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/DefaultDataBuffer.java <ide> else if (newCapacity < oldCapacity) { <ide> return this; <ide> } <ide> <add> @Override <add> public DataBuffer ensureCapacity(int length) { <add> if (length > writableByteCount()) { <add> int newCapacity = calculateCapacity(this.writePosition + length); <add> capacity(newCapacity); <add> } <add> return this; <add> } <add> <ide> private static ByteBuffer allocate(int capacity, boolean direct) { <ide> return direct ? ByteBuffer.allocateDirect(capacity) : ByteBuffer.allocate(capacity); <ide> } <ide> public OutputStream asOutputStream() { <ide> return new DefaultDataBufferOutputStream(); <ide> } <ide> <del> private void ensureCapacity(int length) { <del> if (length <= writableByteCount()) { <del> return; <del> } <del> int newCapacity = calculateCapacity(this.writePosition + length); <del> capacity(newCapacity); <del> } <ide> <ide> /** <ide> * Calculate the capacity of the buffer. <ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/NettyDataBuffer.java <ide> import java.io.InputStream; <ide> import java.io.OutputStream; <ide> import java.nio.ByteBuffer; <add>import java.nio.charset.Charset; <add>import java.nio.charset.StandardCharsets; <ide> import java.util.function.IntPredicate; <ide> <ide> import io.netty.buffer.ByteBuf; <ide> import io.netty.buffer.ByteBufInputStream; <ide> import io.netty.buffer.ByteBufOutputStream; <add>import io.netty.buffer.ByteBufUtil; <ide> <ide> import org.springframework.util.Assert; <ide> import org.springframework.util.ObjectUtils; <ide> public NettyDataBuffer capacity(int capacity) { <ide> return this; <ide> } <ide> <add> @Override <add> public DataBuffer ensureCapacity(int capacity) { <add> this.byteBuf.ensureWritable(capacity); <add> return this; <add> } <add> <ide> @Override <ide> public byte read() { <ide> return this.byteBuf.readByte(); <ide> public NettyDataBuffer write(DataBuffer... buffers) { <ide> if (!ObjectUtils.isEmpty(buffers)) { <ide> if (hasNettyDataBuffers(buffers)) { <ide> ByteBuf[] nativeBuffers = new ByteBuf[buffers.length]; <del> for (int i = 0 ; i < buffers.length; i++) { <add> for (int i = 0; i < buffers.length; i++) { <ide> nativeBuffers[i] = ((NettyDataBuffer) buffers[i]).getNativeBuffer(); <ide> } <ide> write(nativeBuffers); <ide> } <ide> else { <ide> ByteBuffer[] byteBuffers = new ByteBuffer[buffers.length]; <del> for (int i = 0 ; i < buffers.length; i++) { <add> for (int i = 0; i < buffers.length; i++) { <ide> byteBuffers[i] = buffers[i].asByteBuffer(); <ide> <ide> } <ide> public NettyDataBuffer write(ByteBuf... byteBufs) { <ide> return this; <ide> } <ide> <add> @Override <add> public DataBuffer write(CharSequence charSequence, Charset charset) { <add> Assert.notNull(charSequence, "'charSequence' must not be null"); <add> Assert.notNull(charset, "'charset' must not be null"); <add> if (StandardCharsets.UTF_8.equals(charset)) { <add> ByteBufUtil.writeUtf8(this.byteBuf, charSequence); <add> } <add> else if (StandardCharsets.US_ASCII.equals(charset)) { <add> ByteBufUtil.writeAscii(this.byteBuf, charSequence); <add> } <add> else { <add> return PooledDataBuffer.super.write(charSequence, charset); <add> } <add> return this; <add> } <add> <ide> @Override <ide> public NettyDataBuffer slice(int index, int length) { <ide> ByteBuf slice = this.byteBuf.slice(index, length); <ide><path>spring-core/src/test/java/org/springframework/core/io/buffer/DataBufferTests.java <ide> import java.io.InputStream; <ide> import java.io.OutputStream; <ide> import java.nio.ByteBuffer; <add>import java.nio.charset.StandardCharsets; <ide> import java.util.Arrays; <ide> <ide> import org.junit.Test; <ide> public void writeAndRead() { <ide> release(buffer); <ide> } <ide> <add> @Test <add> public void writeNullString() { <add> DataBuffer buffer = createDataBuffer(1); <add> try { <add> buffer.write(null, StandardCharsets.UTF_8); <add> fail("IllegalArgumentException expected"); <add> } <add> catch (IllegalArgumentException exc) { <add> } <add> finally { <add> release(buffer); <add> } <add> } <add> <add> @Test <add> public void writeNullCharset() { <add> DataBuffer buffer = createDataBuffer(1); <add> try { <add> buffer.write("test", null); <add> fail("IllegalArgumentException expected"); <add> } <add> catch (IllegalArgumentException exc) { <add> } <add> finally { <add> release(buffer); <add> } <add> } <add> <add> @Test <add> public void writeUtf8String() { <add> DataBuffer buffer = createDataBuffer(6); <add> buffer.write("Spring", StandardCharsets.UTF_8); <add> <add> byte[] result = new byte[6]; <add> buffer.read(result); <add> <add> assertArrayEquals("Spring".getBytes(StandardCharsets.UTF_8), result); <add> release(buffer); <add> } <add> <add> @Test <add> public void writeUtf8StringOutGrowsCapacity() { <add> DataBuffer buffer = createDataBuffer(5); <add> buffer.write("Spring €", StandardCharsets.UTF_8); <add> <add> byte[] result = new byte[10]; <add> buffer.read(result); <add> <add> assertArrayEquals("Spring €".getBytes(StandardCharsets.UTF_8), result); <add> release(buffer); <add> } <add> <add> @Test <add> public void writeIsoString() { <add> DataBuffer buffer = createDataBuffer(3); <add> buffer.write("\u00A3", StandardCharsets.ISO_8859_1); <add> <add> byte[] result = new byte[1]; <add> buffer.read(result); <add> <add> assertArrayEquals("\u00A3".getBytes(StandardCharsets.ISO_8859_1), result); <add> release(buffer); <add> } <add> <ide> @Test <ide> public void inputStream() throws IOException { <ide> DataBuffer buffer = createDataBuffer(4); <ide><path>spring-core/src/test/java/org/springframework/core/io/buffer/LeakAwareDataBuffer.java <ide> import java.io.InputStream; <ide> import java.io.OutputStream; <ide> import java.nio.ByteBuffer; <add>import java.nio.charset.Charset; <ide> import java.util.function.IntPredicate; <ide> <ide> import org.springframework.util.Assert; <ide> public DataBuffer capacity(int newCapacity) { <ide> return this.delegate.capacity(newCapacity); <ide> } <ide> <add> @Override <add> public DataBuffer ensureCapacity(int capacity) { <add> return this.delegate.ensureCapacity(capacity); <add> } <add> <ide> @Override <ide> public byte getByte(int index) { <ide> return this.delegate.getByte(index); <ide> public DataBuffer write(ByteBuffer... byteBuffers) { <ide> return this.delegate.write(byteBuffers); <ide> } <ide> <add> @Override <add> public DataBuffer write(CharSequence charSequence, Charset charset) { <add> return this.delegate.write(charSequence, charset); <add> } <add> <ide> @Override <ide> public DataBuffer slice(int index, int length) { <ide> return this.delegate.slice(index, length); <ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/UndertowServerHttpRequest.java <ide> import java.net.URI; <ide> import java.net.URISyntaxException; <ide> import java.nio.ByteBuffer; <add>import java.nio.charset.Charset; <ide> import java.util.function.IntPredicate; <ide> import javax.net.ssl.SSLSession; <ide> <ide> public DataBuffer capacity(int newCapacity) { <ide> return this.dataBuffer.capacity(newCapacity); <ide> } <ide> <add> @Override <add> public DataBuffer ensureCapacity(int capacity) { <add> return this.dataBuffer.ensureCapacity(capacity); <add> } <add> <ide> @Override <ide> public byte getByte(int index) { <ide> return this.dataBuffer.getByte(index); <ide> public DataBuffer write( <ide> return this.dataBuffer.write(byteBuffers); <ide> } <ide> <add> @Override <add> public DataBuffer write(CharSequence charSequence, Charset charset) { <add> return this.dataBuffer.write(charSequence, charset); <add> } <add> <ide> @Override <ide> public DataBuffer slice(int index, int length) { <ide> return this.dataBuffer.slice(index, length); <ide><path>spring-webflux/src/test/java/org/springframework/web/reactive/function/BodyInsertersTests.java <ide> import java.util.Optional; <ide> <ide> import com.fasterxml.jackson.annotation.JsonView; <add>import org.junit.Assert; <ide> import org.junit.Before; <ide> import org.junit.Test; <ide> import reactor.core.publisher.Flux; <ide> import org.springframework.core.io.buffer.DataBufferUtils; <ide> import org.springframework.core.io.buffer.DefaultDataBuffer; <ide> import org.springframework.core.io.buffer.DefaultDataBufferFactory; <add>import org.springframework.core.io.buffer.support.DataBufferTestUtils; <ide> import org.springframework.http.HttpMethod; <ide> import org.springframework.http.HttpRange; <ide> import org.springframework.http.ReactiveHttpOutputMessage; <ide> public void ofString() { <ide> MockServerHttpResponse response = new MockServerHttpResponse(); <ide> Mono<Void> result = inserter.insert(response, this.context); <ide> StepVerifier.create(result).expectComplete().verify(); <del> <del> DataBuffer buffer = new DefaultDataBufferFactory().wrap(body.getBytes(UTF_8)); <ide> StepVerifier.create(response.getBody()) <del> .expectNext(buffer) <add> .consumeNextWith(buf -> { <add> String actual = DataBufferTestUtils.dumpString(buf, UTF_8); <add> Assert.assertEquals("foo", actual); <add> }) <ide> .expectComplete() <ide> .verify(); <ide> } <ide> public void ofPublisher() { <ide> MockServerHttpResponse response = new MockServerHttpResponse(); <ide> Mono<Void> result = inserter.insert(response, this.context); <ide> StepVerifier.create(result).expectComplete().verify(); <del> <del> ByteBuffer byteBuffer = ByteBuffer.wrap("foo".getBytes(UTF_8)); <del> DataBuffer buffer = new DefaultDataBufferFactory().wrap(byteBuffer); <ide> StepVerifier.create(response.getBody()) <del> .expectNext(buffer) <add> .consumeNextWith(buf -> { <add> String actual = DataBufferTestUtils.dumpString(buf, UTF_8); <add> Assert.assertEquals("foo", actual); <add> }) <ide> .expectComplete() <ide> .verify(); <ide> }
7
Javascript
Javascript
update cdn on bump
5a79b42a460e95e6adbd27f03d333183f6c0f2c1
<ide><path>gulpfile.js <ide> function bumpTask(complete) { <ide> choices: choices <ide> }, function(res) { <ide> var increment = res.version.split(' ')[0], <del> newVersion = semver.inc(package.version, increment); <add> newVersion = semver.inc(package.version, increment), <add> oldVersion = package.version; <ide> <ide> // Set the new versions into the bower/package object <ide> package.version = newVersion; <ide> function bumpTask(complete) { <ide> // Write these to their own files, then build the output <ide> fs.writeFileSync('package.json', JSON.stringify(package, null, 2)); <ide> fs.writeFileSync('bower.json', JSON.stringify(bower, null, 2)); <add> <add> var oldCDN = 'https://cdnjs.cloudflare.com/ajax/libs/Chart.js/'+oldVersion+'/Chart.min.js', <add> newCDN = 'https://cdnjs.cloudflare.com/ajax/libs/Chart.js/'+newVersion+'/Chart.min.js'; <add> <add> gulp.src(['./README.md']) <add> .pipe(replace(oldCDN, newCDN)) <add> .pipe(gulp.dest('./')); <ide> <ide> complete(); <ide> });
1
Python
Python
add 2.2.5 to revision heads map
4993c7c94185608510cc9e349bbc74f75f4a7fdc
<ide><path>airflow/utils/db.py <ide> "2.2.2": "7b2661a43ba3", <ide> "2.2.3": "be2bfac3da23", <ide> "2.2.4": "587bdf053233", <add> "2.2.5": "587bdf053233", <ide> } <ide> <ide>
1
Text
Text
fix typo in writing-and-running-benchmarks.md
46c3dd701a65702f7e8fc97cad2faae17d4ff24f
<ide><path>doc/guides/writing-and-running-benchmarks.md <ide> an example. This pull request _claims_ to improve the performance of the <ide> <ide> First build two versions of Node.js, one from the master branch (here called <ide> `./node-master`) and another with the pull request applied (here called <del>`./node-pr-5135`). <add>`./node-pr-5134`). <ide> <ide> The `compare.js` tool will then produce a csv file with the benchmark results. <ide>
1
Python
Python
remove use_remote_tpu as it is deprecated
4457c1a81c8e2b1f3fa50cc5362686cdab8e8bd9
<ide><path>official/modeling/model_training_utils.py <ide> def run_customized_training_loop( <ide> eval_steps=None, <ide> metric_fn=None, <ide> init_checkpoint=None, <del> use_remote_tpu=False, <ide> custom_callbacks=None, <ide> run_eagerly=False): <ide> """Run BERT pretrain model training using low-level API. <ide> def run_customized_training_loop( <ide> after every epoch. <ide> init_checkpoint: Optional checkpoint to load to `sub_model` returned by <ide> `model_fn`. <del> use_remote_tpu: Ignored, will be removed in the future. <ide> custom_callbacks: A list of Keras Callbacks objects to run during <ide> training. More specifically, `on_batch_begin()`, `on_batch_end()`, <ide> methods are invoked during training. <ide> def run_customized_training_loop( <ide> attribute or when required parameters are set to none. (2) eval args are <ide> not specified correctly. (3) metric_fn must be a callable if specified. <ide> """ <del> # TODO(bfontain): Remove use_remote_tpu once there are no models using it. <del> del use_remote_tpu <ide> <ide> if _sentinel is not None: <ide> raise ValueError('only call `run_customized_training_loop()` '
1
Text
Text
use serial comma in addons docs
1a0a058fdafde8b0a2aa263fc03fab34be530e6b
<ide><path>doc/api/addons.md <ide> _Addons_ are dynamically-linked shared objects written in C++. The <ide> Addons provide an interface between JavaScript and C/C++ libraries. <ide> <ide> There are three options for implementing addons: Node-API, nan, or direct <del>use of internal V8, libuv and Node.js libraries. Unless there is a need for <add>use of internal V8, libuv, and Node.js libraries. Unless there is a need for <ide> direct access to functionality which is not exposed by Node-API, use Node-API. <ide> Refer to [C/C++ addons with Node-API](n-api.md) for more information on <ide> Node-API. <ide> involving knowledge of several components and APIs: <ide> <ide> * Node.js includes other statically linked libraries including OpenSSL. These <ide> other libraries are located in the `deps/` directory in the Node.js source <del> tree. Only the libuv, OpenSSL, V8 and zlib symbols are purposefully <add> tree. Only the libuv, OpenSSL, V8, and zlib symbols are purposefully <ide> re-exported by Node.js and may be used to various extents by addons. See <ide> [Linking to libraries included with Node.js][] for additional information. <ide> <ide> try { <ide> <ide> ### Linking to libraries included with Node.js <ide> <del>Node.js uses statically linked libraries such as V8, libuv and OpenSSL. All <add>Node.js uses statically linked libraries such as V8, libuv, and OpenSSL. All <ide> addons are required to link to V8 and may link to any of the other dependencies <ide> as well. Typically, this is as simple as including the appropriate <ide> `#include <...>` statements (e.g. `#include <v8.h>`) and `node-gyp` will locate
1
Ruby
Ruby
remove a const_defined? check
4651b5214f4d90e16cb5ec8e832ff18dc06221f6
<ide><path>Library/Homebrew/test/test_requirement.rb <ide> def test_dsl_build <ide> end <ide> <ide> def test_infer_name_from_class <del> klass, const = self.class, :FooRequirement <add> const = :FooRequirement <add> klass = self.class <add> <ide> klass.const_set(const, Class.new(Requirement)) <del> assert_equal "foo", klass.const_get(const).new.name <del> ensure <del> klass.send(:remove_const, const) if klass.const_defined?(const) <add> <add> begin <add> assert_equal "foo", klass.const_get(const).new.name <add> ensure <add> klass.send(:remove_const, const) <add> end <ide> end <ide> <ide> def test_dsl_default_formula
1
Go
Go
use image id if tag has been updated
2167f40a766d1909443695d96bcecc3d039fb482
<ide><path>daemon/list.go <ide> func (daemon *Daemon) Containers(config *ContainersConfig) ([]*types.Container, <ide> ID: container.ID, <ide> Names: names[container.ID], <ide> } <del> newC.Image = container.Config.Image <add> <add> img, err := daemon.Repositories().LookupImage(container.Config.Image) <add> if err != nil { <add> // If the image can no longer be found by its original reference, <add> // it makes sense to show the ID instead of a stale reference. <add> newC.Image = container.ImageID <add> } else if container.ImageID == img.ID { <add> newC.Image = container.Config.Image <add> } else { <add> newC.Image = container.ImageID <add> } <add> <ide> if len(container.Args) > 0 { <ide> args := []string{} <ide> for _, arg := range container.Args { <ide><path>integration-cli/docker_cli_ps_test.go <ide> func (s *DockerSuite) TestPsDefaultFormatAndQuiet(c *check.C) { <ide> c.Fatalf("Expected to print only the container id, got %v\n", out) <ide> } <ide> } <add> <add>// Test for GitHub issue #12595 <add>func (s *DockerSuite) TestPsImageIDAfterUpdate(c *check.C) { <add> <add> originalImageName := "busybox:TestPsImageIDAfterUpdate-original" <add> updatedImageName := "busybox:TestPsImageIDAfterUpdate-updated" <add> <add> runCmd := exec.Command(dockerBinary, "tag", "busybox:latest", originalImageName) <add> out, _, err := runCommandWithOutput(runCmd) <add> c.Assert(err, check.IsNil) <add> <add> originalImageID, err := getIDByName(originalImageName) <add> c.Assert(err, check.IsNil) <add> <add> runCmd = exec.Command(dockerBinary, "run", "-d", originalImageName, "top") <add> out, _, err = runCommandWithOutput(runCmd) <add> c.Assert(err, check.IsNil) <add> containerID := strings.TrimSpace(out) <add> <add> linesOut, err := exec.Command(dockerBinary, "ps", "--no-trunc").CombinedOutput() <add> c.Assert(err, check.IsNil) <add> <add> lines := strings.Split(strings.TrimSpace(string(linesOut)), "\n") <add> // skip header <add> lines = lines[1:] <add> c.Assert(len(lines), check.Equals, 1) <add> <add> for _, line := range lines { <add> f := strings.Fields(line) <add> c.Assert(f[1], check.Equals, originalImageName) <add> } <add> <add> runCmd = exec.Command(dockerBinary, "commit", containerID, updatedImageName) <add> out, _, err = runCommandWithOutput(runCmd) <add> c.Assert(err, check.IsNil) <add> <add> runCmd = exec.Command(dockerBinary, "tag", "-f", updatedImageName, originalImageName) <add> out, _, err = runCommandWithOutput(runCmd) <add> c.Assert(err, check.IsNil) <add> <add> linesOut, err = exec.Command(dockerBinary, "ps", "--no-trunc").CombinedOutput() <add> c.Assert(err, check.IsNil) <add> <add> lines = strings.Split(strings.TrimSpace(string(linesOut)), "\n") <add> // skip header <add> lines = lines[1:] <add> c.Assert(len(lines), check.Equals, 1) <add> <add> for _, line := range lines { <add> f := strings.Fields(line) <add> c.Assert(f[1], check.Equals, originalImageID) <add> } <add> <add>}
2
Python
Python
use explicit nat in test_structure_format
0724f58bf999f844930a0b49dba449e0f12e5bd6
<ide><path>numpy/core/tests/test_arrayprint.py <ide> def test_structure_format(self): <ide> <ide> # for issue #5692 <ide> A = np.zeros(shape=10, dtype=[("A", "M8[s]")]) <del> A[5:].fill(np.nan) <add> A[5:].fill(np.datetime64('NaT')) <ide> assert_equal(np.array2string(A), <ide> "[('1970-01-01T00:00:00',) ('1970-01-01T00:00:00',) " + <ide> "('1970-01-01T00:00:00',)\n ('1970-01-01T00:00:00',) " +
1
PHP
PHP
use defaultformatter with arraylog
c91320a782776269c8ea87001733db4b8b1fc60e
<ide><path>src/Log/Engine/ArrayLog.php <ide> */ <ide> namespace Cake\Log\Engine; <ide> <add>use Cake\Log\Formatter\DefaultFormatter; <add> <ide> /** <ide> * Array logger. <ide> * <ide> */ <ide> class ArrayLog extends BaseLog <ide> { <add> /** <add> * Default config for this class <add> * <add> * @var array <add> */ <add> protected $_defaultConfig = [ <add> 'levels' => [], <add> 'scopes' => [], <add> 'formatter' => [ <add> 'className' => DefaultFormatter::class, <add> 'includeDate' => false, <add> ], <add> ]; <add> <ide> /** <ide> * Captured messages <ide> * <ide> class ArrayLog extends BaseLog <ide> */ <ide> public function log($level, $message, array $context = []) <ide> { <del> $this->content[] = $level . ' ' . $this->_format($message, $context); <add> $message = $this->_format($message, $context); <add> $this->content[] = $this->formatter->format($level, $message, $context); <ide> } <ide> <ide> /** <ide><path>tests/TestCase/Database/ConnectionTest.php <ide> public function testLogFunction() <ide> <ide> $messages = Log::engine('queries')->read(); <ide> $this->assertCount(1, $messages); <del> $this->assertSame('debug connection=test duration=0 rows=0 SELECT 1', $messages[0]); <add> $this->assertSame('debug: connection=test duration=0 rows=0 SELECT 1', $messages[0]); <ide> } <ide> <ide> /** <ide> public function testLogBeginRollbackTransaction() <ide> <ide> $messages = Log::engine('queries')->read(); <ide> $this->assertCount(2, $messages); <del> $this->assertSame('debug connection= duration=0 rows=0 BEGIN', $messages[0]); <del> $this->assertSame('debug connection= duration=0 rows=0 ROLLBACK', $messages[1]); <add> $this->assertSame('debug: connection= duration=0 rows=0 BEGIN', $messages[0]); <add> $this->assertSame('debug: connection= duration=0 rows=0 ROLLBACK', $messages[1]); <ide> } <ide> <ide> /** <ide> public function testLogCommitTransaction() <ide> <ide> $messages = Log::engine('queries')->read(); <ide> $this->assertCount(2, $messages); <del> $this->assertSame('debug connection= duration=0 rows=0 BEGIN', $messages[0]); <del> $this->assertSame('debug connection= duration=0 rows=0 COMMIT', $messages[1]); <add> $this->assertSame('debug: connection= duration=0 rows=0 BEGIN', $messages[0]); <add> $this->assertSame('debug: connection= duration=0 rows=0 COMMIT', $messages[1]); <ide> } <ide> <ide> /** <ide><path>tests/TestCase/Database/Log/LoggingStatementTest.php <ide> public function testExecuteNoParams() <ide> <ide> $messages = Log::engine('queries')->read(); <ide> $this->assertCount(1, $messages); <del> $this->assertMatchesRegularExpression('/^debug connection=test duration=\d+ rows=3 SELECT bar FROM foo$/', $messages[0]); <add> $this->assertMatchesRegularExpression('/^debug: connection=test duration=\d+ rows=3 SELECT bar FROM foo$/', $messages[0]); <ide> } <ide> <ide> /** <ide> public function testExecuteWithParams() <ide> <ide> $messages = Log::engine('queries')->read(); <ide> $this->assertCount(1, $messages); <del> $this->assertMatchesRegularExpression('/^debug connection=test duration=\d+ rows=4 SELECT bar FROM foo WHERE x=1 AND y=2$/', $messages[0]); <add> $this->assertMatchesRegularExpression('/^debug: connection=test duration=\d+ rows=4 SELECT bar FROM foo WHERE x=1 AND y=2$/', $messages[0]); <ide> } <ide> <ide> /** <ide> public function testExecuteWithBinding() <ide> <ide> $messages = Log::engine('queries')->read(); <ide> $this->assertCount(2, $messages); <del> $this->assertMatchesRegularExpression("/^debug connection=test duration=\d+ rows=4 SELECT bar FROM foo WHERE a='1' AND b='2013-01-01'$/", $messages[0]); <del> $this->assertMatchesRegularExpression("/^debug connection=test duration=\d+ rows=4 SELECT bar FROM foo WHERE a='1' AND b='2014-01-01'$/", $messages[1]); <add> $this->assertMatchesRegularExpression("/^debug: connection=test duration=\d+ rows=4 SELECT bar FROM foo WHERE a='1' AND b='2013-01-01'$/", $messages[0]); <add> $this->assertMatchesRegularExpression("/^debug: connection=test duration=\d+ rows=4 SELECT bar FROM foo WHERE a='1' AND b='2014-01-01'$/", $messages[1]); <ide> } <ide> <ide> /** <ide> public function testExecuteWithError() <ide> <ide> $messages = Log::engine('queries')->read(); <ide> $this->assertCount(1, $messages); <del> $this->assertMatchesRegularExpression("/^debug connection=test duration=\d+ rows=0 SELECT bar FROM foo$/", $messages[0]); <add> $this->assertMatchesRegularExpression("/^debug: connection=test duration=\d+ rows=0 SELECT bar FROM foo$/", $messages[0]); <ide> } <ide> <ide> /**
3
Java
Java
refine antpathmatcher.combine when p1 contains '.'
c1dafed886f9f136b788f1a23528d5173167a3e8
<ide><path>spring-core/src/main/java/org/springframework/util/AntPathMatcher.java <ide> public String combine(String pattern1, String pattern2) { <ide> if (!StringUtils.hasText(pattern1) && !StringUtils.hasText(pattern2)) { <ide> return ""; <ide> } <del> else if (!StringUtils.hasText(pattern1)) { <add> if (!StringUtils.hasText(pattern1)) { <ide> return pattern2; <ide> } <del> else if (!StringUtils.hasText(pattern2)) { <add> if (!StringUtils.hasText(pattern2)) { <ide> return pattern1; <ide> } <ide> <ide> else if (!StringUtils.hasText(pattern2)) { <ide> // However /user + /user -> /usr/user ; /{foo} + /bar -> /{foo}/bar <ide> return pattern2; <ide> } <del> else if (pattern1.endsWith("/*")) { <del> if (pattern2.startsWith("/")) { <del> // /hotels/* + /booking -> /hotels/booking <del> return pattern1.substring(0, pattern1.length() - 1) + pattern2.substring(1); <del> } <del> else { <del> // /hotels/* + booking -> /hotels/booking <del> return pattern1.substring(0, pattern1.length() - 1) + pattern2; <del> } <add> <add> // /hotels/* + /booking -> /hotels/booking <add> // /hotels/* + booking -> /hotels/booking <add> if (pattern1.endsWith("/*")) { <add> return slashConcat(pattern1.substring(0, pattern1.length() - 2), pattern2); <ide> } <del> else if (pattern1.endsWith("/**")) { <del> if (pattern2.startsWith("/")) { <del> // /hotels/** + /booking -> /hotels/**/booking <del> return pattern1 + pattern2; <del> } <del> else { <del> // /hotels/** + booking -> /hotels/**/booking <del> return pattern1 + "/" + pattern2; <del> } <add> <add> // /hotels/** + /booking -> /hotels/**/booking <add> // /hotels/** + booking -> /hotels/**/booking <add> if (pattern1.endsWith("/**")) { <add> return slashConcat(pattern1, pattern2); <ide> } <del> else { <del> int dotPos1 = pattern1.indexOf('.'); <del> if (dotPos1 == -1 || pattern1ContainsUriVar) { <del> // simply concatenate the two patterns <del> if (pattern1.endsWith("/") || pattern2.startsWith("/")) { <del> return pattern1 + pattern2; <del> } <del> else { <del> return pattern1 + "/" + pattern2; <del> } <del> } <del> String fileName1 = pattern1.substring(0, dotPos1); <del> String extension1 = pattern1.substring(dotPos1); <del> String fileName2; <del> String extension2; <del> int dotPos2 = pattern2.indexOf('.'); <del> if (dotPos2 != -1) { <del> fileName2 = pattern2.substring(0, dotPos2); <del> extension2 = pattern2.substring(dotPos2); <del> } <del> else { <del> fileName2 = pattern2; <del> extension2 = ""; <del> } <del> String fileName = fileName1.endsWith("*") ? fileName2 : fileName1; <del> String extension = extension1.startsWith("*") ? extension2 : extension1; <ide> <del> return fileName + extension; <add> int starDotPos1 = pattern1.indexOf("*."); <add> if (pattern1ContainsUriVar || starDotPos1 == -1) { <add> // simply concatenate the two patterns <add> return slashConcat(pattern1, pattern2); <add> } <add> String extension1 = pattern1.substring(starDotPos1 + 1); <add> int dotPos2 = pattern2.indexOf('.'); <add> String fileName2 = (dotPos2 == -1 ? pattern2 : pattern2.substring(0, dotPos2)); <add> String extension2 = (dotPos2 == -1 ? "" : pattern2.substring(dotPos2)); <add> String extension = extension1.startsWith("*") ? extension2 : extension1; <add> return fileName2 + extension; <add> } <add> <add> private String slashConcat(String path1, String path2) { <add> if (path1.endsWith("/") || path2.startsWith("/")) { <add> return path1 + path2; <ide> } <add> return path1 + "/" + path2; <ide> } <ide> <ide> /** <ide><path>spring-core/src/test/java/org/springframework/util/AntPathMatcherTests.java <ide> public void combine() { <ide> assertEquals("/hotels/**/booking", pathMatcher.combine("/hotels/**", "/booking")); <ide> assertEquals("/hotels/booking", pathMatcher.combine("/hotels", "/booking")); <ide> assertEquals("/hotels/booking", pathMatcher.combine("/hotels", "booking")); <add> assertEquals("/hotels/booking", pathMatcher.combine("/hotels/", "booking")); <ide> assertEquals("/hotels/{hotel}", pathMatcher.combine("/hotels/*", "{hotel}")); <ide> assertEquals("/hotels/**/{hotel}", pathMatcher.combine("/hotels/**", "{hotel}")); <ide> assertEquals("/hotels/{hotel}", pathMatcher.combine("/hotels", "{hotel}")); <ide> public void combine() { <ide> assertEquals("/{foo}/bar", pathMatcher.combine("/{foo}", "/bar")); // SPR-8858 <ide> assertEquals("/user/user", pathMatcher.combine("/user", "/user")); // SPR-7970 <ide> assertEquals("/{foo:.*[^0-9].*}/edit/", pathMatcher.combine("/{foo:.*[^0-9].*}", "/edit/")); // SPR-10062 <add> assertEquals("/1.0/foo/test", pathMatcher.combine("/1.0", "/foo/test")); // SPR-10554 <ide> } <ide> <ide> @Test
2
Javascript
Javascript
add required imports
04468e1e9fbf8fe143d07697f979d6a3b87f75b7
<ide><path>src/renderers/webvr/WebVRManager.js <ide> /** <ide> * @author mrdoob / http://mrdoob.com/ <ide> */ <add>import { Matrix4 } from '../../math/Matrix4'; <add>import { Vector4 } from '../../math/Vector4'; <add>import { ArrayCamera } from '../../cameras/ArrayCamera'; <add>import { PerspectiveCamera } from '../../cameras/PerspectiveCamera'; <ide> <ide> function WebVRManager( renderer ) { <ide>
1
Python
Python
allow batch size > 1 with exporter_main_v2.py
0d6ce6025ffc2bed437160fc8b2e9934b3f82fad
<ide><path>research/object_detection/dataset_tools/context_rcnn/generate_detection_data_tf2_test.py <ide> def __init__(self, conv_weight_scalar=1.0): <ide> value=conv_weight_scalar)) <ide> <ide> def preprocess(self, inputs): <del> true_image_shapes = [] # Doesn't matter for the fake model. <del> return tf.identity(inputs), true_image_shapes <add> return tf.identity(inputs), exporter_lib_v2.get_true_shapes(inputs) <ide> <ide> def predict(self, preprocessed_inputs, true_image_shapes): <ide> return {'image': self._conv(preprocessed_inputs)} <ide><path>research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf2_test.py <ide> def __init__(self, conv_weight_scalar=1.0): <ide> value=conv_weight_scalar)) <ide> <ide> def preprocess(self, inputs): <del> true_image_shapes = [] # Doesn't matter for the fake model. <del> return tf.identity(inputs), true_image_shapes <add> return tf.identity(inputs), exporter_lib_v2.get_true_shapes(inputs) <ide> <ide> def predict(self, preprocessed_inputs, true_image_shapes): <ide> return {'image': self._conv(preprocessed_inputs)} <ide><path>research/object_detection/exporter_lib_tf2_test.py <ide> def __init__(self, conv_weight_scalar=1.0): <ide> value=conv_weight_scalar)) <ide> <ide> def preprocess(self, inputs): <del> true_image_shapes = [] # Doesn't matter for the fake model. <del> return tf.identity(inputs), true_image_shapes <add> return tf.identity(inputs), exporter_lib_v2.get_true_shapes(inputs) <ide> <ide> def predict(self, preprocessed_inputs, true_image_shapes, **side_inputs): <ide> return_dict = {'image': self._conv(preprocessed_inputs)} <ide><path>research/object_detection/exporter_lib_v2.py <ide> def _get_side_input_signature(self, zipped_side_inputs): <ide> def _get_side_names_from_zip(self, zipped_side_inputs): <ide> return [side[2] for side in zipped_side_inputs] <ide> <del> def _run_inference_on_images(self, image, **kwargs): <add> def _preprocess_input(self, batch_input, decode_fn): <add> # Input preprocessing happends on the CPU. We don't need to use the device <add> # placement as it is automatically handled by TF. <add> def _decode_and_preprocess(single_input): <add> image = decode_fn(single_input) <add> image = tf.cast(image, tf.float32) <add> image, true_shape = self._model.preprocess(image[tf.newaxis, :, :, :]) <add> return image[0], true_shape[0] <add> <add> images, true_shapes = tf.map_fn( <add> _decode_and_preprocess, <add> elems=batch_input, <add> parallel_iterations=32, <add> back_prop=False, <add> fn_output_signature=(tf.float32, tf.int32)) <add> return images, true_shapes <add> <add> def _run_inference_on_images(self, images, true_shapes, **kwargs): <ide> """Cast image to float and run inference. <ide> <ide> Args: <del> image: uint8 Tensor of shape [1, None, None, 3]. <add> images: float32 Tensor of shape [None, None, None, 3]. <add> true_shapes: int32 Tensor of form [batch, 3] <ide> **kwargs: additional keyword arguments. <ide> <ide> Returns: <ide> Tensor dictionary holding detections. <ide> """ <ide> label_id_offset = 1 <del> <del> image = tf.cast(image, tf.float32) <del> image, shapes = self._model.preprocess(image) <del> prediction_dict = self._model.predict(image, shapes, **kwargs) <del> detections = self._model.postprocess(prediction_dict, shapes) <add> prediction_dict = self._model.predict(images, true_shapes, **kwargs) <add> detections = self._model.postprocess(prediction_dict, true_shapes) <ide> classes_field = fields.DetectionResultFields.detection_classes <ide> detections[classes_field] = ( <ide> tf.cast(detections[classes_field], tf.float32) + label_id_offset) <ide> def __init__(self, detection_model, <ide> <ide> def call_func(input_tensor, *side_inputs): <ide> kwargs = dict(zip(self._side_input_names, side_inputs)) <del> return self._run_inference_on_images(input_tensor, **kwargs) <add> images, true_shapes = self._preprocess_input(input_tensor, lambda x: x) <add> return self._run_inference_on_images(images, true_shapes, **kwargs) <ide> <ide> self.__call__ = tf.function(call_func, input_signature=sig) <ide> <ide> def call_func(input_tensor, *side_inputs): <ide> zipped_side_inputs) <ide> <ide> <add>def get_true_shapes(input_tensor): <add> input_shape = tf.shape(input_tensor) <add> batch = input_shape[0] <add> image_shape = input_shape[1:] <add> true_shapes = tf.tile(image_shape[tf.newaxis, :], [batch, 1]) <add> return true_shapes <add> <add> <ide> class DetectionFromFloatImageModule(DetectionInferenceModule): <ide> """Detection Inference Module for float image inputs.""" <ide> <ide> @tf.function( <ide> input_signature=[ <del> tf.TensorSpec(shape=[1, None, None, 3], dtype=tf.float32)]) <add> tf.TensorSpec(shape=[None, None, None, 3], dtype=tf.float32)]) <ide> def __call__(self, input_tensor): <del> return self._run_inference_on_images(input_tensor) <add> images, true_shapes = self._preprocess_input(input_tensor, lambda x: x) <add> return self._run_inference_on_images(images, <add> true_shapes) <ide> <ide> <ide> class DetectionFromEncodedImageModule(DetectionInferenceModule): <ide> """Detection Inference Module for encoded image string inputs.""" <ide> <del> @tf.function(input_signature=[tf.TensorSpec(shape=[1], dtype=tf.string)]) <add> @tf.function(input_signature=[tf.TensorSpec(shape=[None], dtype=tf.string)]) <ide> def __call__(self, input_tensor): <del> with tf.device('cpu:0'): <del> image = tf.map_fn( <del> _decode_image, <del> elems=input_tensor, <del> dtype=tf.uint8, <del> parallel_iterations=32, <del> back_prop=False) <del> return self._run_inference_on_images(image) <add> images, true_shapes = self._preprocess_input(input_tensor, _decode_image) <add> return self._run_inference_on_images(images, true_shapes) <ide> <ide> <ide> class DetectionFromTFExampleModule(DetectionInferenceModule): <ide> """Detection Inference Module for TF.Example inputs.""" <ide> <del> @tf.function(input_signature=[tf.TensorSpec(shape=[1], dtype=tf.string)]) <add> @tf.function(input_signature=[tf.TensorSpec(shape=[None], dtype=tf.string)]) <ide> def __call__(self, input_tensor): <del> with tf.device('cpu:0'): <del> image = tf.map_fn( <del> _decode_tf_example, <del> elems=input_tensor, <del> dtype=tf.uint8, <del> parallel_iterations=32, <del> back_prop=False) <del> return self._run_inference_on_images(image) <add> images, true_shapes = self._preprocess_input(input_tensor, <add> _decode_tf_example) <add> return self._run_inference_on_images(images, true_shapes) <ide> <ide> DETECTION_MODULE_MAP = { <ide> 'image_tensor': DetectionFromImageModule,
4
PHP
PHP
fix whitespace errors
203172477fc99d541a6def73449c8cacc3718ca6
<ide><path>src/Illuminate/Auth/Access/Gate.php <ide> public function any($abilities, $arguments = []) <ide> }); <ide> } <ide> <del> /** <add> /** <ide> * Determine if any one of the given abilities should be denied for the current user. <ide> * <ide> * @param iterable|string $abilities <ide> public function any($abilities, $arguments = []) <ide> public function none($abilities, $arguments = []) <ide> { <ide> return ! $this->any($abilities, $arguments); <del> } <add> } <ide> <ide> /** <ide> * Determine if the given ability should be granted for the current user.
1
Text
Text
remove patterns and strings from urls
da385c9c1f9deeeefd705154a6e6612d6d62f41b
<ide><path>docs/api-guide/authentication.md <ide> If you've already created some users, you can generate tokens for all existing u <ide> <ide> When using `TokenAuthentication`, you may want to provide a mechanism for clients to obtain a token given the username and password. REST framework provides a built-in view to provide this behavior. To use it, add the `obtain_auth_token` view to your URLconf: <ide> <del> urlpatterns += patterns('', <del> url(r'^api-token-auth/', 'rest_framework.authtoken.views.obtain_auth_token') <del> ) <add> from rest_framework.authtoken import views <add> urlpatterns += [ <add> url(r'^api-token-auth/', views.obtain_auth_token) <add> ] <ide> <ide> Note that the URL part of the pattern can be whatever you want to use. <ide> <ide><path>docs/api-guide/format-suffixes.md <ide> Arguments: <ide> Example: <ide> <ide> from rest_framework.urlpatterns import format_suffix_patterns <del> <del> urlpatterns = patterns('blog.views', <del> url(r'^/$', 'api_root'), <del> url(r'^comments/$', 'comment_list'), <del> url(r'^comments/(?P<pk>[0-9]+)/$', 'comment_detail') <del> ) <add> from blog import views <add> <add> urlpatterns = [ <add> url(r'^/$', views.apt_root), <add> url(r'^comments/$', views.comment_list), <add> url(r'^comments/(?P<pk>[0-9]+)/$', views.comment_detail) <add> ] <ide> <ide> urlpatterns = format_suffix_patterns(urlpatterns, allowed=['json', 'html']) <ide> <ide><path>docs/index.md <ide> Add `'rest_framework'` to your `INSTALLED_APPS` setting. <ide> <ide> If you're intending to use the browsable API you'll probably also want to add REST framework's login and logout views. Add the following to your root `urls.py` file. <ide> <del> urlpatterns = patterns('', <add> urlpatterns = [ <ide> ... <ide> url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) <del> ) <add> ] <ide> <ide> Note that the URL path can be whatever you want, but you must include `'rest_framework.urls'` with the `'rest_framework'` namespace. <ide> <ide><path>docs/topics/2.3-announcement.md <ide> As an example of just how simple REST framework APIs can now be, here's an API w <ide> """ <ide> A REST framework API for viewing and editing users and groups. <ide> """ <del> from django.conf.urls.defaults import url, patterns, include <add> from django.conf.urls.defaults import url, include <ide> from django.contrib.auth.models import User, Group <ide> from rest_framework import viewsets, routers <ide> <ide> As an example of just how simple REST framework APIs can now be, here's an API w <ide> <ide> # Wire up our API using automatic URL routing. <ide> # Additionally, we include login URLs for the browseable API. <del> urlpatterns = patterns('', <add> urlpatterns = [ <ide> url(r'^', include(router.urls)), <ide> url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) <del> ) <add> ] <ide> <ide> The best place to get started with ViewSets and Routers is to take a look at the [newest section in the tutorial][part-6], which demonstrates their usage. <ide> <ide><path>docs/tutorial/1-serialization.md <ide> We'll also need to add our new `snippets` app and the `rest_framework` app to `I <ide> <ide> We also need to wire up the root urlconf, in the `tutorial/urls.py` file, to include our snippet app's URLs. <ide> <del> urlpatterns = patterns('', <add> urlpatterns = [ <ide> url(r'^', include('snippets.urls')), <del> ) <add> ] <ide> <ide> Okay, we're ready to roll. <ide> <ide> We'll also need a view which corresponds to an individual snippet, and can be us <ide> Finally we need to wire these views up. Create the `snippets/urls.py` file: <ide> <ide> from django.conf.urls import patterns, url <add> from snippets import views <ide> <del> urlpatterns = patterns('snippets.views', <del> url(r'^snippets/$', 'snippet_list'), <del> url(r'^snippets/(?P<pk>[0-9]+)/$', 'snippet_detail'), <del> ) <add> urlpatterns = [ <add> url(r'^snippets/$', views.snippet_list), <add> url(r'^snippets/(?P<pk>[0-9]+)/$', views.snippet_detail), <add> ] <ide> <ide> It's worth noting that there are a couple of edge cases we're not dealing with properly at the moment. If we send malformed `json`, or if a request is made with a method that the view doesn't handle, then we'll end up with a 500 "server error" response. Still, this'll do for now. <ide> <ide><path>docs/tutorial/2-requests-and-responses.md <ide> Now update the `urls.py` file slightly, to append a set of `format_suffix_patter <ide> <ide> from django.conf.urls import patterns, url <ide> from rest_framework.urlpatterns import format_suffix_patterns <add> from snippets import views <ide> <del> urlpatterns = patterns('snippets.views', <del> url(r'^snippets/$', 'snippet_list'), <del> url(r'^snippets/(?P<pk>[0-9]+)$', 'snippet_detail'), <del> ) <add> urlpatterns = [ <add> url(r'^snippets/$', views.snippet_list), <add> url(r'^snippets/(?P<pk>[0-9]+)$', views.snippet_detail), <add> ] <ide> <ide> urlpatterns = format_suffix_patterns(urlpatterns) <ide> <ide><path>docs/tutorial/3-class-based-views.md <ide> We'll also need to refactor our `urls.py` slightly now we're using class based v <ide> from rest_framework.urlpatterns import format_suffix_patterns <ide> from snippets import views <ide> <del> urlpatterns = patterns('', <add> urlpatterns = [ <ide> url(r'^snippets/$', views.SnippetList.as_view()), <ide> url(r'^snippets/(?P<pk>[0-9]+)/$', views.SnippetDetail.as_view()), <del> ) <add> ] <ide> <ide> urlpatterns = format_suffix_patterns(urlpatterns) <ide> <ide><path>docs/tutorial/4-authentication-and-permissions.md <ide> Add the following import at the top of the file: <ide> <ide> And, at the end of the file, add a pattern to include the login and logout views for the browsable API. <ide> <del> urlpatterns += patterns('', <add> urlpatterns += [ <ide> url(r'^api-auth/', include('rest_framework.urls', <ide> namespace='rest_framework')), <del> ) <add> ] <ide> <ide> The `r'^api-auth/'` part of pattern can actually be whatever URL you want to use. The only restriction is that the included urls must use the `'rest_framework'` namespace. <ide> <ide><path>docs/tutorial/5-relationships-and-hyperlinked-apis.md <ide> If we're going to have a hyperlinked API, we need to make sure we name our URL p <ide> After adding all those names into our URLconf, our final `snippets/urls.py` file should look something like this: <ide> <ide> # API endpoints <del> urlpatterns = format_suffix_patterns(patterns('snippets.views', <del> url(r'^$', 'api_root'), <add> urlpatterns = format_suffix_patterns([ <add> url(r'^$', views.api_root), <ide> url(r'^snippets/$', <ide> views.SnippetList.as_view(), <ide> name='snippet-list'), <ide> After adding all those names into our URLconf, our final `snippets/urls.py` file <ide> url(r'^users/(?P<pk>[0-9]+)/$', <ide> views.UserDetail.as_view(), <ide> name='user-detail') <del> )) <add> ]) <ide> <ide> # Login and logout views for the browsable API <del> urlpatterns += patterns('', <add> urlpatterns += [ <ide> url(r'^api-auth/', include('rest_framework.urls', <ide> namespace='rest_framework')), <del> ) <add> ] <ide> <ide> ## Adding pagination <ide> <ide><path>docs/tutorial/6-viewsets-and-routers.md <ide> Notice how we're creating multiple views from each `ViewSet` class, by binding t <ide> <ide> Now that we've bound our resources into concrete views, we can register the views with the URL conf as usual. <ide> <del> urlpatterns = format_suffix_patterns(patterns('snippets.views', <del> url(r'^$', 'api_root'), <add> urlpatterns = format_suffix_patterns([ <add> url(r'^$', api_root), <ide> url(r'^snippets/$', snippet_list, name='snippet-list'), <ide> url(r'^snippets/(?P<pk>[0-9]+)/$', snippet_detail, name='snippet-detail'), <ide> url(r'^snippets/(?P<pk>[0-9]+)/highlight/$', snippet_highlight, name='snippet-highlight'), <ide> url(r'^users/$', user_list, name='user-list'), <ide> url(r'^users/(?P<pk>[0-9]+)/$', user_detail, name='user-detail') <del> )) <add> ]) <ide> <ide> ## Using Routers <ide> <ide> Because we're using `ViewSet` classes rather than `View` classes, we actually don't need to design the URL conf ourselves. The conventions for wiring up resources into views and urls can be handled automatically, using a `Router` class. All we need to do is register the appropriate view sets with a router, and let it do the rest. <ide> <ide> Here's our re-wired `urls.py` file. <ide> <del> from django.conf.urls import patterns, url, include <add> from django.conf.urls import url, include <ide> from snippets import views <ide> from rest_framework.routers import DefaultRouter <ide> <ide> Here's our re-wired `urls.py` file. <ide> <ide> # The API URLs are now determined automatically by the router. <ide> # Additionally, we include the login URLs for the browseable API. <del> urlpatterns = patterns('', <add> urlpatterns = [ <ide> url(r'^', include(router.urls)), <ide> url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')) <del> ) <add> ] <ide> <ide> Registering the viewsets with the router is similar to providing a urlpattern. We include two arguments - the URL prefix for the views, and the viewset itself. <ide>
10
Text
Text
add missing bracket
7ce777f9c57bec5e38bb48503877f5baddb7cd3d
<ide><path>README.md <ide> Flowable.just("Hello world") <ide> @Override public void accept(String s) { <ide> System.out.println(s); <ide> } <del> ); <add> }); <ide> ``` <ide> <ide> RxJava 2 features several base classes you can discover operators on:
1
Python
Python
add test of new `parametrize` decorator
ea0115974afa3539bdb21c351034c9e755b02591
<ide><path>numpy/testing/tests/test_decorators.py <add>""" <add>Test the decorators from ``testing.decorators``. <add> <add>""" <ide> from __future__ import division, absolute_import, print_function <ide> <ide> import warnings <ide> def slow_func(x, y, z): <ide> <ide> assert_(slow_func.slow) <ide> <add> <ide> def test_setastest(): <ide> @dec.setastest() <ide> def f_default(a): <ide> def f_isnottest(a): <ide> assert_(f_istest.__test__) <ide> assert_(not f_isnottest.__test__) <ide> <add> <ide> class DidntSkipException(Exception): <ide> pass <ide> <ide> def deprecated_func3(): <ide> assert_raises(AssertionError, deprecated_func3) <ide> <ide> <add>@dec.parametrize('base, power, expected', <add> [(1, 1, 1), <add> (2, 1, 2), <add> (2, 2, 4)]) <add>def test_parametrize(base, power, expected): <add> assert_(base**power == expected) <add> <add> <ide> if __name__ == '__main__': <ide> run_module_suite()
1
Text
Text
replace unneeded snake cases
068c001dee01e8ac7401e97249c5f61215aed233
<ide><path>doc/api/process.md <ide> setTimeout(() => { <ide> ``` <ide> <ide> <del>## process.initgroups(user, extra_group) <add>## process.initgroups(user, extraGroup) <ide> <!-- YAML <ide> added: v0.9.4 <ide> --> <ide> <ide> * `user` {string|number} The user name or numeric identifier. <del>* `extra_group` {string|number} A group name or numeric identifier. <add>* `extraGroup` {string|number} A group name or numeric identifier. <ide> <ide> The `process.initgroups()` method reads the `/etc/group` file and initializes <ide> the group access list, using all groups of which the user is a member. This is <ide><path>doc/api/tls.md <ide> socket.pipe(pair.encrypted); <ide> can be replaced by: <ide> <ide> ```js <del>secure_socket = tls.TLSSocket(socket, options); <add>secureSocket = tls.TLSSocket(socket, options); <ide> ``` <ide> <del>where `secure_socket` has the same API as `pair.cleartext`. <add>where `secureSocket` has the same API as `pair.cleartext`. <ide> <ide> [`'secureConnect'`]: #tls_event_secureconnect <ide> [`'secureConnection'`]: #tls_event_secureconnection
2
Javascript
Javascript
remove unused method parameter
3f58edbe800aacb9fc943a19af700e1cc8c439d4
<ide><path>src/core/core.scale.js <ide> function getEvenSpacing(arr) { <ide> /** <ide> * @param {number[]} majorIndices <ide> * @param {Tick[]} ticks <del> * @param {number} axisLength <ide> * @param {number} ticksLimit <ide> */ <del>function calculateSpacing(majorIndices, ticks, axisLength, ticksLimit) { <add>function calculateSpacing(majorIndices, ticks, ticksLimit) { <ide> const evenMajorSpacing = getEvenSpacing(majorIndices); <ide> const spacing = ticks.length / ticksLimit; <ide> <ide> export default class Scale extends Element { <ide> _autoSkip(ticks) { <ide> const me = this; <ide> const tickOpts = me.options.ticks; <del> const axisLength = me._length; <del> const ticksLimit = tickOpts.maxTicksLimit || axisLength / me._tickSize(); <add> const ticksLimit = tickOpts.maxTicksLimit || me._length / me._tickSize(); <ide> const majorIndices = tickOpts.major.enabled ? getMajorIndices(ticks) : []; <ide> const numMajorIndices = majorIndices.length; <ide> const first = majorIndices[0]; <ide> export default class Scale extends Element { <ide> return newTicks; <ide> } <ide> <del> const spacing = calculateSpacing(majorIndices, ticks, axisLength, ticksLimit); <add> const spacing = calculateSpacing(majorIndices, ticks, ticksLimit); <ide> <ide> if (numMajorIndices > 0) { <ide> let i, ilen;
1
Ruby
Ruby
fix use of xcode_version in llvm check
94f84544c6da64b60d5ea2c48d2c4ee3533807c2
<ide><path>Library/Homebrew/formula.rb <ide> def handle_llvm_failure llvm <ide> that we can update the formula accordingly. Thanks! <ide> EOS <ide> puts <del> if xcode_version < "4.2" <add> if MacOS.xcode_version < "4.2" <ide> puts "If it doesn't work you can: brew install --use-gcc" <ide> else <ide> puts "If it doesn't work you can try: brew install --use-clang"
1
Python
Python
stream large assets on download
d85117f88c689c8914be5386d15b63a3330a9124
<ide><path>spacy/cli/_util.py <ide> def download_file(src: Union[str, "Pathy"], dest: Path, *, force: bool = False) <ide> src = str(src) <ide> with smart_open.open(src, mode="rb", ignore_ext=True) as input_file: <ide> with dest.open(mode="wb") as output_file: <del> output_file.write(input_file.read()) <add> shutil.copyfileobj(input_file, output_file) <ide> <ide> <ide> def ensure_pathy(path):
1
Mixed
Javascript
fix benchmark runner
8540768616861e513010ea2874a3188098105cca
<ide><path>scripts/bench/README.md <ide> <ide> In most cases, the only two commands you might want to use are: <ide> <del>- `yarn bench` <del>- `yarn build -- --type=UMD_PROD && yarn bench -- --skip-build` <add>- `yarn start` <add>- `yarn build core,dom-client --type=UMD_PROD && yarn start --skip-build` <ide> <del>The first command will run benchmarks with all the default settings. A local and remote build will occcur on all bundles, both local and remote repos will be run against all benchmarks. <add>The first command will run benchmarks with all the default settings. A local and remote build will occur on React and ReactDOM UMD bundles, both local and remote repos will be run against all benchmarks. <ide> <ide> The second command will run all benchmarks but skip the build process. This is useful for when doing local performance tweaking and the remote repo has already had its bundles built. Both local and remote repos will be run against all benchmarks with this command too. <ide> <ide> The other commands are as follows: <ide> <ide> ```bash <ide> # will compare local repo vs remote merge base repo <del>yarn bench <add>yarn start <ide> <ide> # will compare local repo vs remote merge base repo <ide> # this can significantly improve bench times due to no build <del>yarn bench -- --skip-build <add>yarn start --skip-build <ide> <ide> # will only build and run local repo against benchmarks (no remote values will be shown) <del>yarn bench -- --local <add>yarn start --local <ide> <ide> # will only build and run remote merge base repo against benchmarks (no local values will be shown) <del>yarn bench -- --remote <add>yarn start --remote <ide> <ide> # will only build and run remote master repo against benchmarks <del>yarn bench -- --remote=master <add>yarn start --remote=master <ide> <del># same as "yarn bench" <del>yarn bench -- --remote --local <add># same as "yarn start" <add>yarn start --remote --local <ide> <ide> # runs benchmarks with Chrome in headless mode <del>yarn bench -- --headless <add>yarn start --headless <ide> <ide> # runs only specific string matching benchmarks <del>yarn bench -- --benchmark=hacker <add>yarn start --benchmark=hacker <ide> ``` <ide>\ No newline at end of file <ide><path>scripts/bench/build.js <ide> function getDefaultReactPath() { <ide> return join(__dirname, 'remote-repo'); <ide> } <ide> <del>async function buldAllBundles(reactPath = getDefaultReactPath()) { <del> // build the react FB bundles in the build <del> await executeCommand(`cd ${reactPath} && yarn && yarn build`); <del>} <del> <ide> async function buildBenchmark(reactPath = getDefaultReactPath(), benchmark) { <ide> // get the build.js from the benchmark directory and execute it <ide> await require(join(__dirname, 'benchmarks', benchmark, 'build.js'))( <ide> async function buildBenchmark(reactPath = getDefaultReactPath(), benchmark) { <ide> ); <ide> } <ide> <del>function getBundleResults(reactPath = getDefaultReactPath()) { <del> return require(join(reactPath, 'scripts', 'rollup', 'results.json')); <del>} <del> <ide> async function getMergeBaseFromLocalGitRepo(localRepo) { <ide> const repo = await Git.Repository.open(localRepo); <ide> return await Git.Merge.base( <ide> async function buildBenchmarkBundlesFromGitRepo( <ide> // then we checkout the merge base <ide> await Git.Checkout.tree(repo, commit); <ide> } <del> await buildAllBundles(); <add> await buildReactBundles(); <ide> } <del> return getBundleResults(); <ide> } <ide> <del>async function buildAllBundles(reactPath, skipBuild) { <add>async function buildReactBundles(reactPath = getDefaultReactPath(), skipBuild) { <ide> if (!skipBuild) { <del> // build all bundles so we can get all stats and use bundles for benchmarks <del> await buldAllBundles(reactPath); <add> await executeCommand( <add> `cd ${reactPath} && yarn && yarn build core,dom-client --type=UMD_PROD` <add> ); <ide> } <del> return getBundleResults(reactPath); <ide> } <ide> <ide> // if run directly via CLI <ide> if (require.main === module) { <ide> } <ide> <ide> module.exports = { <del> buildAllBundles, <add> buildReactBundles, <ide> buildBenchmark, <ide> buildBenchmarkBundlesFromGitRepo, <ide> getMergeBaseFromLocalGitRepo, <ide><path>scripts/bench/runner.js <ide> const {readdirSync, statSync} = require('fs'); <ide> const {join} = require('path'); <ide> const runBenchmark = require('./benchmark'); <ide> const { <del> buildAllBundles, <add> buildReactBundles, <ide> buildBenchmark, <ide> buildBenchmarkBundlesFromGitRepo, <ide> getMergeBaseFromLocalGitRepo, <ide> async function benchmarkRemoteMaster() { <ide> chalk.gray(`- Merge base commit ${chalk.white(commit.tostrS())}`) <ide> ); <ide> } <add> await buildBenchmarkBundlesFromGitRepo(commit, skipBuild); <ide> return { <del> // we build the bundles from the React repo <del> bundles: await buildBenchmarkBundlesFromGitRepo(commit, skipBuild), <del> // we use these bundles to run the benchmarks <ide> benchmarks: await runBenchmarks(), <ide> }; <ide> } <ide> async function benchmarkRemoteMaster() { <ide> // of the local react repo <ide> async function benchmarkLocal(reactPath) { <ide> console.log(chalk.gray(`- Building React bundles...`)); <add> await buildReactBundles(reactPath, skipBuild); <ide> return { <del> // we build the bundles from the React repo <del> bundles: await buildAllBundles(reactPath, skipBuild), <del> // we use these bundles to run the benchmarks <ide> benchmarks: await runBenchmarks(reactPath), <ide> }; <ide> } <ide><path>scripts/bench/stats.js <ide> function addBenchmarkResults(table, localResults, remoteMasterResults) { <ide> }); <ide> } <ide> <del>function addBundleSizeComparsions(table, localResults, remoteMasterResults) { <del> const bundlesRowHeader = [chalk.white.bold('Bundles')]; <del> if (remoteMasterResults) { <del> bundlesRowHeader.push(chalk.white.bold('Size')); <del> } <del> if (localResults) { <del> bundlesRowHeader.push(chalk.white.bold('Size')); <del> } <del> if (localResults && remoteMasterResults) { <del> bundlesRowHeader.push(chalk.white.bold('Diff')); <del> } <del> table.push(bundlesRowHeader); <del> <del> const bundles = Object.keys( <del> (localResults && localResults.bundles.bundleSizes) || <del> (remoteMasterResults && remoteMasterResults.bundles.bundleSizes) <del> ); <del> bundles.forEach(bundle => { <del> const row = [chalk.gray(bundle)]; <del> let remoteSize = 0; <del> if (remoteMasterResults) { <del> const remoteBundle = (remoteSize = <del> remoteMasterResults.bundles.bundleSizes[bundle]); <del> <del> if (remoteBundle) { <del> remoteSize = remoteSize.size; <del> } <del> row.push(chalk.white(remoteSize + ' kb')); <del> } <del> let localSize = 0; <del> if (localResults) { <del> const localBundle = localResults.bundles.bundleSizes[bundle]; <del> <del> if (localBundle) { <del> localSize = localBundle.size; <del> } <del> localSize = localResults.bundles.bundleSizes[bundle].size; <del> row.push(chalk.white(localSize + ' kb')); <del> } <del> if (localResults && remoteMasterResults) { <del> row.push(percentChange(remoteSize, localSize, 0, 0)); <del> } <del> table.push(row); <del> }); <del>} <del> <ide> function printResults(localResults, remoteMasterResults) { <ide> const head = ['']; <ide> if (remoteMasterResults) { <ide> function printResults(localResults, remoteMasterResults) { <ide> head.push(''); <ide> } <ide> const table = new Table({head}); <del> <del> addBundleSizeComparsions(table, localResults, remoteMasterResults); <ide> addBenchmarkResults(table, localResults, remoteMasterResults); <del> <ide> console.log(table.toString()); <ide> } <ide>
4
PHP
PHP
fix risky test warnings
aae5191bea6859abaab9d49661fe9d791d17d9ac
<ide><path>tests/TestCase/TestSuite/IntegrationTestCaseTest.php <ide> public function testAssertNoRedirectFail() <ide> { <ide> $test = new AssertIntegrationTestCase('testBadAssertNoRedirect'); <ide> $result = $test->run(); <del> ob_start(); <add> <ide> $this->assertFalse($result->wasSuccessful()); <ide> $this->assertEquals(1, $result->failureCount()); <ide> } <ide><path>tests/TestCase/TestSuite/TestCaseTest.php <ide> public function testAssertHtmlBasic() <ide> { <ide> $test = new AssertHtmlTestCase('testAssertHtmlQuotes'); <ide> $result = $test->run(); <del> ob_start(); <add> <ide> $this->assertEquals(0, $result->errorCount()); <ide> $this->assertTrue($result->wasSuccessful()); <ide> $this->assertEquals(0, $result->failureCount()); <ide> public function testNumericValuesInExpectationForAssertHtml() <ide> { <ide> $test = new AssertHtmlTestCase('testNumericValuesInExpectationForAssertHtml'); <ide> $result = $test->run(); <del> ob_start(); <ide> $this->assertEquals(0, $result->errorCount()); <ide> $this->assertTrue($result->wasSuccessful()); <ide> $this->assertEquals(0, $result->failureCount()); <ide> public function testBadAssertHtml() <ide> { <ide> $test = new AssertHtmlTestCase('testBadAssertHtml'); <ide> $result = $test->run(); <del> ob_start(); <ide> $this->assertEquals(0, $result->errorCount()); <ide> $this->assertFalse($result->wasSuccessful()); <ide> $this->assertEquals(1, $result->failureCount()); <ide> <ide> $test = new AssertHtmlTestCase('testBadAssertHtml2'); <ide> $result = $test->run(); <del> ob_start(); <ide> $this->assertEquals(0, $result->errorCount()); <ide> $this->assertFalse($result->wasSuccessful()); <ide> $this->assertEquals(1, $result->failureCount()); <ide> public function testLoadFixturesOnDemand() <ide> $test->fixtureManager = $manager; <ide> $manager->expects($this->once())->method('loadSingle'); <ide> $result = $test->run(); <del> ob_start(); <ide> <ide> $this->assertEquals(0, $result->errorCount()); <ide> } <ide> public function testSkipIf() <ide> { <ide> $test = new FixturizedTestCase('testSkipIfTrue'); <ide> $result = $test->run(); <del> ob_start(); <ide> $this->assertEquals(1, $result->skippedCount()); <ide> <ide> $test = new FixturizedTestCase('testSkipIfFalse'); <ide> $result = $test->run(); <del> ob_start(); <ide> $this->assertEquals(0, $result->skippedCount()); <ide> } <ide>
2
Javascript
Javascript
add addextension method
3b10641e4c87531725a28e0edb5173a472577839
<ide><path>examples/js/loaders/3MFLoader.js <del>THREE.ThreeMFLoader = function ( manager, extensions ) { <add>THREE.ThreeMFLoader = function ( manager ) { <ide> <del> if ( manager instanceof Array ) { <del> <del> this.manager = THREE.DefaultLoadingManager; <del> this.availableExtensions = manager; <del> <del> } else { <del> <del> this.manager = ( manager !== undefined ) ? manager : THREE.DefaultLoadingManager; <del> this.availableExtensions = extensions || []; <del> <del> } <add> this.manager = ( manager !== undefined ) ? manager : THREE.DefaultLoadingManager; <add> this.availableExtensions = []; <ide> <ide> }; <ide> <ide> THREE.ThreeMFLoader.prototype = { <ide> <ide> var texturesPartName = texturesPartNames[ i ]; <ide> texturesParts[ texturesPartName ] = zip.file( texturesPartName ).asBinary(); <del> //var img = document.createElement('img'); <del> //img.src = 'data:image/png;,' + btoa(texturesParts[ texturesPartName ]); <ide> <ide> } <ide> <ide> THREE.ThreeMFLoader.prototype = { <ide> <ide> meshData[ 'vertices' ] = new Float32Array( vertices.length ); <ide> <del> for ( var i = 0; i < vertices.length; i++ ) { <add> for ( var i = 0; i < vertices.length; i++ ) { <ide> <del> meshData[ 'vertices' ][ i ] = vertices[ i ]; <add> meshData[ 'vertices' ][ i ] = vertices[ i ]; <ide> <del> } <add> } <ide> <ide> var triangleProperties = []; <ide> var triangles = []; <ide> THREE.ThreeMFLoader.prototype = { <ide> meshData[ 'triangleProperties' ] = triangleProperties; <ide> meshData[ 'triangles' ] = new Uint32Array( triangles.length ); <ide> <del> for ( var i = 0; i < triangles.length; i++ ) { <add> for ( var i = 0; i < triangles.length; i++ ) { <ide> <del> meshData[ 'triangles' ][ i ] = triangles[ i ]; <add> meshData[ 'triangles' ][ i ] = triangles[ i ]; <ide> <del> } <add> } <ide> <ide> return meshData; <ide> <ide> THREE.ThreeMFLoader.prototype = { <ide> function parseResourcesNode( resourcesNode ) { <ide> <ide> var resourcesData = {}; <del> <ide> var geometry, material; <del> <ide> var basematerialsNode = resourcesNode.querySelector( 'basematerials' ); <ide> <ide> if ( basematerialsNode ) { <ide> THREE.ThreeMFLoader.prototype = { <ide> var itemNodes = buildNode.querySelectorAll( 'item' ); <ide> <ide> for ( var i = 0; i < itemNodes.length; i++ ) { <add> <ide> var itemNode = itemNodes[ i ]; <ide> var buildItem = { <ide> objectid: itemNode.getAttribute( 'objectid' ) <ide> THREE.ThreeMFLoader.prototype = { <ide> } <ide> <ide> return group; <add> <ide> } <ide> <ide> var data3mf = loadDocument( data ); <ide> THREE.ThreeMFLoader.prototype = { <ide> <ide> }, <ide> <add> addExtension: function( extension ) { <add> <add> this.availableExtensions.push( extension ); <add> <add> } <add> <ide> };
1
Python
Python
support multiple choice in tf common model tests
20451195f02e771a256faf53e482b85dcd5187b0
<ide><path>src/transformers/modeling_tf_bert.py <ide> def call( <ide> flat_attention_mask = tf.reshape(attention_mask, (-1, seq_length)) if attention_mask is not None else None <ide> flat_token_type_ids = tf.reshape(token_type_ids, (-1, seq_length)) if token_type_ids is not None else None <ide> flat_position_ids = tf.reshape(position_ids, (-1, seq_length)) if position_ids is not None else None <add> flat_inputs_embeds = ( <add> tf.reshape(inputs_embeds, (-1, seq_length, shape_list(inputs_embeds)[3])) <add> if inputs_embeds is not None <add> else None <add> ) <ide> <ide> flat_inputs = [ <ide> flat_input_ids, <ide> flat_attention_mask, <ide> flat_token_type_ids, <ide> flat_position_ids, <ide> head_mask, <del> inputs_embeds, <add> flat_inputs_embeds, <ide> output_attentions, <ide> ] <ide> <ide><path>tests/test_modeling_tf_bert.py <ide> class TFBertModelTest(TFModelTesterMixin, unittest.TestCase): <ide> TFBertForQuestionAnswering, <ide> TFBertForSequenceClassification, <ide> TFBertForTokenClassification, <add> TFBertForMultipleChoice, <ide> ) <ide> if is_tf_available() <ide> else () <ide><path>tests/test_modeling_tf_common.py <ide> import tensorflow as tf <ide> import numpy as np <ide> <del> from transformers import tf_top_k_top_p_filtering, TFAdaptiveEmbedding, TFSharedEmbeddings <add> from transformers import ( <add> tf_top_k_top_p_filtering, <add> TFAdaptiveEmbedding, <add> TFSharedEmbeddings, <add> TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING, <add> ) <ide> <ide> if _tf_gpu_memory_limit is not None: <ide> gpus = tf.config.list_physical_devices("GPU") <ide> class TFModelTesterMixin: <ide> test_resize_embeddings = True <ide> is_encoder_decoder = False <ide> <add> def _prepare_for_class(self, inputs_dict, model_class): <add> if model_class in TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING.values(): <add> return { <add> k: tf.tile(tf.expand_dims(v, 1), (1, self.model_tester.num_choices, 1)) <add> if isinstance(v, tf.Tensor) and v.ndim != 0 <add> else v <add> for k, v in inputs_dict.items() <add> } <add> return inputs_dict <add> <ide> def test_initialization(self): <ide> pass <ide> # config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() <ide> def test_save_load(self): <ide> <ide> for model_class in self.all_model_classes: <ide> model = model_class(config) <del> outputs = model(inputs_dict) <add> outputs = model(self._prepare_for_class(inputs_dict, model_class)) <ide> <ide> with tempfile.TemporaryDirectory() as tmpdirname: <ide> model.save_pretrained(tmpdirname) <ide> model = model_class.from_pretrained(tmpdirname) <del> after_outputs = model(inputs_dict) <add> after_outputs = model(self._prepare_for_class(inputs_dict, model_class)) <ide> <ide> self.assert_outputs_same(after_outputs, outputs) <ide> <ide> def test_pt_tf_model_equivalence(self): <ide> <ide> # Check we can load pt model in tf and vice-versa with model => model functions <ide> <del> tf_model = transformers.load_pytorch_model_in_tf2_model(tf_model, pt_model, tf_inputs=inputs_dict) <add> tf_model = transformers.load_pytorch_model_in_tf2_model( <add> tf_model, pt_model, tf_inputs=self._prepare_for_class(inputs_dict, model_class) <add> ) <ide> pt_model = transformers.load_tf2_model_in_pytorch_model(pt_model, tf_model) <ide> <ide> # Check predictions on first output (logits/hidden-states) are close enought given low-level computational differences <ide> pt_model.eval() <ide> pt_inputs_dict = dict( <del> (name, torch.from_numpy(key.numpy()).to(torch.long)) for name, key in inputs_dict.items() <add> (name, torch.from_numpy(key.numpy()).to(torch.long)) <add> for name, key in self._prepare_for_class(inputs_dict, model_class).items() <ide> ) <ide> # need to rename encoder-decoder "inputs" for PyTorch <ide> if "inputs" in pt_inputs_dict and self.is_encoder_decoder: <ide> pt_inputs_dict["input_ids"] = pt_inputs_dict.pop("inputs") <ide> <ide> with torch.no_grad(): <ide> pto = pt_model(**pt_inputs_dict) <del> tfo = tf_model(inputs_dict, training=False) <add> tfo = tf_model(self._prepare_for_class(inputs_dict, model_class), training=False) <ide> tf_hidden_states = tfo[0].numpy() <ide> pt_hidden_states = pto[0].numpy() <ide> <ide> def test_pt_tf_model_equivalence(self): <ide> # Check predictions on first output (logits/hidden-states) are close enought given low-level computational differences <ide> pt_model.eval() <ide> pt_inputs_dict = dict( <del> (name, torch.from_numpy(key.numpy()).to(torch.long)) for name, key in inputs_dict.items() <add> (name, torch.from_numpy(key.numpy()).to(torch.long)) <add> for name, key in self._prepare_for_class(inputs_dict, model_class).items() <ide> ) <ide> # need to rename encoder-decoder "inputs" for PyTorch <ide> if "inputs" in pt_inputs_dict and self.is_encoder_decoder: <ide> pt_inputs_dict["input_ids"] = pt_inputs_dict.pop("inputs") <ide> <ide> with torch.no_grad(): <ide> pto = pt_model(**pt_inputs_dict) <del> tfo = tf_model(inputs_dict) <add> tfo = tf_model(self._prepare_for_class(inputs_dict, model_class)) <ide> tfo = tfo[0].numpy() <ide> pto = pto[0].numpy() <ide> tf_nans = np.copy(np.isnan(tfo)) <ide> def test_pt_tf_model_equivalence(self): <ide> def test_compile_tf_model(self): <ide> config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() <ide> <del> if self.is_encoder_decoder: <del> input_ids = { <del> "decoder_input_ids": tf.keras.Input(batch_shape=(2, 2000), name="decoder_input_ids", dtype="int32"), <del> "inputs": tf.keras.Input(batch_shape=(2, 2000), name="inputs", dtype="int32"), <del> } <del> else: <del> input_ids = tf.keras.Input(batch_shape=(2, 2000), name="input_ids", dtype="int32") <ide> optimizer = tf.keras.optimizers.Adam(learning_rate=3e-5, epsilon=1e-08, clipnorm=1.0) <ide> loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True) <ide> metric = tf.keras.metrics.SparseCategoricalAccuracy("accuracy") <ide> <ide> for model_class in self.all_model_classes: <add> if self.is_encoder_decoder: <add> input_ids = { <add> "decoder_input_ids": tf.keras.Input( <add> batch_shape=(2, 2000), name="decoder_input_ids", dtype="int32" <add> ), <add> "inputs": tf.keras.Input(batch_shape=(2, 2000), name="inputs", dtype="int32"), <add> } <add> elif model_class in TF_MODEL_FOR_MULTIPLE_CHOICE_MAPPING.values(): <add> input_ids = tf.keras.Input(batch_shape=(4, 2, 2000), name="input_ids", dtype="int32") <add> else: <add> input_ids = tf.keras.Input(batch_shape=(2, 2000), name="input_ids", dtype="int32") <add> <ide> # Prepare our model <ide> model = model_class(config) <ide> <ide> # Let's load it from the disk to be sure we can use pretrained weights <ide> with tempfile.TemporaryDirectory() as tmpdirname: <del> outputs = model(inputs_dict) # build the model <add> outputs = model(self._prepare_for_class(inputs_dict, model_class)) # build the model <ide> model.save_pretrained(tmpdirname) <ide> model = model_class.from_pretrained(tmpdirname) <ide> <ide> def test_keyword_and_dict_args(self): <ide> <ide> for model_class in self.all_model_classes: <ide> model = model_class(config) <del> outputs_dict = model(inputs_dict) <add> outputs_dict = model(self._prepare_for_class(inputs_dict, model_class)) <ide> <del> inputs_keywords = copy.deepcopy(inputs_dict) <add> inputs_keywords = copy.deepcopy(self._prepare_for_class(inputs_dict, model_class)) <ide> input_ids = inputs_keywords.pop("input_ids" if not self.is_encoder_decoder else "inputs", None,) <ide> outputs_keywords = model(input_ids, **inputs_keywords) <ide> output_dict = outputs_dict[0].numpy() <ide> def test_attention_outputs(self): <ide> inputs_dict["output_attentions"] = True <ide> config.output_hidden_states = False <ide> model = model_class(config) <del> outputs = model(inputs_dict) <add> outputs = model(self._prepare_for_class(inputs_dict, model_class)) <ide> attentions = [t.numpy() for t in outputs[-1]] <ide> self.assertEqual(model.config.output_hidden_states, False) <ide> self.assertEqual(len(attentions), self.model_tester.num_hidden_layers) <ide> def test_attention_outputs(self): <ide> del inputs_dict["output_attentions"] <ide> config.output_attentions = True <ide> model = model_class(config) <del> outputs = model(inputs_dict) <add> outputs = model(self._prepare_for_class(inputs_dict, model_class)) <ide> attentions = [t.numpy() for t in outputs[-1]] <ide> self.assertEqual(model.config.output_hidden_states, False) <ide> self.assertEqual(len(attentions), self.model_tester.num_hidden_layers) <ide> def test_attention_outputs(self): <ide> inputs_dict["output_attentions"] = True <ide> config.output_hidden_states = True <ide> model = model_class(config) <del> outputs = model(inputs_dict) <add> outputs = model(self._prepare_for_class(inputs_dict, model_class)) <ide> self.assertEqual(out_len + (2 if self.is_encoder_decoder else 1), len(outputs)) <ide> self.assertEqual(model.config.output_hidden_states, True) <ide> <ide> def test_hidden_states_output(self): <ide> for model_class in self.all_model_classes: <ide> config.output_hidden_states = True <ide> model = model_class(config) <del> outputs = model(inputs_dict) <add> outputs = model(self._prepare_for_class(inputs_dict, model_class)) <ide> hidden_states = [t.numpy() for t in outputs[-1]] <ide> self.assertEqual(model.config.output_hidden_states, True) <ide> self.assertEqual(len(hidden_states), self.model_tester.num_hidden_layers + 1) <ide> def test_determinism(self): <ide> for model_class in self.all_model_classes: <ide> model = model_class(config) <ide> first, second = ( <del> model(inputs_dict, training=False)[0], <del> model(inputs_dict, training=False)[0], <add> model(self._prepare_for_class(inputs_dict, model_class), training=False)[0], <add> model(self._prepare_for_class(inputs_dict, model_class), training=False)[0], <ide> ) <ide> out_1 = first.numpy() <ide> out_2 = second.numpy() <ide> def _get_embeds(self, wte, input_ids): <ide> <ide> def test_inputs_embeds(self): <ide> config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() <del> if not self.is_encoder_decoder: <del> input_ids = inputs_dict["input_ids"] <del> del inputs_dict["input_ids"] <del> else: <del> encoder_input_ids = inputs_dict["inputs"] <del> decoder_input_ids = inputs_dict["decoder_input_ids"] <del> del inputs_dict["inputs"] <del> del inputs_dict["decoder_input_ids"] <ide> <ide> for model_class in self.all_model_classes: <ide> model = model_class(config) <ide> <add> inputs = copy.deepcopy(self._prepare_for_class(inputs_dict, model_class)) <add> if not self.is_encoder_decoder: <add> input_ids = inputs["input_ids"] <add> del inputs["input_ids"] <add> else: <add> encoder_input_ids = inputs["inputs"] <add> decoder_input_ids = inputs.get("decoder_input_ids", encoder_input_ids) <add> del inputs["inputs"] <add> inputs.pop("decoder_input_ids", None) <add> <ide> wte = model.get_input_embeddings() <ide> if not self.is_encoder_decoder: <del> inputs_dict["inputs_embeds"] = self._get_embeds(wte, input_ids) <add> inputs["inputs_embeds"] = self._get_embeds(wte, input_ids) <ide> else: <del> inputs_dict["inputs_embeds"] = self._get_embeds(wte, encoder_input_ids) <del> inputs_dict["decoder_inputs_embeds"] = self._get_embeds(wte, decoder_input_ids) <add> inputs["inputs_embeds"] = self._get_embeds(wte, encoder_input_ids) <add> inputs["decoder_inputs_embeds"] = self._get_embeds(wte, decoder_input_ids) <ide> <del> model(inputs_dict) <add> model(inputs) <ide> <ide> def test_lm_head_model_random_no_beam_search_generate(self): <ide> config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
3
Javascript
Javascript
add file parameter to getshallowdependencies()
0b17524134e7139686570f957744cee62f8336b1
<ide><path>local-cli/server/util/attachHMRServer.js <ide> type DependencyOptions = {| <ide> +minify: boolean, <ide> +platform: ?string, <ide> +recursive: boolean, <add> +rootEntryFile: string, <ide> |}; <ide> <ide> /** <ide> function attachHMRServer<TModule: Moduleish>( <ide> const response = await packagerServer.getDependencies({ <ide> dev: true, <ide> entryFile: bundleEntry, <add> rootEntryFile: bundleEntry, <ide> hot: true, <ide> minify: false, <ide> platform: platform, <ide> function attachHMRServer<TModule: Moduleish>( <ide> const dependencies = await packagerServer.getShallowDependencies({ <ide> dev: true, <ide> entryFile: dep.path, <add> rootEntryFile: bundleEntry, <ide> hot: true, <ide> minify: false, <ide> platform: platform, <ide> function attachHMRServer<TModule: Moduleish>( <ide> dev: true, <ide> minify: false, <ide> entryFile: filename, <add> rootEntryFile: client.bundleEntry, <ide> hot: true, <ide> platform: client.platform, <ide> recursive: true, <ide> function attachHMRServer<TModule: Moduleish>( <ide> const response = await packagerServer.getDependencies({ <ide> dev: true, <ide> entryFile: filename, <add> rootEntryFile: client.bundleEntry, <ide> hot: true, <ide> minify: false, <ide> platform: client.platform,
1
Text
Text
update documentation for where chained modifiers
5dfaf8b5b5d6175fc6dc1992f47f6ac740bdc011
<ide><path>guides/source/active_record_querying.md <ide> This code will generate SQL like this: <ide> SELECT * FROM clients WHERE (clients.orders_count IN (1,3,5)) <ide> ``` <ide> <del>### NOT, LIKE, and NOT LIKE Conditions <add>### NOT Conditions <ide> <del>`NOT`, `LIKE`, and `NOT LIKE` SQL queries can be built by `where.not`, `where.like`, and `where.not_like` respectively. <add>`NOT` SQL queries can be built by `where.not`. <ide> <ide> ```ruby <ide> Post.where.not(author: author) <del> <del>Author.where.like(name: 'Nari%') <del> <del>Developer.where.not_like(name: 'Tenderl%') <ide> ``` <ide> <del>In other words, these sort of queries can be generated by calling `where` with no argument, then immediately chain with `not`, `like`, or `not_like` passing `where` conditions. <add>In other words, this query can be generated by calling `where` with no argument, then immediately chain with `not` passing `where` conditions. <ide> <ide> Ordering <ide> --------
1
Python
Python
fix another docstring formatting
d7f1ae929c99c71dee6a7a950937608ddbe92fce
<ide><path>numpy/testing/decorators.py <ide> def skipif(skip_condition, msg=None): <ide> ''' Make function raise SkipTest exception if skip_condition is true <ide> <ide> Parameters <del> --------- <add> ---------- <ide> skip_condition : bool or callable. <ide> Flag to determine whether to skip test. If the condition is a <ide> callable, it is used at runtime to dynamically make the decision. This
1
Go
Go
update archive/tar to match go 1.17.0
aa606307b7d10890885e2c76c1652c48a6914db4
<ide><path>vendor/archive/tar/stat_actime1.go <ide> // Use of this source code is governed by a BSD-style <ide> // license that can be found in the LICENSE file. <ide> <add>//go:build aix || linux || dragonfly || openbsd || solaris <ide> // +build aix linux dragonfly openbsd solaris <ide> <ide> package tar <ide><path>vendor/archive/tar/stat_actime2.go <ide> // Use of this source code is governed by a BSD-style <ide> // license that can be found in the LICENSE file. <ide> <add>//go:build darwin || freebsd || netbsd <ide> // +build darwin freebsd netbsd <ide> <ide> package tar <ide><path>vendor/archive/tar/stat_unix.go <ide> // Use of this source code is governed by a BSD-style <ide> // license that can be found in the LICENSE file. <ide> <add>//go:build aix || linux || darwin || dragonfly || freebsd || openbsd || netbsd || solaris <ide> // +build aix linux darwin dragonfly freebsd openbsd netbsd solaris <ide> <ide> package tar <ide><path>vendor/archive/tar/tar_test.go <ide> func TestFileInfoHeaderDir(t *testing.T) { <ide> func TestFileInfoHeaderSymlink(t *testing.T) { <ide> testenv.MustHaveSymlink(t) <ide> <del> tmpdir, err := os.MkdirTemp("", "TestFileInfoHeaderSymlink") <del> if err != nil { <del> t.Fatal(err) <del> } <del> defer os.RemoveAll(tmpdir) <add> tmpdir := t.TempDir() <ide> <ide> link := filepath.Join(tmpdir, "link") <ide> target := tmpdir <del> err = os.Symlink(target, link) <del> if err != nil { <add> if err := os.Symlink(target, link); err != nil { <ide> t.Fatal(err) <ide> } <ide> fi, err := os.Lstat(link)
4
Python
Python
fix textcat model for gpu
28c26e212dcd17773209596bedb8d928b2583d84
<ide><path>spacy/_ml.py <ide> def _zero_init_impl(self, *args, **kwargs): <ide> @layerize <ide> def _preprocess_doc(docs, drop=0.0): <ide> keys = [doc.to_array(LOWER) for doc in docs] <del> ops = Model.ops <ide> # The dtype here matches what thinc is expecting -- which differs per <ide> # platform (by int definition). This should be fixed once the problem <ide> # is fixed on Thinc's side. <del> lengths = ops.asarray([arr.shape[0] for arr in keys], dtype=numpy.int_) <del> keys = ops.xp.concatenate(keys) <del> vals = ops.allocate(keys.shape) + 1.0 <add> lengths = numpy.array([arr.shape[0] for arr in keys], dtype=numpy.int_) <add> keys = numpy.concatenate(keys) <add> vals = numpy.zeros(keys.shape, dtype='f') <ide> return (keys, vals, lengths), None <ide> <ide> <add>def with_cpu(ops, model): <add> model.to_cpu() <add> def with_cpu_forward(inputs, drop=0.): <add> cpu_outputs, backprop = model.begin_update(_to_cpu(inputs), drop=drop) <add> gpu_outputs = _to_device(ops, cpu_outputs) <add> <add> def with_cpu_backprop(d_outputs, sgd=None): <add> cpu_d_outputs = _to_cpu(d_outputs) <add> return backprop(cpu_d_outputs, sgd=sgd) <add> <add> return gpu_outputs, with_cpu_backprop <add> <add> return wrap(with_cpu_forward, model) <add> <add> <add>def _to_cpu(X): <add> if isinstance(X, numpy.ndarray): <add> return X <add> elif isinstance(X, tuple): <add> return tuple([_to_cpu(x) for x in X]) <add> elif isinstance(X, list): <add> return [_to_cpu(x) for x in X] <add> elif hasattr(X, 'get'): <add> return X.get() <add> else: <add> return X <add> <add> <add>def _to_device(ops, X): <add> if isinstance(X, tuple): <add> return tuple([_to_device(ops, x) for x in X]) <add> elif isinstance(X, list): <add> return [_to_device(ops, x) for x in X] <add> else: <add> return ops.asarray(X) <add> <add> <ide> @layerize <ide> def _preprocess_doc_bigrams(docs, drop=0.0): <ide> unigrams = [doc.to_array(LOWER) for doc in docs] <ide> def build_text_classifier(nr_class, width=64, **cfg): <ide> >> zero_init(Affine(nr_class, width, drop_factor=0.0)) <ide> ) <ide> <del> linear_model = _preprocess_doc >> LinearModel(nr_class) <add> linear_model = ( <add> _preprocess_doc <add> >> with_cpu(Model.ops, LinearModel(nr_class)) <add> ) <ide> if cfg.get('exclusive_classes'): <ide> output_layer = Softmax(nr_class, nr_class * 2) <ide> else:
1
Mixed
Text
remove contentlength middleware from the defaults
56903585a099ab67a7acfaaef0a02db8fe80c450
<ide><path>railties/CHANGELOG.md <add>* Remove ContentLength middleware from the defaults. If you want it, just <add> add it as a middleware in your config. <add> <add> *Egg McMuffin* <add> <ide> * Make it possible to customize the executable inside rerun snippets. <ide> <ide> *Yves Senn* <ide><path>railties/lib/rails/commands/server.rb <ide> def start <ide> <ide> def middleware <ide> middlewares = [] <del> middlewares << [::Rack::ContentLength] <ide> <ide> # FIXME: add Rack::Lock in the case people are using webrick. <ide> # This is to remain backwards compatible for those who are
2
Python
Python
fix some typos in random forest classifier
7ffdef2636e8368d72f50815fe8ced98294b1053
<ide><path>machine_learning/random_forest_classifier.py <ide> def main(): <ide> <ide> """ <del> Random Tree Classifier Example using sklearn function. <add> Random Forest Classifier Example using sklearn function. <ide> Iris type dataset is used to demonstrate algorithm. <ide> """ <ide> <del> # Load Iris house price dataset <add> # Load Iris dataset <ide> iris = load_iris() <ide> <ide> # Split dataset into train and test data
1
Java
Java
convert non-utf-8 json
4882eb278d3ed71a75f0183c790228847b5bc08b
<ide><path>spring-web/src/main/java/org/springframework/http/codec/json/AbstractJackson2Decoder.java <ide> public Flux<Object> decode(Publisher<DataBuffer> input, ResolvableType elementTy <ide> forceUseOfBigDecimal = true; <ide> } <ide> <del> Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(Flux.from(input), mapper.getFactory(), mapper, <add> Flux<DataBuffer> processed = processInput(input, elementType, mimeType, hints); <add> Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(processed, mapper.getFactory(), mapper, <ide> true, forceUseOfBigDecimal, getMaxInMemorySize()); <ide> <ide> ObjectReader reader = getObjectReader(elementType, hints); <ide> public Flux<Object> decode(Publisher<DataBuffer> input, ResolvableType elementTy <ide> }); <ide> } <ide> <add> /** <add> * Process the input publisher into a flux. Default implementation returns <add> * {@link Flux#from(Publisher)}, but subclasses can choose to to customize <add> * this behaviour. <add> * @param input the {@code DataBuffer} input stream to process <add> * @param elementType the expected type of elements in the output stream <add> * @param mimeType the MIME type associated with the input stream (optional) <add> * @param hints additional information about how to do encode <add> * @return the processed flux <add> * @since 5.1.14 <add> */ <add> protected Flux<DataBuffer> processInput(Publisher<DataBuffer> input, ResolvableType elementType, <add> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) { <add> <add> return Flux.from(input); <add> } <add> <ide> @Override <ide> public Mono<Object> decodeToMono(Publisher<DataBuffer> input, ResolvableType elementType, <ide> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) { <ide><path>spring-web/src/main/java/org/springframework/http/codec/json/Jackson2JsonDecoder.java <ide> /* <del> * Copyright 2002-2018 the original author or authors. <add> * Copyright 2002-2020 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> <ide> package org.springframework.http.codec.json; <ide> <add>import java.nio.charset.Charset; <add>import java.nio.charset.StandardCharsets; <add>import java.util.Arrays; <add>import java.util.Map; <add> <ide> import com.fasterxml.jackson.databind.ObjectMapper; <add>import org.reactivestreams.Publisher; <add>import reactor.core.publisher.Flux; <ide> <add>import org.springframework.core.ResolvableType; <add>import org.springframework.core.codec.StringDecoder; <add>import org.springframework.core.io.buffer.DataBuffer; <add>import org.springframework.core.io.buffer.DataBufferFactory; <add>import org.springframework.core.io.buffer.DefaultDataBufferFactory; <ide> import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder; <add>import org.springframework.lang.Nullable; <ide> import org.springframework.util.MimeType; <add>import org.springframework.util.MimeTypeUtils; <ide> <ide> /** <ide> * Decode a byte stream into JSON and convert to Object's with Jackson 2.9, <ide> */ <ide> public class Jackson2JsonDecoder extends AbstractJackson2Decoder { <ide> <add> private static final StringDecoder STRING_DECODER = StringDecoder.textPlainOnly(Arrays.asList(",", "\n"), false); <add> <add> private static final ResolvableType STRING_TYPE = ResolvableType.forClass(String.class); <add> <add> <ide> public Jackson2JsonDecoder() { <ide> super(Jackson2ObjectMapperBuilder.json().build()); <ide> } <ide> public Jackson2JsonDecoder(ObjectMapper mapper, MimeType... mimeTypes) { <ide> super(mapper, mimeTypes); <ide> } <ide> <add> @Override <add> protected Flux<DataBuffer> processInput(Publisher<DataBuffer> input, ResolvableType elementType, <add> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) { <add> <add> Flux<DataBuffer> flux = Flux.from(input); <add> if (mimeType == null) { <add> return flux; <add> } <add> <add> // Jackson asynchronous parser only supports UTF-8 <add> Charset charset = mimeType.getCharset(); <add> if (charset == null || StandardCharsets.UTF_8.equals(charset) || StandardCharsets.US_ASCII.equals(charset)) { <add> return flux; <add> } <add> <add> // Potentially, the memory consumption of this conversion could be improved by using CharBuffers instead <add> // of allocating Strings, but that would require refactoring the buffer tokenization code from StringDecoder <add> <add> MimeType textMimeType = new MimeType(MimeTypeUtils.TEXT_PLAIN, charset); <add> Flux<String> decoded = STRING_DECODER.decode(input, STRING_TYPE, textMimeType, null); <add> DataBufferFactory factory = new DefaultDataBufferFactory(); <add> return decoded.map(s -> factory.wrap(s.getBytes(StandardCharsets.UTF_8))); <add> } <add> <ide> } <ide><path>spring-web/src/test/java/org/springframework/http/codec/json/Jackson2JsonDecoderTests.java <ide> <ide> import java.io.IOException; <ide> import java.math.BigDecimal; <add>import java.nio.charset.Charset; <ide> import java.nio.charset.StandardCharsets; <ide> import java.util.Collections; <ide> import java.util.List; <ide> import reactor.core.publisher.Mono; <ide> import reactor.test.StepVerifier; <ide> <add>import org.springframework.core.ParameterizedTypeReference; <ide> import org.springframework.core.ResolvableType; <ide> import org.springframework.core.codec.CodecException; <ide> import org.springframework.core.codec.DecodingException; <ide> public void bigDecimalFlux() { <ide> ); <ide> } <ide> <add> @Test <add> public void decodeNonUtf8Encoding() { <add> Mono<DataBuffer> input = stringBuffer("{\"foo\":\"bar\"}", StandardCharsets.UTF_16); <add> <add> testDecode(input, ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() { <add> }), <add> step -> step.assertNext(o -> assertThat((Map<String, String>) o).containsEntry("foo", "bar")) <add> .verifyComplete(), <add> MediaType.parseMediaType("application/json; charset=utf-16"), <add> null); <add> } <add> <add> @Test <add> public void decodeMonoNonUtf8Encoding() { <add> Mono<DataBuffer> input = stringBuffer("{\"foo\":\"bar\"}", StandardCharsets.UTF_16); <add> <add> testDecodeToMono(input, ResolvableType.forType(new ParameterizedTypeReference<Map<String, String>>() { <add> }), <add> step -> step.assertNext(o -> assertThat((Map<String, String>) o).containsEntry("foo", "bar")) <add> .verifyComplete(), <add> MediaType.parseMediaType("application/json; charset=utf-16"), <add> null); <add> } <add> <ide> private Mono<DataBuffer> stringBuffer(String value) { <add> return stringBuffer(value, StandardCharsets.UTF_8); <add> } <add> <add> private Mono<DataBuffer> stringBuffer(String value, Charset charset) { <ide> return Mono.defer(() -> { <del> byte[] bytes = value.getBytes(StandardCharsets.UTF_8); <add> byte[] bytes = value.getBytes(charset); <ide> DataBuffer buffer = this.bufferFactory.allocateBuffer(bytes.length); <ide> buffer.write(bytes); <ide> return Mono.just(buffer);
3
Javascript
Javascript
change var to let in lib/https.js
9a4b16f62056dcffd71f5d564684488c390a9a66
<ide><path>lib/https.js <ide> Object.setPrototypeOf(Agent, HttpAgent); <ide> Agent.prototype.createConnection = createConnection; <ide> <ide> Agent.prototype.getName = function getName(options) { <del> var name = HttpAgent.prototype.getName.call(this, options); <add> let name = HttpAgent.prototype.getName.call(this, options); <ide> <ide> name += ':'; <ide> if (options.ca)
1
Python
Python
add support for sqlalchemy 1.4
0953a4d9ecf7008d10236359864d334695cc530c
<ide><path>celery/backends/database/session.py <ide> from kombu.utils.compat import register_after_fork <ide> from sqlalchemy import create_engine <ide> from sqlalchemy.exc import DatabaseError <del>from sqlalchemy.ext.declarative import declarative_base <ide> from sqlalchemy.orm import sessionmaker <ide> from sqlalchemy.pool import NullPool <ide> <ide> from celery.utils.time import get_exponential_backoff_interval <ide> <add>try: <add> from sqlalchemy.orm import declarative_base <add>except ImportError: <add> # TODO: Remove this once we drop support for SQLAlchemy < 1.4. <add> from sqlalchemy.ext.declarative import declarative_base <add> <ide> ResultModelBase = declarative_base() <ide> <ide> __all__ = ('SessionManager',)
1
Ruby
Ruby
remove a puts message
0c143c03441cf2a66557ec7ba2f5d3d2f889fb5d
<ide><path>lib/action_cable/connection/registry.rb <ide> def register_connection <ide> <ide> pubsub.subscribe(internal_redis_channel, &callback) <ide> logger.info "[ActionCable] Registered connection (#{connection_identifier})" <del> puts "[ActionCable] Registered connection: #{connection_identifier}(#{internal_redis_channel})" <ide> end <ide> end <ide>
1
Go
Go
add implementation and test for setupiptables()
6311a967107bf4e97c3474d9d7da6e521fd80365
<ide><path>libnetwork/drivers/bridge/bridge.go <ide> type Configuration struct { <ide> FixedCIDRv6 *net.IPNet <ide> EnableIPv6 bool <ide> EnableIPTables bool <add> EnableIPMasquerade bool <add> EnableICC bool <ide> EnableIPForwarding bool <ide> } <ide> <ide><path>libnetwork/drivers/bridge/setup.go <ide> func (b *bridgeSetup) apply() error { <ide> func (b *bridgeSetup) queueStep(step setupStep) { <ide> b.steps = append(b.steps, step) <ide> } <del> <del>//---------------------------------------------------------------------------// <del> <del>func setupIPTables(i *bridgeInterface) error { <del> return nil <del>} <ide><path>libnetwork/drivers/bridge/setup_ip_tables.go <add>package bridge <add> <add>import ( <add> "fmt" <add> "net" <add> <add> "github.com/docker/docker/daemon/networkdriver" <add> "github.com/docker/docker/daemon/networkdriver/portmapper" <add> "github.com/docker/docker/pkg/iptables" <add>) <add> <add>// DockerChain: DOCKER iptable chain name <add>const ( <add> DockerChain = "DOCKER" <add>) <add> <add>func setupIPTables(i *bridgeInterface) error { <add> // Sanity check. <add> if i.Config.EnableIPTables == false { <add> return fmt.Errorf("Unexpected request to set IP tables for interface: %s", i.Config.BridgeName) <add> } <add> <add> addrv4, _, err := networkdriver.GetIfaceAddr(i.Config.BridgeName) <add> if err != nil { <add> return fmt.Errorf("Failed to setup IP tables, cannot acquire Interface address: %s", err.Error()) <add> } <add> if err = setupIPTablesInternal(i.Config.BridgeName, addrv4, i.Config.EnableICC, i.Config.EnableIPMasquerade, true); err != nil { <add> return fmt.Errorf("Failed to Setup IP tables: %s", err.Error()) <add> } <add> <add> _, err = iptables.NewChain(DockerChain, i.Config.BridgeName, iptables.Nat) <add> if err != nil { <add> return fmt.Errorf("Failed to create NAT chain: %s", err.Error()) <add> } <add> <add> chain, err := iptables.NewChain(DockerChain, i.Config.BridgeName, iptables.Filter) <add> if err != nil { <add> return fmt.Errorf("Failed to create FILTER chain: %s", err.Error()) <add> } <add> <add> portmapper.SetIptablesChain(chain) <add> <add> return nil <add>} <add> <add>func setupIPTablesInternal(bridgeIface string, addr net.Addr, icc, ipmasq, enable bool) error { <add> var ( <add> address = addr.String() <add> natRule = []string{"POSTROUTING", "-t", "nat", "-s", address, "!", "-o", bridgeIface, "-j", "MASQUERADE"} <add> outRule = []string{"FORWARD", "-i", bridgeIface, "!", "-o", bridgeIface, "-j", "ACCEPT"} <add> inRule = []string{"FORWARD", "-o", bridgeIface, "-m", "conntrack", "--ctstate", "RELATED,ESTABLISHED", "-j", "ACCEPT"} <add> ) <add> <add> // Set NAT. <add> if ipmasq { <add> if err := programChainRule(natRule, "NAT", enable); err != nil { <add> return err <add> } <add> } <add> <add> // Set Inter Container Communication. <add> if err := setIcc(bridgeIface, icc, enable); err != nil { <add> return err <add> } <add> <add> // Set Accept on all non-intercontainer outgoing packets. <add> if err := programChainRule(outRule, "ACCEPT NON_ICC OUTGOING", enable); err != nil { <add> return err <add> } <add> <add> // Set Accept on incoming packets for existing connections. <add> if err := programChainRule(inRule, "ACCEPT INCOMING", enable); err != nil { <add> return err <add> } <add> <add> return nil <add>} <add> <add>func programChainRule(ruleArgs []string, ruleDescr string, insert bool) error { <add> var ( <add> prefix []string <add> operation string <add> condition bool <add> ) <add> <add> if insert { <add> condition = !iptables.Exists(ruleArgs...) <add> prefix = []string{"-I"} <add> operation = "enable" <add> } else { <add> condition = iptables.Exists(ruleArgs...) <add> prefix = []string{"-D"} <add> operation = "disable" <add> } <add> <add> if condition { <add> if output, err := iptables.Raw(append(prefix, ruleArgs...)...); err != nil { <add> return fmt.Errorf("Unable to %s %s rule: %s", operation, ruleDescr, err.Error()) <add> } else if len(output) != 0 { <add> return &iptables.ChainError{Chain: ruleDescr, Output: output} <add> } <add> } <add> <add> return nil <add>} <add> <add>func setIcc(bridgeIface string, iccEnable, insert bool) error { <add> var ( <add> args = []string{"FORWARD", "-i", bridgeIface, "-o", bridgeIface, "-j"} <add> acceptArgs = append(args, "ACCEPT") <add> dropArgs = append(args, "DROP") <add> ) <add> <add> if insert { <add> if !iccEnable { <add> iptables.Raw(append([]string{"-D"}, acceptArgs...)...) <add> <add> if !iptables.Exists(dropArgs...) { <add> if output, err := iptables.Raw(append([]string{"-I"}, dropArgs...)...); err != nil { <add> return fmt.Errorf("Unable to prevent intercontainer communication: %s", err.Error()) <add> } else if len(output) != 0 { <add> return fmt.Errorf("Error disabling intercontainer communication: %s", output) <add> } <add> } <add> } else { <add> iptables.Raw(append([]string{"-D"}, dropArgs...)...) <add> <add> if !iptables.Exists(acceptArgs...) { <add> if output, err := iptables.Raw(append([]string{"-I"}, acceptArgs...)...); err != nil { <add> return fmt.Errorf("Unable to allow intercontainer communication: %s", err.Error()) <add> } else if len(output) != 0 { <add> return fmt.Errorf("Error enabling intercontainer communication: %s", output) <add> } <add> } <add> } <add> } else { <add> // Remove any ICC rule. <add> if !iccEnable { <add> if iptables.Exists(dropArgs...) { <add> iptables.Raw(append([]string{"-D"}, dropArgs...)...) <add> } <add> } else { <add> if iptables.Exists(acceptArgs...) { <add> iptables.Raw(append([]string{"-D"}, acceptArgs...)...) <add> } <add> } <add> } <add> <add> return nil <add>} <ide><path>libnetwork/drivers/bridge/setup_ip_tables_test.go <add>package bridge <add> <add>import ( <add> "net" <add> "testing" <add> <add> "github.com/docker/docker/pkg/iptables" <add> "github.com/docker/libnetwork" <add>) <add> <add>const ( <add> iptablesTestBridgeIP = "192.168.42.1" <add>) <add> <add>func TestProgramIPTable(t *testing.T) { <add> // Create a test bridge with a basic bridge configuration (name + IPv4). <add> defer libnetwork.SetupTestNetNS(t)() <add> createTestBridge(getBasicTestConfig(), t) <add> <add> // Store various iptables chain rules we care for. <add> rules := []struct { <add> ruleArgs []string <add> descr string <add> }{{[]string{"FORWARD", "-d", "127.1.2.3", "-i", "lo", "-o", "lo", "-j", "DROP"}, "Test Loopback"}, <add> {[]string{"POSTROUTING", "-t", "nat", "-s", iptablesTestBridgeIP, "!", "-o", DefaultBridgeName, "-j", "MASQUERADE"}, "NAT Test"}, <add> {[]string{"FORWARD", "-i", DefaultBridgeName, "!", "-o", DefaultBridgeName, "-j", "ACCEPT"}, "Test ACCEPT NON_ICC OUTGOING"}, <add> {[]string{"FORWARD", "-o", DefaultBridgeName, "-m", "conntrack", "--ctstate", "RELATED,ESTABLISHED", "-j", "ACCEPT"}, "Test ACCEPT INCOMING"}, <add> {[]string{"FORWARD", "-i", DefaultBridgeName, "-o", DefaultBridgeName, "-j", "ACCEPT"}, "Test enable ICC"}, <add> {[]string{"FORWARD", "-i", DefaultBridgeName, "-o", DefaultBridgeName, "-j", "DROP"}, "Test disable ICC"}, <add> } <add> <add> // Assert the chain rules' insertion and removal. <add> for _, c := range rules { <add> assertIPTableChainProgramming(c.ruleArgs, c.descr, t) <add> } <add>} <add> <add>func TestSetupIPTables(t *testing.T) { <add> // Create a test bridge with a basic bridge configuration (name + IPv4). <add> defer libnetwork.SetupTestNetNS(t)() <add> br := getBasicTestConfig() <add> createTestBridge(br, t) <add> <add> // Modify iptables params in base configuration and apply them. <add> br.Config.EnableIPTables = true <add> assertBridgeConfig(br, t) <add> <add> br.Config.EnableIPMasquerade = true <add> assertBridgeConfig(br, t) <add> <add> br.Config.EnableICC = true <add> assertBridgeConfig(br, t) <add> <add> br.Config.EnableIPMasquerade = false <add> assertBridgeConfig(br, t) <add>} <add> <add>func getBasicTestConfig() *bridgeInterface { <add> return &bridgeInterface{ <add> Config: &Configuration{ <add> BridgeName: DefaultBridgeName, <add> AddressIPv4: &net.IPNet{IP: net.ParseIP(iptablesTestBridgeIP), Mask: net.CIDRMask(16, 32)}, <add> }, <add> } <add>} <add> <add>func createTestBridge(br *bridgeInterface, t *testing.T) { <add> if err := setupDevice(br); err != nil { <add> t.Fatalf("Failed to create the testing Bridge: %s", err.Error()) <add> } <add> if err := setupBridgeIPv4(br); err != nil { <add> t.Fatalf("Failed to bring up the testing Bridge: %s", err.Error()) <add> } <add>} <add> <add>// Assert base function which pushes iptables chain rules on insertion and removal. <add>func assertIPTableChainProgramming(args []string, descr string, t *testing.T) { <add> // Add <add> if err := programChainRule(args, descr, true); err != nil { <add> t.Fatalf("Failed to program iptable rule %s: %s", descr, err.Error()) <add> } <add> if iptables.Exists(args...) == false { <add> t.Fatalf("Failed to effectively program iptable rule: %s", descr) <add> } <add> <add> // Remove <add> if err := programChainRule(args, descr, false); err != nil { <add> t.Fatalf("Failed to remove iptable rule %s: %s", descr, err.Error()) <add> } <add> if iptables.Exists(args...) == true { <add> t.Fatalf("Failed to effectively remove iptable rule: %s", descr) <add> } <add>} <add> <add>// Assert function which pushes chains based on bridge config parameters. <add>func assertBridgeConfig(br *bridgeInterface, t *testing.T) { <add> // Attempt programming of ip tables. <add> err := setupIPTables(br) <add> if err != nil { <add> t.Fatalf("%v", err) <add> } <add>}
4
Text
Text
fix minor typo in streams.md
c9d9ddcbc99b04489ca378250c555a57af3aa050
<ide><path>doc/api/stream.md <ide> primarily for examples and testing, but there are some use cases where <ide> Prior to Node.js 0.10, the `Readable` stream interface was simpler, but also <ide> less powerful and less useful. <ide> <del>* Rather than waiting for calls the [`stream.read()`][stream-read] method, <add>* Rather than waiting for calls to the [`stream.read()`][stream-read] method, <ide> [`'data'`][] events would begin emitting immediately. Applications that <ide> would need to perform some amount of work to decide how to handle data <ide> were required to store read data into buffers so the data would not be lost.
1
Text
Text
add note about path.basename on windows
a03e15f571dc54455896489589504df7ce45a0c8
<ide><path>doc/api/path.md <ide> path.basename('/foo/bar/baz/asdf/quux.html', '.html'); <ide> // Returns: 'quux' <ide> ``` <ide> <add>Although Windows usually treats file names, including file extensions, in a <add>case-insensitive manner, this function does not. For example, `C:\\foo.html` and <add>`C:\\foo.HTML` refer to the same file, but `basename` treats the extension as a <add>case-sensitive string: <add> <add>```js <add>path.win32.basename('C:\\foo.html', '.html'); <add>// Returns: 'foo' <add> <add>path.win32.basename('C:\\foo.HTML', '.html'); <add>// Returns: 'foo.HTML' <add>``` <add> <ide> A [`TypeError`][] is thrown if `path` is not a string or if `ext` is given <ide> and is not a string. <ide>
1
Text
Text
add shigeki as collaborator
aea9b89b5c2e3fb9fdbd96c7483eb1f60d09a39e
<ide><path>README.md <ide> information about the governance of the io.js project, see <ide> * **Nikolai Vavilov** ([@seishun](https://github.com/seishun)) &lt;[email protected]&gt; <ide> * **Nicu Micleușanu** ([@micnic](https://github.com/micnic)) &lt;[email protected]&gt; <ide> * **Aleksey Smolenchuk** ([@lxe](https://github.com/lxe)) &lt;[email protected]&gt; <add>* **Shigeki Ohtsu** ([@shigeki](https://github.com/shigeki)) &lt;[email protected]&gt; <ide> <ide> Collaborators follow the [COLLABORATOR_GUIDE.md](./COLLABORATOR_GUIDE.md) in <ide> maintaining the io.js project.
1
Javascript
Javascript
add composition events to react
578863881f50dbd1ce59c4ee453f51ce895b57b0
<ide><path>src/core/ReactEventEmitter.js <ide> var ReactEventEmitter = merge(ReactEventEmitterMixin, { <ide> mountAt <ide> ); <ide> <add> trapBubbledEvent( <add> topLevelTypes.topCompositionEnd, <add> 'compositionend', <add> mountAt <add> ); <add> trapBubbledEvent( <add> topLevelTypes.topCompositionStart, <add> 'compositionstart', <add> mountAt <add> ); <add> trapBubbledEvent( <add> topLevelTypes.topCompositionUpdate, <add> 'compositionupdate', <add> mountAt <add> ); <add> <ide> if (isEventSupported('drag')) { <ide> trapBubbledEvent(topLevelTypes.topDrag, 'drag', mountAt); <ide> trapBubbledEvent(topLevelTypes.topDragEnd, 'dragend', mountAt); <ide><path>src/event/EventConstants.js <ide> var topLevelTypes = keyMirror({ <ide> topBlur: null, <ide> topChange: null, <ide> topClick: null, <add> topCompositionEnd: null, <add> topCompositionStart: null, <add> topCompositionUpdate: null, <ide> topCopy: null, <ide> topCut: null, <ide> topDOMCharacterDataModified: null, <ide><path>src/event/synthetic/SyntheticCompositionEvent.js <add>/** <add> * Copyright 2013 Facebook, Inc. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> * <add> * @providesModule SyntheticCompositionEvent <add> * @typechecks static-only <add> */ <add> <add>"use strict"; <add> <add>var SyntheticEvent = require('SyntheticEvent'); <add> <add>/** <add> * @interface Event <add> * @see http://www.w3.org/TR/DOM-Level-3-Events/#events-compositionevents <add> */ <add>var CompositionEventInterface = { <add> data: null <add>}; <add> <add>/** <add> * @param {object} dispatchConfig Configuration used to dispatch this event. <add> * @param {string} dispatchMarker Marker identifying the event target. <add> * @param {object} nativeEvent Native browser event. <add> * @extends {SyntheticUIEvent} <add> */ <add>function SyntheticCompositionEvent( <add> dispatchConfig, <add> dispatchMarker, <add> nativeEvent) { <add> SyntheticEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent); <add>} <add> <add>SyntheticEvent.augmentClass( <add> SyntheticCompositionEvent, <add> CompositionEventInterface <add>); <add> <add>module.exports = SyntheticCompositionEvent; <add> <ide><path>src/eventPlugins/SimpleEventPlugin.js <ide> var EventConstants = require('EventConstants'); <ide> var EventPropagators = require('EventPropagators'); <ide> var SyntheticClipboardEvent = require('SyntheticClipboardEvent'); <add>var SyntheticCompositionEvent = require('SyntheticCompositionEvent'); <ide> var SyntheticEvent = require('SyntheticEvent'); <ide> var SyntheticFocusEvent = require('SyntheticFocusEvent'); <ide> var SyntheticKeyboardEvent = require('SyntheticKeyboardEvent'); <ide> var eventTypes = { <ide> captured: keyOf({onClickCapture: true}) <ide> } <ide> }, <add> compositionEnd: { <add> phasedRegistrationNames: { <add> bubbled: keyOf({onCompositionEnd: true}), <add> captured: keyOf({onCompositionEndCapture: true}) <add> } <add> }, <add> compositionStart: { <add> phasedRegistrationNames: { <add> bubbled: keyOf({onCompositionStart: true}), <add> captured: keyOf({onCompositionStartCapture: true}) <add> } <add> }, <add> compositionUpdate: { <add> phasedRegistrationNames: { <add> bubbled: keyOf({onCompositionUpdate: true}), <add> captured: keyOf({onCompositionUpdateCapture: true}) <add> } <add> }, <ide> copy: { <ide> phasedRegistrationNames: { <ide> bubbled: keyOf({onCopy: true}), <ide> var topLevelEventsToDispatchConfig = { <ide> topClick: eventTypes.click, <ide> topCopy: eventTypes.copy, <ide> topCut: eventTypes.cut, <add> topCompositionEnd: eventTypes.compositionEnd, <add> topCompositionStart: eventTypes.compositionStart, <add> topCompositionUpdate: eventTypes.compositionUpdate, <ide> topDoubleClick: eventTypes.doubleClick, <ide> topDOMCharacterDataModified: eventTypes.DOMCharacterDataModified, <ide> topDrag: eventTypes.drag, <ide> var SimpleEventPlugin = { <ide> case topLevelTypes.topPaste: <ide> EventConstructor = SyntheticClipboardEvent; <ide> break; <add> case topLevelTypes.topCompositionStart: <add> case topLevelTypes.topCompositionEnd: <add> case topLevelTypes.topCompositionUpdate: <add> EventConstructor = SyntheticCompositionEvent; <add> break; <ide> } <ide> invariant( <ide> EventConstructor,
4
Javascript
Javascript
remove unused definition `currentsource`
3dcf7253a831946bd1cbb5ad74bfec6bc014f92e
<ide><path>lib/_debugger.js <ide> function Client() { <ide> <ide> this.currentFrame = NO_FRAME; <ide> this.currentSourceLine = -1; <del> this.currentSource = null; <ide> this.handles = {}; <ide> this.scripts = {}; <ide> this.breakpoints = [];
1
Javascript
Javascript
add maximumdistance prop
41a78cd85c461d2f029c1863614c73a28faabade
<ide><path>packages/react-events/src/dom/Tap.js <ide> import { <ide> isMac, <ide> dispatchDiscreteEvent, <ide> dispatchUserBlockingEvent, <add> getTouchById, <add> hasModifierKey, <ide> } from './shared'; <ide> <del>type TapProps = {| <del> disabled: boolean, <del> preventDefault: boolean, <del> onTapCancel: (e: TapEvent) => void, <del> onTapChange: boolean => void, <del> onTapEnd: (e: TapEvent) => void, <del> onTapStart: (e: TapEvent) => void, <del> onTapUpdate: (e: TapEvent) => void, <del>|}; <del> <del>type TapState = { <add>type TapProps = $ReadOnly<{| <add> disabled?: boolean, <add> maximumDistance?: number, <add> preventDefault?: boolean, <add> onTapCancel?: (e: TapEvent) => void, <add> onTapChange?: boolean => void, <add> onTapEnd?: (e: TapEvent) => void, <add> onTapStart?: (e: TapEvent) => void, <add> onTapUpdate?: (e: TapEvent) => void, <add>|}>; <add> <add>type TapState = {| <ide> activePointerId: null | number, <ide> buttons: 0 | 1 | 4, <ide> gestureState: TapGestureState, <ide> ignoreEmulatedEvents: boolean, <add> initialPosition: {|x: number, y: number|}, <ide> isActive: boolean, <ide> pointerType: PointerType, <ide> responderTarget: null | Element, <ide> rootEvents: null | Array<string>, <ide> shouldPreventClick: boolean, <del>}; <add>|}; <ide> <ide> type TapEventType = <ide> | 'tap-cancel' <ide> type TapGestureState = {| <ide> y: number, <ide> |}; <ide> <del>type TapEvent = {| <add>type TapEvent = $ReadOnly<{| <ide> ...TapGestureState, <ide> type: TapEventType, <del>|}; <add>|}>; <ide> <ide> /** <ide> * Native event dependencies <ide> function createInitialState(): TapState { <ide> buttons: 0, <ide> ignoreEmulatedEvents: false, <ide> isActive: false, <add> initialPosition: {x: 0, y: 0}, <ide> pointerType: '', <ide> responderTarget: null, <ide> rootEvents: null, <ide> function removeRootEventTypes( <ide> * Managing pointers <ide> */ <ide> <del>function getTouchById( <del> nativeEvent: TouchEvent, <del> pointerId: null | number, <del>): null | Touch { <del> if (pointerId != null) { <del> const changedTouches = nativeEvent.changedTouches; <del> for (let i = 0; i < changedTouches.length; i++) { <del> const touch = changedTouches[i]; <del> if (touch.identifier === pointerId) { <del> return touch; <del> } <del> } <del> return null; <del> } <del> return null; <del>} <del> <ide> function getHitTarget( <ide> event: ReactDOMResponderEvent, <ide> context: ReactDOMResponderContext, <ide> function isActivePointer( <ide> } <ide> } <ide> <del>function isModifiedTap(event: ReactDOMResponderEvent): boolean { <del> const nativeEvent: any = event.nativeEvent; <del> const {altKey, ctrlKey, metaKey, shiftKey} = nativeEvent; <del> return ( <del> altKey === true || ctrlKey === true || metaKey === true || shiftKey === true <del> ); <del>} <del> <ide> function shouldActivate(event: ReactDOMResponderEvent): boolean { <ide> const nativeEvent: any = event.nativeEvent; <ide> const pointerType = event.pointerType; <ide> const responderImpl = { <ide> state.pointerType = event.pointerType; <ide> state.responderTarget = context.getResponderNode(); <ide> state.shouldPreventClick = props.preventDefault !== false; <del> state.gestureState = createGestureState(context, props, state, event); <add> <add> const gestureState = createGestureState(context, props, state, event); <add> state.gestureState = gestureState; <add> state.initialPosition.x = gestureState.x; <add> state.initialPosition.y = gestureState.y; <add> <ide> dispatchStart(context, props, state); <ide> dispatchChange(context, props, state); <ide> addRootEventTypes(rootEventTypes, context, state); <ide> const responderImpl = { <ide> <ide> if (state.isActive && isActivePointer(event, state)) { <ide> state.gestureState = createGestureState(context, props, state, event); <del> if (context.isTargetWithinResponder(hitTarget)) { <add> let shouldUpdate = true; <add> <add> if (!context.isTargetWithinResponder(hitTarget)) { <add> shouldUpdate = false; <add> } else if ( <add> props.maximumDistance != null && <add> props.maximumDistance >= 10 <add> ) { <add> const maxDistance = props.maximumDistance; <add> const initialPosition = state.initialPosition; <add> const currentPosition = state.gestureState; <add> const moveX = initialPosition.x - currentPosition.x; <add> const moveY = initialPosition.y - currentPosition.y; <add> const moveDistance = Math.sqrt(moveX * moveX + moveY * moveY); <add> if (moveDistance > maxDistance) { <add> shouldUpdate = false; <add> } <add> } <add> <add> if (shouldUpdate) { <ide> dispatchUpdate(context, props, state); <ide> } else { <ide> state.isActive = false; <ide> const responderImpl = { <ide> dispatchChange(context, props, state); <ide> if (context.isTargetWithinResponder(hitTarget)) { <ide> // Determine whether to call preventDefault on subsequent native events. <del> if (isModifiedTap(event)) { <add> if (hasModifierKey(event)) { <ide> state.shouldPreventClick = false; <ide> } <ide> dispatchEnd(context, props, state); <ide><path>packages/react-events/src/dom/__tests__/Tap-test.internal.js <ide> describeWithPointerEvent('Tap responder', hasPointerEvents => { <ide> }); <ide> }); <ide> <add> describe('maximumDistance', () => { <add> let onTapCancel, onTapUpdate, ref; <add> <add> function render(props) { <add> const Component = () => { <add> const listener = useTap(props); <add> return <div ref={ref} listeners={listener} />; <add> }; <add> ReactDOM.render(<Component />, container); <add> document.elementFromPoint = () => ref.current; <add> } <add> <add> beforeEach(() => { <add> onTapCancel = jest.fn(); <add> onTapUpdate = jest.fn(); <add> ref = React.createRef(); <add> render({ <add> maximumDistance: 20, <add> onTapCancel, <add> onTapUpdate, <add> }); <add> }); <add> <add> test('ignores values less than 10', () => { <add> render({ <add> maximumDistance: 5, <add> onTapCancel, <add> onTapUpdate, <add> }); <add> const target = createEventTarget(ref.current); <add> const pointerType = 'mouse'; <add> target.pointerdown({pointerType, x: 0, y: 0}); <add> target.pointermove({pointerType, x: 10, y: 10}); <add> expect(onTapUpdate).toHaveBeenCalledTimes(1); <add> expect(onTapCancel).toHaveBeenCalledTimes(0); <add> }); <add> <add> testWithPointerType('below threshold', pointerType => { <add> const target = createEventTarget(ref.current); <add> target.pointerdown({pointerType, x: 0, y: 0}); <add> target.pointermove({pointerType, x: 10, y: 10}); <add> expect(onTapUpdate).toHaveBeenCalledTimes(1); <add> expect(onTapCancel).toHaveBeenCalledTimes(0); <add> }); <add> <add> testWithPointerType('above threshold', pointerType => { <add> const target = createEventTarget(ref.current); <add> target.pointerdown({pointerType, x: 0, y: 0}); <add> target.pointermove({pointerType, x: 15, y: 14}); <add> expect(onTapUpdate).toHaveBeenCalledTimes(0); <add> expect(onTapCancel).toHaveBeenCalledTimes(1); <add> }); <add> }); <add> <ide> describe('onTapStart', () => { <ide> let onTapStart, ref; <ide> <ide> describeWithPointerEvent('Tap responder', hasPointerEvents => { <ide> }); <ide> <ide> describe('onTapCancel', () => { <del> let onTapCancel, parentRef, ref, siblingRef; <add> let onTapCancel, onTapUpdate, parentRef, ref, siblingRef; <ide> <ide> beforeEach(() => { <ide> onTapCancel = jest.fn(); <add> onTapUpdate = jest.fn(); <ide> parentRef = React.createRef(); <ide> ref = React.createRef(); <ide> siblingRef = React.createRef(); <ide> const Component = () => { <del> const listener = useTap({onTapCancel}); <add> const listener = useTap({onTapCancel, onTapUpdate}); <ide> return ( <ide> <div ref={parentRef}> <ide> <div ref={ref} listeners={listener} /> <ide> describeWithPointerEvent('Tap responder', hasPointerEvents => { <ide> y: 0, <ide> }), <ide> ); <add> target.pointermove({pointerType, x: 5, y: 5}); <add> expect(onTapUpdate).not.toBeCalled(); <ide> }); <ide> <ide> test('long press context menu', () => { <ide><path>packages/react-events/src/dom/shared/index.js <ide> export function dispatchUserBlockingEvent( <ide> ) { <ide> context.dispatchEvent(payload, callback, UserBlockingEvent); <ide> } <add> <add>export function getTouchById( <add> nativeEvent: TouchEvent, <add> pointerId: null | number, <add>): null | Touch { <add> if (pointerId != null) { <add> const changedTouches = nativeEvent.changedTouches; <add> for (let i = 0; i < changedTouches.length; i++) { <add> const touch = changedTouches[i]; <add> if (touch.identifier === pointerId) { <add> return touch; <add> } <add> } <add> return null; <add> } <add> return null; <add>} <add> <add>export function hasModifierKey(event: ReactDOMResponderEvent): boolean { <add> const nativeEvent: any = event.nativeEvent; <add> const {altKey, ctrlKey, metaKey, shiftKey} = nativeEvent; <add> return ( <add> altKey === true || ctrlKey === true || metaKey === true || shiftKey === true <add> ); <add>}
3
Python
Python
fix dense layer bias_add bug when ndim > 2.
86a683540504c6f6ecd44d03941ba4d0a97ad08c
<ide><path>keras/layers/core.py <ide> def build(self, input_shape): <ide> def call(self, inputs): <ide> output = K.dot(inputs, self.kernel) <ide> if self.use_bias: <del> output = K.bias_add(output, self.bias) <add> output = K.bias_add(output, self.bias, data_format='channels_last') <ide> if self.activation is not None: <ide> output = self.activation(output) <ide> return output
1
PHP
PHP
apply fixes from styleci
c00a8e7d1ed48b1a4c9303aad4aceea6cea87c14
<ide><path>tests/Routing/RoutingRouteTest.php <ide> public function testNullValuesCanBeInjectedIntoRoutes() <ide> return $router; <ide> }); <ide> <del> $container->bind(RoutingTestUserModel::class, function() { <del> return null; <add> $container->bind(RoutingTestUserModel::class, function () { <ide> }); <ide> <ide> $router->get('foo/{team}/{post}', [
1
Ruby
Ruby
fix ar tests for sqlite
fd87a787101e4b58249bf953ada756b992dff5f5
<ide><path>activerecord/lib/active_record/fixtures.rb <ide> def teardown_with_fixtures <ide> ActiveRecord::Base.connection.rollback_db_transaction <ide> ActiveRecord::Base.unlock_mutex <ide> end <del> ActiveRecord::Base.clear_active_connections! <add> ActiveRecord::Base.verify_active_connections! <ide> end <ide> <ide> alias_method :teardown, :teardown_with_fixtures
1
Python
Python
fix bug in sparse_categorical_accuracy
6dff721a3a8755356b2e89d02ef63ad8ab38ec95
<ide><path>keras/metrics.py <ide> def categorical_accuracy(y_true, y_pred): <ide> <ide> <ide> def sparse_categorical_accuracy(y_true, y_pred): <del> # flatten y_true in case it's in shape (num_samples, 1) instead of (num_samples,) <del> return K.cast(K.equal(K.flatten(y_true), <del> K.cast(K.argmax(y_pred, axis=-1), K.floatx())), <del> K.floatx()) <add> # reshape in case it's in shape (num_samples, 1) instead of (num_samples,) <add> if K.ndim(y_true) == K.ndim(y_pred): <add> y_true = K.squeeze(y_true, -1) <add> # convert dense predictions to labels <add> y_pred_labels = K.argmax(y_pred, axis=-1) <add> y_pred_labels = K.cast(y_pred_labels, K.floatx()) <add> return K.cast(K.equal(y_true, y_pred_labels), K.floatx()) <ide> <ide> <ide> def top_k_categorical_accuracy(y_true, y_pred, k=5): <ide><path>tests/keras/metrics_test.py <ide> def test_sparse_metrics(): <ide> assert K.eval(metric(y_a, y_b)).shape == (6,) <ide> <ide> <del>def test_sparse_categorical_accuracy_correctness(): <del> y_a = K.variable(np.random.randint(0, 7, (6,)), dtype=K.floatx()) <del> y_b = K.variable(np.random.random((6, 7)), dtype=K.floatx()) <add>@pytest.mark.parametrize('shape', [(6,), (6, 3), (6, 3, 1)]) <add>def test_sparse_categorical_accuracy_correctness(shape): <add> y_a = K.variable(np.random.randint(0, 7, shape), dtype=K.floatx()) <add> y_b_shape = shape + (7,) <add> y_b = K.variable(np.random.random(y_b_shape), dtype=K.floatx()) <ide> # use one_hot embedding to convert sparse labels to equivalent dense labels <del> y_a_dense_labels = K.cast(K.one_hot(K.cast(y_a, dtype='int32'), num_classes=7), <add> y_a_dense_labels = K.cast(K.one_hot(K.cast(y_a, dtype='int32'), 7), <ide> dtype=K.floatx()) <ide> sparse_categorical_acc = metrics.sparse_categorical_accuracy(y_a, y_b) <ide> categorical_acc = metrics.categorical_accuracy(y_a_dense_labels, y_b)
2
Text
Text
update dep0102 text
70dae240f04b91a05d1859f5440ca1305762d3da
<ide><path>doc/api/deprecations.md <ide> changes: <ide> <ide> Type: End-of-Life <ide> <del>Using the `noAssert` argument has no functionality anymore. All input is going <del>to be verified, no matter if it is set to true or not. Skipping the verification <del>could lead to hard to find errors and crashes. <add>Using the `noAssert` argument has no functionality anymore. All input is <add>verified regardless of the value of `noAssert`. Skipping the verification <add>could lead to hard-to-find errors and crashes. <ide> <ide> ### DEP0103: `process.binding('util').is[...]` typechecks <ide>
1
PHP
PHP
copy debugger test fix from 5.0
c440c727674675f0204afd2b91f0d5e7bcc392fe
<ide><path>tests/TestCase/Error/DebuggerTest.php <ide> public function testLogDepth(): void <ide> Log::setConfig('test', [ <ide> 'className' => 'Array', <ide> ]); <del> $val = [ <add> $veryRandomName = [ <ide> 'test' => ['key' => 'val'], <ide> ]; <ide> Debugger::log($val, 'debug', 0); <ide> <ide> $messages = Log::engine('test')->read(); <ide> $this->assertStringContainsString('DebuggerTest::testLogDepth', $messages[0]); <ide> $this->assertStringContainsString('test', $messages[0]); <del> $this->assertStringNotContainsString('val', $messages[0]); <add> $this->assertStringNotContainsString('veryRandomName', $messages[0]); <ide> } <ide> <ide> /**
1
Java
Java
avoid repeated charset resolution in mimetype
c04400890f54bb2d342348085666066072b7bab9
<ide><path>spring-core/src/main/java/org/springframework/util/MimeType.java <ide> /* <del> * Copyright 2002-2019 the original author or authors. <add> * Copyright 2002-2020 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> public class MimeType implements Comparable<MimeType>, Serializable { <ide> <ide> private final Map<String, String> parameters; <ide> <add> @Nullable <add> private Charset resolvedCharset; <add> <ide> @Nullable <ide> private volatile String toStringValue; <ide> <ide> public MimeType(String type, String subtype) { <ide> */ <ide> public MimeType(String type, String subtype, Charset charset) { <ide> this(type, subtype, Collections.singletonMap(PARAM_CHARSET, charset.name())); <add> this.resolvedCharset = charset; <ide> } <ide> <ide> /** <ide> public MimeType(String type, String subtype, Charset charset) { <ide> */ <ide> public MimeType(MimeType other, Charset charset) { <ide> this(other.getType(), other.getSubtype(), addCharsetParameter(charset, other.getParameters())); <add> this.resolvedCharset = charset; <ide> } <ide> <ide> /** <ide> public MimeType(String type, String subtype, @Nullable Map<String, String> param <ide> * @see <a href="https://tools.ietf.org/html/rfc2616#section-2.2">HTTP 1.1, section 2.2</a> <ide> */ <ide> private void checkToken(String token) { <del> for (int i = 0; i < token.length(); i++ ) { <add> for (int i = 0; i < token.length(); i++) { <ide> char ch = token.charAt(i); <ide> if (!TOKEN.get(ch)) { <ide> throw new IllegalArgumentException("Invalid token character '" + ch + "' in token \"" + token + "\""); <ide> protected void checkParameters(String attribute, String value) { <ide> Assert.hasLength(value, "'value' must not be empty"); <ide> checkToken(attribute); <ide> if (PARAM_CHARSET.equals(attribute)) { <del> value = unquote(value); <del> Charset.forName(value); <add> if (this.resolvedCharset == null) { <add> this.resolvedCharset = Charset.forName(unquote(value)); <add> } <ide> } <ide> else if (!isQuotedString(value)) { <ide> checkToken(value); <ide> public String getSubtype() { <ide> */ <ide> @Nullable <ide> public Charset getCharset() { <del> String charset = getParameter(PARAM_CHARSET); <del> return (charset != null ? Charset.forName(unquote(charset)) : null); <add> return this.resolvedCharset; <ide> } <ide> <ide> /**
1
PHP
PHP
add listener optinos
a82a25f58252eab6831a0efde35a17403710abdc
<ide><path>src/Illuminate/Queue/Console/ListenCommand.php <ide> <ide> use Illuminate\Queue\Listener; <ide> use Illuminate\Console\Command; <del>use Symfony\Component\Console\Input\InputOption; <del>use Symfony\Component\Console\Input\InputArgument; <add>use Illuminate\Queue\ListenerOptions; <ide> <ide> class ListenCommand extends Command <ide> { <ide> class ListenCommand extends Command <ide> * <ide> * @var string <ide> */ <del> protected $name = 'queue:listen'; <add> protected $signature = 'queue:listen <add> {connection? : The name of connection} <add> {--queue= : The queue to listen on} <add> {--delay=0 : Amount of time to delay failed jobs} <add> {--memory=128 : The memory limit in megabytes} <add> {--sleep=3 : Number of seconds to sleep when no job is available} <add> {--timeout=60 : The number of seconds a child process can run} <add> {--tries=0 : Number of times to attempt a job before logging it failed}'; <ide> <ide> /** <ide> * The console command description. <ide> public function __construct(Listener $listener) <ide> { <ide> parent::__construct(); <ide> <del> $this->listener = $listener; <add> $this->setOutputHandler($this->listener = $listener); <ide> } <ide> <ide> /** <ide> public function __construct(Listener $listener) <ide> */ <ide> public function fire() <ide> { <del> $this->setListenerOptions(); <del> <del> $connection = $this->input->getArgument('connection'); <del> <del> // The memory limit is the amount of memory we will allow the script to occupy <del> // before killing it and letting a process manager restart it for us, which <del> // is to protect us against any memory leaks that will be in the scripts. <del> $memory = $this->input->getOption('memory'); <del> <del> $timeout = $this->input->getOption('timeout'); <del> <del> $delay = $this->input->getOption('delay'); <del> <ide> // We need to get the right queue for the connection which is set in the queue <ide> // configuration file for the application. We will pull it based on the set <ide> // connection being run for the queue operation currently being executed. <del> $queue = $this->getQueue($connection); <add> $queue = $this->getQueue( <add> $connection = $this->input->getArgument('connection') <add> ); <ide> <ide> $this->listener->listen( <del> $connection, $queue, $delay, $memory, $timeout <add> $connection, $queue, $this->gatherOptions() <ide> ); <ide> } <ide> <ide> public function fire() <ide> */ <ide> protected function getQueue($connection) <ide> { <del> if (is_null($connection)) { <del> $connection = $this->laravel['config']['queue.default']; <del> } <add> $connection = $connection ?: $this->laravel['config']['queue.default']; <ide> <del> $queue = $this->laravel['config']->get( <add> return $this->input->getOption('queue') ?: $this->laravel['config']->get( <ide> "queue.connections.{$connection}.queue", 'default' <ide> ); <del> <del> return $this->input->getOption('queue') ?: $queue; <ide> } <ide> <ide> /** <del> * Set the options on the queue listener. <add> * Get the listener options for the command. <ide> * <del> * @return void <add> * @return \Illuminate\Queue\ListenerOptions <ide> */ <del> protected function setListenerOptions() <add> protected function gatherOptions() <ide> { <del> $this->listener->setEnvironment($this->laravel->environment()); <del> <del> $this->listener->setSleep($this->option('sleep')); <del> <del> $this->listener->setMaxTries($this->option('tries')); <del> <del> $this->listener->setOutputHandler(function ($type, $line) { <del> $this->output->write($line); <del> }); <del> } <del> <del> /** <del> * Get the console command arguments. <del> * <del> * @return array <del> */ <del> protected function getArguments() <del> { <del> return [ <del> ['connection', InputArgument::OPTIONAL, 'The name of connection'], <del> ]; <add> return new ListenerOptions( <add> $this->option('env'), $this->option('delay'), <add> $this->option('memory'), $this->option('timeout') <add> ); <ide> } <ide> <ide> /** <del> * Get the console command options. <add> * Set the options on the queue listener. <ide> * <del> * @return array <add> * @param \Illuminate\Queue\Listener $listener <add> * @return void <ide> */ <del> protected function getOptions() <add> protected function setOutputHandler(Listener $listener) <ide> { <del> return [ <del> ['queue', null, InputOption::VALUE_OPTIONAL, 'The queue to listen on', null], <del> <del> ['delay', null, InputOption::VALUE_OPTIONAL, 'Amount of time to delay failed jobs', 0], <del> <del> ['memory', null, InputOption::VALUE_OPTIONAL, 'The memory limit in megabytes', 128], <del> <del> ['timeout', null, InputOption::VALUE_OPTIONAL, 'Seconds a job may run before timing out', 60], <del> <del> ['sleep', null, InputOption::VALUE_OPTIONAL, 'Seconds to wait before checking queue for jobs', 3], <del> <del> ['tries', null, InputOption::VALUE_OPTIONAL, 'Number of times to attempt a job before logging it failed', 0], <del> ]; <add> $listener->setOutputHandler(function ($type, $line) { <add> $this->output->write($line); <add> }); <ide> } <ide> } <ide><path>src/Illuminate/Queue/Listener.php <ide> class Listener <ide> public function __construct($commandPath) <ide> { <ide> $this->commandPath = $commandPath; <del> $this->workerCommand = $this->buildWorkerCommand(); <add> $this->workerCommand = $this->buildCommandTemplate(); <ide> } <ide> <ide> /** <ide> * Build the environment specific worker command. <ide> * <ide> * @return string <ide> */ <del> protected function buildWorkerCommand() <add> protected function buildCommandTemplate() <ide> { <del> $binary = ProcessUtils::escapeArgument((new PhpExecutableFinder)->find(false)); <add> $command = 'queue:work %s --once --queue=%s --delay=%s --memory=%s --sleep=%s --tries=%s'; <ide> <del> $artisan = defined('ARTISAN_BINARY') ? ProcessUtils::escapeArgument(ARTISAN_BINARY) : 'artisan'; <add> return "{$this->phpBinary()} {$this->artisanBinary()} {$command}"; <add> } <ide> <del> $command = 'queue:work %s --once --queue=%s --delay=%s --memory=%s --sleep=%s --tries=%s'; <add> /** <add> * Get the PHP binary. <add> * <add> * @return string <add> */ <add> protected function phpBinary() <add> { <add> return ProcessUtils::escapeArgument( <add> (new PhpExecutableFinder)->find(false) <add> ); <add> } <ide> <del> return "{$binary} {$artisan} {$command}"; <add> /** <add> * Get the Artisan binary. <add> * <add> * @return string <add> */ <add> protected function artisanBinary() <add> { <add> return defined('ARTISAN_BINARY') <add> ? ProcessUtils::escapeArgument(ARTISAN_BINARY) <add> : 'artisan'; <ide> } <ide> <ide> /** <ide> * Listen to the given queue connection. <ide> * <ide> * @param string $connection <ide> * @param string $queue <del> * @param string $delay <del> * @param string $memory <del> * @param int $timeout <add> * @param \Illuminate\Queue\ListenerOptions $options <ide> * @return void <ide> */ <del> public function listen($connection, $queue, $delay, $memory, $timeout = 60) <add> public function listen($connection, $queue, ListenerOptions $options) <ide> { <del> $process = $this->makeProcess($connection, $queue, $delay, $memory, $timeout); <add> $process = $this->makeProcess($connection, $queue, $options); <ide> <ide> while (true) { <del> $this->runProcess($process, $memory); <add> $this->runProcess($process, $options->memory); <ide> } <ide> } <ide> <ide> public function listen($connection, $queue, $delay, $memory, $timeout = 60) <ide> * <ide> * @param string $connection <ide> * @param string $queue <del> * @param int $delay <del> * @param int $memory <del> * @param int $timeout <add> * @param \Illuminate\Queue\ListenerOptions $options <ide> * @return \Symfony\Component\Process\Process <ide> */ <del> public function makeProcess($connection, $queue, $delay, $memory, $timeout) <add> public function makeProcess($connection, $queue, ListenerOptions $options) <ide> { <ide> $command = $this->workerCommand; <ide> <ide> // If the environment is set, we will append it to the command string so the <ide> // workers will run under the specified environment. Otherwise, they will <ide> // just run under the production environment which is not always right. <del> if (isset($this->environment)) { <del> $command = $this->addEnvironment($command); <add> if (isset($options->environment)) { <add> $command = $this->addEnvironment($command, $options); <ide> } <ide> <ide> // Next, we will just format out the worker commands with all of the various <ide> // options available for the command. This will produce the final command <ide> // line that we will pass into a Symfony process object for processing. <ide> $command = $this->formatCommand( <del> $command, $connection, $queue, $delay, $memory <add> $command, $connection, $queue, $options <ide> ); <ide> <ide> return new Process( <del> $command, $this->commandPath, null, null, $timeout <add> $command, $this->commandPath, null, null, $options->timeout <ide> ); <ide> } <ide> <ide> /** <ide> * Add the environment option to the given command. <ide> * <ide> * @param string $command <add> * @param \Illuminate\Queue\ListenerOptions $options <ide> * @return string <ide> */ <del> protected function addEnvironment($command) <add> protected function addEnvironment($command, ListenerOptions $options) <ide> { <del> return $command.' --env='.ProcessUtils::escapeArgument($this->environment); <add> return $command.' --env='.ProcessUtils::escapeArgument($options->environment); <ide> } <ide> <ide> /** <ide> protected function addEnvironment($command) <ide> * @param string $command <ide> * @param string $connection <ide> * @param string $queue <del> * @param int $delay <del> * @param int $memory <add> * @param \Illuminate\Queue\ListenerOptions $options <ide> * @return string <ide> */ <del> protected function formatCommand($command, $connection, $queue, $delay, $memory) <add> protected function formatCommand($command, $connection, $queue, ListenerOptions $options) <ide> { <ide> return sprintf( <ide> $command, <ide> ProcessUtils::escapeArgument($connection), <ide> ProcessUtils::escapeArgument($queue), <del> $delay, $memory, $this->sleep, $this->maxTries <add> $options->delay, $options->memory, <add> $options->sleep, $options->maxTries <ide> ); <ide> } <ide> <ide> public function setOutputHandler(Closure $outputHandler) <ide> { <ide> $this->outputHandler = $outputHandler; <ide> } <del> <del> /** <del> * Set the current environment. <del> * <del> * @param string $environment <del> * @return void <del> */ <del> public function setEnvironment($environment) <del> { <del> $this->environment = $environment; <del> } <del> <del> /** <del> * Set the amount of seconds to wait before polling the queue. <del> * <del> * @param int $sleep <del> * @return void <del> */ <del> public function setSleep($sleep) <del> { <del> $this->sleep = $sleep; <del> } <del> <del> /** <del> * Set the amount of times to try a job before logging it failed. <del> * <del> * @param int $tries <del> * @return void <del> */ <del> public function setMaxTries($tries) <del> { <del> $this->maxTries = $tries; <del> } <ide> } <ide><path>src/Illuminate/Queue/ListenerOptions.php <add><?php <add> <add>namespace Illuminate\Queue; <add> <add>class ListenerOptions extends WorkerOptions <add>{ <add> /** <add> * The environment the worker should run in. <add> * <add> * @var string <add> */ <add> public $environment; <add> <add> /** <add> * Create a new listener options instance. <add> * <add> * @param string $environment <add> * @param int $delay <add> * @param int $memory <add> * @param int $timeout <add> * @param int $sleep <add> * @param int $maxTries <add> * @param bool $force <add> */ <add> public function __construct($environment = null, $delay = 0, $memory = 128, $timeout = 60, $sleep = 3, $maxTries = 0, $force = false) <add> { <add> $this->environment = $environment; <add> <add> parent::__construct($delay, $memory, $timeout, $sleep, $maxTries, $force); <add> } <add>} <ide><path>tests/Queue/QueueListenerTest.php <ide> public function testListenerStopsWhenMemoryIsExceeded() <ide> public function testMakeProcessCorrectlyFormatsCommandLine() <ide> { <ide> $listener = new Illuminate\Queue\Listener(__DIR__); <del> $process = $listener->makeProcess('connection', 'queue', 1, 2, 3); <add> $options = new Illuminate\Queue\ListenerOptions(); <add> $options->delay = 1; <add> $options->memory = 2; <add> $options->timeout = 3; <add> $process = $listener->makeProcess('connection', 'queue', $options); <ide> $escape = '\\' === DIRECTORY_SEPARATOR ? '"' : '\''; <ide> <ide> $this->assertInstanceOf('Symfony\Component\Process\Process', $process);
4
Python
Python
fix bug in pos_tag.py script
eaccbcda0fc0705364cfc17277fe4ca3579fc2c2
<ide><path>examples/pos_tag.py <ide> def represent_word(word): <ide> # Only do this if the lower-cased form is more probable. <ide> if text.istitle() \ <ide> and is_sent_begin(word) \ <del> and word.prob < word.vocab[text.lower()].prob: <add> and word.prob < word.doc.vocab[text.lower()].prob: <ide> text = text.lower() <ide> return text + '|' + word.tag_ <ide>
1
PHP
PHP
table
818c78f44c66422a0721b206a7c95b56d8af8c78
<ide><path>src/Illuminate/Cache/Console/CacheTableCommand.php <ide> protected function createBaseMigration() <ide> { <ide> $name = 'create_cache_table'; <ide> <del> $path = $this->laravel['path'].'/database/migrations'; <add> $path = $this->laravel['path.database'].'/migrations'; <ide> <ide> return $this->laravel['migration.creator']->create($name, $path); <ide> }
1
Javascript
Javascript
fix "the the" typo in comments
e84e6e7fad68f1c4a54f19d6d4625c322286f849
<ide><path>lib/internal/streams/readable.js <ide> function maybeReadMore_(stream, state) { <ide> // conditions prevent the data from being read: <ide> // - The stream has ended (state.ended). <ide> // - There is already a pending 'read' operation (state.reading). This is a <del> // case where the the stream has called the implementation defined _read() <add> // case where the stream has called the implementation defined _read() <ide> // method, but they are processing the call asynchronously and have _not_ <ide> // called push() with new data. In this case we skip performing more <ide> // read()s. The execution ends in this method again after the _read() ends <ide><path>lib/internal/util/inspect.js <ide> function reduceToSingleString( <ide> ctx, output, base, braces, extrasType, recurseTimes, value) { <ide> if (ctx.compact !== true) { <ide> if (typeof ctx.compact === 'number' && ctx.compact >= 1) { <del> // Memorize the original output length. In case the the output is grouped, <add> // Memorize the original output length. In case the output is grouped, <ide> // prevent lining up the entries on a single line. <ide> const entries = output.length; <ide> // Group array elements together if the array contains at least six
2
Text
Text
add docs for examples
8abfb5e0e2015a0b8a80cba81ccf249df275adcb
<ide><path>Examples/Movies/README.md <add># Movies app <add> <add>The Movies app is a demonstration of basic concepts, such as fetching data, rendering a list of data including images, and navigating between different screens. <add> <add>## Running this app <add> <add>Before running the app, make sure you ran: <add> <add> cd react-native <add> npm install <add> <add>### Running on iOS <add> <add>Mac OS and Xcode are required. <add> <add>- Open `Examples/Movies/Movies.xcodeproj` in Xcode <add>- Hit the Run button <add> <add>See [Running on device](https://facebook.github.io/react-native/docs/running-on-device-ios.html) if you want to use a physical device. <add> <add>### Running on Android <add> <add>You'll need to have all the [prerequisites](https://github.com/facebook/react-native/tree/master/ReactAndroid#prerequisites) (SDK, NDK) for Building React Native installed. <add> <add>Start an Android emulator ([Genymotion](https://www.genymotion.com) is recommended). <add> <add> cd react-native <add> ./gradlew :Examples:Movies:android:app:installDebug <add> ./packager/packager.sh <add> <add>_Note: Building for the first time can take a while._ <add> <add>Open the Movies app in your emulator. <add> <add>See [Running on Device](https://facebook.github.io/react-native/docs/running-on-device-android.html) in case you want to use a physical device. <add> <add>## Built from source <add> <add>Building the app on both iOS and Android means building the React Native framework from source. This way you're running the latest native and JS code the way you see it in your clone of the github repo. <add> <add>This is different from apps created using `react-native init` which have a dependency on a specific version of React Native JS and native code, declared in a `package.json` file (and `build.gradle` for Android apps). <ide><path>Examples/UIExplorer/README.md <add># UIExplorer <add> <add>The UIExplorer is a sample app that showcases React Native views and modules. <add> <add>## Running this app <add> <add>Before running the app, make sure you ran: <add> <add> cd react-native <add> npm install <add> <add>### Running on iOS <add> <add>Mac OS and Xcode are required. <add> <add>- Open `Examples/UIExplorer/UIExplorer.xcodeproj` in Xcode <add>- Hit the Run button <add> <add>See [Running on device](https://facebook.github.io/react-native/docs/running-on-device-ios.html) if you want to use a physical device. <add> <add>### Running on Android <add> <add>You'll need to have all the [prerequisites](https://github.com/facebook/react-native/tree/master/ReactAndroid#prerequisites) (SDK, NDK) for Building React Native installed. <add> <add>Start an Android emulator ([Genymotion](https://www.genymotion.com) is recommended). <add> <add> cd react-native <add> ./gradlew :Examples:UIExplorer:android:app:installDebug <add> ./packager/packager.sh <add> <add>_Note: Building for the first time can take a while._ <add> <add>Open the UIExplorer app in your emulator. <add> <add>See [Running on Device](https://facebook.github.io/react-native/docs/running-on-device-android.html) in case you want to use a physical device. <add> <add>## Built from source <add> <add>Building the app on both iOS and Android means building the React Native framework from source. This way you're running the latest native and JS code the way you see it in your clone of the github repo. <add> <add>This is different from apps created using `react-native init` which have a dependency on a specific version of React Native JS and native code, declared in a `package.json` file (and `build.gradle` for Android apps).
2
PHP
PHP
add unit test for debugsecurity param
b64bcff5652b0c49f43168ad75a8a4d5c237ca09
<ide><path>src/View/Helper/FormHelper.php <ide> public function secure(array $fields = [], array $secureAttributes = []) <ide> if (empty($this->request['_Token'])) { <ide> return null; <ide> } <del> $debugSecurity = Hash::get($secureAttributes, 'debugSecurity') ?: Configure::read('debug'); <del> unset($secureAttributes['debugSecurity']); <add> $debugSecurity = Configure::read('debug'); <add> if (isset($secureAttributes['debugSecurity'])) { <add> $debugSecurity = $secureAttributes['debugSecurity']; <add> unset($secureAttributes['debugSecurity']); <add> } <ide> <ide> $tokenData = $this->_buildFieldToken( <ide> $this->_lastAction, <ide><path>tests/TestCase/View/Helper/FormHelperTest.php <ide> public function testFormSecurityInputUnlockedFields() <ide> $this->assertHtml($expected, $result); <ide> } <ide> <add> /** <add> * testFormSecurityInputUnlockedFieldsDebugSecurityTrue method <add> * <add> * Test single record form with debugSecurity param. <add> * <add> * @return void <add> */ <add> public function testFormSecurityInputUnlockedFieldsDebugSecurityTrue() <add> { <add> $this->Form->request['_Token'] = [ <add> 'unlockedFields' => ['first_name', 'address'] <add> ]; <add> $this->Form->create(); <add> $this->assertEquals($this->Form->request['_Token']['unlockedFields'], $this->Form->unlockField()); <add> <add> $this->Form->hidden('Addresses.id', ['value' => '123456']); <add> $this->Form->text('Addresses.title'); <add> $this->Form->text('Addresses.first_name'); <add> $this->Form->text('Addresses.last_name'); <add> $this->Form->text('Addresses.address'); <add> $this->Form->text('Addresses.city'); <add> $this->Form->text('Addresses.phone'); <add> <add> $result = $this->Form->fields; <add> $expected = [ <add> 'Addresses.id' => '123456', 'Addresses.title', 'Addresses.last_name', <add> 'Addresses.city', 'Addresses.phone' <add> ]; <add> $this->assertEquals($expected, $result); <add> $debug = Configure::read('debug'); <add> Configure::write('debug', false); <add> $result = $this->Form->secure($expected, ['data-foo' => 'bar', 'debugSecurity' => true]); <add> Configure::write('debug', $debug); <add> <add> $hash = 'a303becbdd99cb42ca14a1cf7e63dfd48696a3c5%3AAddresses.id'; <add> $tokenDebug = urlencode(json_encode([ <add> '/articles/add', <add> [ <add> 'Addresses.id' => '123456', <add> 'Addresses.title', <add> 'Addresses.last_name', <add> 'Addresses.city', <add> 'Addresses.phone' <add> ], <add> [ <add> 'first_name', <add> 'address' <add> ] <add> ])); <add> <add> $expected = [ <add> 'div' => ['style' => 'display:none;'], <add> ['input' => [ <add> 'type' => 'hidden', <add> 'name' => '_Token[fields]', <add> 'value' => $hash, <add> 'data-foo' => 'bar', <add> ]], <add> ['input' => [ <add> 'type' => 'hidden', <add> 'name' => '_Token[unlocked]', <add> 'value' => 'address%7Cfirst_name', <add> 'data-foo' => 'bar', <add> ]], <add> ['input' => [ <add> 'type' => 'hidden', 'name' => '_Token[debug]', <add> 'value' => $tokenDebug, <add> 'data-foo' => 'bar' <add> ]], <add> '/div' <add> ]; <add> $this->assertHtml($expected, $result); <add> <add> } <add> <add> /** <add> * testFormSecurityInputUnlockedFieldsDebugSecurityFalse method <add> * <add> * Test single record form with debugSecurity param. <add> * <add> * @return void <add> */ <add> public function testFormSecurityInputUnlockedFieldsDebugSecurityFalse() <add> { <add> $this->Form->request['_Token'] = [ <add> 'unlockedFields' => ['first_name', 'address'] <add> ]; <add> $this->Form->create(); <add> $this->assertEquals($this->Form->request['_Token']['unlockedFields'], $this->Form->unlockField()); <add> <add> $this->Form->hidden('Addresses.id', ['value' => '123456']); <add> $this->Form->text('Addresses.title'); <add> $this->Form->text('Addresses.first_name'); <add> $this->Form->text('Addresses.last_name'); <add> $this->Form->text('Addresses.address'); <add> $this->Form->text('Addresses.city'); <add> $this->Form->text('Addresses.phone'); <add> <add> $result = $this->Form->fields; <add> $expected = [ <add> 'Addresses.id' => '123456', 'Addresses.title', 'Addresses.last_name', <add> 'Addresses.city', 'Addresses.phone' <add> ]; <add> $this->assertEquals($expected, $result); <add> <add> $debug = Configure::read('debug'); <add> Configure::write('debug', true); <add> $result = $this->Form->secure($expected, ['data-foo' => 'bar', 'debugSecurity' => false]); <add> Configure::write('debug', $debug); <add> <add> $hash = 'a303becbdd99cb42ca14a1cf7e63dfd48696a3c5%3AAddresses.id'; <add> <add> $expected = [ <add> 'div' => ['style' => 'display:none;'], <add> ['input' => [ <add> 'type' => 'hidden', <add> 'name' => '_Token[fields]', <add> 'value' => $hash, <add> 'data-foo' => 'bar', <add> ]], <add> ['input' => [ <add> 'type' => 'hidden', <add> 'name' => '_Token[unlocked]', <add> 'value' => 'address%7Cfirst_name', <add> 'data-foo' => 'bar', <add> ]], <add> '/div' <add> ]; <add> <add> $this->assertHtml($expected, $result); <add> } <add> <ide> /** <ide> * test securing inputs with custom name attributes. <ide> *
2
Text
Text
fix punctuation in line 46
ed2d5f09ab1686d33475b8d7e286cea873aefb82
<ide><path>guide/english/css/margins/index.md <ide> An element can be aligned to the center by making it a block element and changin <ide> - `%` implies relative to the parent element. <ide> <ide> ## Browser Support <del>It is effectively supported in all browsers (since IE6+, Firefox 2+, Chrome 1+ etc) <add>It is effectively supported in all browsers (since IE6+, Firefox 2+, Chrome 1+, etc). <ide> <ide> ### More Information <del>- <a href='https://www.w3.org/TR/css3-box/#the-margin' target='_blank' rel='nofollow'>W3C Working Draft</a> <add>- <a href='https://www.w3.org/TR/css-box-3/#margins' target='_blank' rel='nofollow'>W3C Working Draft</a> <ide> - <a href='https://www.w3.org/TR/CSS2/box.html#propdef-margin' target='_blank' rel='nofollow'>W3C CSS Level 2</a> <ide> - <a href='https://www.w3.org/TR/CSS1/#margin' target='_blank' rel='nofollow'>W3C CSS Level 1</a> <ide> - <a href='https://developer.mozilla.org/en-US/docs/Web/CSS/margin' target='_blank' rel='nofollow'>MDN Web Docs</a>
1
PHP
PHP
simplify "app" function.
d72ecc447b40e77629891b9fdeb6b64db7411ff4
<ide><path>src/Illuminate/Foundation/helpers.php <ide> function app($abstract = null, array $parameters = []) <ide> return Container::getInstance(); <ide> } <ide> <del> return empty($parameters) <del> ? Container::getInstance()->make($abstract) <del> : Container::getInstance()->makeWith($abstract, $parameters); <add> return Container::getInstance()->make($abstract, $parameters); <ide> } <ide> } <ide>
1
Javascript
Javascript
update donators copy
0975b87a633f0cf89a9943a4f01d22e7942b3346
<ide><path>client/src/components/Supporters.js <ide> function Supporters({ isDonating, activeDonations }) { <ide> freeCodeCamp.org is a tiny non-profit that's helping millions of <ide> people learn to code for free. <br /> <ide> {isDonating <del> ? 'Thank you for supporting freeCodeCamp.org' <add> ? "Thanks for being a supporter! Do you know anyone who's " + <add> 'interested in technology? Encourage them to join the ' + <add> 'community as well.' <ide> : 'Join 4,000 supporters. Your $5 / month donation will help ' + <ide> 'keep tech education free and open.'} <ide> </p>
1
Javascript
Javascript
add support for custom tracking of items
61f2767ce65562257599649d9eaf9da08f321655
<ide><path>src/apis.js <ide> HashMap.prototype = { <ide> return value; <ide> } <ide> }; <del> <del>/** <del> * A map where multiple values can be added to the same key such that they form a queue. <del> * @returns {HashQueueMap} <del> */ <del>function HashQueueMap() {} <del>HashQueueMap.prototype = { <del> /** <del> * Same as array push, but using an array as the value for the hash <del> */ <del> push: function(key, value) { <del> var array = this[key = hashKey(key)]; <del> if (!array) { <del> this[key] = [value]; <del> } else { <del> array.push(value); <del> } <del> }, <del> <del> /** <del> * Same as array shift, but using an array as the value for the hash <del> */ <del> shift: function(key) { <del> var array = this[key = hashKey(key)]; <del> if (array) { <del> if (array.length == 1) { <del> delete this[key]; <del> return array[0]; <del> } else { <del> return array.shift(); <del> } <del> } <del> }, <del> <del> /** <del> * return the first item without deleting it <del> */ <del> peek: function(key) { <del> var array = this[hashKey(key)]; <del> if (array) { <del> return array[0]; <del> } <del> } <del>}; <ide><path>src/ng/directive/ngRepeat.js <ide> * @element ANY <ide> * @scope <ide> * @priority 1000 <del> * @param {repeat_expression} ngRepeat The expression indicating how to enumerate a collection. Two <add> * @param {repeat_expression} ngRepeat The expression indicating how to enumerate a collection. These <ide> * formats are currently supported: <ide> * <ide> * * `variable in expression` – where variable is the user defined loop variable and `expression` <ide> * <ide> * For example: `(name, age) in {'adam':10, 'amalie':12}`. <ide> * <add> * * `variable in expression track by tracking_expression` – You can also provide an optional tracking function <add> * which can be used to associate the objects in the collection with the DOM elements. If no tractking function <add> * is specified the ng-repeat associates elements by identity in the collection. It is an error to have <add> * more then one tractking function to resolve to the same key. (This would mean that two distinct objects are <add> * mapped to the same DOM element, which is not possible.) <add> * <add> * For example: `item in items` is equivalent to `item in items track by $id(item)'. This implies that the DOM elements <add> * will be associated by item identity in the array. <add> * <add> * For example: `item in items track by $id(item)`. A built in `$id()` function can be used to assign a unique <add> * `$$hashKey` property to each item in the array. This property is then used as a key to associated DOM elements <add> * with the corresponding item in the array by identity. Moving the same object in array would move the DOM <add> * element in the same way ian the DOM. <add> * <add> * For example: `item in items track by item.id` Is a typical pattern when the items come from the database. In this <add> * case the object identity does not matter. Two objects are considered equivalent as long as their `id` <add> * property is same. <add> * <ide> * @example <ide> * This example initializes the scope to a list of names and <ide> * then uses `ngRepeat` to display every person: <ide> </doc:scenario> <ide> </doc:example> <ide> */ <del>var ngRepeatDirective = ngDirective({ <del> transclude: 'element', <del> priority: 1000, <del> terminal: true, <del> compile: function(element, attr, linker) { <del> return function(scope, iterStartElement, attr){ <del> var expression = attr.ngRepeat; <del> var match = expression.match(/^\s*(.+)\s+in\s+(.*)\s*$/), <del> lhs, rhs, valueIdent, keyIdent; <del> if (! match) { <del> throw Error("Expected ngRepeat in form of '_item_ in _collection_' but got '" + <del> expression + "'."); <del> } <del> lhs = match[1]; <del> rhs = match[2]; <del> match = lhs.match(/^(?:([\$\w]+)|\(([\$\w]+)\s*,\s*([\$\w]+)\))$/); <del> if (!match) { <del> throw Error("'item' in 'item in collection' should be identifier or (key, value) but got '" + <del> lhs + "'."); <del> } <del> valueIdent = match[3] || match[1]; <del> keyIdent = match[2]; <del> <del> // Store a list of elements from previous run. This is a hash where key is the item from the <del> // iterator, and the value is an array of objects with following properties. <del> // - scope: bound scope <del> // - element: previous element. <del> // - index: position <del> // We need an array of these objects since the same object can be returned from the iterator. <del> // We expect this to be a rare case. <del> var lastOrder = new HashQueueMap(); <del> <del> scope.$watch(function ngRepeatWatch(scope){ <del> var index, length, <del> collection = scope.$eval(rhs), <del> cursor = iterStartElement, // current position of the node <del> // Same as lastOrder but it has the current state. It will become the <del> // lastOrder on the next iteration. <del> nextOrder = new HashQueueMap(), <del> arrayBound, <del> childScope, <del> key, value, // key/value of iteration <del> array, <del> last; // last object information {scope, element, index} <del> <del> <del> <del> if (!isArray(collection)) { <del> // if object, extract keys, sort them and use to determine order of iteration over obj props <del> array = []; <del> for(key in collection) { <del> if (collection.hasOwnProperty(key) && key.charAt(0) != '$') { <del> array.push(key); <del> } <del> } <del> array.sort(); <del> } else { <del> array = collection || []; <add>var ngRepeatDirective = ['$parse', function($parse) { <add> return { <add> transclude: 'element', <add> priority: 1000, <add> terminal: true, <add> compile: function(element, attr, linker) { <add> return function($scope, $element, $attr){ <add> var expression = $attr.ngRepeat; <add> var match = expression.match(/^\s*(.+)\s+in\s+(.*?)\s*(\s+track\s+by\s+(.+)\s*)?$/), <add> trackByExp, hashExpFn, trackByIdFn, lhs, rhs, valueIdentifier, keyIdentifier, <add> hashFnLocals = {$id: hashKey}; <add> <add> if (!match) { <add> throw Error("Expected ngRepeat in form of '_item_ in _collection_[ track by _id_]' but got '" + <add> expression + "'."); <ide> } <ide> <del> arrayBound = array.length-1; <del> <del> // we are not using forEach for perf reasons (trying to avoid #call) <del> for (index = 0, length = array.length; index < length; index++) { <del> key = (collection === array) ? index : array[index]; <del> value = collection[key]; <del> <del> last = lastOrder.shift(value); <del> <del> if (last) { <del> // if we have already seen this object, then we need to reuse the <del> // associated scope/element <del> childScope = last.scope; <del> nextOrder.push(value, last); <del> <del> if (index === last.index) { <del> // do nothing <del> cursor = last.element; <del> } else { <del> // existing item which got moved <del> last.index = index; <del> // This may be a noop, if the element is next, but I don't know of a good way to <del> // figure this out, since it would require extra DOM access, so let's just hope that <del> // the browsers realizes that it is noop, and treats it as such. <del> cursor.after(last.element); <del> cursor = last.element; <del> } <add> lhs = match[1]; <add> rhs = match[2]; <add> trackByExp = match[4]; <add> <add> if (trackByExp) { <add> hashExpFn = $parse(trackByExp); <add> trackByIdFn = function(key, value, index) { <add> // assign key, value, and $index to the locals so that they can be used in hash functions <add> if (keyIdentifier) hashFnLocals[keyIdentifier] = key; <add> hashFnLocals[valueIdentifier] = value; <add> hashFnLocals.$index = index; <add> return hashExpFn($scope, hashFnLocals); <add> }; <add> } else { <add> trackByIdFn = function(key, value) { <add> return hashKey(value); <add> } <add> } <add> <add> match = lhs.match(/^(?:([\$\w]+)|\(([\$\w]+)\s*,\s*([\$\w]+)\))$/); <add> if (!match) { <add> throw Error("'item' in 'item in collection' should be identifier or (key, value) but got '" + <add> lhs + "'."); <add> } <add> valueIdentifier = match[3] || match[1]; <add> keyIdentifier = match[2]; <add> <add> // Store a list of elements from previous run. This is a hash where key is the item from the <add> // iterator, and the value is objects with following properties. <add> // - scope: bound scope <add> // - element: previous element. <add> // - index: position <add> var lastBlockMap = {}; <add> <add> //watch props <add> $scope.$watchCollection(rhs, function ngRepeatAction(collection){ <add> var index, length, <add> cursor = $element, // current position of the node <add> // Same as lastBlockMap but it has the current state. It will become the <add> // lastBlockMap on the next iteration. <add> nextBlockMap = {}, <add> arrayLength, <add> childScope, <add> key, value, // key/value of iteration <add> trackById, <add> collectionKeys, <add> block, // last object information {scope, element, id} <add> nextBlockOrder = []; <add> <add> <add> if (isArray(collection)) { <add> collectionKeys = collection; <ide> } else { <del> // new item which we don't know about <del> childScope = scope.$new(); <add> // if object, extract keys, sort them and use to determine order of iteration over obj props <add> collectionKeys = []; <add> for (key in collection) { <add> if (collection.hasOwnProperty(key) && key.charAt(0) != '$') { <add> collectionKeys.push(key); <add> } <add> } <add> collectionKeys.sort(); <ide> } <ide> <del> childScope[valueIdent] = value; <del> if (keyIdent) childScope[keyIdent] = key; <del> childScope.$index = index; <del> <del> childScope.$first = (index === 0); <del> childScope.$last = (index === arrayBound); <del> childScope.$middle = !(childScope.$first || childScope.$last); <del> <del> if (!last) { <del> linker(childScope, function(clone){ <del> cursor.after(clone); <del> last = { <del> scope: childScope, <del> element: (cursor = clone), <del> index: index <del> }; <del> nextOrder.push(value, last); <del> }); <add> arrayLength = collectionKeys.length; <add> <add> // locate existing items <add> length = nextBlockOrder.length = collectionKeys.length; <add> for(index = 0; index < length; index++) { <add> key = (collection === collectionKeys) ? index : collectionKeys[index]; <add> value = collection[key]; <add> trackById = trackByIdFn(key, value, index); <add> if((block = lastBlockMap[trackById])) { <add> delete lastBlockMap[trackById]; <add> nextBlockMap[trackById] = block; <add> nextBlockOrder[index] = block; <add> } else if (nextBlockMap.hasOwnProperty(trackById)) { <add> // restore lastBlockMap <add> forEach(nextBlockOrder, function(block) { <add> if (block && block.element) lastBlockMap[block.id] = block; <add> }); <add> // This is a duplicate and we need to throw an error <add> throw new Error('Duplicates in a repeater are not allowed. Repeater: ' + expression); <add> } else { <add> // new never before seen block <add> nextBlockOrder[index] = { id: trackById }; <add> } <add> } <add> <add> // remove existing items <add> for (key in lastBlockMap) { <add> if (lastBlockMap.hasOwnProperty(key)) { <add> block = lastBlockMap[key]; <add> block.element.remove(); <add> block.scope.$destroy(); <add> } <ide> } <del> } <ide> <del> //shrink children <del> for (key in lastOrder) { <del> if (lastOrder.hasOwnProperty(key)) { <del> array = lastOrder[key]; <del> while(array.length) { <del> value = array.pop(); <del> value.element.remove(); <del> value.scope.$destroy(); <add> // we are not using forEach for perf reasons (trying to avoid #call) <add> for (index = 0, length = collectionKeys.length; index < length; index++) { <add> key = (collection === collectionKeys) ? index : collectionKeys[index]; <add> value = collection[key]; <add> block = nextBlockOrder[index]; <add> <add> if (block.element) { <add> // if we have already seen this object, then we need to reuse the <add> // associated scope/element <add> childScope = block.scope; <add> <add> if (block.element == cursor) { <add> // do nothing <add> cursor = block.element; <add> } else { <add> // existing item which got moved <add> cursor.after(block.element); <add> cursor = block.element; <add> } <add> } else { <add> // new item which we don't know about <add> childScope = $scope.$new(); <ide> } <del> } <del> } <ide> <del> lastOrder = nextOrder; <del> }); <del> }; <del> } <del>}); <add> childScope[valueIdentifier] = value; <add> if (keyIdentifier) childScope[keyIdentifier] = key; <add> childScope.$index = index; <add> childScope.$first = (index === 0); <add> childScope.$last = (index === (arrayLength - 1)); <add> childScope.$middle = !(childScope.$first || childScope.$last); <add> <add> if (!block.element) { <add> linker(childScope, function(clone){ <add> cursor.after(clone); <add> cursor = clone; <add> block.scope = childScope; <add> block.element = clone; <add> nextBlockMap[block.id] = block; <add> }); <add> } <add> } <add> lastBlockMap = nextBlockMap; <add> }); <add> }; <add> } <add> }; <add>}]; <ide><path>test/ApiSpecs.js <ide> describe('api', function() { <ide> expect(map.get('c')).toBe(undefined); <ide> }); <ide> }); <del> <del> <del> describe('HashQueueMap', function() { <del> it('should do basic crud with collections', function() { <del> var map = new HashQueueMap(); <del> map.push('key', 'a'); <del> map.push('key', 'b'); <del> expect(map[hashKey('key')]).toEqual(['a', 'b']); <del> expect(map.peek('key')).toEqual('a'); <del> expect(map[hashKey('key')]).toEqual(['a', 'b']); <del> expect(map.shift('key')).toEqual('a'); <del> expect(map.peek('key')).toEqual('b'); <del> expect(map[hashKey('key')]).toEqual(['b']); <del> expect(map.shift('key')).toEqual('b'); <del> expect(map.shift('key')).toEqual(undefined); <del> expect(map[hashKey('key')]).toEqual(undefined); <del> }); <del> <del> it('should support primitive and object keys', function() { <del> var obj1 = {}, <del> obj2 = {}; <del> <del> var map = new HashQueueMap(); <del> map.push(obj1, 'a1'); <del> map.push(obj1, 'a2'); <del> map.push(obj2, 'b'); <del> map.push(1, 'c'); <del> map.push(undefined, 'd'); <del> map.push(null, 'e'); <del> <del> expect(map[hashKey(obj1)]).toEqual(['a1', 'a2']); <del> expect(map[hashKey(obj2)]).toEqual(['b']); <del> expect(map[hashKey(1)]).toEqual(['c']); <del> expect(map[hashKey(undefined)]).toEqual(['d']); <del> expect(map[hashKey(null)]).toEqual(['e']); <del> }); <del> }); <ide> }); <ide> <ide><path>test/ng/directive/ngClassSpec.js <ide> describe('ngClass', function() { <ide> <ide> it('should update ngClassOdd/Even when model is changed by filtering', inject(function($rootScope, $compile) { <ide> element = $compile('<ul>' + <del> '<li ng-repeat="i in items" ' + <add> '<li ng-repeat="i in items track by $index" ' + <ide> 'ng-class-odd="\'odd\'" ng-class-even="\'even\'"></li>' + <ide> '<ul>')($rootScope); <ide> $rootScope.items = ['a','b','a']; <ide><path>test/ng/directive/ngRepeatSpec.js <ide> 'use strict'; <ide> <ide> describe('ngRepeat', function() { <del> var element, $compile, scope; <add> var element, $compile, scope, $exceptionHandler; <ide> <ide> <del> beforeEach(inject(function(_$compile_, $rootScope) { <add> beforeEach(module(function($exceptionHandlerProvider) { <add> $exceptionHandlerProvider.mode('log'); <add> })); <add> <add> beforeEach(inject(function(_$compile_, $rootScope, _$exceptionHandler_) { <ide> $compile = _$compile_; <add> $exceptionHandler = _$exceptionHandler_; <ide> scope = $rootScope.$new(); <ide> })); <ide> <ide> <del> afterEach(function(){ <add> afterEach(function() { <add> if ($exceptionHandler.errors.length) { <add> dump(jasmine.getEnv().currentSpec.getFullName()); <add> dump('$exceptionHandler has errors'); <add> dump($exceptionHandler.errors); <add> expect($exceptionHandler.errors).toBe([]); <add> } <ide> dealoc(element); <ide> }); <ide> <ide> describe('ngRepeat', function() { <ide> }); <ide> <ide> <del> it('should iterate over an array of primitives', function() { <add> it('should iterate over on object/map', function() { <ide> element = $compile( <ide> '<ul>' + <del> '<li ng-repeat="item in items">{{item}};</li>' + <add> '<li ng-repeat="(key, value) in items">{{key}}:{{value}}|</li>' + <ide> '</ul>')(scope); <del> <del> Array.prototype.extraProperty = "should be ignored"; <del> // INIT <del> scope.items = [true, true, true]; <add> scope.items = {misko:'swe', shyam:'set'}; <ide> scope.$digest(); <del> expect(element.find('li').length).toEqual(3); <del> expect(element.text()).toEqual('true;true;true;'); <del> delete Array.prototype.extraProperty; <add> expect(element.text()).toEqual('misko:swe|shyam:set|'); <add> }); <ide> <del> scope.items = [false, true, true]; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(3); <del> expect(element.text()).toEqual('false;true;true;'); <ide> <del> scope.items = [false, true, false]; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(3); <del> expect(element.text()).toEqual('false;true;false;'); <add> describe('track by', function() { <add> it('should track using expression function', function() { <add> element = $compile( <add> '<ul>' + <add> '<li ng-repeat="item in items track by item.id">{{item.name}};</li>' + <add> '</ul>')(scope); <add> scope.items = [{id: 'misko'}, {id: 'igor'}]; <add> scope.$digest(); <add> var li0 = element.find('li')[0]; <add> var li1 = element.find('li')[1]; <ide> <del> scope.items = [true]; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(1); <del> expect(element.text()).toEqual('true;'); <add> scope.items.push(scope.items.shift()); <add> scope.$digest(); <add> expect(element.find('li')[0]).toBe(li1); <add> expect(element.find('li')[1]).toBe(li0); <add> }); <ide> <del> scope.items = [true, true, false]; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(3); <del> expect(element.text()).toEqual('true;true;false;'); <ide> <del> scope.items = [true, false, false]; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(3); <del> expect(element.text()).toEqual('true;false;false;'); <add> it('should track using build in $id function', function() { <add> element = $compile( <add> '<ul>' + <add> '<li ng-repeat="item in items track by $id(item)">{{item.name}};</li>' + <add> '</ul>')(scope); <add> scope.items = [{name: 'misko'}, {name: 'igor'}]; <add> scope.$digest(); <add> var li0 = element.find('li')[0]; <add> var li1 = element.find('li')[1]; <ide> <del> // string <del> scope.items = ['a', 'a', 'a']; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(3); <del> expect(element.text()).toEqual('a;a;a;'); <add> scope.items.push(scope.items.shift()); <add> scope.$digest(); <add> expect(element.find('li')[0]).toBe(li1); <add> expect(element.find('li')[1]).toBe(li0); <add> }); <ide> <del> scope.items = ['ab', 'a', 'a']; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(3); <del> expect(element.text()).toEqual('ab;a;a;'); <ide> <del> scope.items = ['test']; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(1); <del> expect(element.text()).toEqual('test;'); <add> it('should iterate over an array of primitives', function() { <add> element = $compile( <add> '<ul>' + <add> '<li ng-repeat="item in items track by $index">{{item}};</li>' + <add> '</ul>')(scope); <ide> <del> scope.items = ['same', 'value']; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(2); <del> expect(element.text()).toEqual('same;value;'); <add> Array.prototype.extraProperty = "should be ignored"; <add> // INIT <add> scope.items = [true, true, true]; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(3); <add> expect(element.text()).toEqual('true;true;true;'); <add> delete Array.prototype.extraProperty; <ide> <del> // number <del> scope.items = [12, 12, 12]; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(3); <del> expect(element.text()).toEqual('12;12;12;'); <add> scope.items = [false, true, true]; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(3); <add> expect(element.text()).toEqual('false;true;true;'); <ide> <del> scope.items = [53, 12, 27]; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(3); <del> expect(element.text()).toEqual('53;12;27;'); <add> scope.items = [false, true, false]; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(3); <add> expect(element.text()).toEqual('false;true;false;'); <ide> <del> scope.items = [89]; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(1); <del> expect(element.text()).toEqual('89;'); <add> scope.items = [true]; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(1); <add> expect(element.text()).toEqual('true;'); <ide> <del> scope.items = [89, 23]; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(2); <del> expect(element.text()).toEqual('89;23;'); <del> }); <add> scope.items = [true, true, false]; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(3); <add> expect(element.text()).toEqual('true;true;false;'); <ide> <add> scope.items = [true, false, false]; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(3); <add> expect(element.text()).toEqual('true;false;false;'); <ide> <del> it('should iterate over on object/map', function() { <del> element = $compile( <del> '<ul>' + <del> '<li ng-repeat="(key, value) in items">{{key}}:{{value}}|</li>' + <del> '</ul>')(scope); <del> scope.items = {misko:'swe', shyam:'set'}; <del> scope.$digest(); <del> expect(element.text()).toEqual('misko:swe|shyam:set|'); <del> }); <add> // string <add> scope.items = ['a', 'a', 'a']; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(3); <add> expect(element.text()).toEqual('a;a;a;'); <ide> <add> scope.items = ['ab', 'a', 'a']; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(3); <add> expect(element.text()).toEqual('ab;a;a;'); <ide> <del> it('should iterate over object with changing primitive property values', function() { <del> // test for issue #933 <add> scope.items = ['test']; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(1); <add> expect(element.text()).toEqual('test;'); <ide> <del> element = $compile( <del> '<ul>' + <del> '<li ng-repeat="(key, value) in items">' + <del> '{{key}}:{{value}};' + <del> '<input type="checkbox" ng-model="items[key]">' + <del> '</li>' + <del> '</ul>')(scope); <add> scope.items = ['same', 'value']; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(2); <add> expect(element.text()).toEqual('same;value;'); <ide> <del> scope.items = {misko: true, shyam: true, zhenbo:true}; <del> scope.$digest(); <del> expect(element.find('li').length).toEqual(3); <del> expect(element.text()).toEqual('misko:true;shyam:true;zhenbo:true;'); <add> // number <add> scope.items = [12, 12, 12]; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(3); <add> expect(element.text()).toEqual('12;12;12;'); <ide> <del> browserTrigger(element.find('input').eq(0), 'click'); <add> scope.items = [53, 12, 27]; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(3); <add> expect(element.text()).toEqual('53;12;27;'); <ide> <del> expect(element.text()).toEqual('misko:false;shyam:true;zhenbo:true;'); <del> expect(element.find('input')[0].checked).toBe(false); <del> expect(element.find('input')[1].checked).toBe(true); <del> expect(element.find('input')[2].checked).toBe(true); <add> scope.items = [89]; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(1); <add> expect(element.text()).toEqual('89;'); <add> <add> scope.items = [89, 23]; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(2); <add> expect(element.text()).toEqual('89;23;'); <add> }); <ide> <del> browserTrigger(element.find('input').eq(0), 'click'); <del> expect(element.text()).toEqual('misko:true;shyam:true;zhenbo:true;'); <del> expect(element.find('input')[0].checked).toBe(true); <del> expect(element.find('input')[1].checked).toBe(true); <del> expect(element.find('input')[2].checked).toBe(true); <ide> <del> browserTrigger(element.find('input').eq(1), 'click'); <del> expect(element.text()).toEqual('misko:true;shyam:false;zhenbo:true;'); <del> expect(element.find('input')[0].checked).toBe(true); <del> expect(element.find('input')[1].checked).toBe(false); <del> expect(element.find('input')[2].checked).toBe(true); <add> it('should iterate over object with changing primitive property values', function() { <add> // test for issue #933 <ide> <del> scope.items = {misko: false, shyam: true, zhenbo: true}; <del> scope.$digest(); <del> expect(element.text()).toEqual('misko:false;shyam:true;zhenbo:true;'); <del> expect(element.find('input')[0].checked).toBe(false); <del> expect(element.find('input')[1].checked).toBe(true); <del> expect(element.find('input')[2].checked).toBe(true); <add> element = $compile( <add> '<ul>' + <add> '<li ng-repeat="(key, value) in items track by $index">' + <add> '{{key}}:{{value}};' + <add> '<input type="checkbox" ng-model="items[key]">' + <add> '</li>' + <add> '</ul>')(scope); <add> <add> scope.items = {misko: true, shyam: true, zhenbo:true}; <add> scope.$digest(); <add> expect(element.find('li').length).toEqual(3); <add> expect(element.text()).toEqual('misko:true;shyam:true;zhenbo:true;'); <add> <add> browserTrigger(element.find('input').eq(0), 'click'); <add> <add> expect(element.text()).toEqual('misko:false;shyam:true;zhenbo:true;'); <add> expect(element.find('input')[0].checked).toBe(false); <add> expect(element.find('input')[1].checked).toBe(true); <add> expect(element.find('input')[2].checked).toBe(true); <add> <add> browserTrigger(element.find('input').eq(0), 'click'); <add> expect(element.text()).toEqual('misko:true;shyam:true;zhenbo:true;'); <add> expect(element.find('input')[0].checked).toBe(true); <add> expect(element.find('input')[1].checked).toBe(true); <add> expect(element.find('input')[2].checked).toBe(true); <add> <add> browserTrigger(element.find('input').eq(1), 'click'); <add> expect(element.text()).toEqual('misko:true;shyam:false;zhenbo:true;'); <add> expect(element.find('input')[0].checked).toBe(true); <add> expect(element.find('input')[1].checked).toBe(false); <add> expect(element.find('input')[2].checked).toBe(true); <add> <add> scope.items = {misko: false, shyam: true, zhenbo: true}; <add> scope.$digest(); <add> expect(element.text()).toEqual('misko:false;shyam:true;zhenbo:true;'); <add> expect(element.find('input')[0].checked).toBe(false); <add> expect(element.find('input')[1].checked).toBe(true); <add> expect(element.find('input')[2].checked).toBe(true); <add> }); <ide> }); <ide> <ide> <ide> describe('ngRepeat', function() { <ide> <ide> <ide> it('should error on wrong parsing of ngRepeat', function() { <del> expect(function() { <del> element = jqLite('<ul><li ng-repeat="i dont parse"></li></ul>'); <del> $compile(element)(scope); <del> }).toThrow("Expected ngRepeat in form of '_item_ in _collection_' but got 'i dont parse'."); <add> element = jqLite('<ul><li ng-repeat="i dont parse"></li></ul>'); <add> $compile(element)(scope); <add> expect($exceptionHandler.errors.shift()[0].message). <add> toBe("Expected ngRepeat in form of '_item_ in _collection_[ track by _id_]' but got 'i dont parse'."); <ide> }); <ide> <ide> <ide> it("should throw error when left-hand-side of ngRepeat can't be parsed", function() { <del> expect(function() { <ide> element = jqLite('<ul><li ng-repeat="i dont parse in foo"></li></ul>'); <ide> $compile(element)(scope); <del> }).toThrow("'item' in 'item in collection' should be identifier or (key, value) but got " + <del> "'i dont parse'."); <add> expect($exceptionHandler.errors.shift()[0].message). <add> toBe("'item' in 'item in collection' should be identifier or (key, value) but got 'i dont parse'."); <ide> }); <ide> <ide> <ide> describe('ngRepeat', function() { <ide> it('should ignore $ and $$ properties', function() { <ide> element = $compile('<ul><li ng-repeat="i in items">{{i}}|</li></ul>')(scope); <ide> scope.items = ['a', 'b', 'c']; <del> scope.items.$$hashkey = 'xxx'; <add> scope.items.$$hashKey = 'xxx'; <ide> scope.items.$root = 'yyy'; <ide> scope.$digest(); <ide> <ide> describe('ngRepeat', function() { <ide> }); <ide> <ide> <del> it('should support duplicates', function() { <del> scope.items = [a, a, b, c]; <del> scope.$digest(); <del> var newElements = element.find('li'); <del> expect(newElements[0]).toEqual(lis[0]); <del> expect(newElements[1]).not.toEqual(lis[0]); <del> expect(newElements[2]).toEqual(lis[1]); <del> expect(newElements[3]).toEqual(lis[2]); <del> <del> lis = newElements; <add> it('should throw error on duplicates and recover', function() { <add> scope.items = [a, a, a]; <ide> scope.$digest(); <del> newElements = element.find('li'); <del> expect(newElements[0]).toEqual(lis[0]); <del> expect(newElements[1]).toEqual(lis[1]); <del> expect(newElements[2]).toEqual(lis[2]); <del> expect(newElements[3]).toEqual(lis[3]); <add> expect($exceptionHandler.errors.shift().message). <add> toEqual('Duplicates in a repeater are not allowed. Repeater: item in items'); <ide> <add> // recover <add> scope.items = [a]; <ide> scope.$digest(); <del> newElements = element.find('li'); <add> var newElements = element.find('li'); <add> expect(newElements.length).toEqual(1); <ide> expect(newElements[0]).toEqual(lis[0]); <del> expect(newElements[1]).toEqual(lis[1]); <del> expect(newElements[2]).toEqual(lis[2]); <del> expect(newElements[3]).toEqual(lis[3]); <del> }); <del> <ide> <del> it('should remove last item when one duplicate instance is removed', function() { <del> scope.items = [a, a, a]; <del> scope.$digest(); <del> lis = element.find('li'); <del> <del> scope.items = [a, a]; <add> scope.items = []; <ide> scope.$digest(); <ide> var newElements = element.find('li'); <del> expect(newElements.length).toEqual(2); <del> expect(newElements[0]).toEqual(lis[0]); <del> expect(newElements[1]).toEqual(lis[1]); <add> expect(newElements.length).toEqual(0); <ide> }); <ide> <ide>
5
Text
Text
add explanation of cancel token
3958e9fbc8ebc0f72a8ce568cbf86f391d6869f3
<ide><path>README.md <ide> cancel(); <ide> ``` <ide> <ide> > Note: you can cancel several requests with the same cancel token. <add>> If a cancellation token is already cancelled at the moment of starting an Axios request, then the request is cancelled immediately, without any attempts to make real request. <ide> <ide> ## Using application/x-www-form-urlencoded format <ide>
1
Python
Python
fix raise_warnings in np.test in py3
ede15881846af7aa6058c89359b9f9190201bfb5
<ide><path>numpy/testing/nosetester.py <ide> def test(self, label='fast', verbose=1, extra_argv=None, <ide> <ide> _warn_opts = dict(develop=(DeprecationWarning, RuntimeWarning), <ide> release=()) <del> if raise_warnings in _warn_opts.keys(): <add> if isinstance(raise_warnings, basestring): <ide> raise_warnings = _warn_opts[raise_warnings] <ide> <ide> with warnings.catch_warnings():
1
Text
Text
add react 18 changelog
509d2d906591b0fd364b2853433a4fe5b111bf02
<ide><path>CHANGELOG.md <add>## 18.0.0 (March 29, 2022) <add> <add>Below is a list of all new features, APIs, deprecations, and breaking changes. <add>Read [React 18 release post](https://reactjs.org/blog/2022/03/29/react-v18.html) and [React 18 upgrade guide](https://reactjs.org/blog/2022/03/08/react-18-upgrade-guide.html) for more information. <add> <add>### New Features <add> <add>### React <add> <add>* `useId` is a new hook for generating unique IDs on both the client and server, while avoiding hydration mismatches. It is primarily useful for component libraries integrating with accessibility APIs that require unique IDs. This solves an issue that already exists in React 17 and below, but it’s even more important in React 18 because of how the new streaming server renderer delivers HTML out-of-order. <add>* `startTransition` and `useTransition` let you mark some state updates as not urgent. Other state updates are considered urgent by default. React will allow urgent state updates (for example, updating a text input) to interrupt non-urgent state updates (for example, rendering a list of search results). <add>* `useDeferredValue` lets you defer re-rendering a non-urgent part of the tree. It is similar to debouncing, but has a few advantages compared to it. There is no fixed time delay, so React will attempt the deferred render right after the first render is reflected on the screen. The deferred render is interruptible and doesn't block user input. <add>* `useSyncExternalStore` is a new hook that allows external stores to support concurrent reads by forcing updates to the store to be synchronous. It removes the need for `useEffect` when implementing subscriptions to external data sources, and is recommended for any library that integrates with state external to React. <add>* `useInsertionEffect` is a new hook that allows CSS-in-JS libraries to address performance issues of injecting styles in render. Unless you’ve already built a CSS-in-JS library we don’t expect you to ever use this. This hook will run after the DOM is mutated, but before layout effects read the new layout. This solves an issue that already exists in React 17 and below, but is even more important in React 18 because React yields to the browser during concurrent rendering, giving it a chance to recalculate layout. <add> <add>### React DOM Client <add> <add>These new APIs are now exported from `react-dom/client`: <add> <add>* `createRoot`: New method to create a root to `render` or `unmount`. Use it instead of `ReactDOM.render`. New features in React 18 don't work without it. <add>* `hydrateRoot`: New method to hydrate a server rendered application. Use it instead of `ReactDOM.hydrate` in conjunction with the new React DOM Server APIs. New features in React 18 don't work without it. <add> <add>Both `createRoot` and `hydrateRoot` accept a new option called `onRecoverableError` in case you want to be notified when React recovers from errors during rendering or hydration for logging. By default, React will use [`reportError`](https://developer.mozilla.org/en-US/docs/Web/API/reportError), or `console.error` in the older browsers. <add> <add>### React DOM Server <add> <add>These new APIs are now exported from `react-dom/server` and have full support for streaming Suspense on the server: <add> <add>* `renderToPipeableStream`: for streaming in Node environments. <add>* `renderToReadableStream`: for modern edge runtime environments, such as Deno and Cloudflare workers. <add> <add>The existing `renderToString` method keeps working but is discouraged. <add> <add>## Deprecations <add> <add>* `react-dom`: `ReactDOM.render` has been deprecated. Using it will warn and run your app in React 17 mode. <add>* `react-dom`: `ReactDOM.hydrate` has been deprecated. Using it will warn and run your app in React 17 mode. <add>* `react-dom`: `ReactDOM.unmountComponentAtNode` has been deprecated. <add>* `react-dom`: `ReactDOM.renderSubtreeIntoContainer` has been deprecated. <add>* `react-dom/server`: `ReactDOMServer.renderToNodeStream` has been deprecated. <add> <add>## Breaking Changes <add> <add>### React <add> <add>* **Automatic batching:** This release introduces a performance improvement that changes to the way React batches updates to do more batching automatically. See [Automatic batching for fewer renders in React 18](https://github.com/reactwg/react-18/discussions/21) for more info. In the rare case that you need to opt out, wrap the state update in `flushSync`. <add>* **Stricter Strict Mode**: In the future, React will provide a feature that lets components preserve state between unmounts. To prepare for it, React 18 introduces a new development-only check to Strict Mode. React will automatically unmount and remount every component, whenever a component mounts for the first time, restoring the previous state on the second mount. If this breaks your app, consider removing Strict Mode until you can fix the components to be resilient to remounting with existing state. <add>* **Consistent useEffect timing**: React now always synchronously flushes effect functions if the update was triggered during a discrete user input event such as a click or a keydown event. Previously, the behavior wasn't always predictable or consistent. <add>* **Stricter hydration errors**: Hydration mismatches due to missing or extra text content are now treated like errors instead of warnings. React will no longer attempt to "patch up" individual nodes by inserting or deleting a node on the client in an attempt to match the server markup, and will revert to client rendering up to the closest `<Suspense>` boundary in the tree. This ensures the hydrated tree is consistent and avoids potential privacy and security holes that can be caused by hydration mismatches. <add>* **Layout Effects with Suspense**: When a tree re-suspends and reverts to a fallback, React will now clean up layout effects, and then re-create them when the content inside the boundary is shown again. This fixes an issue which prevented component libraries from correctly measuring layout when used with Suspense. <add>* **New JS Environment Requirements**: React now depends on modern browsers features including `Promise`, `Symbol`, and `Object.assign`. If you support older browsers and devices such as Internet Explorer which do not provide modern browser features natively or have non-compliant implementations, consider including a global polyfill in your bundled application. <add> <add>## Notable Changes <add> <add>### React <add> <add>* **Components can now render `undefined`:** React no longer throws if you return `undefined` from a component. This makes the allowed component return values consistent with values that are allowed in the middle of a component tree. We suggest to use a linter to prevent mistakes like forgetting a `return` statement before JSX. <add>* **In tests, `act` warnings are now opt-in:** If you're running end-to-end tests, the `act` warnings are unnecessary. We've introduced an [opt-in](https://github.com/reactwg/react-18/discussions/102) mechanism so you can enable them only for unit tests where they are useful and beneficial. <add>* **No warning about `setState` on unmounted components:** Previously, React warned about memory leaks when you call `setState` on an unmounted component. This warning was added for subscriptions, but people primarily run into it in scenarios where setting state is fine, and workarounds make the code worse. We've [removed](https://github.com/facebook/react/pull/22114) this warning. <add>* **No suppression of console logs:** When you use Strict Mode, React renders each component twice to help you find unexpected side effects. In React 17, we've suppressed console logs for one of the two renders to make the logs easier to read. In response to [community feedback](https://github.com/facebook/react/issues/21783) about this being confusing, we've removed the suppression. Instead, if you have React DevTools installed, the second log's renders will be displayed in grey, and there will be an option (off by default) to suppress them completely. <add>* **Improved memory usage:** React now cleans up more internal fields on unmount, making the impact from unfixed memory leaks that may exist in your application code less severe. <add> <add>### React DOM Server <add> <add>* **`renderToString`:** Will no longer error when suspending on the server. Instead, it will emit the fallback HTML for the closest `<Suspense>` boundary and then retry rendering the same content on the client. It is still recommended that you switch to a streaming API like `renderToPipeableStream` or `renderToReadableStream` instead. <add>* **`renderToStaticMarkup`:** Will no longer error when suspending on the server. Instead, it will emit the fallback HTML for the closest `<Suspense>` boundary and retry rendering on the client. <add> <add>## All Changes <add> <add>## React <add> <add>* Add `useTransition` and `useDeferredValue` to separate urgent updates from transitions. ([#10426](https://github.com/facebook/react/pull/10426), [#10715](https://github.com/facebook/react/pull/10715), [#15593](https://github.com/facebook/react/pull/15593), [#15272](https://github.com/facebook/react/pull/15272), [#15578](https://github.com/facebook/react/pull/15578), [#15769](https://github.com/facebook/react/pull/15769), [#17058](https://github.com/facebook/react/pull/17058), [#18796](https://github.com/facebook/react/pull/18796), [#19121](https://github.com/facebook/react/pull/19121), [#19703](https://github.com/facebook/react/pull/19703), [#19719](https://github.com/facebook/react/pull/19719), [#19724](https://github.com/facebook/react/pull/19724), [#20672](https://github.com/facebook/react/pull/20672), [#20976](https://github.com/facebook/react/pull/20976) by [@acdlite](https://github.com/acdlite), [@lunaruan](https://github.com/lunaruan), [@rickhanlonii](https://github.com/rickhanlonii), and [@sebmarkbage](https://github.com/sebmarkbage)) <add>* Add `useId` for generating unique IDs. ([#17322](https://github.com/facebook/react/pull/17322), [#18576](https://github.com/facebook/react/pull/18576), [#22644](https://github.com/facebook/react/pull/22644), [#22672](https://github.com/facebook/react/pull/22672), [#21260](https://github.com/facebook/react/pull/21260) by [@acdlite](https://github.com/acdlite), [@lunaruan](https://github.com/lunaruan), and [@sebmarkbage](https://github.com/sebmarkbage)) <add>* Add `useSyncExternalStore` to help external store libraries integrate with React. ([#15022](https://github.com/facebook/react/pull/15022), [#18000](https://github.com/facebook/react/pull/18000), [#18771](https://github.com/facebook/react/pull/18771), [#22211](https://github.com/facebook/react/pull/22211), [#22292](https://github.com/facebook/react/pull/22292), [#22239](https://github.com/facebook/react/pull/22239), [#22347](https://github.com/facebook/react/pull/22347), [#23150](https://github.com/facebook/react/pull/23150) by [@acdlite](https://github.com/acdlite), [@bvaughn](https://github.com/bvaughn), and [@drarmstr](https://github.com/drarmstr)) <add>* Add `startTransition` as a version of `useTransition` without pending feedback. ([#19696](https://github.com/facebook/react/pull/19696) by [@rickhanlonii](https://github.com/rickhanlonii)) <add>* Add `useInsertionEffect` for CSS-in-JS libraries. ([#21913](https://github.com/facebook/react/pull/21913) by [@rickhanlonii](https://github.com/rickhanlonii)) <add>* Make Suspense remount layout effects when content reappears. ([#19322](https://github.com/facebook/react/pull/19322), [#19374](https://github.com/facebook/react/pull/19374), [#19523](https://github.com/facebook/react/pull/19523), [#20625](https://github.com/facebook/react/pull/20625), [#21079](https://github.com/facebook/react/pull/21079) by [@acdlite](https://github.com/acdlite), [@bvaughn](https://github.com/bvaughn), and [@lunaruan](https://github.com/lunaruan)) <add>* Make `<StrictMode>` re-run effects to check for restorable state. ([#19523](https://github.com/facebook/react/pull/19523) , [#21418](https://github.com/facebook/react/pull/21418) by [@bvaughn](https://github.com/bvaughn) and [@lunaruan](https://github.com/lunaruan)) <add>* Assume Symbols are always available. ([#23348](https://github.com/facebook/react/pull/23348) by [@sebmarkbage](https://github.com/sebmarkbage)) <add>* Remove `object-assign` polyfill. ([#23351](https://github.com/facebook/react/pull/23351) by [@sebmarkbage](https://github.com/sebmarkbage)) <add>* Remove unsupported `unstable_changedBits` API. ([#20953](https://github.com/facebook/react/pull/20953) by [@acdlite](https://github.com/acdlite)) <add>* Allow components to render undefined. ([#21869](https://github.com/facebook/react/pull/21869) by [@rickhanlonii](https://github.com/rickhanlonii)) <add>* Flush `useEffect` resulting from discrete events like clicks synchronously. ([#21150](https://github.com/facebook/react/pull/21150) by [@acdlite](https://github.com/acdlite)) <add>* Suspense `fallback={undefined}` now behaves the same as `null` and isn't ignored. ([#21854](https://github.com/facebook/react/pull/21854) by [@rickhanlonii](https://github.com/rickhanlonii)) <add>* Consider all `lazy()` resolving to the same component equivalent. ([#20357](https://github.com/facebook/react/pull/20357) by [@sebmarkbage](https://github.com/sebmarkbage)) <add>* Don't patch console during first render. ([#22308](https://github.com/facebook/react/pull/22308) by [@lunaruan](https://github.com/lunaruan)) <add>* Improve memory usage. ([#21039](https://github.com/facebook/react/pull/21039) by [@bgirard](https://github.com/bgirard)) <add>* Improve messages if string coercion throws (Temporal.*, Symbol, etc.) ([#22064](https://github.com/facebook/react/pull/22064) by [@justingrant](https://github.com/justingrant)) <add>* Use `setImmediate` when available over `MessageChannel`. ([#20834](https://github.com/facebook/react/pull/20834) by [@gaearon](https://github.com/gaearon)) <add>* Fix context failing to propagate inside suspended trees. ([#23095](https://github.com/facebook/react/pull/23095) by [@gaearon](https://github.com/gaearon)) <add>* Fix `useReducer` observing incorrect props by removing the eager bailout mechanism. ([#22445](https://github.com/facebook/react/pull/22445) by [@josephsavona](https://github.com/josephsavona)) <add>* Fix `setState` being ignored in Safari when appending iframes. ([#23111](https://github.com/facebook/react/pull/23111) by [@gaearon](https://github.com/gaearon)) <add>* Fix a crash when rendering `ZonedDateTime` in the tree. ([#20617](https://github.com/facebook/react/pull/20617) by [@dimaqq](https://github.com/dimaqq)) <add>* Fix a crash when document is set to `null` in tests. ([#22695](https://github.com/facebook/react/pull/22695) by [@SimenB](https://github.com/SimenB)) <add>* Fix `onLoad` not triggering when concurrent features are on. ([#23316](https://github.com/facebook/react/pull/23316) by [@gnoff](https://github.com/gnoff)) <add>* Fix a warning when a selector returns `NaN`. ([#23333](https://github.com/facebook/react/pull/23333) by [@hachibeeDI](https://github.com/hachibeeDI)) <add>* Fix a crash when document is set to `null` in tests. ([#22695](https://github.com/facebook/react/pull/22695) by [@SimenB](https://github.com/SimenB)) <add>* Fix the generated license header. ([#23004](https://github.com/facebook/react/pull/23004) by [@vitaliemiron](https://github.com/vitaliemiron)) <add>* Add `package.json` as one of the entry points. ([#22954](https://github.com/facebook/react/pull/22954) by [@Jack](https://github.com/Jack-Works)) <add>* Allow suspending outside a Suspense boundary. ([#23267](https://github.com/facebook/react/pull/23267) by [@acdlite](https://github.com/acdlite)) <add>* Log a recoverable error whenever hydration fails. ([#23319](https://github.com/facebook/react/pull/23319) by [@acdlite](https://github.com/acdlite)) <add> <add>### React DOM <add> <add>* Add `createRoot` and `hydrateRoot`. ([#10239](https://github.com/facebook/react/pull/10239), [#11225](https://github.com/facebook/react/pull/11225), [#12117](https://github.com/facebook/react/pull/12117), [#13732](https://github.com/facebook/react/pull/13732), [#15502](https://github.com/facebook/react/pull/15502), [#15532](https://github.com/facebook/react/pull/15532), [#17035](https://github.com/facebook/react/pull/17035), [#17165](https://github.com/facebook/react/pull/17165), [#20669](https://github.com/facebook/react/pull/20669), [#20748](https://github.com/facebook/react/pull/20748), [#20888](https://github.com/facebook/react/pull/20888), [#21072](https://github.com/facebook/react/pull/21072), [#21417](https://github.com/facebook/react/pull/21417), [#21652](https://github.com/facebook/react/pull/21652), [#21687](https://github.com/facebook/react/pull/21687), [#23207](https://github.com/facebook/react/pull/23207), [#23385](https://github.com/facebook/react/pull/23385) by [@acdlite](https://github.com/acdlite), [@bvaughn](https://github.com/bvaughn), [@gaearon](https://github.com/gaearon), [@lunaruan](https://github.com/lunaruan), [@rickhanlonii](https://github.com/rickhanlonii), [@trueadm](https://github.com/trueadm), and [@sebmarkbage](https://github.com/sebmarkbage)) <add>* Add selective hydration. ([#14717](https://github.com/facebook/react/pull/14717), [#14884](https://github.com/facebook/react/pull/14884), [#16725](https://github.com/facebook/react/pull/16725), [#16880](https://github.com/facebook/react/pull/16880), [#17004](https://github.com/facebook/react/pull/17004), [#22416](https://github.com/facebook/react/pull/22416), [#22629](https://github.com/facebook/react/pull/22629), [#22448](https://github.com/facebook/react/pull/22448), [#22856](https://github.com/facebook/react/pull/22856), [#23176](https://github.com/facebook/react/pull/23176) by [@acdlite](https://github.com/acdlite), [@gaearon](https://github.com/gaearon), [@salazarm](https://github.com/salazarm), and [@sebmarkbage](https://github.com/sebmarkbage)) <add>* Add `aria-description` to the list of known ARIA attributes. ([#22142](https://github.com/facebook/react/pull/22142) by [@mahyareb](https://github.com/mahyareb)) <add>* Add `onResize` event to video elements. ([#21973](https://github.com/facebook/react/pull/21973) by [@rileyjshaw](https://github.com/rileyjshaw)) <add>* Add `imageSizes` and `imageSrcSet` to known props. ([#22550](https://github.com/facebook/react/pull/22550) by [@eps1lon](https://github.com/eps1lon)) <add>* Allow non-string `<option>` children if `value` is provided. ([#21431](https://github.com/facebook/react/pull/21431) by [@sebmarkbage](https://github.com/sebmarkbage)) <add>* Fix `aspectRatio` style not being applied. ([#21100](https://github.com/facebook/react/pull/21100) by [@gaearon](https://github.com/gaearon)) <add>* Warn if `renderSubtreeIntoContainer` is called. ([#23355](https://github.com/facebook/react/pull/23355) by [@acdlite](https://github.com/acdlite)) <add> <add>### React DOM Server <add> <add>* Add the new streaming renderer. ([#14144](https://github.com/facebook/react/pull/14144), [#20970](https://github.com/facebook/react/pull/20970), [#21056](https://github.com/facebook/react/pull/21056), [#21255](https://github.com/facebook/react/pull/21255), [#21200](https://github.com/facebook/react/pull/21200), [#21257](https://github.com/facebook/react/pull/21257), [#21276](https://github.com/facebook/react/pull/21276), [#22443](https://github.com/facebook/react/pull/22443), [#22450](https://github.com/facebook/react/pull/22450), [#23247](https://github.com/facebook/react/pull/23247), [#24025](https://github.com/facebook/react/pull/24025), [#24030](https://github.com/facebook/react/pull/24030) by [@sebmarkbage](https://github.com/sebmarkbage)) <add>* Fix context providers in SSR when handling multiple requests. ([#23171](https://github.com/facebook/react/pull/23171) by [@frandiox](https://github.com/frandiox)) <add>* Revert to client render on text mismatch. ([#23354](https://github.com/facebook/react/pull/23354) by [@acdlite](https://github.com/acdlite)) <add>* Deprecate `renderToNodeStream`. ([#23359](https://github.com/facebook/react/pull/23359) by [@sebmarkbage](https://github.com/sebmarkbage)) <add>* Fix a spurious error log in the new server renderer. ([#24043](https://github.com/facebook/react/pull/24043) by [@eps1lon](https://github.com/eps1lon)) <add>* Fix a bug in the new server renderer. ([#22617](https://github.com/facebook/react/pull/22617) by [@shuding](https://github.com/shuding)) <add>* Ignore function and symbol values inside custom elements on the server. ([#21157](https://github.com/facebook/react/pull/21157) by [@sebmarkbage](https://github.com/sebmarkbage)) <add> <add>### React DOM Test Utils <add> <add>* Throw when `act` is used in production. ([#21686](https://github.com/facebook/react/pull/21686) by [@acdlite](https://github.com/acdlite)) <add>* Support disabling spurious act warnings with `global.IS_REACT_ACT_ENVIRONMENT`. ([#22561](https://github.com/facebook/react/pull/22561) by [@acdlite](https://github.com/acdlite)) <add>* Expand act warning to cover all APIs that might schedule React work. ([#22607](https://github.com/facebook/react/pull/22607) by [@acdlite](https://github.com/acdlite)) <add>* Make `act` batch updates. ([#21797](https://github.com/facebook/react/pull/21797) by [@acdlite](https://github.com/acdlite)) <add>* Remove warning for dangling passive effects. ([#22609](https://github.com/facebook/react/pull/22609) by [@acdlite](https://github.com/acdlite)) <add> <add>### React Refresh <add> <add>* Track late-mounted roots in Fast Refresh. ([#22740](https://github.com/facebook/react/pull/22740) by [@anc95](https://github.com/anc95)) <add>* Add `exports` field to `package.json`. ([#23087](https://github.com/facebook/react/pull/23087) by [@otakustay](https://github.com/otakustay)) <add> <add>### Server Components (Experimental) <add> <add>* Add Server Context support. ([#23244](https://github.com/facebook/react/pull/22739) by [@salazarm](https://github.com/salazarm)) <add>* Add `lazy` support. ([#24068](https://github.com/facebook/react/pull/24068) by [@gnoff](https://github.com/gnoff)) <add>* Update webpack plugin for webpack 5 ([#22739](https://github.com/facebook/react/pull/22739) by [@michenly](https://github.com/michenly)) <add>* Fix a mistake in the Node loader. ([#22537](https://github.com/facebook/react/pull/22537) by [@btea](https://github.com/btea)) <add>* Use `globalThis` instead of `window` for edge environments. ([#22777](https://github.com/facebook/react/pull/22777) by [@huozhi](https://github.com/huozhi)) <add> <ide> ## 17.0.2 (March 22, 2021) <ide> <ide> ### React DOM
1
Javascript
Javascript
split large image groups into smaller chunks
cae62341ac9bd94b5f4a03b139dc57141ca9cc8f
<ide><path>src/canvas.js <ide> var CanvasGraphics = (function CanvasGraphicsClosure() { <ide> 'setFillCMYKColor': true, <ide> 'paintJpegXObject': true, <ide> 'paintImageXObject': true, <add> 'paintInlineImageXObject': true, <add> 'paintInlineImageXObjectGroup': true, <ide> 'paintImageMaskXObject': true, <add> 'paintImageMaskXObjectGroup': true, <ide> 'shadingFill': true <ide> }, <ide> <ide><path>src/core.js <ide> var Page = (function PageClosure() { <ide> xref, handler, this.pageIndex, <ide> 'p' + this.pageIndex + '_'); <ide> <del> return pe.getOperatorList(contentStream, resources, dependency); <add> var list = pe.getOperatorList(contentStream, resources, dependency); <add> pe.optimizeQueue(list); <add> return list; <ide> }, <ide> extractTextContent: function Page_extractTextContent() { <ide> var handler = { <ide><path>src/evaluator.js <ide> var PartialEvaluator = (function PartialEvaluatorClosure() { <ide> }, handler, xref, resources, image, inline); <ide> } <ide> <del> function optimizeQueue() { <del> // grouping paintInlineImageXObject's into paintInlineImageXObjectGroup <del> // searching for (save, transform, paintInlineImageXObject, restore)+ <del> var MIN_IMAGES_COUNT = 10; <del> var MAX_WIDTH = 1000; <del> var IMAGE_PADDING = 1; <del> for (var i = 0, ii = fnArray.length; i < ii; i++) { <del> if (fnArray[i] === 'paintInlineImageXObject' && <del> fnArray[i - 2] === 'save' && fnArray[i - 1] === 'transform' && <del> fnArray[i + 1] === 'restore') { <del> var j = i - 2; <del> for (i += 2; i < ii && fnArray[i - 4] === fnArray[i]; i++) { <del> } <del> var count = (i - j) >> 2; <del> if (count < MIN_IMAGES_COUNT) { <del> continue; <del> } <del> // assuming that heights of those image is too small (~1 pixel) <del> // packing as much as possible by lines <del> var maxX = 0; <del> var map = [], maxLineHeight = 0; <del> var currentX = IMAGE_PADDING, currentY = IMAGE_PADDING; <del> for (var q = 0; q < count; q++) { <del> var transform = argsArray[j + (q << 2) + 1]; <del> var img = argsArray[j + (q << 2) + 2][0]; <del> if (currentX + img.width > MAX_WIDTH) { <del> // starting new line <del> maxX = Math.max(maxX, currentX); <del> currentY += maxLineHeight + 2 * IMAGE_PADDING; <del> currentX = 0; <del> maxLineHeight = 0; <del> } <del> map.push({ <del> transform: transform, <del> x: currentX, y: currentY, <del> w: img.width, h: img.height <del> }); <del> currentX += img.width + 2 * IMAGE_PADDING; <del> maxLineHeight = Math.max(maxLineHeight, img.height); <del> } <del> var imgWidth = Math.max(maxX, currentX) + IMAGE_PADDING; <del> var imgHeight = currentY + maxLineHeight + IMAGE_PADDING; <del> var imgData = new Uint8Array(imgWidth * imgHeight * 4); <del> var imgRowSize = imgWidth << 2; <del> for (var q = 0; q < count; q++) { <del> var data = argsArray[j + (q << 2) + 2][0].data; <del> // copy image by lines and extends pixels into padding <del> var rowSize = map[q].w << 2; <del> var dataOffset = 0; <del> var offset = (map[q].x + map[q].y * imgWidth) << 2; <del> imgData.set( <del> data.subarray(0, rowSize), offset - imgRowSize); <del> for (var k = 0, kk = map[q].h; k < kk; k++) { <del> imgData.set( <del> data.subarray(dataOffset, dataOffset + rowSize), offset); <del> dataOffset += rowSize; <del> offset += imgRowSize; <del> } <del> imgData.set( <del> data.subarray(dataOffset - rowSize, dataOffset), offset); <del> while (offset >= 0) { <del> data[offset - 4] = data[offset]; <del> data[offset - 3] = data[offset + 1]; <del> data[offset - 2] = data[offset + 2]; <del> data[offset - 1] = data[offset + 3]; <del> data[offset + rowSize] = data[offset + rowSize - 4]; <del> data[offset + rowSize + 1] = data[offset + rowSize - 3]; <del> data[offset + rowSize + 2] = data[offset + rowSize - 2]; <del> data[offset + rowSize + 3] = data[offset + rowSize - 1]; <del> offset -= imgRowSize; <del> } <del> } <del> // replacing queue items <del> fnArray.splice(j, count * 4, ['paintInlineImageXObjectGroup']); <del> argsArray.splice(j, count * 4, <del> [{width: imgWidth, height: imgHeight, data: imgData}, map]); <del> i = j; <del> ii = fnArray.length; <del> } <del> } <del> // grouping paintImageMaskXObject's into paintImageMaskXObjectGroup <del> // searching for (save, transform, paintImageMaskXObject, restore)+ <del> for (var i = 0, ii = fnArray.length; i < ii; i++) { <del> if (fnArray[i] === 'paintImageMaskXObject' && <del> fnArray[i - 2] === 'save' && fnArray[i - 1] === 'transform' && <del> fnArray[i + 1] === 'restore') { <del> var j = i - 2; <del> for (i += 2; i < ii && fnArray[i - 4] === fnArray[i]; i++) { <del> } <del> var count = (i - j) >> 2; <del> if (count < MIN_IMAGES_COUNT) { <del> continue; <del> } <del> var images = []; <del> for (var q = 0; q < count; q++) { <del> var transform = argsArray[j + (q << 2) + 1]; <del> var maskParams = argsArray[j + (q << 2) + 2]; <del> images.push({data: maskParams[0], width: maskParams[2], <del> height: maskParams[3], transform: transform, <del> inverseDecode: maskParams[1]}); <del> } <del> // replacing queue items <del> fnArray.splice(j, count * 4, ['paintImageMaskXObjectGroup']); <del> argsArray.splice(j, count * 4, [images]); <del> i = j; <del> ii = fnArray.length; <del> } <del> } <del> } <del> <ide> if (!queue) <ide> queue = {}; <ide> <ide> var PartialEvaluator = (function PartialEvaluatorClosure() { <ide> } <ide> } <ide> <del> optimizeQueue(); <del> <ide> return queue; <ide> }, <ide> <add> optimizeQueue: function PartialEvaluator_optimizeQueue(queue) { <add> var fnArray = queue.fnArray, argsArray = queue.argsArray; <add> // grouping paintInlineImageXObject's into paintInlineImageXObjectGroup <add> // searching for (save, transform, paintInlineImageXObject, restore)+ <add> var MIN_IMAGES_IN_INLINE_IMAGES_BLOCK = 10; <add> var MAX_IMAGES_IN_INLINE_IMAGES_BLOCK = 200; <add> var MAX_WIDTH = 1000; <add> var IMAGE_PADDING = 1; <add> for (var i = 0, ii = fnArray.length; i < ii; i++) { <add> if (fnArray[i] === 'paintInlineImageXObject' && <add> fnArray[i - 2] === 'save' && fnArray[i - 1] === 'transform' && <add> fnArray[i + 1] === 'restore') { <add> var j = i - 2; <add> for (i += 2; i < ii && fnArray[i - 4] === fnArray[i]; i++) { <add> } <add> var count = Math.min((i - j) >> 2, <add> MAX_IMAGES_IN_INLINE_IMAGES_BLOCK); <add> if (count < MIN_IMAGES_IN_INLINE_IMAGES_BLOCK) { <add> continue; <add> } <add> // assuming that heights of those image is too small (~1 pixel) <add> // packing as much as possible by lines <add> var maxX = 0; <add> var map = [], maxLineHeight = 0; <add> var currentX = IMAGE_PADDING, currentY = IMAGE_PADDING; <add> for (var q = 0; q < count; q++) { <add> var transform = argsArray[j + (q << 2) + 1]; <add> var img = argsArray[j + (q << 2) + 2][0]; <add> if (currentX + img.width > MAX_WIDTH) { <add> // starting new line <add> maxX = Math.max(maxX, currentX); <add> currentY += maxLineHeight + 2 * IMAGE_PADDING; <add> currentX = 0; <add> maxLineHeight = 0; <add> } <add> map.push({ <add> transform: transform, <add> x: currentX, y: currentY, <add> w: img.width, h: img.height <add> }); <add> currentX += img.width + 2 * IMAGE_PADDING; <add> maxLineHeight = Math.max(maxLineHeight, img.height); <add> } <add> var imgWidth = Math.max(maxX, currentX) + IMAGE_PADDING; <add> var imgHeight = currentY + maxLineHeight + IMAGE_PADDING; <add> var imgData = new Uint8Array(imgWidth * imgHeight * 4); <add> var imgRowSize = imgWidth << 2; <add> for (var q = 0; q < count; q++) { <add> var data = argsArray[j + (q << 2) + 2][0].data; <add> // copy image by lines and extends pixels into padding <add> var rowSize = map[q].w << 2; <add> var dataOffset = 0; <add> var offset = (map[q].x + map[q].y * imgWidth) << 2; <add> imgData.set( <add> data.subarray(0, rowSize), offset - imgRowSize); <add> for (var k = 0, kk = map[q].h; k < kk; k++) { <add> imgData.set( <add> data.subarray(dataOffset, dataOffset + rowSize), offset); <add> dataOffset += rowSize; <add> offset += imgRowSize; <add> } <add> imgData.set( <add> data.subarray(dataOffset - rowSize, dataOffset), offset); <add> while (offset >= 0) { <add> data[offset - 4] = data[offset]; <add> data[offset - 3] = data[offset + 1]; <add> data[offset - 2] = data[offset + 2]; <add> data[offset - 1] = data[offset + 3]; <add> data[offset + rowSize] = data[offset + rowSize - 4]; <add> data[offset + rowSize + 1] = data[offset + rowSize - 3]; <add> data[offset + rowSize + 2] = data[offset + rowSize - 2]; <add> data[offset + rowSize + 3] = data[offset + rowSize - 1]; <add> offset -= imgRowSize; <add> } <add> } <add> // replacing queue items <add> fnArray.splice(j, count * 4, ['paintInlineImageXObjectGroup']); <add> argsArray.splice(j, count * 4, <add> [{width: imgWidth, height: imgHeight, data: imgData}, map]); <add> i = j; <add> ii = fnArray.length; <add> } <add> } <add> // grouping paintImageMaskXObject's into paintImageMaskXObjectGroup <add> // searching for (save, transform, paintImageMaskXObject, restore)+ <add> var MIN_IMAGES_IN_MASKS_BLOCK = 10; <add> var MAX_IMAGES_IN_MASKS_BLOCK = 100; <add> for (var i = 0, ii = fnArray.length; i < ii; i++) { <add> if (fnArray[i] === 'paintImageMaskXObject' && <add> fnArray[i - 2] === 'save' && fnArray[i - 1] === 'transform' && <add> fnArray[i + 1] === 'restore') { <add> var j = i - 2; <add> for (i += 2; i < ii && fnArray[i - 4] === fnArray[i]; i++) { <add> } <add> var count = Math.min((i - j) >> 2, <add> MAX_IMAGES_IN_MASKS_BLOCK); <add> if (count < MIN_IMAGES_IN_MASKS_BLOCK) { <add> continue; <add> } <add> var images = []; <add> for (var q = 0; q < count; q++) { <add> var transform = argsArray[j + (q << 2) + 1]; <add> var maskParams = argsArray[j + (q << 2) + 2]; <add> images.push({data: maskParams[0], width: maskParams[2], <add> height: maskParams[3], transform: transform, <add> inverseDecode: maskParams[1]}); <add> } <add> // replacing queue items <add> fnArray.splice(j, count * 4, ['paintImageMaskXObjectGroup']); <add> argsArray.splice(j, count * 4, [images]); <add> i = j; <add> ii = fnArray.length; <add> } <add> } <add> }, <add> <ide> getTextContent: function PartialEvaluator_getTextContent( <ide> stream, resources, state) { <ide> var bidiTexts;
3
Python
Python
fix issue with parsing error logs in the kpo
4752fb3eb8ac8827e6af6022fbcf751829ecb17a
<ide><path>airflow/providers/cncf/kubernetes/utils/pod_launcher.py <ide> import math <ide> import time <ide> from datetime import datetime as dt <del>from typing import Optional, Tuple <add>from typing import Optional, Tuple, Union <ide> <ide> import pendulum <ide> import tenacity <ide> from kubernetes import client, watch <ide> from kubernetes.client.models.v1_pod import V1Pod <ide> from kubernetes.client.rest import ApiException <ide> from kubernetes.stream import stream as kubernetes_stream <add>from pendulum import Date, DateTime, Duration, Time <add>from pendulum.parsing.exceptions import ParserError <ide> from requests.exceptions import BaseHTTPError <ide> <ide> from airflow.exceptions import AirflowException <ide> def monitor_pod(self, pod: V1Pod, get_logs: bool) -> Tuple[State, Optional[str]] <ide> :param get_logs: whether to read the logs locally <ide> :return: Tuple[State, Optional[str]] <ide> """ <del> if get_logs: <add> if get_logs: # pylint: disable=too-many-nested-blocks <ide> read_logs_since_sec = None <ide> last_log_time = None <ide> while True: <ide> logs = self.read_pod_logs(pod, timestamps=True, since_seconds=read_logs_since_sec) <ide> for line in logs: <ide> timestamp, message = self.parse_log_line(line.decode('utf-8')) <del> last_log_time = pendulum.parse(timestamp) <ide> self.log.info(message) <add> if timestamp: <add> last_log_time = timestamp <ide> time.sleep(1) <ide> <ide> if not self.base_container_is_running(pod): <ide> def monitor_pod(self, pod: V1Pod, get_logs: bool) -> Tuple[State, Optional[str]] <ide> time.sleep(2) <ide> return self._task_status(self.read_pod(pod)), result <ide> <del> def parse_log_line(self, line: str) -> Tuple[str, str]: <add> def parse_log_line(self, line: str) -> Tuple[Optional[Union[Date, Time, DateTime, Duration]], str]: <ide> """ <ide> Parse K8s log line and returns the final state <ide> <ide> def parse_log_line(self, line: str) -> Tuple[str, str]: <ide> raise Exception(f'Log not in "{{timestamp}} {{log}}" format. Got: {line}') <ide> timestamp = line[:split_at] <ide> message = line[split_at + 1 :].rstrip() <del> return timestamp, message <add> try: <add> last_log_time = pendulum.parse(timestamp) <add> except ParserError: <add> self.log.error("Error parsing timestamp. Will continue execution but won't update timestamp") <add> return None, line <add> return last_log_time, message <ide> <ide> def _task_status(self, event): <ide> self.log.info('Event: %s had an event of type %s', event.metadata.name, event.status.phase) <ide><path>tests/providers/cncf/kubernetes/utils/test_pod_launcher.py <ide> import unittest <ide> from unittest import mock <ide> <add>import pendulum <ide> import pytest <ide> from kubernetes.client.rest import ApiException <ide> from requests.exceptions import BaseHTTPError <ide> def test_read_pod_retries_fails(self): <ide> self.pod_launcher.read_pod(mock.sentinel) <ide> <ide> def test_parse_log_line(self): <del> timestamp, message = self.pod_launcher.parse_log_line( <del> '2020-10-08T14:16:17.793417674Z Valid message\n' <del> ) <del> <del> assert timestamp == '2020-10-08T14:16:17.793417674Z' <del> assert message == 'Valid message' <add> log_message = "This should return no timestamp" <add> timestamp, line = self.pod_launcher.parse_log_line(log_message) <add> self.assertEqual(timestamp, None) <add> self.assertEqual(line, log_message) <add> <add> real_timestamp = "2020-10-08T14:16:17.793417674Z" <add> timestamp, line = self.pod_launcher.parse_log_line(" ".join([real_timestamp, log_message])) <add> self.assertEqual(timestamp, pendulum.parse(real_timestamp)) <add> self.assertEqual(line, log_message) <ide> <ide> with pytest.raises(Exception): <ide> self.pod_launcher.parse_log_line('2020-10-08T14:16:17.793417674ZInvalidmessage\n')
2
Javascript
Javascript
add the type1 subrs into the cff local subrs index
159c986e6e6858b87a609af068d65ca5d9f8aefd
<ide><path>fonts.js <ide> CFF.prototype = { <ide> "hlineto": 6, <ide> "vlineto": 7, <ide> "rrcurveto": 8, <add> "callsubr": 10, <add> "return": 11, <ide> "endchar": 14, <ide> "rmoveto": 21, <ide> "hmoveto": 22, <ide> "vhcurveto": 30, <ide> "hvcurveto": 31, <ide> }, <ide> <del> flattenCharstring: function flattenCharstring(charstring, subrs) { <add> flattenCharstring: function flattenCharstring(charstring) { <ide> var i = 0; <ide> while (true) { <ide> var obj = charstring[i]; <ide> CFF.prototype = { <ide> <ide> if (obj.charAt) { <ide> switch (obj) { <del> case "callsubr": <del> var subr = subrs[charstring[i - 1]]; <del> if (subr.length > 1) { <del> subr = this.flattenCharstring(subr, subrs); <del> subr.pop(); <del> charstring.splice(i - 1, 2, subr); <del> } else { <del> charstring.splice(i - 1, 2); <del> } <del> i -= 1; <del> break; <del> <ide> case "callothersubr": <ide> var index = charstring[i - 1]; <ide> var count = charstring[i - 2]; <ide> var data = charstring[i - 3]; <ide> <del> // XXX The callothersubr needs to support at least the 3 defaults <del> // otherSubrs of the spec <del> if (index != 3) <del> error("callothersubr for index: " + index + " (" + charstring + ")"); <del> <del> if (!data) { <del> charstring.splice(i - 2, 4, "pop", 3); <del> i -= 3; <del> } else { <del> // 5 to remove the arguments, the callothersubr call and the pop command <del> charstring.splice(i - 3, 5, 3); <add> // If the flex mechanishm is not used in a font program, Adobe <add> // state that that entries 0, 1 and 2 can simply be replace by <add> // {}, which means that we can simply ignore them. <add> if (index < 3) { <ide> i -= 3; <add> continue; <add> } <add> <add> // This is the same things about hint replacment, if it is not used <add> // entry 3 can be replaced by {} <add> if (index == 3) { <add> if (!data) { <add> charstring.splice(i - 2, 4, "pop", 3); <add> i -= 3; <add> } else { <add> // 5 to remove the arguments, the callothersubr call and the pop command <add> charstring.splice(i - 3, 5, 3); <add> i -= 3; <add> } <ide> } <ide> break; <ide> <ide> CFF.prototype = { <ide> i -= 2; <ide> break; <ide> <del> case "pop": <del> if (i) <del> charstring.splice(i - 2, 2); <del> else <del> charstring.splice(i - 1, 1); <del> i -= 1; <del> break; <del> <del> <ide> case "hsbw": <ide> var charWidthVector = charstring[i - 1]; <ide> var leftSidebearing = charstring[i - 2]; <ide> CFF.prototype = { <ide> var glyphsCount = charstrings.length; <ide> for (var i = 0; i < glyphsCount; i++) { <ide> var charstring = charstrings[i].charstring; <del> glyphs.push(this.flattenCharstring(charstring.slice(), subrs)); <add> glyphs.push(this.flattenCharstring(charstring.slice())); <ide> } <ide> <ide> // Create a CFF font data <ide> CFF.prototype = { <ide> 247, 32, 11, <ide> 247, 10, 161, 147, 154, 150, 143, 12, 13, <ide> 139, 12, 14, <del> 28, 0, 55, 19 <add> 28, 0, 55, 19 // Subrs offset <ide> ]); <ide> cff.set(privateData, currentOffset); <ide> currentOffset += privateData.length; <ide> <del> // Dump shit at the end of the file <del> var shit = [ <del> 0x00, 0x01, 0x01, 0x01, <del> 0x13, 0x5D, 0x65, 0x64, <del> 0x5E, 0x5B, 0xAF, 0x66, <del> 0xBA, 0xBB, 0xB1, 0xB0, <del> 0xB9, 0xBA, 0x65, 0xB2, <del> 0x5C, 0x1F, 0x0B <del> ]; <del> cff.set(shit, currentOffset); <del> currentOffset += shit.length; <add> // Local Subrs <add> var flattenedSubrs = []; <add> <add> var bias = 0; <add> var subrsCount = subrs.length; <add> if (subrsCount < 1240) <add> bias = 107; <add> else if (subrsCount < 33900) <add> bias = 1131; <add> else <add> bias = 32768; <add> <add> // Add a bunch of empty subrs to deal with the Type2 bias <add> for (var i = 0; i < bias; i++) <add> flattenedSubrs.push([0x0B]); <add> <add> for (var i = 0; i < subrsCount; i++) { <add> var subr = subrs[i]; <add> flattenedSubrs.push(this.flattenCharstring(subr)); <add> } <add> <add> var subrsData = this.createCFFIndexHeader(flattenedSubrs, true); <add> cff.set(subrsData, currentOffset); <add> currentOffset += subrsData.length; <ide> <ide> var fontData = []; <ide> for (var i = 0; i < currentOffset; i++)
1
Javascript
Javascript
suppress private property accesses
94845b57834b8d73c0abb5c12319c3566f1ce1b7
<ide><path>Libraries/Components/Keyboard/__tests__/Keyboard-test.js <ide> describe('Keyboard', () => { <ide> <ide> // $FlowFixMe <ide> expect(Keyboard._subscriber).toBe(KeyboardEventEmitter._subscriber); <add> // $FlowFixMe Cannot access private property <ide> expect(Keyboard._nativeModule).toBe(KeyboardEventEmitter._nativeModule); <ide> }); <ide> <ide><path>Libraries/Lists/VirtualizedSectionList.js <ide> class VirtualizedSectionList< <ide> } <ide> let viewOffset = params.viewOffset || 0; <ide> if (params.itemIndex > 0 && this.props.stickySectionHeadersEnabled) { <add> // $FlowFixMe Cannot access private property <ide> const frame = this._listRef._getFrameMetricsApprox( <ide> index - params.itemIndex, <ide> );
2
Python
Python
use planids map, but with correct values
a146be2c01d4b8646ca720018085cd0b13b34610
<ide><path>libcloud/common/linode.py <ide> # Constants that map a RAM figure to a PlanID (updated 2014-08-25) <ide> LINODE_PLAN_IDS = {1024: '1', <ide> 2048: '2', <del> 4096: '4', <del> 8192: '6', <del> 16384: '7', <add> 4096: '3', <add> 8192: '4', <add> 12288: '5', <add> 16384: '6', <add> 24576: '7', <ide> 32768: '8', <ide> 49152: '9', <del> 65536: '10', <del> 98304: '12'} <add> 61440: '10', <add> 65536: '11', <add> 81920: '12'} <ide> <ide> # Available filesystems for disk creation <ide> LINODE_DISK_FILESYSTEMS = ['ext3', 'ext4', 'swap', 'raw']
1
Ruby
Ruby
add missing require
0f39f18b43082d7a40bf042d5db95b2f0c8d39eb
<ide><path>actionpack/lib/action_dispatch/routing/mapper.rb <ide> require "active_support/core_ext/hash/slice" <ide> require "active_support/core_ext/enumerable" <ide> require "active_support/core_ext/array/extract_options" <add>require "active_support/core_ext/regexp" <ide> require "action_dispatch/routing/redirection" <ide> require "action_dispatch/routing/endpoint" <ide>
1
Text
Text
improve arabic translation
46e2a1b262d211cd9936a3cb16e3667cb8d7217b
<ide><path>curriculum/challenges/arabic/01-responsive-web-design/applied-accessibility/add-a-text-alternative-to-images-for-visually-impaired-accessibility.arabic.md <ide> localeTitle: إضافة نص بديل للصور لذوي ضعاف البصر <ide> --- <ide> <ide> ## Description <del><section id="description"> من المحتمل أنك رأيت سمة <code>alt</code> على علامة <code>img</code> في تحديات أخرى. يصف النص <code>Alt</code> محتوى الصورة ويوفر بديلًا نصيًا. يساعد ذلك في حالة فشل تحميل أو عدم تمكن المستخدم من رؤية الصورة. ويستخدم أيضًا من قِبل محركات البحث لفهم ما تحتويه الصورة لتضمينها في نتائج البحث. في ما يلي مثال: <code>&lt;img src=&quot;importantLogo.jpeg&quot; alt=&quot;Company logo&quot;&gt;</code> يعتمد الأشخاص الذين يعانون من إعاقات بصرية على قارئي الشاشة لتحويل محتوى الويب إلى واجهة صوتية. لن يحصلوا على المعلومات إذا تم تقديمها فقط بصريًا. بالنسبة للصور ، يمكن لقارئي الشاشة الوصول إلى سمة <code>alt</code> وقراءة محتوياتها لتقديم المعلومات الأساسية. النص <code>alt</code> الجيد قصير ولكن وصفي ، ويقصد به أن ينقل بإيجاز معنى الصورة. يجب عليك دائمًا تضمين سمة <code>alt</code> على صورتك. وفقًا لمواصفات HTML5 ، يعتبر هذا الآن إلزاميًا. </section> <add><section id="description"> من المحتمل أنك رأيت سمة alt على علامة img في تحديات أخرى. يصف النص Alt محتوى الصورة ويوفر بديلًا نصيًا. يساعد ذلك في حالة فشل تحميل أو عدم تمكن المستخدم من رؤية الصورة. ويستخدم أيضًا من قِبل محركات البحث لفهم ما تحتويه الصورة لتضمينها في نتائج البحث. في ما يلي مثال: <img src="importantLogo.jpeg" alt="Company logo"> يعتمد الأشخاص الذين يعانون من إعاقات بصرية على قارئ الشاشة لتحويل محتوى الويب إلى واجهة صوتية. لن يحصلوا على المعلومات إذا تم تقديمها فقط بصريًا. بالنسبة للصور، يمكن لقارئ الشاشة الوصول إلى سمة alt وقراءة محتوياتها لتقديم المعلومات الأساسية. النص alt الجيد قصير ولكن وصفي، ويقصد به أن ينقل بإيجاز معنى الصورة. يجب عليك دائمًا تضمين سمة alt على صورتك. وفقًا لمواصفات HTML5، يعتبر هذا الآن إلزاميًا. </section> <ide> <ide> ## Instructions <del><section id="instructions"> كامبر كات هو عبارة عن نينجا مشفر ونينجا فعليين ، ويقوم ببناء موقع ويب لتبادل معرفته. تظهر صورة الملف الشخصي التي يريد استخدامها مهاراته ، ويجب أن تحظى بتقدير جميع زائري الموقع. إضافة سمة <code>alt</code> في علامة <code>img</code> ، التي تشرح كامبر كات تقوم بالكاراتيه. (لا ترتبط الصورة <code>src</code> بملف فعلي ، لذلك يجب أن ترى النص <code>alt</code> في الشاشة.) </section> <add><section id="instructions"> صادف أن يكون Camper Cat بمثابة مبرمج نينجا ونينجا فعلي، يقوم ببناء موقع على شبكة الإنترنت لتبادل معرفته. تظهر صورة الملف الشخصي التي يريد استخدامها مهاراته ويجب أن تكون موضع تقدير من قبل جميع زوار الموقع. أضف سمة بديلة في علامة img، والتي توضح أن Camper Cat يقوم بالكاراتيه. (لترى النص البديل في الشاشة، لا ترتبط الصورة src بملف حقيقي.) <add> </section> <ide> <ide> ## Tests <ide> <section id='tests'>
1
Go
Go
fix gcc warning
7da12bcfa9db4d84b2c547bee93dafeaead15b16
<ide><path>pkg/devicemapper/devmapper_wrapper.go <ide> static void log_cb(int level, const char *file, int line, int dm_errno_or_class, <ide> { <ide> char *buffer = NULL; <ide> va_list ap; <add> int ret; <ide> <ide> va_start(ap, f); <del> vasprintf(&buffer, f, ap); <add> ret = vasprintf(&buffer, f, ap); <ide> va_end(ap); <add> if (ret < 0) { <add> // memory allocation failed -- should never happen? <add> return; <add> } <ide> <ide> DevmapperLogCallback(level, (char *)file, line, dm_errno_or_class, buffer); <ide> free(buffer);
1
Go
Go
add support for udp (closes #33)
fac0d87d00ada08309ea3b82cae69beeef637c89
<ide><path>container.go <ide> func ParseRun(args []string, capabilities *Capabilities) (*Config, *HostConfig, <ide> return config, hostConfig, cmd, nil <ide> } <ide> <add>type portMapping map[string]string <add> <ide> type NetworkSettings struct { <ide> IPAddress string <ide> IPPrefixLen int <ide> Gateway string <ide> Bridge string <del> PortMapping map[string]string <add> PortMapping map[string]portMapping <ide> } <ide> <ide> // String returns a human-readable description of the port mapping defined in the settings <ide> func (settings *NetworkSettings) PortMappingHuman() string { <ide> var mapping []string <del> for private, public := range settings.PortMapping { <add> for private, public := range settings.PortMapping["Tcp"] { <ide> mapping = append(mapping, fmt.Sprintf("%s->%s", public, private)) <ide> } <add> for private, public := range settings.PortMapping["Udp"] { <add> mapping = append(mapping, fmt.Sprintf("%s->%s/udp", public, private)) <add> } <ide> sort.Strings(mapping) <ide> return strings.Join(mapping, ", ") <ide> } <ide> func (container *Container) allocateNetwork() error { <ide> if err != nil { <ide> return err <ide> } <del> container.NetworkSettings.PortMapping = make(map[string]string) <add> container.NetworkSettings.PortMapping = make(map[string]portMapping) <add> container.NetworkSettings.PortMapping["Tcp"] = make(portMapping) <add> container.NetworkSettings.PortMapping["Udp"] = make(portMapping) <ide> for _, spec := range container.Config.PortSpecs { <ide> nat, err := iface.AllocatePort(spec) <ide> if err != nil { <ide> iface.Release() <ide> return err <ide> } <del> container.NetworkSettings.PortMapping[strconv.Itoa(nat.Backend)] = strconv.Itoa(nat.Frontend) <add> proto := strings.Title(nat.Proto) <add> backend, frontend := strconv.Itoa(nat.Backend), strconv.Itoa(nat.Frontend) <add> container.NetworkSettings.PortMapping[proto][backend] = frontend <ide> } <ide> container.network = iface <ide> container.NetworkSettings.Bridge = container.runtime.networkManager.bridgeIface <ide><path>network.go <ide> import ( <ide> "errors" <ide> "fmt" <ide> "github.com/dotcloud/docker/utils" <del> "io" <ide> "log" <ide> "net" <ide> "os/exec" <ide> func getIfaceAddr(name string) (net.Addr, error) { <ide> // up iptables rules. <ide> // It keeps track of all mappings and is able to unmap at will <ide> type PortMapper struct { <del> mapping map[int]net.TCPAddr <del> proxies map[int]net.Listener <add> tcpMapping map[int]*net.TCPAddr <add> tcpProxies map[int]Proxy <add> udpMapping map[int]*net.UDPAddr <add> udpProxies map[int]Proxy <ide> } <ide> <ide> func (mapper *PortMapper) cleanup() error { <ide> func (mapper *PortMapper) cleanup() error { <ide> iptables("-t", "nat", "-D", "OUTPUT", "-j", "DOCKER") <ide> iptables("-t", "nat", "-F", "DOCKER") <ide> iptables("-t", "nat", "-X", "DOCKER") <del> mapper.mapping = make(map[int]net.TCPAddr) <del> mapper.proxies = make(map[int]net.Listener) <add> mapper.tcpMapping = make(map[int]*net.TCPAddr) <add> mapper.tcpProxies = make(map[int]Proxy) <add> mapper.udpMapping = make(map[int]*net.UDPAddr) <add> mapper.udpProxies = make(map[int]Proxy) <ide> return nil <ide> } <ide> <ide> func (mapper *PortMapper) setup() error { <ide> return nil <ide> } <ide> <del>func (mapper *PortMapper) iptablesForward(rule string, port int, dest net.TCPAddr) error { <del> return iptables("-t", "nat", rule, "DOCKER", "-p", "tcp", "--dport", strconv.Itoa(port), <del> "-j", "DNAT", "--to-destination", net.JoinHostPort(dest.IP.String(), strconv.Itoa(dest.Port))) <add>func (mapper *PortMapper) iptablesForward(rule string, port int, proto string, dest_addr string, dest_port int) error { <add> return iptables("-t", "nat", rule, "DOCKER", "-p", proto, "--dport", strconv.Itoa(port), <add> "-j", "DNAT", "--to-destination", net.JoinHostPort(dest_addr, strconv.Itoa(dest_port))) <ide> } <ide> <del>func (mapper *PortMapper) Map(port int, dest net.TCPAddr) error { <del> if err := mapper.iptablesForward("-A", port, dest); err != nil { <del> return err <del> } <del> <del> mapper.mapping[port] = dest <del> listener, err := net.Listen("tcp", fmt.Sprintf("127.0.0.1:%d", port)) <del> if err != nil { <del> mapper.Unmap(port) <del> return err <del> } <del> mapper.proxies[port] = listener <del> go proxy(listener, "tcp", dest.String()) <del> return nil <del>} <del> <del>// proxy listens for socket connections on `listener`, and forwards them unmodified <del>// to `proto:address` <del>func proxy(listener net.Listener, proto, address string) error { <del> utils.Debugf("proxying to %s:%s", proto, address) <del> defer utils.Debugf("Done proxying to %s:%s", proto, address) <del> for { <del> utils.Debugf("Listening on %s", listener) <del> src, err := listener.Accept() <add>func (mapper *PortMapper) Map(port int, backendAddr net.Addr) error { <add> if _, isTCP := backendAddr.(*net.TCPAddr); isTCP { <add> backendPort := backendAddr.(*net.TCPAddr).Port <add> backendIP := backendAddr.(*net.TCPAddr).IP <add> if err := mapper.iptablesForward("-A", port, "tcp", backendIP.String(), backendPort); err != nil { <add> return err <add> } <add> mapper.tcpMapping[port] = backendAddr.(*net.TCPAddr) <add> proxy, err := NewProxy(&net.TCPAddr{IP: net.IPv4(127, 0, 0, 1), Port: port}, backendAddr) <ide> if err != nil { <add> mapper.Unmap(port, "tcp") <ide> return err <ide> } <del> utils.Debugf("Connecting to %s:%s", proto, address) <del> dst, err := net.Dial(proto, address) <add> mapper.tcpProxies[port] = proxy <add> go proxy.Run() <add> } else { <add> backendPort := backendAddr.(*net.UDPAddr).Port <add> backendIP := backendAddr.(*net.UDPAddr).IP <add> if err := mapper.iptablesForward("-A", port, "udp", backendIP.String(), backendPort); err != nil { <add> return err <add> } <add> mapper.udpMapping[port] = backendAddr.(*net.UDPAddr) <add> proxy, err := NewProxy(&net.UDPAddr{IP: net.IPv4(127, 0, 0, 1), Port: port}, backendAddr) <ide> if err != nil { <del> log.Printf("Error connecting to %s:%s: %s", proto, address, err) <del> src.Close() <del> continue <add> mapper.Unmap(port, "udp") <add> return err <ide> } <del> utils.Debugf("Connected to backend, splicing") <del> splice(src, dst) <add> mapper.udpProxies[port] = proxy <add> go proxy.Run() <ide> } <add> return nil <ide> } <ide> <del>func halfSplice(dst, src net.Conn) error { <del> _, err := io.Copy(dst, src) <del> // FIXME: on EOF from a tcp connection, pass WriteClose() <del> dst.Close() <del> src.Close() <del> return err <del>} <del> <del>func splice(a, b net.Conn) { <del> go halfSplice(a, b) <del> go halfSplice(b, a) <del>} <del> <del>func (mapper *PortMapper) Unmap(port int) error { <del> dest, ok := mapper.mapping[port] <del> if !ok { <del> return errors.New("Port is not mapped") <del> } <del> if proxy, exists := mapper.proxies[port]; exists { <del> proxy.Close() <del> delete(mapper.proxies, port) <del> } <del> if err := mapper.iptablesForward("-D", port, dest); err != nil { <del> return err <add>func (mapper *PortMapper) Unmap(port int, proto string) error { <add> if proto == "tcp" { <add> backendAddr, ok := mapper.tcpMapping[port] <add> if !ok { <add> return fmt.Errorf("Port tcp/%v is not mapped", port) <add> } <add> if proxy, exists := mapper.tcpProxies[port]; exists { <add> proxy.Close() <add> delete(mapper.tcpProxies, port) <add> } <add> if err := mapper.iptablesForward("-D", port, proto, backendAddr.IP.String(), backendAddr.Port); err != nil { <add> return err <add> } <add> delete(mapper.tcpMapping, port) <add> } else { <add> backendAddr, ok := mapper.udpMapping[port] <add> if !ok { <add> return fmt.Errorf("Port udp/%v is not mapped", port) <add> } <add> if proxy, exists := mapper.udpProxies[port]; exists { <add> proxy.Close() <add> delete(mapper.udpProxies, port) <add> } <add> if err := mapper.iptablesForward("-D", port, proto, backendAddr.IP.String(), backendAddr.Port); err != nil { <add> return err <add> } <add> delete(mapper.udpMapping, port) <ide> } <del> delete(mapper.mapping, port) <ide> return nil <ide> } <ide> <ide> type NetworkInterface struct { <ide> Gateway net.IP <ide> <ide> manager *NetworkManager <del> extPorts []int <add> extPorts []*Nat <ide> } <ide> <ide> // Allocate an external TCP port and map it to the interface <ide> func (iface *NetworkInterface) AllocatePort(spec string) (*Nat, error) { <ide> if err != nil { <ide> return nil, err <ide> } <del> // Allocate a random port if Frontend==0 <del> extPort, err := iface.manager.portAllocator.Acquire(nat.Frontend) <del> if err != nil { <del> return nil, err <del> } <del> nat.Frontend = extPort <del> if err := iface.manager.portMapper.Map(nat.Frontend, net.TCPAddr{IP: iface.IPNet.IP, Port: nat.Backend}); err != nil { <del> iface.manager.portAllocator.Release(nat.Frontend) <del> return nil, err <add> <add> if nat.Proto == "tcp" { <add> extPort, err := iface.manager.tcpPortAllocator.Acquire(nat.Frontend) <add> if err != nil { <add> return nil, err <add> } <add> backend := &net.TCPAddr{IP: iface.IPNet.IP, Port: nat.Backend} <add> if err := iface.manager.portMapper.Map(extPort, backend); err != nil { <add> iface.manager.tcpPortAllocator.Release(extPort) <add> return nil, err <add> } <add> nat.Frontend = extPort <add> } else { <add> extPort, err := iface.manager.udpPortAllocator.Acquire(nat.Frontend) <add> if err != nil { <add> return nil, err <add> } <add> backend := &net.UDPAddr{IP: iface.IPNet.IP, Port: nat.Backend} <add> if err := iface.manager.portMapper.Map(extPort, backend); err != nil { <add> iface.manager.udpPortAllocator.Release(extPort) <add> return nil, err <add> } <add> nat.Frontend = extPort <ide> } <del> iface.extPorts = append(iface.extPorts, nat.Frontend) <add> iface.extPorts = append(iface.extPorts, nat) <add> <ide> return nat, nil <ide> } <ide> <ide> type Nat struct { <ide> func parseNat(spec string) (*Nat, error) { <ide> var nat Nat <ide> <add> if strings.Contains(spec, "/") { <add> specParts := strings.Split(spec, "/") <add> if len(specParts) != 2 { <add> return nil, fmt.Errorf("Invalid port format.") <add> } <add> proto := specParts[1] <add> spec = specParts[0] <add> if proto != "tcp" && proto != "udp" { <add> return nil, fmt.Errorf("Invalid port format: unknown protocol %v.", proto) <add> } <add> nat.Proto = proto <add> } else { <add> nat.Proto = "tcp" <add> } <add> <ide> if strings.Contains(spec, ":") { <ide> specParts := strings.Split(spec, ":") <ide> if len(specParts) != 2 { <ide> func parseNat(spec string) (*Nat, error) { <ide> } <ide> nat.Backend = int(port) <ide> } <del> nat.Proto = "tcp" <add> <ide> return &nat, nil <ide> } <ide> <ide> // Release: Network cleanup - release all resources <ide> func (iface *NetworkInterface) Release() { <del> for _, port := range iface.extPorts { <del> if err := iface.manager.portMapper.Unmap(port); err != nil { <del> log.Printf("Unable to unmap port %v: %v", port, err) <add> for _, nat := range iface.extPorts { <add> utils.Debugf("Unmaping %v/%v", nat.Proto, nat.Frontend) <add> if err := iface.manager.portMapper.Unmap(nat.Frontend, nat.Proto); err != nil { <add> log.Printf("Unable to unmap port %v/%v: %v", nat.Proto, nat.Frontend, err) <ide> } <del> if err := iface.manager.portAllocator.Release(port); err != nil { <del> log.Printf("Unable to release port %v: %v", port, err) <add> if nat.Proto == "tcp" { <add> if err := iface.manager.tcpPortAllocator.Release(nat.Frontend); err != nil { <add> log.Printf("Unable to release port tcp/%v: %v", nat.Frontend, err) <add> } <add> } else if err := iface.manager.udpPortAllocator.Release(nat.Frontend); err != nil { <add> log.Printf("Unable to release port udp/%v: %v", nat.Frontend, err) <ide> } <del> <ide> } <ide> <ide> iface.manager.ipAllocator.Release(iface.IPNet.IP) <ide> type NetworkManager struct { <ide> bridgeIface string <ide> bridgeNetwork *net.IPNet <ide> <del> ipAllocator *IPAllocator <del> portAllocator *PortAllocator <del> portMapper *PortMapper <add> ipAllocator *IPAllocator <add> tcpPortAllocator *PortAllocator <add> udpPortAllocator *PortAllocator <add> portMapper *PortMapper <ide> } <ide> <ide> // Allocate a network interface <ide> func newNetworkManager(bridgeIface string) (*NetworkManager, error) { <ide> <ide> ipAllocator := newIPAllocator(network) <ide> <del> portAllocator, err := newPortAllocator() <add> tcpPortAllocator, err := newPortAllocator() <add> if err != nil { <add> return nil, err <add> } <add> udpPortAllocator, err := newPortAllocator() <ide> if err != nil { <ide> return nil, err <ide> } <ide> func newNetworkManager(bridgeIface string) (*NetworkManager, error) { <ide> } <ide> <ide> manager := &NetworkManager{ <del> bridgeIface: bridgeIface, <del> bridgeNetwork: network, <del> ipAllocator: ipAllocator, <del> portAllocator: portAllocator, <del> portMapper: portMapper, <add> bridgeIface: bridgeIface, <add> bridgeNetwork: network, <add> ipAllocator: ipAllocator, <add> tcpPortAllocator: tcpPortAllocator, <add> udpPortAllocator: udpPortAllocator, <add> portMapper: portMapper, <ide> } <ide> return manager, nil <ide> } <ide><path>network_proxy.go <add>package docker <add> <add>import ( <add> "encoding/binary" <add> "fmt" <add> "github.com/dotcloud/docker/utils" <add> "io" <add> "log" <add> "net" <add> "sync" <add> "syscall" <add> "time" <add>) <add> <add>const ( <add> UDPConnTrackTimeout = 90 * time.Second <add> UDPBufSize = 2048 <add>) <add> <add>type Proxy interface { <add> // Start forwarding traffic back and forth the front and back-end <add> // addresses. <add> Run() <add> // Stop forwarding traffic and close both ends of the Proxy. <add> Close() <add> // Return the address on which the proxy is listening. <add> FrontendAddr() net.Addr <add> // Return the proxied address. <add> BackendAddr() net.Addr <add>} <add> <add>type TCPProxy struct { <add> listener *net.TCPListener <add> frontendAddr *net.TCPAddr <add> backendAddr *net.TCPAddr <add>} <add> <add>func NewTCPProxy(frontendAddr, backendAddr *net.TCPAddr) (*TCPProxy, error) { <add> listener, err := net.ListenTCP("tcp", frontendAddr) <add> if err != nil { <add> return nil, err <add> } <add> // If the port in frontendAddr was 0 then ListenTCP will have a picked <add> // a port to listen on, hence the call to Addr to get that actual port: <add> return &TCPProxy{ <add> listener: listener, <add> frontendAddr: listener.Addr().(*net.TCPAddr), <add> backendAddr: backendAddr, <add> }, nil <add>} <add> <add>func (proxy *TCPProxy) clientLoop(client *net.TCPConn, quit chan bool) { <add> backend, err := net.DialTCP("tcp", nil, proxy.backendAddr) <add> if err != nil { <add> log.Printf("Can't forward traffic to backend tcp/%v: %v\n", proxy.backendAddr, err.Error()) <add> client.Close() <add> return <add> } <add> <add> event := make(chan int64) <add> var broker = func(to, from *net.TCPConn) { <add> written, err := io.Copy(to, from) <add> if err != nil { <add> err, ok := err.(*net.OpError) <add> // If the socket we are writing to is shutdown with <add> // SHUT_WR, forward it to the other end of the pipe: <add> if ok && err.Err == syscall.EPIPE { <add> from.CloseWrite() <add> } <add> } <add> event <- written <add> } <add> utils.Debugf("Forwarding traffic between tcp/%v and tcp/%v", client.RemoteAddr(), backend.RemoteAddr()) <add> go broker(client, backend) <add> go broker(backend, client) <add> <add> var transferred int64 = 0 <add> for i := 0; i < 2; i++ { <add> select { <add> case written := <-event: <add> transferred += written <add> case <-quit: <add> // Interrupt the two brokers and "join" them. <add> client.Close() <add> backend.Close() <add> for ; i < 2; i++ { <add> transferred += <-event <add> } <add> goto done <add> } <add> } <add> client.Close() <add> backend.Close() <add>done: <add> utils.Debugf("%v bytes transferred between tcp/%v and tcp/%v", transferred, client.RemoteAddr(), backend.RemoteAddr()) <add>} <add> <add>func (proxy *TCPProxy) Run() { <add> quit := make(chan bool) <add> defer close(quit) <add> utils.Debugf("Starting proxy on tcp/%v for tcp/%v", proxy.frontendAddr, proxy.backendAddr) <add> for { <add> client, err := proxy.listener.Accept() <add> if err != nil { <add> utils.Debugf("Stopping proxy on tcp/%v for tcp/%v (%v)", proxy.frontendAddr, proxy.backendAddr, err.Error()) <add> return <add> } <add> go proxy.clientLoop(client.(*net.TCPConn), quit) <add> } <add>} <add> <add>func (proxy *TCPProxy) Close() { proxy.listener.Close() } <add>func (proxy *TCPProxy) FrontendAddr() net.Addr { return proxy.frontendAddr } <add>func (proxy *TCPProxy) BackendAddr() net.Addr { return proxy.backendAddr } <add> <add>// A net.Addr where the IP is split into two fields so you can use it as a key <add>// in a map: <add>type connTrackKey struct { <add> IPHigh uint64 <add> IPLow uint64 <add> Port int <add>} <add> <add>func newConnTrackKey(addr *net.UDPAddr) *connTrackKey { <add> if len(addr.IP) == net.IPv4len { <add> return &connTrackKey{ <add> IPHigh: 0, <add> IPLow: uint64(binary.BigEndian.Uint32(addr.IP)), <add> Port: addr.Port, <add> } <add> } <add> return &connTrackKey{ <add> IPHigh: binary.BigEndian.Uint64(addr.IP[:8]), <add> IPLow: binary.BigEndian.Uint64(addr.IP[8:]), <add> Port: addr.Port, <add> } <add>} <add> <add>type connTrackMap map[connTrackKey]*net.UDPConn <add> <add>type UDPProxy struct { <add> listener *net.UDPConn <add> frontendAddr *net.UDPAddr <add> backendAddr *net.UDPAddr <add> connTrackTable connTrackMap <add> connTrackLock sync.Mutex <add>} <add> <add>func NewUDPProxy(frontendAddr, backendAddr *net.UDPAddr) (*UDPProxy, error) { <add> listener, err := net.ListenUDP("udp", frontendAddr) <add> if err != nil { <add> return nil, err <add> } <add> return &UDPProxy{ <add> listener: listener, <add> frontendAddr: listener.LocalAddr().(*net.UDPAddr), <add> backendAddr: backendAddr, <add> connTrackTable: make(connTrackMap), <add> }, nil <add>} <add> <add>func (proxy *UDPProxy) replyLoop(proxyConn *net.UDPConn, clientAddr *net.UDPAddr, clientKey *connTrackKey) { <add> defer func() { <add> proxy.connTrackLock.Lock() <add> delete(proxy.connTrackTable, *clientKey) <add> proxy.connTrackLock.Unlock() <add> utils.Debugf("Done proxying between udp/%v and udp/%v", clientAddr.String(), proxy.backendAddr.String()) <add> proxyConn.Close() <add> }() <add> <add> readBuf := make([]byte, UDPBufSize) <add> for { <add> proxyConn.SetReadDeadline(time.Now().Add(UDPConnTrackTimeout)) <add> again: <add> read, err := proxyConn.Read(readBuf) <add> if err != nil { <add> if err, ok := err.(*net.OpError); ok && err.Err == syscall.ECONNREFUSED { <add> // This will happen if the last write failed <add> // (e.g: nothing is actually listening on the <add> // proxied port on the container), ignore it <add> // and continue until UDPConnTrackTimeout <add> // expires: <add> goto again <add> } <add> return <add> } <add> for i := 0; i != read; { <add> written, err := proxy.listener.WriteToUDP(readBuf[i:read], clientAddr) <add> if err != nil { <add> return <add> } <add> i += written <add> utils.Debugf("Forwarded %v/%v bytes to udp/%v", i, read, clientAddr.String()) <add> } <add> } <add>} <add> <add>func (proxy *UDPProxy) Run() { <add> readBuf := make([]byte, UDPBufSize) <add> utils.Debugf("Starting proxy on udp/%v for udp/%v", proxy.frontendAddr, proxy.backendAddr) <add> for { <add> read, from, err := proxy.listener.ReadFromUDP(readBuf) <add> if err != nil { <add> // NOTE: Apparently ReadFrom doesn't return <add> // ECONNREFUSED like Read do (see comment in <add> // UDPProxy.replyLoop) <add> utils.Debugf("Stopping proxy on udp/%v for udp/%v (%v)", proxy.frontendAddr, proxy.backendAddr, err.Error()) <add> break <add> } <add> <add> fromKey := newConnTrackKey(from) <add> proxy.connTrackLock.Lock() <add> proxyConn, hit := proxy.connTrackTable[*fromKey] <add> if !hit { <add> proxyConn, err = net.DialUDP("udp", nil, proxy.backendAddr) <add> if err != nil { <add> log.Printf("Can't proxy a datagram to udp/%s: %v\n", proxy.backendAddr.String(), err) <add> continue <add> } <add> proxy.connTrackTable[*fromKey] = proxyConn <add> go proxy.replyLoop(proxyConn, from, fromKey) <add> } <add> proxy.connTrackLock.Unlock() <add> for i := 0; i != read; { <add> written, err := proxyConn.Write(readBuf[i:read]) <add> if err != nil { <add> log.Printf("Can't proxy a datagram to udp/%s: %v\n", proxy.backendAddr.String(), err) <add> break <add> } <add> i += written <add> utils.Debugf("Forwarded %v/%v bytes to udp/%v", i, read, proxy.backendAddr.String()) <add> } <add> } <add>} <add> <add>func (proxy *UDPProxy) Close() { <add> proxy.listener.Close() <add> proxy.connTrackLock.Lock() <add> defer proxy.connTrackLock.Unlock() <add> for _, conn := range proxy.connTrackTable { <add> conn.Close() <add> } <add>} <add> <add>func (proxy *UDPProxy) FrontendAddr() net.Addr { return proxy.frontendAddr } <add>func (proxy *UDPProxy) BackendAddr() net.Addr { return proxy.backendAddr } <add> <add>func NewProxy(frontendAddr, backendAddr net.Addr) (Proxy, error) { <add> switch frontendAddr.(type) { <add> case *net.UDPAddr: <add> return NewUDPProxy(frontendAddr.(*net.UDPAddr), backendAddr.(*net.UDPAddr)) <add> case *net.TCPAddr: <add> return NewTCPProxy(frontendAddr.(*net.TCPAddr), backendAddr.(*net.TCPAddr)) <add> default: <add> panic(fmt.Errorf("Unsupported protocol")) <add> } <add>} <ide><path>network_proxy_test.go <add>package docker <add> <add>import ( <add> "bytes" <add> "fmt" <add> "io" <add> "net" <add> "strings" <add> "testing" <add> "time" <add>) <add> <add>var testBuf = []byte("Buffalo buffalo Buffalo buffalo buffalo buffalo Buffalo buffalo") <add>var testBufSize = len(testBuf) <add> <add>type EchoServer interface { <add> Run() <add> Close() <add> LocalAddr() net.Addr <add>} <add> <add>type TCPEchoServer struct { <add> listener net.Listener <add> testCtx *testing.T <add>} <add> <add>type UDPEchoServer struct { <add> conn net.PacketConn <add> testCtx *testing.T <add>} <add> <add>func NewEchoServer(t *testing.T, proto, address string) EchoServer { <add> var server EchoServer <add> if strings.HasPrefix(proto, "tcp") { <add> listener, err := net.Listen(proto, address) <add> if err != nil { <add> t.Fatal(err) <add> } <add> server = &TCPEchoServer{listener: listener, testCtx: t} <add> } else { <add> socket, err := net.ListenPacket(proto, address) <add> if err != nil { <add> t.Fatal(err) <add> } <add> server = &UDPEchoServer{conn: socket, testCtx: t} <add> } <add> t.Logf("EchoServer listening on %v/%v\n", proto, server.LocalAddr().String()) <add> return server <add>} <add> <add>func (server *TCPEchoServer) Run() { <add> go func() { <add> for { <add> client, err := server.listener.Accept() <add> if err != nil { <add> return <add> } <add> go func(client net.Conn) { <add> server.testCtx.Logf("TCP client accepted on the EchoServer\n") <add> written, err := io.Copy(client, client) <add> server.testCtx.Logf("%v bytes echoed back to the client\n", written) <add> if err != nil { <add> server.testCtx.Logf("can't echo to the client: %v\n", err.Error()) <add> } <add> client.Close() <add> }(client) <add> } <add> }() <add>} <add> <add>func (server *TCPEchoServer) LocalAddr() net.Addr { return server.listener.Addr() } <add>func (server *TCPEchoServer) Close() { server.listener.Addr() } <add> <add>func (server *UDPEchoServer) Run() { <add> go func() { <add> readBuf := make([]byte, 1024) <add> for { <add> read, from, err := server.conn.ReadFrom(readBuf) <add> if err != nil { <add> return <add> } <add> server.testCtx.Logf("Writing UDP datagram back") <add> for i := 0; i != read; { <add> written, err := server.conn.WriteTo(readBuf[i:read], from) <add> if err != nil { <add> break <add> } <add> i += written <add> } <add> } <add> }() <add>} <add> <add>func (server *UDPEchoServer) LocalAddr() net.Addr { return server.conn.LocalAddr() } <add>func (server *UDPEchoServer) Close() { server.conn.Close() } <add> <add>func testProxyAt(t *testing.T, proto string, proxy Proxy, addr string) { <add> defer proxy.Close() <add> go proxy.Run() <add> client, err := net.Dial(proto, addr) <add> if err != nil { <add> t.Fatalf("Can't connect to the proxy: %v", err) <add> } <add> defer client.Close() <add> client.SetDeadline(time.Now().Add(10 * time.Second)) <add> if _, err = client.Write(testBuf); err != nil { <add> t.Fatal(err) <add> } <add> recvBuf := make([]byte, testBufSize) <add> if _, err = client.Read(recvBuf); err != nil { <add> t.Fatal(err) <add> } <add> if !bytes.Equal(testBuf, recvBuf) { <add> t.Fatal(fmt.Errorf("Expected [%v] but got [%v]", testBuf, recvBuf)) <add> } <add>} <add> <add>func testProxy(t *testing.T, proto string, proxy Proxy) { <add> testProxyAt(t, proto, proxy, proxy.FrontendAddr().String()) <add>} <add> <add>func TestTCP4Proxy(t *testing.T) { <add> backend := NewEchoServer(t, "tcp", "127.0.0.1:0") <add> defer backend.Close() <add> backend.Run() <add> frontendAddr := &net.TCPAddr{IP: net.IPv4(127, 0, 0, 1), Port: 0} <add> proxy, err := NewProxy(frontendAddr, backend.LocalAddr()) <add> if err != nil { <add> t.Fatal(err) <add> } <add> testProxy(t, "tcp", proxy) <add>} <add> <add>func TestTCP6Proxy(t *testing.T) { <add> backend := NewEchoServer(t, "tcp", "[::1]:0") <add> defer backend.Close() <add> backend.Run() <add> frontendAddr := &net.TCPAddr{IP: net.IPv6loopback, Port: 0} <add> proxy, err := NewProxy(frontendAddr, backend.LocalAddr()) <add> if err != nil { <add> t.Fatal(err) <add> } <add> testProxy(t, "tcp", proxy) <add>} <add> <add>func TestTCPDualStackProxy(t *testing.T) { <add> // If I understand `godoc -src net favoriteAddrFamily` (used by the <add> // net.Listen* functions) correctly this should work, but it doesn't. <add> t.Skip("No support for dual stack yet") <add> backend := NewEchoServer(t, "tcp", "[::1]:0") <add> defer backend.Close() <add> backend.Run() <add> frontendAddr := &net.TCPAddr{IP: net.IPv6loopback, Port: 0} <add> proxy, err := NewProxy(frontendAddr, backend.LocalAddr()) <add> if err != nil { <add> t.Fatal(err) <add> } <add> ipv4ProxyAddr := &net.TCPAddr{ <add> IP: net.IPv4(127, 0, 0, 1), <add> Port: proxy.FrontendAddr().(*net.TCPAddr).Port, <add> } <add> testProxyAt(t, "tcp", proxy, ipv4ProxyAddr.String()) <add>} <add> <add>func TestUDP4Proxy(t *testing.T) { <add> backend := NewEchoServer(t, "udp", "127.0.0.1:0") <add> defer backend.Close() <add> backend.Run() <add> frontendAddr := &net.UDPAddr{IP: net.IPv4(127, 0, 0, 1), Port: 0} <add> proxy, err := NewProxy(frontendAddr, backend.LocalAddr()) <add> if err != nil { <add> t.Fatal(err) <add> } <add> testProxy(t, "udp", proxy) <add>} <add> <add>func TestUDP6Proxy(t *testing.T) { <add> backend := NewEchoServer(t, "udp", "[::1]:0") <add> defer backend.Close() <add> backend.Run() <add> frontendAddr := &net.UDPAddr{IP: net.IPv6loopback, Port: 0} <add> proxy, err := NewProxy(frontendAddr, backend.LocalAddr()) <add> if err != nil { <add> t.Fatal(err) <add> } <add> testProxy(t, "udp", proxy) <add>} <add> <add>func TestUDPWriteError(t *testing.T) { <add> frontendAddr := &net.UDPAddr{IP: net.IPv4(127, 0, 0, 1), Port: 0} <add> // Hopefully, this port will be free: */ <add> backendAddr := &net.UDPAddr{IP: net.IPv4(127, 0, 0, 1), Port: 25587} <add> proxy, err := NewProxy(frontendAddr, backendAddr) <add> if err != nil { <add> t.Fatal(err) <add> } <add> defer proxy.Close() <add> go proxy.Run() <add> client, err := net.Dial("udp", "127.0.0.1:25587") <add> if err != nil { <add> t.Fatalf("Can't connect to the proxy: %v", err) <add> } <add> defer client.Close() <add> // Make sure the proxy doesn't stop when there is no actual backend: <add> client.Write(testBuf) <add> client.Write(testBuf) <add> backend := NewEchoServer(t, "udp", "127.0.0.1:25587") <add> defer backend.Close() <add> backend.Run() <add> client.SetDeadline(time.Now().Add(10 * time.Second)) <add> if _, err = client.Write(testBuf); err != nil { <add> t.Fatal(err) <add> } <add> recvBuf := make([]byte, testBufSize) <add> if _, err = client.Read(recvBuf); err != nil { <add> t.Fatal(err) <add> } <add> if !bytes.Equal(testBuf, recvBuf) { <add> t.Fatal(fmt.Errorf("Expected [%v] but got [%v]", testBuf, recvBuf)) <add> } <add>} <ide><path>network_test.go <ide> func TestIptables(t *testing.T) { <ide> <ide> func TestParseNat(t *testing.T) { <ide> if nat, err := parseNat("4500"); err == nil { <del> if nat.Frontend != 0 || nat.Backend != 4500 { <del> t.Errorf("-p 4500 should produce 0->4500, got %d->%d", nat.Frontend, nat.Backend) <add> if nat.Frontend != 0 || nat.Backend != 4500 || nat.Proto != "tcp" { <add> t.Errorf("-p 4500 should produce 0->4500/tcp, got %d->%d/%s", <add> nat.Frontend, nat.Backend, nat.Proto) <ide> } <ide> } else { <ide> t.Fatal(err) <ide> } <ide> <ide> if nat, err := parseNat(":4501"); err == nil { <del> if nat.Frontend != 4501 || nat.Backend != 4501 { <del> t.Errorf("-p :4501 should produce 4501->4501, got %d->%d", nat.Frontend, nat.Backend) <add> if nat.Frontend != 4501 || nat.Backend != 4501 || nat.Proto != "tcp" { <add> t.Errorf("-p :4501 should produce 4501->4501/tcp, got %d->%d/%s", <add> nat.Frontend, nat.Backend, nat.Proto) <ide> } <ide> } else { <ide> t.Fatal(err) <ide> } <ide> <ide> if nat, err := parseNat("4502:4503"); err == nil { <del> if nat.Frontend != 4502 || nat.Backend != 4503 { <del> t.Errorf("-p 4502:4503 should produce 4502->4503, got %d->%d", nat.Frontend, nat.Backend) <add> if nat.Frontend != 4502 || nat.Backend != 4503 || nat.Proto != "tcp" { <add> t.Errorf("-p 4502:4503 should produce 4502->4503/tcp, got %d->%d/%s", <add> nat.Frontend, nat.Backend, nat.Proto) <ide> } <ide> } else { <ide> t.Fatal(err) <ide> } <add> <add> if nat, err := parseNat("4502:4503/tcp"); err == nil { <add> if nat.Frontend != 4502 || nat.Backend != 4503 || nat.Proto != "tcp" { <add> t.Errorf("-p 4502:4503/tcp should produce 4502->4503/tcp, got %d->%d/%s", <add> nat.Frontend, nat.Backend, nat.Proto) <add> } <add> } else { <add> t.Fatal(err) <add> } <add> <add> if nat, err := parseNat("4502:4503/udp"); err == nil { <add> if nat.Frontend != 4502 || nat.Backend != 4503 || nat.Proto != "udp" { <add> t.Errorf("-p 4502:4503/udp should produce 4502->4503/udp, got %d->%d/%s", <add> nat.Frontend, nat.Backend, nat.Proto) <add> } <add> } else { <add> t.Fatal(err) <add> } <add> <add> if nat, err := parseNat(":4503/udp"); err == nil { <add> if nat.Frontend != 4503 || nat.Backend != 4503 || nat.Proto != "udp" { <add> t.Errorf("-p :4503/udp should produce 4503->4503/udp, got %d->%d/%s", <add> nat.Frontend, nat.Backend, nat.Proto) <add> } <add> } else { <add> t.Fatal(err) <add> } <add> <add> if nat, err := parseNat(":4503/tcp"); err == nil { <add> if nat.Frontend != 4503 || nat.Backend != 4503 || nat.Proto != "tcp" { <add> t.Errorf("-p :4503/tcp should produce 4503->4503/tcp, got %d->%d/%s", <add> nat.Frontend, nat.Backend, nat.Proto) <add> } <add> } else { <add> t.Fatal(err) <add> } <add> <add> if nat, err := parseNat("4503/tcp"); err == nil { <add> if nat.Frontend != 0 || nat.Backend != 4503 || nat.Proto != "tcp" { <add> t.Errorf("-p 4503/tcp should produce 0->4503/tcp, got %d->%d/%s", <add> nat.Frontend, nat.Backend, nat.Proto) <add> } <add> } else { <add> t.Fatal(err) <add> } <add> <add> if nat, err := parseNat("4503/udp"); err == nil { <add> if nat.Frontend != 0 || nat.Backend != 4503 || nat.Proto != "udp" { <add> t.Errorf("-p 4503/udp should produce 0->4503/udp, got %d->%d/%s", <add> nat.Frontend, nat.Backend, nat.Proto) <add> } <add> } else { <add> t.Fatal(err) <add> } <add> <add> if _, err := parseNat("4503/tcpgarbage"); err == nil { <add> t.Fatal(err) <add> } <add> <add> if _, err := parseNat("4503/tcp/udp"); err == nil { <add> t.Fatal(err) <add> } <add> <add> if _, err := parseNat("4503/"); err == nil { <add> t.Fatal(err) <add> } <ide> } <ide> <ide> func TestPortAllocation(t *testing.T) { <ide><path>runtime_test.go <ide> package docker <ide> <ide> import ( <add> "bytes" <ide> "fmt" <ide> "github.com/dotcloud/docker/utils" <ide> "io" <ide> import ( <ide> ) <ide> <ide> const ( <del> unitTestImageName = "docker-unit-tests" <del> unitTestImageID = "e9aa60c60128cad1" <del> unitTestNetworkBridge = "testdockbr0" <del> unitTestStoreBase = "/var/lib/docker/unit-tests" <del> testDaemonAddr = "127.0.0.1:4270" <del> testDaemonProto = "tcp" <add> unitTestImageName = "docker-test-image" <add> unitTestImageID = "83599e29c455eb719f77d799bc7c51521b9551972f5a850d7ad265bc1b5292f6" // 1.0 <add> unitTestNetworkBridge = "testdockbr0" <add> unitTestStoreBase = "/var/lib/docker/unit-tests" <add> testDaemonAddr = "127.0.0.1:4270" <add> testDaemonProto = "tcp" <ide> ) <ide> <ide> var globalRuntime *Runtime <ide> func TestGet(t *testing.T) { <ide> <ide> } <ide> <del>func findAvailablePort(runtime *Runtime, port int) (*Container, error) { <del> strPort := strconv.Itoa(port) <del> container, err := NewBuilder(runtime).Create(&Config{ <del> Image: GetTestImage(runtime).ID, <del> Cmd: []string{"sh", "-c", "echo well hello there | nc -l -p " + strPort}, <del> PortSpecs: []string{strPort}, <del> }, <del> ) <del> if err != nil { <del> return nil, err <del> } <del> hostConfig := &HostConfig{} <del> if err := container.Start(hostConfig); err != nil { <del> if strings.Contains(err.Error(), "address already in use") { <del> return nil, nil <del> } <del> return nil, err <del> } <del> return container, nil <del>} <del> <del>// Run a container with a TCP port allocated, and test that it can receive connections on localhost <del>func TestAllocatePortLocalhost(t *testing.T) { <add>func startEchoServerContainer(t *testing.T, proto string) (*Runtime, *Container, string) { <ide> runtime, err := newTestRuntime() <ide> if err != nil { <ide> t.Fatal(err) <ide> } <del> port := 5554 <ide> <add> port := 5554 <ide> var container *Container <add> var strPort string <ide> for { <ide> port += 1 <del> log.Println("Trying port", port) <del> t.Log("Trying port", port) <del> container, err = findAvailablePort(runtime, port) <add> strPort = strconv.Itoa(port) <add> var cmd string <add> if proto == "tcp" { <add> cmd = "socat TCP-LISTEN:" + strPort + ",reuseaddr,fork EXEC:/bin/cat" <add> } else if proto == "udp" { <add> cmd = "socat UDP-RECVFROM:" + strPort + ",fork EXEC:/bin/cat" <add> } else { <add> t.Fatal(fmt.Errorf("Unknown protocol %v", proto)) <add> } <add> t.Log("Trying port", strPort) <add> container, err = NewBuilder(runtime).Create(&Config{ <add> Image: GetTestImage(runtime).ID, <add> Cmd: []string{"sh", "-c", cmd}, <add> PortSpecs: []string{fmt.Sprintf("%s/%s", strPort, proto)}, <add> }) <ide> if container != nil { <ide> break <ide> } <ide> if err != nil { <add> nuke(runtime) <ide> t.Fatal(err) <ide> } <del> log.Println("Port", port, "already in use") <del> t.Log("Port", port, "already in use") <add> t.Logf("Port %v already in use", strPort) <ide> } <ide> <del> defer container.Kill() <add> hostConfig := &HostConfig{} <add> if err := container.Start(hostConfig); err != nil { <add> nuke(runtime) <add> t.Fatal(err) <add> } <ide> <ide> setTimeout(t, "Waiting for the container to be started timed out", 2*time.Second, func() { <ide> for !container.State.Running { <ide> func TestAllocatePortLocalhost(t *testing.T) { <ide> // Even if the state is running, lets give some time to lxc to spawn the process <ide> container.WaitTimeout(500 * time.Millisecond) <ide> <del> conn, err := net.Dial("tcp", <del> fmt.Sprintf( <del> "localhost:%s", container.NetworkSettings.PortMapping[strconv.Itoa(port)], <del> ), <del> ) <add> strPort = container.NetworkSettings.PortMapping[strings.Title(proto)][strPort] <add> return runtime, container, strPort <add>} <add> <add>// Run a container with a TCP port allocated, and test that it can receive connections on localhost <add>func TestAllocateTCPPortLocalhost(t *testing.T) { <add> runtime, container, port := startEchoServerContainer(t, "tcp") <add> defer nuke(runtime) <add> defer container.Kill() <add> <add> conn, err := net.Dial("tcp", fmt.Sprintf("localhost:%v", port)) <ide> if err != nil { <ide> t.Fatal(err) <ide> } <ide> defer conn.Close() <del> output, err := ioutil.ReadAll(conn) <add> <add> input := bytes.NewBufferString("well hello there\n") <add> _, err = conn.Write(input.Bytes()) <ide> if err != nil { <ide> t.Fatal(err) <ide> } <del> if string(output) != "well hello there\n" { <del> t.Fatalf("Received wrong output from network connection: should be '%s', not '%s'", <del> "well hello there\n", <del> string(output), <del> ) <add> buf := make([]byte, 16) <add> read := 0 <add> conn.SetReadDeadline(time.Now().Add(2 * time.Second)) <add> read, err = conn.Read(buf) <add> if err != nil { <add> t.Fatal(err) <ide> } <del> container.Wait() <add> output := string(buf[:read]) <add> if !strings.Contains(output, "well hello there") { <add> t.Fatal(fmt.Errorf("[%v] doesn't contain [well hello there]", output)) <add> } <add>} <add> <add>// Run a container with a TCP port allocated, and test that it can receive connections on localhost <add>func TestAllocateUDPPortLocalhost(t *testing.T) { <add> runtime, container, port := startEchoServerContainer(t, "udp") <add> defer nuke(runtime) <add> defer container.Kill() <add> <add> conn, err := net.Dial("udp", fmt.Sprintf("localhost:%v", port)) <add> if err != nil { <add> t.Fatal(err) <add> } <add> defer conn.Close() <add> <add> input := bytes.NewBufferString("well hello there\n") <add> buf := make([]byte, 16) <add> for i := 0; i != 10; i++ { <add> _, err := conn.Write(input.Bytes()) <add> if err != nil { <add> t.Fatal(err) <add> } <add> conn.SetReadDeadline(time.Now().Add(200 * time.Millisecond)) <add> read, err := conn.Read(buf) <add> if err == nil { <add> output := string(buf[:read]) <add> if strings.Contains(output, "well hello there") { <add> return <add> } <add> } <add> } <add> <add> t.Fatal("No reply from the container") <ide> } <ide> <ide> func TestRestore(t *testing.T) {
6
Javascript
Javascript
remove ids from viewer page and thumbnail divs
52360694b0f28828f9a103d4649724effac2bb98
<ide><path>web/pdf_page_view.js <ide> var PDFPageView = (function PDFPageViewClosure() { <ide> this.annotationLayer = null; <ide> <ide> var div = document.createElement('div'); <del> div.id = 'pageContainer' + this.id; <ide> div.className = 'page'; <ide> div.style.width = Math.floor(this.viewport.width) + 'px'; <ide> div.style.height = Math.floor(this.viewport.height) + 'px'; <ide><path>web/pdf_thumbnail_view.js <ide> var PDFThumbnailView = (function PDFThumbnailViewClosure() { <ide> this.anchor = anchor; <ide> <ide> var div = document.createElement('div'); <del> div.id = 'thumbnailContainer' + id; <ide> div.className = 'thumbnail'; <add> div.setAttribute('data-page-number', this.id); <ide> this.div = div; <ide> <ide> if (id === 1) { <ide><path>web/pdf_thumbnail_viewer.js <ide> var PDFThumbnailViewer = (function PDFThumbnailViewerClosure() { <ide> if (selected) { <ide> selected.classList.remove('selected'); <ide> } <del> var thumbnail = document.getElementById('thumbnailContainer' + page); <add> var thumbnail = document.querySelector( <add> 'div.thumbnail[data-page-number="' + page + '"]'); <ide> if (thumbnail) { <ide> thumbnail.classList.add('selected'); <ide> }
3
Python
Python
fix tests to use new prepared_request method
6287bf1bf6b6927960cbfb9443b1b145b7ad8b2f
<ide><path>libcloud/httplib_ssl.py <ide> import requests <ide> <ide> import libcloud.security <del>from libcloud.utils.py3 import httplib <ide> from libcloud.utils.py3 import urlparse <ide> <ide> <ide> def request(self, method, url, body=None, headers=None, raw=False): <ide> verify=self.ca_cert if self.ca_cert is not None else self.verify <ide> ) <ide> <del> def prepared_request(self, method, url, body=None, headers=None, raw=False): <del> req = requests.Request(method, ''.join([self.host, url]), data=body, headers=headers) <del> <add> def prepared_request(self, method, url, body=None, <add> headers=None, raw=False): <add> req = requests.Request(method, ''.join([self.host, url]), <add> data=body, headers=headers) <add> <ide> prepped = self.session.prepare_request(req) <ide> <ide> prepped.body = body <del> <del> self.response = self.session.send(prepped, <add> <add> self.response = self.session.send( <add> prepped, <ide> stream=raw, <del> verify=self.ca_cert if self.ca_cert is not None else self.verify <del> ) <add> verify=self.ca_cert if self.ca_cert is not None else self.verify) <ide> <ide> def getresponse(self): <ide> return self.response <ide><path>libcloud/test/__init__.py <ide> def getheader(self, name, *args, **kwargs): <ide> def getheaders(self): <ide> return list(self.headers.items()) <ide> <add> def iter_content(self, chunk_size): <add> return self.body_iter <add> <ide> def msg(self): <ide> raise NotImplemented <ide> <ide> def assertUrlContainsQueryParams(self, url, expected_params, strict=False): <ide> <ide> <ide> class StorageMockHttp(MockHttp): <add> def prepared_request(self, method, url, body=None, headers=None, raw=False): <add> pass <add> <ide> def putrequest(self, method, action, skip_host=0, skip_accept_encoding=0): <ide> pass <ide> <ide><path>libcloud/test/storage/test_cloudfiles.py <ide> def upload_file(self, response, file_path, chunked=False, <ide> extra = {} <ide> <ide> def func(*args, **kwargs): <del> self.assertEqual(kwargs['headers']['Content-Length'], 0) <add> self.assertEqual(kwargs['headers']['Content-Length'], '0') <ide> func.called = True <ide> return old_request(*args, **kwargs) <ide> <ide><path>libcloud/test/test_connection.py <ide> def test_content_length(self): <ide> for method in ['POST', 'PUT', 'post', 'put']: <ide> con.request('/test', method=method, data=None, <ide> headers={'Content-Length': '42'}, raw=True) <del> putheader_call_list = con.connection.putheader.call_args_list <del> self.assertIn(call('Content-Length', '42'), putheader_call_list) <add> request_prepared_call_list = con.connection.prepared_request.call_args_list <add> expected_call = call(body=None, headers={'Host': '127.0.0.1', 'Content-Length': '42', <add> 'Accept-Encoding': 'gzip,deflate', <add> 'User-Agent': con._user_agent()}, url='/test', method=method) <add> self.assertIn(expected_call, request_prepared_call_list) <ide> <ide> # '' as data, raw request, do not touch Content-Length if present <ide> for method in ['POST', 'PUT', 'post', 'put']: <ide> con.request('/test', method=method, data=None, <ide> headers={'Content-Length': '42'}, raw=True) <del> putheader_call_list = con.connection.putheader.call_args_list <del> self.assertIn(call('Content-Length', '42'), putheader_call_list) <add> request_prepared_call_list = con.connection.prepared_request.call_args_list <add> expected_call = call(body=None, headers={'Host': '127.0.0.1', 'Content-Length': '42', <add> 'Accept-Encoding': 'gzip,deflate', <add> 'User-Agent': con._user_agent()}, url='/test', method=method) <add> self.assertIn(expected_call, request_prepared_call_list) <ide> <ide> # 'a' as data, content length should be present <ide> for method in ['POST', 'PUT', 'post', 'put']: <ide><path>libcloud/utils/loggingconnection.py <ide> from libcloud.utils.compression import decompress_data <ide> <ide> <del>class LoggingBaseConnection(LibcloudConnection): <add>class LoggingConnection(LibcloudConnection): <ide> """ <ide> Debug class to log all HTTP(s) requests as they could be made <ide> with the curl command. <ide> <ide> :cvar log: file-like object that logs entries are written to. <ide> """ <ide> <add> protocol = 'https' <add> port = None <add> <ide> log = None <ide> http_proxy_used = False <ide> <ide> def _log_curl(self, method, url, body, headers): <ide> self.port, url))]) <ide> return " ".join(cmd) <ide> <del> <del>class LoggingConnection(LoggingBaseConnection): <del> """ <del> Utility Class for logging HTTPS connections <del> """ <del> <del> protocol = 'https' <del> port = None <del> <ide> def getresponse(self): <ide> r = LibcloudConnection.getresponse(self) <ide> if self.log is not None:
5
PHP
PHP
fix references to missingcontrollerexception
c8b55013c8ee8c4685208e280ca9d886fc747e88
<ide><path>tests/TestCase/Error/ErrorHandlerTest.php <ide> use Cake\Error\ErrorHandler; <ide> use Cake\Http\Exception\ForbiddenException; <ide> use Cake\Http\Exception\NotFoundException; <add>use Cake\Http\Exception\MissingControllerException; <ide> use Cake\Http\ServerRequest; <ide> use Cake\Log\Log; <del>use Cake\Routing\Exception\MissingControllerException; <ide> use Cake\Routing\Router; <ide> use Cake\TestSuite\TestCase; <ide> use Psr\Log\LoggerInterface; <ide> public function testHandleExceptionLogAttributes() <ide> ->method('log') <ide> ->with('error', $this->logicalAnd( <ide> $this->stringContains( <del> '[Cake\Routing\Exception\MissingControllerException] ' . <add> '[Cake\Http\Exception\MissingControllerException] ' . <ide> 'Controller class Derp could not be found.' <ide> ), <ide> $this->stringContains('Exception Attributes:'), <ide> public function testHandleExceptionLogAttributes() <ide> ->method('log') <ide> ->with('error', $this->logicalAnd( <ide> $this->stringContains( <del> '[Cake\Routing\Exception\MissingControllerException] ' . <add> '[Cake\Http\Exception\MissingControllerException] ' . <ide> 'Controller class Derp could not be found.' <ide> ), <ide> $this->logicalNot($this->stringContains('Exception Attributes:')) <ide><path>tests/TestCase/Error/ExceptionRendererTest.php <ide> use Cake\Http\Exception\HttpException; <ide> use Cake\Http\Exception\InternalErrorException; <ide> use Cake\Http\Exception\MethodNotAllowedException; <add>use Cake\Http\Exception\MissingControllerException; <ide> use Cake\Http\Exception\NotFoundException; <ide> use Cake\Http\ServerRequest; <ide> use Cake\Mailer\Exception\MissingActionException as MissingMailerActionException; <ide> use Cake\ORM\Exception\MissingBehaviorException; <del>use Cake\Routing\Exception\MissingControllerException; <ide> use Cake\Routing\Router; <ide> use Cake\TestSuite\TestCase; <ide> use Cake\View\Exception\MissingHelperException; <ide><path>tests/TestCase/Error/Middleware/ErrorHandlerMiddlewareTest.php <ide> use Cake\Error\ErrorHandler; <ide> use Cake\Error\ExceptionRendererInterface; <ide> use Cake\Error\Middleware\ErrorHandlerMiddleware; <add>use Cake\Http\Exception\MissingControllerException; <ide> use Cake\Http\Response; <ide> use Cake\Http\ServerRequestFactory; <ide> use Cake\Log\Log; <ide> public function testHandleExceptionLogAttributes() <ide> ->method('log') <ide> ->with('error', $this->logicalAnd( <ide> $this->stringContains( <del> '[Cake\Routing\Exception\MissingControllerException] ' . <add> '[Cake\Http\Exception\MissingControllerException] ' . <ide> 'Controller class Articles could not be found.' <ide> ), <ide> $this->stringContains('Exception Attributes:'), <ide> public function testHandleExceptionLogAttributes() <ide> $request = ServerRequestFactory::fromGlobals(); <ide> $middleware = new ErrorHandlerMiddleware(null, ['log' => true]); <ide> $handler = new TestRequestHandler(function ($req) { <del> throw new \Cake\Routing\Exception\MissingControllerException(['class' => 'Articles']); <add> throw new MissingControllerException(['class' => 'Articles']); <ide> }); <ide> $result = $middleware->process($request, $handler); <ide> $this->assertEquals(404, $result->getStatusCode()); <ide><path>tests/TestCase/ExceptionsTest.php <ide> public function exceptionProvider() <ide> ['Cake\Http\Exception\InternalErrorException', 500], <ide> ['Cake\Http\Exception\InvalidCsrfTokenException', 403], <ide> ['Cake\Http\Exception\MethodNotAllowedException', 405], <add> ['Cake\Http\Exception\MissingControllerException', 500], <ide> ['Cake\Http\Exception\NotAcceptableException', 406], <ide> ['Cake\Http\Exception\NotFoundException', 404], <ide> ['Cake\Http\Exception\NotImplementedException', 501], <ide> public function exceptionProvider() <ide> ['Cake\ORM\Exception\MissingTableClassException', 500], <ide> ['Cake\ORM\Exception\RolledbackTransactionException', 500], <ide> ['Cake\Routing\Exception\DuplicateNamedRouteException', 500], <del> ['Cake\Routing\Exception\MissingControllerException', 500], <ide> ['Cake\Routing\Exception\MissingDispatcherFilterException', 500], <ide> ['Cake\Routing\Exception\MissingRouteException', 500], <ide> ['Cake\Routing\Exception\RedirectException', 302],
4
Javascript
Javascript
fix comment indentation
a8991900b5c9045c54e5fbd4ad10480af18abc8c
<ide><path>examples/js/loaders/FBXLoader.js <ide> materials[ materialsIndex ].skinning = true; <ide> <ide> } <add> <ide> model = new THREE.SkinnedMesh( geometry, material ); <ide> <ide> } else { <ide> <ide> } <ide> <del> // FBX does not list materials for Nurbs lines, so we'll just put our own in here. <add> // FBX does not list materials for Nurbs lines, so we'll just put our own in here. <ide> var material = new THREE.LineBasicMaterial( { color: 0x3300ff, linewidth: 1 } ); <ide> return new THREE.Line( geometry, material ); <ide> <ide> } <ide> <del> // Parse ambient color in FBXTree.GlobalSettings.properties - if it's not set to black (default), create an ambient light <add> // Parse ambient color in FBXTree.GlobalSettings.properties - if it's not set to black (default), create an ambient light <ide> function createAmbientLight( FBXTree, sceneGraph ) { <ide> <ide> if ( 'GlobalSettings' in FBXTree && 'AmbientColor' in FBXTree.GlobalSettings.properties ) { <ide> <ide> } <ide> <del> // parse the model node for transform details and apply them to the model <add> // parse the model node for transform details and apply them to the model <ide> function setModelTransforms( FBXTree, model, modelNode, connections, sceneGraph ) { <ide> <ide> if ( 'Lcl_Translation' in modelNode.properties ) { <ide> <ide> } <ide> <del> // allow transformed pivots - see https://github.com/mrdoob/three.js/issues/11895 <add> // allow transformed pivots - see https://github.com/mrdoob/three.js/issues/11895 <ide> if ( 'GeometricTranslation' in modelNode.properties ) { <ide> <ide> var array = modelNode.properties.GeometricTranslation.value; <ide> <ide> var pos = lookAtTarget.properties.Lcl_Translation.value; <ide> <del> // DirectionalLight, SpotLight <add> // DirectionalLight, SpotLight <ide> if ( model.target !== undefined ) { <ide> <ide> model.target.position.set( pos[ 0 ], pos[ 1 ], pos[ 2 ] ); <ide> <ide> function bindSkeleton( FBXTree, deformers, geometryMap, modelMap, connections, sceneGraph ) { <ide> <del> // Now with the bones created, we can update the skeletons and bind them to the skinned meshes. <add> // Now with the bones created, we can update the skeletons and bind them to the skinned meshes. <ide> sceneGraph.updateMatrixWorld( true ); <ide> <ide> var worldMatrices = new Map(); <ide> <del> // Put skeleton into bind pose. <add> // Put skeleton into bind pose. <ide> if ( 'Pose' in FBXTree.Objects.subNodes ) { <ide> <ide> var BindPoseNode = FBXTree.Objects.subNodes.Pose; <ide> <ide> } <ide> <del> // Now that skeleton is in bind pose, bind to model. <add> // Now that skeleton is in bind pose, bind to model. <ide> deformer.skeleton = new THREE.Skeleton( deformer.bones ); <ide> <ide> var conns = connections.get( deformer.FBX_ID ); <ide> <ide> } <ide> <del> //Skeleton is now bound, return objects to starting world positions. <add> //Skeleton is now bound, return objects to starting world positions. <ide> sceneGraph.updateMatrixWorld( true ); <ide> <del> // Silly hack with the animation parsing. We're gonna pretend the scene graph has a skeleton <del> // to attach animations to, since FBX treats animations as animations for the entire scene, <del> // not just for individual objects. <add> // Silly hack with the animation parsing. We're gonna pretend the scene graph has a skeleton <add> // to attach animations to, since FBX treats animations as animations for the entire scene, <add> // not just for individual objects. <ide> sceneGraph.skeleton = { <ide> bones: Array.from( modelMap.values() ), <ide> };
1
PHP
PHP
fix incorrect defaults
8e3ae129989c7cee47943b36b5435271a87a3123
<ide><path>lib/Cake/Database/Schema/Table.php <ide> class Table { <ide> 'type' => null, <ide> 'columns' => [], <ide> 'length' => [], <del> 'references' => null, <del> 'update' => null, <del> 'delete' => null, <add> 'references' => [], <add> 'update' => 'cascade', <add> 'delete' => 'cascade', <ide> ]; <ide> <ide> /** <ide> public function addConstraint($name, $attrs) { <ide> * @return array <ide> */ <ide> protected function _checkForeignKey($attrs) { <del> $attrs += [ <del> 'references' => [], <del> 'update' => 'cascade', <del> 'delete' => 'cascade', <del> ]; <ide> if (count($attrs['references']) < 2) { <ide> throw new Exception(__d('cake_dev', 'References must contain a table and column.')); <ide> }
1
Python
Python
add tests for sorting strings containing zeros
2bd3c4c8d52560717a8acc2b30da14d269fb2e37
<ide><path>numpy/core/tests/test_regression.py <ide> def check_searchsorted_variable_length(self, level=rlevel): <ide> y = np.array(['d','e']) <ide> assert_equal(x.searchsorted(y), [3,3]) <ide> <add> def check_string_argsort_with_zeros(self, level=rlevel): <add> """Check argsort for strings containing zeros.""" <add> x = np.fromstring("\x00\x02\x00\x01", dtype="|S2") <add> assert_array_equal(x.argsort(kind='m'), np.array([1,0])) <add> assert_array_equal(x.argsort(kind='q'), np.array([1,0])) <add> <add> def check_string_sort_with_zeros(self, level=rlevel): <add> """Check sort for strings containing zeros.""" <add> x = np.fromstring("\x00\x02\x00\x01", dtype="|S2") <add> y = np.fromstring("\x00\x01\x00\x02", dtype="|S2") <add> assert_array_equal(np.sort(x, kind="q"), y) <add> <ide> if __name__ == "__main__": <ide> NumpyTest().run()
1
Text
Text
add content from wiki to docs/
599c2293d6bc0076e96d6355bfcc6ca009e18b48
<ide><path>docs/kyles-little-things.md <add>I recently switched over to Sublime Text 2 and for the most part it's been pretty awesome. But I've been noticing a lot of little things that I really appreciate in an editor, so I thought I'd note them down. <add> <add>1. Indenting soft-wrapped lines http://share.kyleneath.com/captures/_upsell.html.erb-20120127-231402.png <add>2. Respecting Chrome-like tab behavior (drag between windows/panes, `⌘+Shift+T` to get last closed tab back, `⌘+N` for a new tab in your current pane) <add>3. Indent markers http://share.kyleneath.com/captures/billing_dependency.rb-20120127-232754.png
1
Ruby
Ruby
remove deprecated specs
16bdd423089f5fa307b4b87a8868bd4598237fdf
<ide><path>Library/Homebrew/test/cask/dsl/version_spec.rb <ide> "1.2.3,abc" => "abc" <ide> end <ide> <del> describe "#before_colon" do <del> include_examples "version expectations hash", :before_colon, <del> "1.2.3" => "1.2.3", <del> "1.2.3:" => "1.2.3", <del> ":abc" => "", <del> "1.2.3:abc" => "1.2.3" <del> end <del> <del> describe "#after_colon" do <del> include_examples "version expectations hash", :after_colon, <del> "1.2.3" => "", <del> "1.2.3:" => "", <del> ":abc" => "abc", <del> "1.2.3:abc" => "abc" <del> end <del> <ide> describe "#dots_to_hyphens" do <ide> include_examples "version expectations hash", :dots_to_hyphens, <ide> "1.2.3_4-5" => "1-2-3_4-5"
1
Text
Text
fix stability indicator in webcrypto doc
2c2c87e291d36df4c8c3357cc6b896a711c219ef
<ide><path>doc/api/webcrypto.md <ide> <ide> <!-- introduced_in=REPLACEME --> <ide> <del>> Stability: 0 - Experimental <add>> Stability: 1 - Experimental <ide> <ide> Node.js provides an implementation of the standard [Web Crypto API][]. <ide>
1
Javascript
Javascript
hide toggle arrows in owners list mode
61203f77cf850b07a6da57ea4257bcfeea200ae5
<ide><path>src/devtools/views/Components/Element.js <ide> export default function ElementView({ data, index, style }: Props) { <ide> const { <ide> baseDepth, <ide> getElementAtIndex, <add> ownerStack, <ide> selectOwner, <ide> selectedElementID, <ide> selectElementByID, <ide> export default function ElementView({ data, index, style }: Props) { <ide> marginBottom: `-${style.height}px`, <ide> }} <ide> > <del> <ExpandCollapseToggle element={element} store={store} /> <add> {ownerStack.length === 0 ? ( <add> <ExpandCollapseToggle element={element} store={store} /> <add> ) : null} <ide> <span className={styles.Component} ref={ref}> <ide> <DisplayName displayName={displayName} id={((id: any): number)} /> <ide> {key && (
1