content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Python | Python | fix multi heuristic astar algo | cd987372e4c3a9f87d65b757ab46a48527fc9fa9 | <ide><path>graphs/multi_heuristic_astar.py
<ide>
<ide> import numpy as np
<ide>
<add>TPos = tuple[int, int]
<add>
<ide>
<ide> class PriorityQueue:
<ide> def __init__(self):
<ide> def get(self):
<ide> return (priority, item)
<ide>
<ide>
<del>def consistent_heuristic(P, goal):
<add>def consistent_heuristic(P: TPos, goal: TPos):
<ide> # euclidean distance
<ide> a = np.array(P)
<ide> b = np.array(goal)
<ide> return np.linalg.norm(a - b)
<ide>
<ide>
<del>def heuristic_2(P, goal):
<add>def heuristic_2(P: TPos, goal: TPos):
<ide> # integer division by time variable
<ide> return consistent_heuristic(P, goal) // t
<ide>
<ide>
<del>def heuristic_1(P, goal):
<add>def heuristic_1(P: TPos, goal: TPos):
<ide> # manhattan distance
<ide> return abs(P[0] - goal[0]) + abs(P[1] - goal[1])
<ide>
<ide>
<del>def key(start, i, goal, g_function):
<add>def key(start: TPos, i: int, goal: TPos, g_function: dict[TPos, float]):
<ide> ans = g_function[start] + W1 * heuristics[i](start, goal)
<ide> return ans
<ide>
<ide> def do_something(back_pointer, goal, start):
<ide> quit()
<ide>
<ide>
<del>def valid(p):
<add>def valid(p: TPos):
<ide> if p[0] < 0 or p[0] > n - 1:
<ide> return False
<ide> if p[1] < 0 or p[1] > n - 1:
<ide> def make_common_ground():
<ide> (18, 1),
<ide> (19, 1),
<ide> ]
<del>blocks_no = []
<ide> blocks_all = make_common_ground()
<ide>
<ide>
<ide> def make_common_ground():
<ide> t = 1
<ide>
<ide>
<del>def multi_a_star(start, goal, n_heuristic):
<add>def multi_a_star(start: TPos, goal: TPos, n_heuristic: int):
<ide> g_function = {start: 0, goal: float("inf")}
<ide> back_pointer = {start: -1, goal: -1}
<ide> open_list = []
<ide> def multi_a_star(start, goal, n_heuristic):
<ide> open_list.append(PriorityQueue())
<ide> open_list[i].put(start, key(start, i, goal, g_function))
<ide>
<del> close_list_anchor = []
<del> close_list_inad = []
<add> close_list_anchor: list[int] = []
<add> close_list_inad: list[int] = []
<ide> while open_list[0].minkey() < float("inf"):
<ide> for i in range(1, n_heuristic):
<ide> # print(open_list[0].minkey(), open_list[i].minkey()) | 1 |
Java | Java | turn flatviewmanager into viewgroupmanager | 85cdfcd1f78fa70ff913570d831c9994f604fab0 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/flat/FlatViewManager.java
<ide> package com.facebook.react.flat;
<ide>
<ide> import com.facebook.react.uimanager.ThemedReactContext;
<del>import com.facebook.react.uimanager.BaseViewManager;
<add>import com.facebook.react.uimanager.ViewGroupManager;
<ide>
<del>abstract class FlatViewManager<C extends FlatShadowNode> extends BaseViewManager<FlatViewGroup, C> {
<add>abstract class FlatViewManager extends ViewGroupManager<FlatViewGroup> {
<ide>
<ide> @Override
<ide> protected FlatViewGroup createViewInstance(ThemedReactContext reactContext) {
<ide> protected FlatViewGroup createViewInstance(ThemedReactContext reactContext) {
<ide> public void setBackgroundColor(FlatViewGroup view, int backgroundColor) {
<ide> // suppress
<ide> }
<del>
<del> @Override
<del> public void updateExtraData(FlatViewGroup root, Object extraData) {
<del> }
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/react/flat/RCTImageViewManager.java
<ide>
<ide> package com.facebook.react.flat;
<ide>
<del>/* package */ final class RCTImageViewManager extends FlatViewManager<RCTImageView> {
<add>/* package */ final class RCTImageViewManager extends FlatViewManager {
<ide>
<ide> @Override
<ide> public String getName() {
<ide><path>ReactAndroid/src/main/java/com/facebook/react/flat/RCTTextManager.java
<ide> /**
<ide> * ViewManager that creates instances of RCTText.
<ide> */
<del>/* package */ final class RCTTextManager extends FlatViewManager<RCTText> {
<add>/* package */ final class RCTTextManager extends FlatViewManager {
<ide>
<ide> @Override
<ide> public String getName() {
<ide><path>ReactAndroid/src/main/java/com/facebook/react/flat/RCTViewManager.java
<ide> /**
<ide> * ViewManager that creates instances of RCTView.
<ide> */
<del>/* package */ final class RCTViewManager extends FlatViewManager<RCTView> {
<add>/* package */ final class RCTViewManager extends FlatViewManager {
<ide>
<ide> @Override
<ide> public String getName() { | 4 |
PHP | PHP | make line shorter | f00fb8c92285f1e9eeb087d25f8058f173159f5e | <ide><path>src/Illuminate/Bus/Dispatcher.php
<ide> protected function commandShouldBeQueued($command)
<ide> */
<ide> public function dispatchToQueue($command)
<ide> {
<del> $queue = call_user_func($this->queueResolver, isset($command->connection) ? $command->connection : null);
<add> $connection = isset($command->connection) ? $command->connection : null;
<add> $queue = call_user_func($this->queueResolver, $connection);
<ide>
<ide> if (! $queue instanceof Queue) {
<ide> throw new RuntimeException('Queue resolver did not return a Queue implementation.'); | 1 |
PHP | PHP | register symfony console namespace with autoloader | 2c12be305d99020f1213bf680a60d14ea5638988 | <ide><path>laravel/core.php
<ide> => path('sys').'database/eloquent/relationships/has_one_or_many'.EXT,
<ide> ));
<ide>
<add>/*
<add>|--------------------------------------------------------------------------
<add>| Register The Symfony Components
<add>|--------------------------------------------------------------------------
<add>|
<add>| Laravel's "Artisan" CLI makes use of the Symfony Console component to
<add>| build a wonderful CLI environment that is both robust and testable.
<add>| We'll register the component's namespace here.
<add>|
<add>*/
<add>
<add>Autoloader::namespaces(array(
<add> 'Symfony\Component\Console' => path('base').'vendor/Symfony/Component/Console',
<add>));
<add>
<ide> /*
<ide> |--------------------------------------------------------------------------
<ide> | Set The CLI Options Array | 1 |
Text | Text | point people to prod build in perf docs | 43b63995a8c7233569c142840ca73f653bbfcabf | <ide><path>docs/docs/10.9-perf.md
<ide> In addition to giving you an overview of your app's overall performance, ReactPe
<ide>
<ide> See these two articles by the [Benchling Engineering Team](http://benchling.engineering) for a in-depth introduction to performance tooling: ["Performance Engineering with React"](http://benchling.engineering/performance-engineering-with-react/) and ["A Deep Dive into React Perf Debugging"](http://benchling.engineering/deep-dive-react-perf-debugging/)!
<ide>
<add>## Development vs. Production Builds
<add>
<add>If you're benchmarking or seeing performance problems in your React apps, make sure you're testing with the [minified production build](/react/downloads.html). The development build includes extra warnings that are helpful when building your apps, but it is slower due to the extra bookkeeping it does.
<add>
<add>However, the perf tools described on this page only work when using the development build of React. Therefore, the profiler only serves to indicate the _relatively_ expensive parts of your app.
<add>
<ide> ## General API
<ide>
<ide> The `Perf` object documented here is exposed as `require('react-addons-perf')` and can be used with React in development mode only. You should not include this bundle when building your app for production.
<ide>
<del>> Note:
<del>>
<del>> The dev build of React is slower than the prod build, due to all the extra logic for providing, for example, React's friendly console warnings (stripped away in the prod build). Therefore, the profiler only serves to indicate the _relatively_ expensive parts of your app.
<del>
<ide> ### `Perf.start()` and `Perf.stop()`
<ide> Start/stop the measurement. The React operations in-between are recorded for analyses below. Operations that took an insignificant amount of time are ignored.
<ide>
<ide><path>docs/docs/11-advanced-performance.md
<ide> next: context.html
<ide>
<ide> One of the first questions people ask when considering React for a project is whether their application will be as fast and responsive as an equivalent non-React version. The idea of re-rendering an entire subtree of components in response to every state change makes people wonder whether this process negatively impacts performance. React uses several clever techniques to minimize the number of costly DOM operations required to update the UI.
<ide>
<add>## Use the production build
<add>
<add>If you're benchmarking or seeing performance problems in your React apps, make sure you're testing with the [minified production build](/react/downloads.html). The development build includes extra warnings that are helpful when building your apps, but it is slower due to the extra bookkeeping it does.
<add>
<ide> ## Avoiding reconciling the DOM
<ide>
<ide> React makes use of a *virtual DOM*, which is a descriptor of a DOM subtree rendered in the browser. This parallel representation allows React to avoid creating DOM nodes and accessing existing ones, which is slower than operations on JavaScript objects. When a component's props or state change, React decides whether an actual DOM update is necessary by constructing a new virtual DOM and comparing it to the old one. Only in the case they are not equal, will React [reconcile](/react/docs/reconciliation.html) the DOM, applying as few mutations as possible. | 2 |
Python | Python | add tests for basis and cast static class methods | 8ad18ab3799d669d93759e7c20ff6457ed1e2bc2 | <ide><path>numpy/polynomial/tests/test_chebyshev.py
<ide> def test_call(self) :
<ide> xx = 2*x - 1
<ide> assert_almost_equal(self.p2(x), self.p1(xx))
<ide>
<del> def test_degree(self) :
<del> assert_equal(self.p1.degree(), 2)
<del>
<ide> def test_cutdeg(self) :
<ide> assert_raises(ValueError, self.p1.cutdeg, .5)
<ide> assert_raises(ValueError, self.p1.cutdeg, -1)
<ide> def test_cutdeg(self) :
<ide> assert_equal(len(self.p1.cutdeg(1)), 2)
<ide> assert_equal(len(self.p1.cutdeg(0)), 1)
<ide>
<del> def test_convert(self) :
<del> x = np.linspace(-1,1)
<del> p = self.p1.convert(domain=[0,1])
<del> assert_almost_equal(p(x), self.p1(x))
<del>
<ide> def test_mapparms(self) :
<ide> parms = self.p2.mapparms()
<ide> assert_almost_equal(parms, [-1, 2])
<ide> def test_truncate(self) :
<ide> assert_equal(len(self.p1.truncate(2)), 2)
<ide> assert_equal(len(self.p1.truncate(1)), 1)
<ide>
<del> def test_copy(self) :
<del> p = self.p1.copy()
<del> assert_(self.p1 == p)
<del>
<ide> def test_integ(self) :
<ide> p = self.p2.integ()
<ide> assert_almost_equal(p.coef, cheb.chebint([1,2,3], 1, 0, scl=.5))
<ide> def test_integ(self) :
<ide> p = self.p2.integ(2, [1, 2])
<ide> assert_almost_equal(p.coef, cheb.chebint([1,2,3], 2, [1,2], scl=.5))
<ide>
<del> def test_deriv(self) :
<del> p = self.p2.integ(2, [1, 2])
<del> assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
<del> assert_almost_equal(p.deriv(2).coef, self.p2.coef)
<del>
<del> def test_roots(self) :
<del> p = cheb.Chebyshev(cheb.poly2cheb([0, -1, 0, 1]), [0, 1])
<del> res = p.roots()
<del> tgt = [0, .5, 1]
<del> assert_almost_equal(res, tgt)
<del>
<del> def test_linspace(self):
<del> xdes = np.linspace(0, 1, 20)
<del> ydes = self.p2(xdes)
<del> xres, yres = self.p2.linspace(20)
<del> assert_almost_equal(xres, xdes)
<del> assert_almost_equal(yres, ydes)
<del>
<del> def test_fromroots(self) :
<del> roots = [0, .5, 1]
<del> p = cheb.Chebyshev.fromroots(roots, domain=[0, 1])
<del> res = p.coef
<del> tgt = cheb.poly2cheb([0, -1, 0, 1])
<del> assert_almost_equal(res, tgt)
<del>
<del> def test_fit(self) :
<del> def f(x) :
<del> return x*(x - 1)*(x - 2)
<del> x = np.linspace(0,3)
<del> y = f(x)
<del>
<del> # test default value of domain
<del> p = cheb.Chebyshev.fit(x, y, 3)
<del> assert_almost_equal(p.domain, [0,3])
<del>
<del> # test that fit works in given domains
<del> p = cheb.Chebyshev.fit(x, y, 3, None)
<del> assert_almost_equal(p(x), y)
<del> assert_almost_equal(p.domain, [0,3])
<del> p = cheb.Chebyshev.fit(x, y, 3, [])
<del> assert_almost_equal(p(x), y)
<del> assert_almost_equal(p.domain, [-1, 1])
<del> # test that fit accepts weights.
<del> w = np.zeros_like(x)
<del> yw = y.copy()
<del> w[1::2] = 1
<del> yw[0::2] = 0
<del> p = cheb.Chebyshev.fit(x, yw, 3, w=w)
<del> assert_almost_equal(p(x), y)
<del>
<ide> def test_identity(self) :
<ide> x = np.linspace(0,3)
<ide> p = cheb.Chebyshev.identity()
<ide> assert_almost_equal(p(x), x)
<ide> p = cheb.Chebyshev.identity([1,3])
<ide> assert_almost_equal(p(x), x)
<del>#
<add>
<ide>
<ide> if __name__ == "__main__":
<ide> run_module_suite()
<ide><path>numpy/polynomial/tests/test_classes.py
<add>"""Test inter-conversion of different polynomial classes.
<add>
<add>This tests the convert and cast methods of all the polynomial classes.
<add>
<add>"""
<add>from __future__ import division
<add>
<add>import numpy as np
<add>from numpy.polynomial import (
<add> Polynomial, Legendre, Chebyshev, Laguerre,
<add> Hermite, HermiteE)
<add>from numpy.testing import (
<add> TestCase, assert_almost_equal, assert_raises,
<add> assert_equal, assert_, run_module_suite)
<add>
<add>classes = (
<add> Polynomial, Legendre, Chebyshev, Laguerre,
<add> Hermite, HermiteE)
<add>
<add>class TestClassConversions(TestCase):
<add>
<add> def test_conversion(self):
<add> x = np.linspace(0, 1, 10)
<add> window = np.array([0, 1])
<add> domain = np.array([0, 1])
<add> for Poly1 in classes:
<add> d1 = domain + np.random.random((2,))*.25
<add> w1 = window + np.random.random((2,))*.25
<add> c1 = np.random.random((3,))
<add> p1 = Poly1(c1, domain=d1, window=w1)
<add> for Poly2 in classes:
<add> msg = "-- %s -> %s" % (Poly1.__name__, Poly2.__name__)
<add> d2 = domain + np.random.random((2,))*.25
<add> w2 = window + np.random.random((2,))*.25
<add> p2 = p1.convert(kind=Poly2, domain=d2, window=w2)
<add> assert_almost_equal(p2.domain, d2, err_msg=msg)
<add> assert_almost_equal(p2.window, w2, err_msg=msg)
<add> assert_almost_equal(p2(x), p1(x), err_msg=msg)
<add>
<add> def test_cast(self):
<add> x = np.linspace(0, 1, 10)
<add> window = np.array([0, 1])
<add> domain = np.array([0, 1])
<add> for Poly1 in classes:
<add> d1 = domain + np.random.random((2,))*.25
<add> w1 = window + np.random.random((2,))*.25
<add> c1 = np.random.random((3,))
<add> p1 = Poly1(c1, domain=d1, window=w1)
<add> for Poly2 in classes:
<add> msg = "-- %s -> %s" % (Poly1.__name__, Poly2.__name__)
<add> d2 = domain + np.random.random((2,))*.25
<add> w2 = window + np.random.random((2,))*.25
<add> p2 = Poly2.cast(p1, domain=d2, window=w2)
<add> assert_almost_equal(p2.domain, d2, err_msg=msg)
<add> assert_almost_equal(p2.window, w2, err_msg=msg)
<add> assert_almost_equal(p2(x), p1(x), err_msg=msg)
<add>
<add>
<add>class TestClasses(TestCase):
<add>
<add> def test_basis(self):
<add> window = np.array([0, 1])
<add> domain = np.array([0, 1])
<add> for Poly in classes:
<add> msg = "-- %s" % (Poly.__name__,)
<add> d = domain + np.random.random((2,))*.25
<add> w = window + np.random.random((2,))*.25
<add> p = Poly.basis(5, domain=d, window=w)
<add> assert_equal(p.domain, d, err_msg=msg)
<add> assert_equal(p.window, w, err_msg=msg)
<add> assert_equal(p.coef, [0]*5 + [1])
<add>
<add> def test_fromroots(self):
<add> window = np.array([0, 1])
<add> domain = np.array([0, 1])
<add> for Poly in classes:
<add> msg = "-- %s" % (Poly.__name__,)
<add> d = domain + np.random.random((2,))*.25
<add> w = window + np.random.random((2,))*.25
<add> r = np.random.random((5,))
<add> p1 = Poly.fromroots(r, domain=d, window=w)
<add> assert_equal(p1.domain, d, err_msg=msg)
<add> assert_equal(p1.window, w, err_msg=msg)
<add> assert_almost_equal(p1(r), 0, err_msg=msg)
<add> # test monic with same domain and window
<add> p2 = Polynomial.cast(p1, domain=d, window=w)
<add> assert_almost_equal(p2.coef[-1], 1, err_msg=msg)
<add>
<add> def test_roots(self):
<add> window = np.array([0, 1])
<add> domain = np.array([0, 1])
<add> for Poly in classes:
<add> msg = "-- %s" % (Poly.__name__,)
<add> d = domain + np.random.random((2,))*.25
<add> w = window + np.random.random((2,))*.25
<add> tgt = np.sort(np.random.random((5,)))
<add> res = np.sort(Poly.fromroots(tgt).roots())
<add> assert_almost_equal(res, tgt, err_msg=msg)
<add>
<add> def test_degree(self):
<add> for Poly in classes:
<add> msg = "-- %s" % (Poly.__name__,)
<add> p = Poly.basis(5)
<add> assert_equal(p.degree(), 5, err_msg=msg)
<add>
<add> def test_copy(self):
<add> for Poly in classes:
<add> msg = "-- %s" % (Poly.__name__,)
<add> p1 = Poly.basis(5)
<add> p2 = p1.copy()
<add> assert_(p1 == p2, msg)
<add> assert_(p1 is not p2, msg)
<add> assert_(p1.coef is not p2.coef, msg)
<add> assert_(p1.domain is not p2.domain, msg)
<add> assert_(p1.window is not p2.window, msg)
<add>
<add> def test_deriv(self):
<add> # Check that the derivative is the inverse of integration. It is
<add> # assumes that the integration has been tested elsewhere.
<add> window = np.array([0, 1])
<add> domain = np.array([0, 1])
<add> for Poly in classes:
<add> msg = "-- %s" % (Poly.__name__,)
<add> d = domain + np.random.random((2,))*.25
<add> w = window + np.random.random((2,))*.25
<add> p1 = Poly([1, 2, 3], domain=d, window=w)
<add> p2 = p1.integ(2, k=[1, 2])
<add> p3 = p1.integ(1, k=[1])
<add> assert_almost_equal(p2.deriv(1).coef, p3.coef, err_msg=msg)
<add> assert_almost_equal(p2.deriv(2).coef, p1.coef, err_msg=msg)
<add>
<add> def test_linspace(self):
<add> window = np.array([0, 1])
<add> domain = np.array([0, 1])
<add> for Poly in classes:
<add> msg = "-- %s" % (Poly.__name__,)
<add> d = domain + np.random.random((2,))*.25
<add> w = window + np.random.random((2,))*.25
<add> p = Poly([1,2,3], domain=d, window=w)
<add> # test default domain
<add> xtgt = np.linspace(d[0], d[1], 20)
<add> ytgt = p(xtgt)
<add> xres, yres = p.linspace(20)
<add> assert_almost_equal(xres, xtgt, err_msg=msg)
<add> assert_almost_equal(yres, ytgt, err_msg=msg)
<add> # test specified domain
<add> xtgt = np.linspace(0, 2, 20)
<add> ytgt = p(xtgt)
<add> xres, yres = p.linspace(20, domain=[0, 2])
<add> assert_almost_equal(xres, xtgt, err_msg=msg)
<add> assert_almost_equal(yres, ytgt, err_msg=msg)
<add>
<add> def test_fit(self) :
<add>
<add> def f(x) :
<add> return x*(x - 1)*(x - 2)
<add> x = np.linspace(0,3)
<add> y = f(x)
<add>
<add> window = np.array([0, 1])
<add> domain = np.array([0, 1])
<add> for Poly in classes:
<add> msg = "-- %s" % (Poly.__name__,)
<add> # test default value of domain
<add> p = Poly.fit(x, y, 3)
<add> assert_almost_equal(p.domain, [0,3], err_msg=msg)
<add> assert_almost_equal(p(x), y, err_msg=msg)
<add> assert_equal(p.degree(), 3, err_msg=msg)
<add>
<add> # test with given windows and domains
<add> d = domain + np.random.random((2,))*.25
<add> w = window + np.random.random((2,))*.25
<add> p = Poly.fit(x, y, 3, domain=d, window=w)
<add> assert_almost_equal(p(x), y, err_msg=msg)
<add> assert_almost_equal(p.domain, d, err_msg=msg)
<add> assert_almost_equal(p.window, w, err_msg=msg)
<add>
<add> # test with class domain default
<add> p = Poly.fit(x, y, 3, [])
<add> assert_equal(p.domain, Poly.domain, err_msg=msg)
<add> assert_equal(p.window, Poly.window, err_msg=msg)
<add>
<add> # test that fit accepts weights.
<add> w = np.zeros_like(x)
<add> z = y + np.random.random(y.shape)*.25
<add> w[::2] = 1
<add> p1 = Poly.fit(x[::2], z[::2], 3)
<add> p2 = Poly.fit(x, z, 3, w=w)
<add> assert_almost_equal(p1(x), p2(x), err_msg=msg)
<add>
<add>
<add>if __name__ == "__main__":
<add> run_module_suite()
<ide><path>numpy/polynomial/tests/test_hermite.py
<ide> def test_call(self) :
<ide> xx = 2*x - 1
<ide> assert_almost_equal(self.p2(x), self.p1(xx))
<ide>
<del> def test_degree(self) :
<del> assert_equal(self.p1.degree(), 2)
<del>
<ide> def test_cutdeg(self) :
<ide> assert_raises(ValueError, self.p1.cutdeg, .5)
<ide> assert_raises(ValueError, self.p1.cutdeg, -1)
<ide> def test_cutdeg(self) :
<ide> assert_equal(len(self.p1.cutdeg(1)), 2)
<ide> assert_equal(len(self.p1.cutdeg(0)), 1)
<ide>
<del> def test_convert(self) :
<del> x = np.linspace(-1,1)
<del> p = self.p1.convert(domain=[0,1])
<del> assert_almost_equal(p(x), self.p1(x))
<del>
<ide> def test_mapparms(self) :
<ide> parms = self.p2.mapparms()
<ide> assert_almost_equal(parms, [-1, 2])
<ide> def test_truncate(self) :
<ide> assert_equal(len(self.p1.truncate(2)), 2)
<ide> assert_equal(len(self.p1.truncate(1)), 1)
<ide>
<del> def test_copy(self) :
<del> p = self.p1.copy()
<del> assert_(self.p1 == p)
<del>
<ide> def test_integ(self) :
<ide> p = self.p2.integ()
<ide> assert_almost_equal(p.coef, herm.hermint([1,2,3], 1, 0, scl=.5))
<ide> def test_integ(self) :
<ide> p = self.p2.integ(2, [1, 2])
<ide> assert_almost_equal(p.coef, herm.hermint([1,2,3], 2, [1,2], scl=.5))
<ide>
<del> def test_deriv(self) :
<del> p = self.p2.integ(2, [1, 2])
<del> assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
<del> assert_almost_equal(p.deriv(2).coef, self.p2.coef)
<del>
<del> def test_roots(self) :
<del> p = herm.Hermite(herm.poly2herm([0, -1, 0, 1]), [0, 1])
<del> res = p.roots()
<del> tgt = [0, .5, 1]
<del> assert_almost_equal(res, tgt)
<del>
<del> def test_linspace(self):
<del> xdes = np.linspace(0, 1, 20)
<del> ydes = self.p2(xdes)
<del> xres, yres = self.p2.linspace(20)
<del> assert_almost_equal(xres, xdes)
<del> assert_almost_equal(yres, ydes)
<del>
<del> def test_fromroots(self) :
<del> roots = [0, .5, 1]
<del> p = herm.Hermite.fromroots(roots, domain=[0, 1])
<del> res = p.coef
<del> tgt = herm.poly2herm([0, -1, 0, 1])
<del> assert_almost_equal(res, tgt)
<del>
<del> def test_fit(self) :
<del> def f(x) :
<del> return x*(x - 1)*(x - 2)
<del> x = np.linspace(0,3)
<del> y = f(x)
<del>
<del> # test default value of domain
<del> p = herm.Hermite.fit(x, y, 3)
<del> assert_almost_equal(p.domain, [0,3])
<del>
<del> # test that fit works in given domains
<del> p = herm.Hermite.fit(x, y, 3, None)
<del> assert_almost_equal(p(x), y)
<del> assert_almost_equal(p.domain, [0,3])
<del> p = herm.Hermite.fit(x, y, 3, [])
<del> assert_almost_equal(p(x), y)
<del> assert_almost_equal(p.domain, [-1, 1])
<del> # test that fit accepts weights.
<del> w = np.zeros_like(x)
<del> yw = y.copy()
<del> w[1::2] = 1
<del> yw[0::2] = 0
<del> p = herm.Hermite.fit(x, yw, 3, w=w)
<del> assert_almost_equal(p(x), y)
<del>
<ide> def test_identity(self) :
<ide> x = np.linspace(0,3)
<ide> p = herm.Hermite.identity()
<ide> assert_almost_equal(p(x), x)
<ide> p = herm.Hermite.identity([1,3])
<ide> assert_almost_equal(p(x), x)
<del>#
<add>
<ide>
<ide> if __name__ == "__main__":
<ide> run_module_suite()
<ide><path>numpy/polynomial/tests/test_hermite_e.py
<ide> def test_call(self) :
<ide> xx = 2*x - 1
<ide> assert_almost_equal(self.p2(x), self.p1(xx))
<ide>
<del> def test_degree(self) :
<del> assert_equal(self.p1.degree(), 2)
<del>
<ide> def test_cutdeg(self) :
<ide> assert_raises(ValueError, self.p1.cutdeg, .5)
<ide> assert_raises(ValueError, self.p1.cutdeg, -1)
<ide> def test_cutdeg(self) :
<ide> assert_equal(len(self.p1.cutdeg(1)), 2)
<ide> assert_equal(len(self.p1.cutdeg(0)), 1)
<ide>
<del> def test_convert(self) :
<del> x = np.linspace(-1,1)
<del> p = self.p1.convert(domain=[0,1])
<del> assert_almost_equal(p(x), self.p1(x))
<del>
<ide> def test_mapparms(self) :
<ide> parms = self.p2.mapparms()
<ide> assert_almost_equal(parms, [-1, 2])
<ide> def test_truncate(self) :
<ide> assert_equal(len(self.p1.truncate(2)), 2)
<ide> assert_equal(len(self.p1.truncate(1)), 1)
<ide>
<del> def test_copy(self) :
<del> p = self.p1.copy()
<del> assert_(self.p1 == p)
<del>
<ide> def test_integ(self) :
<ide> p = self.p2.integ()
<ide> assert_almost_equal(p.coef, herme.hermeint([1,2,3], 1, 0, scl=.5))
<ide> def test_integ(self) :
<ide> p = self.p2.integ(2, [1, 2])
<ide> assert_almost_equal(p.coef, herme.hermeint([1,2,3], 2, [1,2], scl=.5))
<ide>
<del> def test_deriv(self) :
<del> p = self.p2.integ(2, [1, 2])
<del> assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
<del> assert_almost_equal(p.deriv(2).coef, self.p2.coef)
<del>
<del> def test_roots(self) :
<del> p = herme.HermiteE(herme.poly2herme([0, -1, 0, 1]), [0, 1])
<del> res = p.roots()
<del> tgt = [0, .5, 1]
<del> assert_almost_equal(res, tgt)
<del>
<del> def test_linspace(self):
<del> xdes = np.linspace(0, 1, 20)
<del> ydes = self.p2(xdes)
<del> xres, yres = self.p2.linspace(20)
<del> assert_almost_equal(xres, xdes)
<del> assert_almost_equal(yres, ydes)
<del>
<del> def test_fromroots(self) :
<del> roots = [0, .5, 1]
<del> p = herme.HermiteE.fromroots(roots, domain=[0, 1])
<del> res = p.coef
<del> tgt = herme.poly2herme([0, -1, 0, 1])
<del> assert_almost_equal(res, tgt)
<del>
<del> def test_fit(self) :
<del> def f(x) :
<del> return x*(x - 1)*(x - 2)
<del> x = np.linspace(0,3)
<del> y = f(x)
<del>
<del> # test default value of domain
<del> p = herme.HermiteE.fit(x, y, 3)
<del> assert_almost_equal(p.domain, [0,3])
<del>
<del> # test that fit works in given domains
<del> p = herme.HermiteE.fit(x, y, 3, None)
<del> assert_almost_equal(p(x), y)
<del> assert_almost_equal(p.domain, [0,3])
<del> p = herme.HermiteE.fit(x, y, 3, [])
<del> assert_almost_equal(p(x), y)
<del> assert_almost_equal(p.domain, [-1, 1])
<del> # test that fit accepts weights.
<del> w = np.zeros_like(x)
<del> yw = y.copy()
<del> w[1::2] = 1
<del> yw[0::2] = 0
<del> p = herme.HermiteE.fit(x, yw, 3, w=w)
<del> assert_almost_equal(p(x), y)
<del>
<ide> def test_identity(self) :
<ide> x = np.linspace(0,3)
<ide> p = herme.HermiteE.identity()
<ide><path>numpy/polynomial/tests/test_laguerre.py
<ide> def test_call(self) :
<ide> xx = 2*x - 1
<ide> assert_almost_equal(self.p2(x), self.p1(xx))
<ide>
<del> def test_degree(self) :
<del> assert_equal(self.p1.degree(), 2)
<del>
<ide> def test_cutdeg(self) :
<ide> assert_raises(ValueError, self.p1.cutdeg, .5)
<ide> assert_raises(ValueError, self.p1.cutdeg, -1)
<ide> def test_cutdeg(self) :
<ide> assert_equal(len(self.p1.cutdeg(1)), 2)
<ide> assert_equal(len(self.p1.cutdeg(0)), 1)
<ide>
<del> def test_convert(self) :
<del> x = np.linspace(-1,1)
<del> p = self.p1.convert(domain=[0,1])
<del> assert_almost_equal(p(x), self.p1(x))
<del>
<ide> def test_mapparms(self) :
<ide> parms = self.p2.mapparms()
<ide> assert_almost_equal(parms, [-1, 2])
<ide> def test_truncate(self) :
<ide> assert_equal(len(self.p1.truncate(2)), 2)
<ide> assert_equal(len(self.p1.truncate(1)), 1)
<ide>
<del> def test_copy(self) :
<del> p = self.p1.copy()
<del> assert_(self.p1 == p)
<del>
<ide> def test_integ(self) :
<ide> p = self.p2.integ()
<ide> assert_almost_equal(p.coef, lag.lagint([1,2,3], 1, 0, scl=.5))
<ide> def test_integ(self) :
<ide> p = self.p2.integ(2, [1, 2])
<ide> assert_almost_equal(p.coef, lag.lagint([1,2,3], 2, [1,2], scl=.5))
<ide>
<del> def test_deriv(self) :
<del> p = self.p2.integ(2, [1, 2])
<del> assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
<del> assert_almost_equal(p.deriv(2).coef, self.p2.coef)
<del>
<del> def test_roots(self) :
<del> p = lag.Laguerre(lag.poly2lag([0, -1, 0, 1]), [0, 1])
<del> res = p.roots()
<del> tgt = [0, .5, 1]
<del> assert_almost_equal(res, tgt)
<del>
<del> def test_linspace(self):
<del> xdes = np.linspace(0, 1, 20)
<del> ydes = self.p2(xdes)
<del> xres, yres = self.p2.linspace(20)
<del> assert_almost_equal(xres, xdes)
<del> assert_almost_equal(yres, ydes)
<del>
<del> def test_fromroots(self) :
<del> roots = [0, .5, 1]
<del> p = lag.Laguerre.fromroots(roots, domain=[0, 1])
<del> res = p.coef
<del> tgt = lag.poly2lag([0, -1, 0, 1])
<del> assert_almost_equal(res, tgt)
<del>
<del> def test_fit(self) :
<del> def f(x) :
<del> return x*(x - 1)*(x - 2)
<del> x = np.linspace(0,3)
<del> y = f(x)
<del>
<del> # test default value of domain
<del> p = lag.Laguerre.fit(x, y, 3)
<del> assert_almost_equal(p.domain, [0,3])
<del>
<del> # test that fit works in given domains
<del> p = lag.Laguerre.fit(x, y, 3, None)
<del> assert_almost_equal(p(x), y)
<del> assert_almost_equal(p.domain, [0,3])
<del> p = lag.Laguerre.fit(x, y, 3, [])
<del> assert_almost_equal(p(x), y)
<del> assert_almost_equal(p.domain, [-1, 1])
<del> # test that fit accepts weights.
<del> w = np.zeros_like(x)
<del> yw = y.copy()
<del> w[1::2] = 1
<del> yw[0::2] = 0
<del> p = lag.Laguerre.fit(x, yw, 3, w=w)
<del> assert_almost_equal(p(x), y)
<del>
<ide> def test_identity(self) :
<ide> x = np.linspace(0,3)
<ide> p = lag.Laguerre.identity()
<ide> assert_almost_equal(p(x), x)
<ide> p = lag.Laguerre.identity([1,3])
<ide> assert_almost_equal(p(x), x)
<del>#
<add>
<ide>
<ide> if __name__ == "__main__":
<ide> run_module_suite()
<ide><path>numpy/polynomial/tests/test_legendre.py
<ide> def test_call(self) :
<ide> xx = 2*x - 1
<ide> assert_almost_equal(self.p2(x), self.p1(xx))
<ide>
<del> def test_degree(self) :
<del> assert_equal(self.p1.degree(), 2)
<del>
<ide> def test_cutdeg(self) :
<ide> assert_raises(ValueError, self.p1.cutdeg, .5)
<ide> assert_raises(ValueError, self.p1.cutdeg, -1)
<ide> def test_cutdeg(self) :
<ide> assert_equal(len(self.p1.cutdeg(1)), 2)
<ide> assert_equal(len(self.p1.cutdeg(0)), 1)
<ide>
<del> def test_convert(self) :
<del> x = np.linspace(-1,1)
<del> p = self.p1.convert(domain=[0,1])
<del> assert_almost_equal(p(x), self.p1(x))
<del>
<ide> def test_mapparms(self) :
<ide> parms = self.p2.mapparms()
<ide> assert_almost_equal(parms, [-1, 2])
<ide> def test_truncate(self) :
<ide> assert_equal(len(self.p1.truncate(2)), 2)
<ide> assert_equal(len(self.p1.truncate(1)), 1)
<ide>
<del> def test_copy(self) :
<del> p = self.p1.copy()
<del> assert_(self.p1 == p)
<del>
<ide> def test_integ(self) :
<ide> p = self.p2.integ()
<ide> assert_almost_equal(p.coef, leg.legint([1,2,3], 1, 0, scl=.5))
<ide> def test_integ(self) :
<ide> p = self.p2.integ(2, [1, 2])
<ide> assert_almost_equal(p.coef, leg.legint([1,2,3], 2, [1,2], scl=.5))
<ide>
<del> def test_deriv(self) :
<del> p = self.p2.integ(2, [1, 2])
<del> assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
<del> assert_almost_equal(p.deriv(2).coef, self.p2.coef)
<del>
<del> def test_roots(self) :
<del> p = leg.Legendre(leg.poly2leg([0, -1, 0, 1]), [0, 1])
<del> res = p.roots()
<del> tgt = [0, .5, 1]
<del> assert_almost_equal(res, tgt)
<del>
<del> def test_linspace(self):
<del> xdes = np.linspace(0, 1, 20)
<del> ydes = self.p2(xdes)
<del> xres, yres = self.p2.linspace(20)
<del> assert_almost_equal(xres, xdes)
<del> assert_almost_equal(yres, ydes)
<del>
<del> def test_fromroots(self) :
<del> roots = [0, .5, 1]
<del> p = leg.Legendre.fromroots(roots, domain=[0, 1])
<del> res = p.coef
<del> tgt = leg.poly2leg([0, -1, 0, 1])
<del> assert_almost_equal(res, tgt)
<del>
<del> def test_fit(self) :
<del> def f(x) :
<del> return x*(x - 1)*(x - 2)
<del> x = np.linspace(0,3)
<del> y = f(x)
<del>
<del> # test default value of domain
<del> p = leg.Legendre.fit(x, y, 3)
<del> assert_almost_equal(p.domain, [0,3])
<del>
<del> # test that fit works in given domains
<del> p = leg.Legendre.fit(x, y, 3, None)
<del> assert_almost_equal(p(x), y)
<del> assert_almost_equal(p.domain, [0,3])
<del> p = leg.Legendre.fit(x, y, 3, [])
<del> assert_almost_equal(p(x), y)
<del> assert_almost_equal(p.domain, [-1, 1])
<del> # test that fit accepts weights.
<del> w = np.zeros_like(x)
<del> yw = y.copy()
<del> w[1::2] = 1
<del> yw[0::2] = 0
<del> p = leg.Legendre.fit(x, yw, 3, w=w)
<del> assert_almost_equal(p(x), y)
<del>
<ide> def test_identity(self) :
<ide> x = np.linspace(0,3)
<ide> p = leg.Legendre.identity()
<ide> assert_almost_equal(p(x), x)
<ide> p = leg.Legendre.identity([1,3])
<ide> assert_almost_equal(p(x), x)
<del>#
<add>
<ide>
<ide> if __name__ == "__main__":
<ide> run_module_suite()
<ide><path>numpy/polynomial/tests/test_polynomial.py
<ide>
<ide> import numpy as np
<ide> import numpy.polynomial.polynomial as poly
<del>from numpy.testing import *
<add>from numpy.testing import (
<add> TestCase, assert_almost_equal, assert_raises,
<add> assert_equal, assert_, run_module_suite)
<ide>
<ide> def trim(x) :
<ide> return poly.polytrim(x, tol=1e-6)
<ide> def test_call(self) :
<ide> xx = 2*x - 1
<ide> assert_almost_equal(self.p2(x), self.p1(xx))
<ide>
<del> def test_degree(self) :
<del> assert_equal(self.p1.degree(), 2)
<del>
<ide> def test_cutdeg(self) :
<ide> assert_raises(ValueError, self.p1.cutdeg, .5)
<ide> assert_raises(ValueError, self.p1.cutdeg, -1)
<ide> def test_cutdeg(self) :
<ide> assert_equal(len(self.p1.cutdeg(1)), 2)
<ide> assert_equal(len(self.p1.cutdeg(0)), 1)
<ide>
<del> def test_convert(self) :
<del> x = np.linspace(-1,1)
<del> p = self.p1.convert(domain=[0,1])
<del> assert_almost_equal(p(x), self.p1(x))
<del>
<ide> def test_mapparms(self) :
<ide> parms = self.p2.mapparms()
<ide> assert_almost_equal(parms, [-1, 2])
<ide> def test_truncate(self) :
<ide> assert_equal(len(self.p1.truncate(2)), 2)
<ide> assert_equal(len(self.p1.truncate(1)), 1)
<ide>
<del> def test_copy(self) :
<del> p = self.p1.copy()
<del> assert_(self.p1 == p)
<del>
<ide> def test_integ(self) :
<ide> p = self.p2.integ()
<ide> assert_almost_equal(p.coef, poly.polyint([1,2,3], 1, 0, scl=.5))
<ide> def test_integ(self) :
<ide> p = self.p2.integ(2, [1, 2])
<ide> assert_almost_equal(p.coef, poly.polyint([1,2,3], 2, [1, 2], scl=.5))
<ide>
<del> def test_deriv(self) :
<del> p = self.p2.integ(2, [1, 2])
<del> assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
<del> assert_almost_equal(p.deriv(2).coef, self.p2.coef)
<del>
<del> def test_roots(self) :
<del> p = poly.Polynomial([0, -1, 0, 1], [0, 1])
<del> res = p.roots()
<del> tgt = [0, .5, 1]
<del> assert_almost_equal(res, tgt)
<del>
<del> def test_linspace(self):
<del> xdes = np.linspace(0, 1, 20)
<del> ydes = self.p2(xdes)
<del> xres, yres = self.p2.linspace(20)
<del> assert_almost_equal(xres, xdes)
<del> assert_almost_equal(yres, ydes)
<del>
<del> def test_fromroots(self) :
<del> roots = [0, .5, 1]
<del> p = poly.Polynomial.fromroots(roots, domain=[0, 1])
<del> res = p.coef
<del> tgt = [0, -1, 0, 1]
<del> assert_almost_equal(res, tgt)
<del>
<del> def test_fit(self) :
<del> def f(x) :
<del> return x*(x - 1)*(x - 2)
<del> x = np.linspace(0,3)
<del> y = f(x)
<del>
<del> # test default value of domain
<del> p = poly.Polynomial.fit(x, y, 3)
<del> assert_almost_equal(p.domain, [0,3])
<del>
<del> # test that fit works in given domains
<del> p = poly.Polynomial.fit(x, y, 3, None)
<del> assert_almost_equal(p(x), y)
<del> assert_almost_equal(p.domain, [0,3])
<del> p = poly.Polynomial.fit(x, y, 3, [])
<del> assert_almost_equal(p(x), y)
<del> assert_almost_equal(p.domain, [-1, 1])
<del> # test that fit accepts weights.
<del> w = np.zeros_like(x)
<del> yw = y.copy()
<del> w[1::2] = 1
<del> yw[0::2] = 0
<del> p = poly.Polynomial.fit(x, yw, 3, w=w)
<del> assert_almost_equal(p(x), y)
<ide>
<ide> def test_identity(self) :
<ide> x = np.linspace(0,3)
<ide> p = poly.Polynomial.identity()
<ide> assert_almost_equal(p(x), x)
<ide> p = poly.Polynomial.identity([1,3])
<ide> assert_almost_equal(p(x), x)
<del>#
<add>
<add>
<ide>
<ide> if __name__ == "__main__":
<ide> run_module_suite() | 7 |
Mixed | Python | add token_splitter component | bf0cdae8d41f2cc458739c2512c8bdb625757770 | <ide><path>spacy/pipeline/functions.py
<add>import srsly
<add>from thinc.api import Config
<add>from typing import Dict, Any
<ide> from ..language import Language
<ide> from ..matcher import Matcher
<ide> from ..tokens import Doc
<ide> from ..util import filter_spans
<add>from .. import util
<ide>
<ide>
<ide> @Language.component(
<ide> def merge_subtokens(doc: Doc, label: str = "subtok") -> Doc:
<ide> for span in spans:
<ide> retokenizer.merge(span)
<ide> return doc
<add>
<add>
<add>@Language.factory(
<add> "token_splitter",
<add> default_config={"min_length": 25, "split_length": 10},
<add> retokenizes=True,
<add>)
<add>def make_token_splitter(
<add> nlp: Language,
<add> name: str,
<add> *,
<add> min_length=0,
<add> split_length=0,
<add>):
<add> return TokenSplitter(
<add> min_length=min_length, split_length=split_length
<add> )
<add>
<add>
<add>class TokenSplitter:
<add> def __init__(self, min_length: int = 0, split_length: int = 0):
<add> self.min_length = min_length
<add> self.split_length = split_length
<add>
<add> def __call__(self, doc: Doc) -> Doc:
<add> if self.min_length > 0 and self.split_length > 0:
<add> with doc.retokenize() as retokenizer:
<add> for t in doc:
<add> if len(t.text) >= self.min_length:
<add> orths = []
<add> heads = []
<add> attrs = {}
<add> for i in range(0, len(t.text), self.split_length):
<add> orths.append(t.text[i : i + self.split_length])
<add> heads.append((t, i / self.split_length))
<add> retokenizer.split(t, orths, heads, attrs)
<add> return doc
<add>
<add> def _get_config(self) -> Dict[str, Any]:
<add> return {
<add> "min_length": self.min_length,
<add> "split_length": self.split_length,
<add> }
<add>
<add> def _set_config(self, config: Dict[str, Any] = {}) -> None:
<add> self.min_length = config.get("min_length", 0)
<add> self.split_length = config.get("split_length", 0)
<add>
<add> def to_bytes(self, **kwargs):
<add> serializers = {
<add> "cfg": lambda: srsly.json_dumps(self._get_config()),
<add> }
<add> return util.to_bytes(serializers, [])
<add>
<add> def from_bytes(self, data, **kwargs):
<add> deserializers = {
<add> "cfg": lambda b: self._set_config(srsly.json_loads(b)),
<add> }
<add> util.from_bytes(data, deserializers, [])
<add> return self
<add>
<add> def to_disk(self, path, **kwargs):
<add> path = util.ensure_path(path)
<add> serializers = {
<add> "cfg": lambda p: srsly.write_json(p, self._get_config()),
<add> }
<add> return util.to_disk(path, serializers, [])
<add>
<add> def from_disk(self, path, **kwargs):
<add> path = util.ensure_path(path)
<add> serializers = {
<add> "cfg": lambda p: self._set_config(srsly.read_json(p)),
<add> }
<add> util.from_disk(path, serializers, [])
<ide><path>spacy/tests/pipeline/test_functions.py
<ide> def test_factories_merge_ents(doc2):
<ide> assert len(doc2) == 6
<ide> assert len(list(doc2.ents)) == 1
<ide> assert doc2[2].text == "New York"
<add>
<add>
<add>def test_token_splitter():
<add> nlp = Language()
<add> config = {"min_length": 20, "split_length": 5}
<add> token_splitter = nlp.add_pipe("token_splitter", config=config)
<add> doc = nlp("aaaaabbbbbcccccdddd e f g")
<add> assert [t.text for t in doc] == ["aaaaabbbbbcccccdddd", "e", "f", "g"]
<add> doc = nlp("aaaaabbbbbcccccdddddeeeeeff g h i")
<add> assert [t.text for t in doc] == [
<add> "aaaaa",
<add> "bbbbb",
<add> "ccccc",
<add> "ddddd",
<add> "eeeee",
<add> "ff",
<add> "g",
<add> "h",
<add> "i",
<add> ]
<add> assert all(len(t.text) <= token_splitter.split_length for t in doc)
<ide><path>website/docs/api/pipeline-functions.md
<ide> menu:
<ide> - ['merge_noun_chunks', 'merge_noun_chunks']
<ide> - ['merge_entities', 'merge_entities']
<ide> - ['merge_subtokens', 'merge_subtokens']
<add> - ['token_splitter', 'token_splitter']
<ide> ---
<ide>
<ide> ## merge_noun_chunks {#merge_noun_chunks tag="function"}
<ide> end of the pipeline and after all other components.
<ide> | `doc` | The `Doc` object to process, e.g. the `Doc` in the pipeline. ~~Doc~~ |
<ide> | `label` | The subtoken dependency label. Defaults to `"subtok"`. ~~str~~ |
<ide> | **RETURNS** | The modified `Doc` with merged subtokens. ~~Doc~~ |
<add>
<add>## token_splitter {#token_splitter tag="function" new="3.0"}
<add>
<add>Split tokens longer than a minimum length into shorter tokens. Intended for use
<add>with transformer pipelines where long spaCy tokens lead to input text that
<add>exceed the transformer model max length. See
<add>[managing transformer model max length limitations](/usage/embeddings-transformers#transformer-max-length).
<add>
<add>> #### Example
<add>>
<add>> ```python
<add>> config={"min_length": 20, "split_length": 5}
<add>> nlp.add_pipe("token_splitter", config=config, first=True)
<add>> doc = nlp("aaaaabbbbbcccccdddddee")
<add>> print([token.text for token in doc])
<add>> # ['aaaaa', 'bbbbb', 'ccccc', 'ddddd', 'ee']
<add>> ```
<add>
<add>| Setting | Description |
<add>| -------------- | --------------------------------------------------------------------- |
<add>| `min_length` | The minimum length for a token to be split. Defaults to `25`. ~~int~~ |
<add>| `split_length` | The length of the split tokens. Defaults to `5`. ~~int~~ |
<ide><path>website/docs/usage/embeddings-transformers.md
<ide> custom learning rate for each component. Instead of a constant, you can also
<ide> provide a schedule, allowing you to freeze the shared parameters at the start of
<ide> training.
<ide>
<add>### Managing transformer model max length limitations {#transformer-max-length}
<add>
<add>Many transformer models have a limit on the maximum number of tokens that the
<add>model can process, for example BERT models are limited to 512 tokens. This limit
<add>refers to the number of transformer tokens (BPE, WordPiece, etc.), not the
<add>number of spaCy tokens.
<add>
<add>To be able to process longer texts, the spaCy [`transformer`](/api/transformer)
<add>component uses [`span_getters`](/api/transformer#span_getters) to convert a
<add>batch of [`Doc`](/api/doc) objects into lists of [`Span`](/api/span) objects. A
<add>span may correspond to a doc (for `doc_spans`), a sentence (for `sent_spans`) or
<add>a window of spaCy tokens (`strided_spans`). If a single span corresponds to more
<add>transformer tokens than the transformer model supports, the spaCy pipeline can't
<add>process the text because some spaCy tokens would be left without an analysis.
<add>
<add>In general, it is up to the transformer pipeline user to manage the input texts
<add>so that the model max length is not exceeded. If you're training a **new
<add>pipeline**, you have a number of options to handle the max length limit:
<add>
<add>- Use `doc_spans` with short texts only
<add>- Use `sent_spans` with short sentences only
<add>- For `strided_spans`, lower the `window` size to be short enough for your input
<add> texts (and don't forget to lower the `stride` correspondingly)
<add>- Implement a [custom span getter](#transformers-training-custom-settings)
<add>
<add>You may still run into the max length limit if a single spaCy token is very
<add>long, like a long URL or a noisy string, or if you're using a **pretrained
<add>pipeline** like `en_core_web_trf` with a fixed `window` size for
<add>`strided_spans`. In this case, you need to modify either your texts or your
<add>pipeline so that you have shorter spaCy tokens. Some options:
<add>
<add>- Preprocess your texts to clean up noise and split long tokens with whitespace
<add>- Add a `token_splitter` to the beginning of your pipeline to break up
<add> tokens that are longer than a specified length:
<add>
<add> ```python
<add> config={"min_length": 20, "split_length": 5}
<add> nlp.add_pipe("token_splitter", config=config, first=True)
<add> ```
<add>
<add> In this example, tokens that are at least 20 characters long will be split up
<add> into smaller tokens of 5 characters each, resulting in strided spans that
<add> correspond to fewer transformer tokens.
<add>
<ide> ## Static vectors {#static-vectors}
<ide>
<ide> If your pipeline includes a **word vectors table**, you'll be able to use the | 4 |
Javascript | Javascript | add portal support to react.children calls | 0752a63f59168b56b14ec45220bad97b39222604 | <ide><path>packages/react/src/ReactChildren.js
<ide> var FAUX_ITERATOR_SYMBOL = '@@iterator'; // Before Symbol spec.
<ide> var REACT_ELEMENT_TYPE =
<ide> (typeof Symbol === 'function' && Symbol.for && Symbol.for('react.element')) ||
<ide> 0xeac7;
<del>
<add>const REACT_PORTAL_TYPE =
<add> (typeof Symbol === 'function' && Symbol.for && Symbol.for('react.portal')) ||
<add> 0xeaca;
<ide> var SEPARATOR = '.';
<ide> var SUBSEPARATOR = ':';
<ide>
<ide> function traverseAllChildrenImpl(
<ide> type === 'number' ||
<ide> // The following is inlined from ReactElement. This means we can optimize
<ide> // some checks. React Fiber also inlines this logic for similar purposes.
<del> (type === 'object' && children.$$typeof === REACT_ELEMENT_TYPE)
<add> (type === 'object' && children.$$typeof === REACT_ELEMENT_TYPE) ||
<add> (type === 'object' && children.$$typeof === REACT_PORTAL_TYPE)
<ide> ) {
<ide> callback(
<ide> traverseContext,
<ide><path>packages/react/src/__tests__/ReactChildren-test.js
<ide> describe('ReactChildren', () => {
<ide> expect(mappedChildren[0]).toEqual(<span key=".$simple" />);
<ide> });
<ide>
<add> it('should support Portal components', () => {
<add> const context = {};
<add> const callback = jasmine.createSpy().and.callFake(function(kid, index) {
<add> expect(this).toBe(context);
<add> return kid;
<add> });
<add> const ReactDOM = require('react-dom');
<add> const portalContainer = document.createElement('div');
<add>
<add> const simpleChild = <span key="simple" />;
<add> const portal = ReactDOM.createPortal(simpleChild, portalContainer);
<add> const instance = <div>{portal}</div>;
<add>
<add> React.Children.forEach(instance.props.children, callback, context);
<add> expect(callback).toHaveBeenCalledWith(portal, 0);
<add> callback.calls.reset();
<add> const mappedChildren = React.Children.map(
<add> instance.props.children,
<add> callback,
<add> context,
<add> );
<add> expect(callback).toHaveBeenCalledWith(portal, 0);
<add> expect(mappedChildren[0]).toEqual(portal);
<add> });
<add>
<ide> it('should treat single arrayless child as being in array', () => {
<ide> var context = {};
<ide> var callback = jasmine.createSpy().and.callFake(function(kid, index) { | 2 |
Ruby | Ruby | create project view with subl, too | 5ce864176a34215b0991ed30b93732b830dced66 | <ide><path>Library/Homebrew/cmd/edit.rb
<ide> def edit
<ide> # If no brews are listed, open the project root in an editor.
<ide> if ARGV.named.empty?
<ide> editor = File.basename which_editor
<del> if editor == "mate"
<del> # If the user is using TextMate, give a nice project view instead.
<del> exec 'mate', HOMEBREW_REPOSITORY+"bin/brew",
<add> if editor == "mate" or editor == "subl"
<add> # If the user is using TextMate or Sublime Text,
<add> # give a nice project view instead.
<add> exec editor, HOMEBREW_REPOSITORY+"bin/brew",
<ide> HOMEBREW_REPOSITORY+'README.md',
<ide> HOMEBREW_REPOSITORY+".gitignore",
<ide> *library_folders | 1 |
Ruby | Ruby | remove hardcoded paths in keg tests | bca37554b7f0f7fbd671ee714e435445141137f0 | <ide><path>Library/Homebrew/test/test_keg.rb
<ide> def test_link_dry_run
<ide> assert [email protected]?
<ide>
<ide> ['hiworld', 'helloworld', 'goodbye_cruel_world'].each do |file|
<del> assert_match "/private/tmp/testbrew/prefix/bin/#{file}", $stdout.string
<add> assert_match "#{HOMEBREW_PREFIX}/bin/#{file}", $stdout.string
<ide> end
<ide> assert_equal 3, $stdout.string.lines.count
<ide> end
<ide> def test_link_overwrite_dryrun
<ide> assert_equal 0, @keg.link(@mode)
<ide> assert [email protected]?
<ide>
<del> assert_equal "/private/tmp/testbrew/prefix/bin/helloworld\n", $stdout.string
<add> assert_equal "#{HOMEBREW_PREFIX}/bin/helloworld\n", $stdout.string
<ide> end
<ide>
<ide> def teardown | 1 |
Go | Go | use flag constants for swarm flag | c08a50dbd1bac8a0bf1c9be5e617273c19a54a2e | <ide><path>api/client/swarm/init.go
<ide> type initOptions struct {
<ide> }
<ide>
<ide> func newInitCommand(dockerCli *client.DockerCli) *cobra.Command {
<del> var flags *pflag.FlagSet
<ide> opts := initOptions{
<ide> listenAddr: NewListenAddrOption(),
<ide> autoAccept: NewAutoAcceptOption(),
<ide> func newInitCommand(dockerCli *client.DockerCli) *cobra.Command {
<ide> Short: "Initialize a Swarm",
<ide> Args: cli.NoArgs,
<ide> RunE: func(cmd *cobra.Command, args []string) error {
<del> return runInit(dockerCli, flags, opts)
<add> return runInit(dockerCli, cmd.Flags(), opts)
<ide> },
<ide> }
<ide>
<del> flags = cmd.Flags()
<del> flags.Var(&opts.listenAddr, flagListenAddr, "Listen address")
<add> flags := cmd.Flags()
<add> flags.Var(&opts.listenAddr, "listen-addr", "Listen address")
<ide> flags.Var(&opts.autoAccept, flagAutoAccept, "Auto acceptance policy (worker, manager, or none)")
<ide> flags.StringVar(&opts.secret, flagSecret, "", "Set secret value needed to accept nodes into cluster")
<ide> flags.BoolVar(&opts.forceNewCluster, "force-new-cluster", false, "Force create a new cluster from current state.")
<ide> func runInit(dockerCli *client.DockerCli, flags *pflag.FlagSet, opts initOptions
<ide> ForceNewCluster: opts.forceNewCluster,
<ide> }
<ide>
<del> if flags.Changed("secret") {
<add> if flags.Changed(flagSecret) {
<ide> req.Spec.AcceptancePolicy.Policies = opts.autoAccept.Policies(&opts.secret)
<ide> } else {
<ide> req.Spec.AcceptancePolicy.Policies = opts.autoAccept.Policies(nil)
<ide><path>api/client/swarm/join.go
<ide> func newJoinCommand(dockerCli *client.DockerCli) *cobra.Command {
<ide> flags := cmd.Flags()
<ide> flags.Var(&opts.listenAddr, flagListenAddr, "Listen address")
<ide> flags.BoolVar(&opts.manager, "manager", false, "Try joining as a manager.")
<del> flags.StringVar(&opts.secret, "secret", "", "Secret for node acceptance")
<add> flags.StringVar(&opts.secret, flagSecret, "", "Secret for node acceptance")
<ide> flags.StringVar(&opts.CACertHash, "ca-hash", "", "Hash of the Root Certificate Authority certificate used for trusted join")
<ide> return cmd
<ide> }
<ide><path>api/client/swarm/leave.go
<ide> func runLeave(dockerCli *client.DockerCli, opts leaveOptions) error {
<ide> return err
<ide> }
<ide>
<del> fmt.Fprintln(dockerCli.Out(), "Node left the default swarm.")
<add> fmt.Fprintln(dockerCli.Out(), "Node left the swarm.")
<ide> return nil
<ide> }
<ide><path>api/client/swarm/update.go
<ide> type updateOptions struct {
<ide>
<ide> func newUpdateCommand(dockerCli *client.DockerCli) *cobra.Command {
<ide> opts := updateOptions{autoAccept: NewAutoAcceptOption()}
<del> var flags *pflag.FlagSet
<ide>
<ide> cmd := &cobra.Command{
<ide> Use: "update",
<ide> Short: "Update the Swarm",
<ide> Args: cli.NoArgs,
<ide> RunE: func(cmd *cobra.Command, args []string) error {
<del> return runUpdate(dockerCli, flags, opts)
<add> return runUpdate(dockerCli, cmd.Flags(), opts)
<ide> },
<ide> }
<ide>
<del> flags = cmd.Flags()
<del> flags.Var(&opts.autoAccept, "auto-accept", "Auto acceptance policy (worker, manager or none)")
<del> flags.StringVar(&opts.secret, "secret", "", "Set secret value needed to accept nodes into cluster")
<del> flags.Int64Var(&opts.taskHistoryLimit, "task-history-limit", 10, "Task history retention limit")
<del> flags.DurationVar(&opts.dispatcherHeartbeat, "dispatcher-heartbeat", time.Duration(5*time.Second), "Dispatcher heartbeat period")
<del> flags.DurationVar(&opts.nodeCertExpiry, "cert-expiry", time.Duration(90*24*time.Hour), "Validity period for node certificates")
<add> flags := cmd.Flags()
<add> flags.Var(&opts.autoAccept, flagAutoAccept, "Auto acceptance policy (worker, manager or none)")
<add> flags.StringVar(&opts.secret, flagSecret, "", "Set secret value needed to accept nodes into cluster")
<add> flags.Int64Var(&opts.taskHistoryLimit, flagTaskHistoryLimit, 10, "Task history retention limit")
<add> flags.DurationVar(&opts.dispatcherHeartbeat, flagDispatcherHeartbeat, time.Duration(5*time.Second), "Dispatcher heartbeat period")
<add> flags.DurationVar(&opts.nodeCertExpiry, flagCertExpiry, time.Duration(90*24*time.Hour), "Validity period for node certificates")
<ide> return cmd
<ide> }
<ide>
<ide> func runUpdate(dockerCli *client.DockerCli, flags *pflag.FlagSet, opts updateOpt
<ide> func mergeSwarm(swarm *swarm.Swarm, flags *pflag.FlagSet) error {
<ide> spec := &swarm.Spec
<ide>
<del> if flags.Changed("auto-accept") {
<del> value := flags.Lookup("auto-accept").Value.(*AutoAcceptOption)
<add> if flags.Changed(flagAutoAccept) {
<add> value := flags.Lookup(flagAutoAccept).Value.(*AutoAcceptOption)
<ide> spec.AcceptancePolicy.Policies = value.Policies(nil)
<ide> }
<ide>
<ide> var psecret *string
<del> if flags.Changed("secret") {
<del> secret, _ := flags.GetString("secret")
<add> if flags.Changed(flagSecret) {
<add> secret, _ := flags.GetString(flagSecret)
<ide> psecret = &secret
<ide> }
<ide>
<ide> for i := range spec.AcceptancePolicy.Policies {
<ide> spec.AcceptancePolicy.Policies[i].Secret = psecret
<ide> }
<ide>
<del> if flags.Changed("task-history-limit") {
<del> spec.Orchestration.TaskHistoryRetentionLimit, _ = flags.GetInt64("task-history-limit")
<add> if flags.Changed(flagTaskHistoryLimit) {
<add> spec.Orchestration.TaskHistoryRetentionLimit, _ = flags.GetInt64(flagTaskHistoryLimit)
<ide> }
<ide>
<del> if flags.Changed("dispatcher-heartbeat") {
<del> if v, err := flags.GetDuration("dispatcher-heartbeat"); err == nil {
<add> if flags.Changed(flagDispatcherHeartbeat) {
<add> if v, err := flags.GetDuration(flagDispatcherHeartbeat); err == nil {
<ide> spec.Dispatcher.HeartbeatPeriod = uint64(v.Nanoseconds())
<ide> }
<ide> }
<ide>
<del> if flags.Changed("cert-expiry") {
<del> if v, err := flags.GetDuration("cert-expiry"); err == nil {
<add> if flags.Changed(flagCertExpiry) {
<add> if v, err := flags.GetDuration(flagCertExpiry); err == nil {
<ide> spec.CAConfig.NodeCertExpiry = v
<ide> }
<ide> } | 4 |
Javascript | Javascript | allow multiple loading of function modules | 2f0a4488731fdb0e8217325dbb52a576defd09bd | <ide><path>src/apis.js
<ide> * @returns {string} hash string such that the same input will have the same hash string.
<ide> * The resulting string key is in 'type:hashKey' format.
<ide> */
<del>function hashKey(obj) {
<add>function hashKey(obj, nextUidFn) {
<ide> var objType = typeof obj,
<ide> key;
<ide>
<del> if (objType == 'object' && obj !== null) {
<add> if (objType == 'function' || (objType == 'object' && obj !== null)) {
<ide> if (typeof (key = obj.$$hashKey) == 'function') {
<ide> // must invoke on object to keep the right this
<ide> key = obj.$$hashKey();
<ide> } else if (key === undefined) {
<del> key = obj.$$hashKey = nextUid();
<add> key = obj.$$hashKey = (nextUidFn || nextUid)();
<ide> }
<ide> } else {
<ide> key = obj;
<ide> function hashKey(obj) {
<ide> /**
<ide> * HashMap which can use objects as keys
<ide> */
<del>function HashMap(array){
<add>function HashMap(array, isolatedUid) {
<add> if (isolatedUid) {
<add> var uid = 0;
<add> this.nextUid = function() {
<add> return ++uid;
<add> };
<add> }
<ide> forEach(array, this.put, this);
<ide> }
<ide> HashMap.prototype = {
<ide> HashMap.prototype = {
<ide> * @param value value to store can be any type
<ide> */
<ide> put: function(key, value) {
<del> this[hashKey(key)] = value;
<add> this[hashKey(key, this.nextUid)] = value;
<ide> },
<ide>
<ide> /**
<ide> * @param key
<ide> * @returns {Object} the value for the key
<ide> */
<ide> get: function(key) {
<del> return this[hashKey(key)];
<add> return this[hashKey(key, this.nextUid)];
<ide> },
<ide>
<ide> /**
<ide> * Remove the key/value pair
<ide> * @param key
<ide> */
<ide> remove: function(key) {
<del> var value = this[key = hashKey(key)];
<add> var value = this[key = hashKey(key, this.nextUid)];
<ide> delete this[key];
<ide> return value;
<ide> }
<ide><path>src/auto/injector.js
<ide> function createInjector(modulesToLoad, strictDi) {
<ide> var INSTANTIATING = {},
<ide> providerSuffix = 'Provider',
<ide> path = [],
<del> loadedModules = new HashMap(),
<add> loadedModules = new HashMap([], true),
<ide> providerCache = {
<ide> $provide: {
<ide> provider: supportObject(provider),
<ide><path>src/ngMock/angular-mocks.js
<ide> if(window.jasmine || window.mocha) {
<ide> (window.afterEach || window.teardown)(function() {
<ide> var injector = currentSpec.$injector;
<ide>
<add> angular.forEach(currentSpec.$modules, function(module) {
<add> if (module && module.$$hashKey) {
<add> module.$$hashKey = undefined;
<add> }
<add> });
<add>
<ide> currentSpec.$injector = null;
<ide> currentSpec.$modules = null;
<ide> currentSpec = null;
<ide><path>test/ApiSpecs.js
<ide> describe('api', function() {
<ide> expect(map.get('b')).toBe(1);
<ide> expect(map.get('c')).toBe(undefined);
<ide> });
<add>
<add> it('should maintain hashKey for object keys', function() {
<add> var map = new HashMap();
<add> var key = {};
<add> map.get(key);
<add> expect(key.$$hashKey).toBeDefined();
<add> });
<add>
<add> it('should maintain hashKey for function keys', function() {
<add> var map = new HashMap();
<add> var key = function() {};
<add> map.get(key);
<add> expect(key.$$hashKey).toBeDefined();
<add> });
<add>
<add> it('should share hashKey between HashMap by default', function() {
<add> var map1 = new HashMap(), map2 = new HashMap();
<add> var key1 = {}, key2 = {};
<add> map1.get(key1);
<add> map2.get(key2);
<add> expect(key1.$$hashKey).not.toEqual(key2.$$hashKey);
<add> });
<add>
<add> it('should maintain hashKey per HashMap if flag is passed', function() {
<add> var map1 = new HashMap([], true), map2 = new HashMap([], true);
<add> var key1 = {}, key2 = {};
<add> map1.get(key1);
<add> map2.get(key2);
<add> expect(key1.$$hashKey).toEqual(key2.$$hashKey);
<add> });
<ide> });
<ide> });
<ide>
<ide><path>test/auto/injectorSpec.js
<ide> describe('injector', function() {
<ide> expect(log).toEqual('abc');
<ide> });
<ide>
<add> it('should load different instances of dependent functions', function() {
<add> function generateValueModule(name, value) {
<add> return function ($provide) {
<add> $provide.value(name, value);
<add> };
<add> }
<add> var injector = createInjector([generateValueModule('name1', 'value1'),
<add> generateValueModule('name2', 'value2')]);
<add> expect(injector.get('name2')).toBe('value2');
<add> });
<add>
<add> it('should load same instance of dependent function only once', function() {
<add> var count = 0;
<add> function valueModule($provide) {
<add> count++;
<add> $provide.value('name', 'value');
<add> }
<add>
<add> var injector = createInjector([valueModule, valueModule]);
<add> expect(injector.get('name')).toBe('value');
<add> expect(count).toBe(1);
<add> });
<add>
<ide> it('should execute runBlocks after injector creation', function() {
<ide> var log = '';
<ide> angular.module('a', [], function(){ log += 'a'; }).run(function() { log += 'A'; });
<ide><path>test/ngMock/angular-mocksSpec.js
<ide> describe('ngMock', function() {
<ide> expect(example).toEqual('win');
<ide> });
<ide> });
<add>
<add> describe('module cleanup', function() {
<add> function testFn() {
<add>
<add> }
<add>
<add> it('should add hashKey to module function', function() {
<add> module(testFn);
<add> inject(function () {
<add> expect(testFn.$$hashKey).toBeDefined();
<add> });
<add> });
<add>
<add> it('should cleanup hashKey after previous test', function() {
<add> expect(testFn.$$hashKey).toBeUndefined();
<add> });
<add> });
<ide> });
<ide>
<ide> describe('in DSL', function() { | 6 |
Text | Text | add missing entries in ap changelog | 0654f76a5ae5c7c34d22da0f732d45d9300c0762 | <ide><path>actionpack/CHANGELOG.md
<ide> ## Rails 4.0.0 (unreleased) ##
<ide>
<add>* Sprockets integration has been extracted from Action Pack and the `sprockets-rails`
<add> gem should be added to Gemfile (under the assets group) in order to use Rails asset
<add> pipeline in future versions of Rails.
<add>
<add> *Guillermo Iguaran*
<add>
<add>* `ActionDispatch::Session::MemCacheStore` now uses `dalli` instead of the deprecated
<add> `memcache-client` gem. As side effect the autoloading of unloaded classes objects
<add> saved as values in session isn't supported anymore when mem_cache session store is
<add> used, this can have an impact in apps only when config.cache_classes is false.
<add>
<add> *Arun Agrawal + Guillermo Iguaran*
<add>
<ide> * Support multiple etags in If-None-Match header. *Travis Warlick*
<ide>
<ide> * Allow to configure how unverified request will be handled using `:with` | 1 |
Ruby | Ruby | fix rubocop warnings | dab681e71563b8d633d290f2f381a2101b56a3d8 | <ide><path>Library/Homebrew/requirements/perl_requirement.rb
<ide> class PerlRequirement < Requirement
<ide> default_formula "perl"
<ide>
<ide> def initialize(tags)
<del> @version = tags.shift if /^\d+\.\d+$/ === tags.first
<add> @version = tags.shift if /^\d+\.\d+$/ =~ tags.first
<ide> raise "PerlRequirement requires a version!" unless @version
<ide> super
<ide> end | 1 |
Ruby | Ruby | remove ruby 1.8 support code | 44bd9719e938dd8e8e6764f00d2c6ba40b4d34e3 | <ide><path>railties/lib/rails/generators/base.rb
<ide> def class_collisions(*class_names) #:nodoc:
<ide> nesting = class_name.split('::')
<ide> last_name = nesting.pop
<ide>
<del> # Hack to limit const_defined? to non-inherited on 1.9
<del> extra = []
<del> extra << false unless Object.method(:const_defined?).arity == 1
<del>
<ide> # Extract the last Module in the nesting
<ide> last = nesting.inject(Object) do |last_module, nest|
<del> break unless last_module.const_defined?(nest, *extra)
<add> break unless last_module.const_defined?(nest, false)
<ide> last_module.const_get(nest)
<ide> end
<ide>
<del> if last && last.const_defined?(last_name.camelize, *extra)
<add> if last && last.const_defined?(last_name.camelize, false)
<ide> raise Error, "The name '#{class_name}' is either already used in your application " <<
<ide> "or reserved by Ruby on Rails. Please choose an alternative and run " <<
<ide> "this generator again." | 1 |
Javascript | Javascript | convert anim values back to arrays | 5b1441a0e9422d25bf0343aae9fca2f3df8b5504 | <ide><path>examples/js/loaders/FBXLoader.js
<ide>
<ide> if ( rawTracks.transform ) rawTracks.transform.decompose( initialPosition, initialRotation, initialScale );
<ide>
<add> initialPosition = initialPosition.toArray();
<add> initialRotation = new THREE.Euler().setFromQuaternion( initialRotation ).toArray(); // todo: euler order
<add> initialScale = initialScale.toArray();
<add>
<ide> if ( rawTracks.T !== undefined && Object.keys( rawTracks.T.curves ).length > 0 ) {
<ide>
<ide> var positionTrack = generateVectorTrack( rawTracks.modelName, rawTracks.T.curves, initialPosition, 'position' ); | 1 |
Javascript | Javascript | remove stray copyright notices | c2b8b308369e37b31c00fd30918e2dd84522b993 | <ide><path>test/message/eval_messages.js
<ide> 'use strict';
<del>// USE OR OTHER DEALINGS IN THE SOFTWARE.
<ide>
<ide> var common = require('../common');
<ide> var assert = require('assert');
<ide><path>test/message/stdin_messages.js
<ide> 'use strict';
<del>// USE OR OTHER DEALINGS IN THE SOFTWARE.
<ide>
<ide> var common = require('../common');
<ide> var assert = require('assert');
<ide><path>test/parallel/test-event-emitter-listeners-side-effects.js
<ide> 'use strict';
<del>// USE OR OTHER DEALINGS IN THE SOFTWARE.
<ide>
<ide> var common = require('../common');
<ide> var assert = require('assert');
<ide><path>test/parallel/test-event-emitter-listeners.js
<ide> 'use strict';
<del>// USE OR OTHER DEALINGS IN THE SOFTWARE.
<ide>
<ide> var common = require('../common');
<ide> var assert = require('assert'); | 4 |
Ruby | Ruby | determine revision more reliably | 1b7718f6a744881c853f258e40ab874cd975db58 | <ide><path>Library/Homebrew/cmd/bottle.rb
<ide> def bottle_formula f
<ide> return ofail "Formula not installed with '--build-bottle': #{f.name}"
<ide> end
<ide>
<del> master_bottle_filenames = f.bottle_filenames 'origin/master'
<del> bottle_revision = -1
<del> begin
<del> bottle_revision += 1
<del> filename = bottle_filename(f, :tag => bottle_tag, :revision => bottle_revision)
<del> end while not ARGV.include? '--no-revision' \
<del> and master_bottle_filenames.include? filename
<add> if ARGV.include? '--no-revision'
<add> bottle_revision = 0
<add> else
<add> max = f.bottle_version_map('origin/master')[f.version].max
<add> bottle_revision = max ? max + 1 : 0
<add> end
<add>
<add> filename = bottle_filename(f, :tag => bottle_tag, :revision => bottle_revision)
<ide>
<ide> if bottle_filename_formula_name(filename).empty?
<ide> return ofail "Add a new regex to bottle_version.rb to parse the bottle filename."
<ide><path>Library/Homebrew/cmd/versions.rb
<ide> def versions
<ide> return versions
<ide> end
<ide>
<del> def bottle_filenames branch='HEAD'
<del> filenames = []
<del> rev_list(branch).each do |sha|
<del> filename = formula_for_sha(sha) do |f|
<del> bottle_block = f.class.send(:bottle)
<del> unless bottle_block.checksums.empty?
<del> bottle_filename f, :revision => bottle_block.revision
<add> def bottle_version_map branch='HEAD'
<add> map = Hash.new { |h, k| h[k] = [] }
<add> rev_list(branch).each do |rev|
<add> formula_for_sha(rev) do |f|
<add> bottle = f.class.send(:bottle)
<add> unless bottle.checksums.empty?
<add> map[bottle.version] << bottle.revision
<ide> end
<ide> end
<del> unless filenames.include? filename or filename.nil?
<del> filenames << filename
<del> end
<ide> end
<del> filenames
<add> map
<ide> end
<ide>
<ide> def pretty_relative_path
<ide> def entry_name
<ide>
<ide> def rev_list branch='HEAD'
<ide> repository.cd do
<del> `git rev-list --abbrev-commit #{branch} -- #{entry_name}`.split
<add> `git rev-list --abbrev-commit --remove-empty #{branch} -- #{entry_name}`.split
<ide> end
<ide> end
<ide> | 2 |
Javascript | Javascript | improve inspect edge cases | 892c51f330f99b7eb57481e183df61530a9c5956 | <ide><path>lib/internal/util/inspect.js
<ide> function formatProxy(ctx, proxy, recurseTimes) {
<ide> formatValue(ctx, proxy[1], recurseTimes)
<ide> ];
<ide> ctx.indentationLvl -= 2;
<del> return reduceToSingleString(ctx, res, '', ['Proxy [', ']']);
<add> return reduceToSingleString(
<add> ctx, res, '', ['Proxy [', ']'], kArrayExtrasType, recurseTimes);
<ide> }
<ide>
<ide> function findTypedConstructor(value) {
<ide> function formatRaw(ctx, value, recurseTimes, typedArray) {
<ide> }
<ide> }
<ide>
<del> let combine = false;
<del> if (typeof ctx.compact === 'number') {
<del> // Memorize the original output length. In case the the output is grouped,
<del> // prevent lining up the entries on a single line.
<del> const entries = output.length;
<del> // Group array elements together if the array contains at least six separate
<del> // entries.
<del> if (extrasType === kArrayExtrasType && output.length > 6) {
<del> output = groupArrayElements(ctx, output);
<del> }
<del> // `ctx.currentDepth` is set to the most inner depth of the currently
<del> // inspected object part while `recurseTimes` is the actual current depth
<del> // that is inspected.
<del> //
<del> // Example:
<del> //
<del> // const a = { first: [ 1, 2, 3 ], second: { inner: [ 1, 2, 3 ] } }
<del> //
<del> // The deepest depth of `a` is 2 (a.second.inner) and `a.first` has a max
<del> // depth of 1.
<del> //
<del> // Consolidate all entries of the local most inner depth up to
<del> // `ctx.compact`, as long as the properties are smaller than
<del> // `ctx.breakLength`.
<del> if (ctx.currentDepth - recurseTimes < ctx.compact &&
<del> entries === output.length) {
<del> combine = true;
<del> }
<del> }
<del>
<del> const res = reduceToSingleString(ctx, output, base, braces, combine);
<add> const res = reduceToSingleString(
<add> ctx, output, base, braces, extrasType, recurseTimes);
<ide> const budget = ctx.budget[ctx.indentationLvl] || 0;
<ide> const newLength = budget + res.length;
<ide> ctx.budget[ctx.indentationLvl] = newLength;
<ide> function formatBigInt(fn, value) {
<ide> function formatPrimitive(fn, value, ctx) {
<ide> if (typeof value === 'string') {
<ide> if (ctx.compact !== true &&
<del> ctx.indentationLvl + value.length > ctx.breakLength &&
<del> value.length > kMinLineLength) {
<del> const rawMaxLineLength = ctx.breakLength - ctx.indentationLvl;
<add> ctx.indentationLvl + value.length + 4 > ctx.breakLength &&
<add> value.length > kMinLineLength) {
<add> // Subtract the potential quotes, the space and the plus as well (4).
<add> const rawMaxLineLength = ctx.breakLength - ctx.indentationLvl - 4;
<ide> const maxLineLength = Math.max(rawMaxLineLength, kMinLineLength);
<ide> const lines = Math.ceil(value.length / maxLineLength);
<ide> const averageLineLength = Math.ceil(value.length / lines);
<ide> function formatMapIterInner(ctx, recurseTimes, entries, state) {
<ide> formatValue(ctx, entries[pos], recurseTimes),
<ide> formatValue(ctx, entries[pos + 1], recurseTimes)
<ide> ];
<del> output[i] = reduceToSingleString(ctx, res, '', ['[', ']']);
<add> output[i] = reduceToSingleString(
<add> ctx, res, '', ['[', ']'], kArrayExtrasType, recurseTimes);
<ide> }
<ide> }
<ide> ctx.indentationLvl -= 2;
<ide> function isBelowBreakLength(ctx, output, start, base) {
<ide> return base === '' || !base.includes('\n');
<ide> }
<ide>
<del>function reduceToSingleString(ctx, output, base, braces, combine = false) {
<add>function reduceToSingleString(
<add> ctx, output, base, braces, extrasType, recurseTimes) {
<ide> if (ctx.compact !== true) {
<del> if (combine) {
<del> // Line up all entries on a single line in case the entries do not exceed
<del> // `breakLength`. Add 10 as constant to start next to all other factors
<del> // that may reduce `breakLength`.
<del> const start = output.length + ctx.indentationLvl +
<del> braces[0].length + base.length + 10;
<del> if (isBelowBreakLength(ctx, output, start, base)) {
<del> return `${base ? `${base} ` : ''}${braces[0]} ${join(output, ', ')} ` +
<del> braces[1];
<add> if (typeof ctx.compact === 'number' && ctx.compact >= 1) {
<add> // Memorize the original output length. In case the the output is grouped,
<add> // prevent lining up the entries on a single line.
<add> const entries = output.length;
<add> // Group array elements together if the array contains at least six
<add> // separate entries.
<add> if (extrasType === kArrayExtrasType && entries > 6) {
<add> output = groupArrayElements(ctx, output);
<add> }
<add> // `ctx.currentDepth` is set to the most inner depth of the currently
<add> // inspected object part while `recurseTimes` is the actual current depth
<add> // that is inspected.
<add> //
<add> // Example:
<add> //
<add> // const a = { first: [ 1, 2, 3 ], second: { inner: [ 1, 2, 3 ] } }
<add> //
<add> // The deepest depth of `a` is 2 (a.second.inner) and `a.first` has a max
<add> // depth of 1.
<add> //
<add> // Consolidate all entries of the local most inner depth up to
<add> // `ctx.compact`, as long as the properties are smaller than
<add> // `ctx.breakLength`.
<add> if (ctx.currentDepth - recurseTimes < ctx.compact &&
<add> entries === output.length) {
<add> // Line up all entries on a single line in case the entries do not
<add> // exceed `breakLength`. Add 10 as constant to start next to all other
<add> // factors that may reduce `breakLength`.
<add> const start = output.length + ctx.indentationLvl +
<add> braces[0].length + base.length + 10;
<add> if (isBelowBreakLength(ctx, output, start, base)) {
<add> return `${base ? `${base} ` : ''}${braces[0]} ${join(output, ', ')}` +
<add> ` ${braces[1]}`;
<add> }
<ide> }
<ide> }
<ide> // Line up each entry on an individual line.
<ide><path>test/parallel/test-util-inspect.js
<ide> assert.strictEqual(util.inspect('"\'${a}'), "'\"\\'${a}'");
<ide> return 'BazError';
<ide> }
<ide> }, undefined]
<del>].forEach(([Class, message, messages], i) => {
<add>].forEach(([Class, message], i) => {
<ide> console.log('Test %i', i);
<ide> const foo = new Class(message);
<ide> const name = foo.name; | 2 |
Ruby | Ruby | add bottle regex | 28a20b70fc6874e6b3329e729bd199816281ed4a | <ide><path>Library/Homebrew/bottle_version.rb
<ide> def self._parse spec
<ide> return m.captures.first unless m.nil?
<ide>
<ide> # e.g. ssh-copy-id-6.2p2.bottle.tar.gz
<del> m = /(\d\.(\d)+(p(\d)+)?)/.match(stem)
<add> # e.g. icu4c-52.1.bottle.tar.gz
<add> m = /(\d+\.(\d)+(p(\d)+)?)/.match(stem)
<ide> return m.captures.first unless m.nil?
<ide>
<ide> super
<ide><path>Library/Homebrew/test/test_bottle_versions.rb
<ide> def test_ssh_copy_id_style
<ide> assert_version_detected '6.2p2',
<ide> '/usr/local/ssh-copy-id-6.2p2.mountain_lion.bottle.tar.gz'
<ide> end
<add>
<add> def test_icu4c_style
<add> assert_version_detected '52.1',
<add> '/usr/local/icu4c-52.1.bottle.tar.gz'
<add> end
<ide> end | 2 |
Javascript | Javascript | remove throughput (covered by benchmark/net) | 051c1317f99ede15a831d1f542bd89a47ab77397 | <ide><path>benchmark/throughput-child.js
<del>var net = require('net');
<del>var received = 0;
<del>var start = process.hrtime();
<del>var socket = net.connect(8000);
<del>
<del>socket.on('data', function(d) {
<del> received += d.length;
<del>});
<del>
<del>var interval = setInterval(function() {
<del> // After 1 gigabyte shutdown.
<del> if (received > 10 * 1024 * 1024 * 1024) {
<del> socket.destroy();
<del> clearInterval(interval);
<del> process.exit(0);
<del> } else {
<del> // Otherwise print some stats.
<del> var elapsed = process.hrtime(start);
<del> var sec = elapsed[0] + elapsed[1]/1E9;
<del> var gigabytes = received / (1024 * 1024 * 1024);
<del> var gigabits = gigabytes * 8.0;
<del> console.log((gigabits / sec) + " gbit/sec")
<del> }
<del>}, 1000);
<ide><path>benchmark/throughput.js
<del>var fork = require('child_process').fork;
<del>var net = require('net');
<del>var buffer = new Buffer(1024 * 1024);
<del>
<del>function write(socket) {
<del> if (!socket.writable) return;
<del>
<del> socket.write(buffer, function() {
<del> write(socket);
<del> });
<del>}
<del>
<del>var server = net.createServer(function(socket) {
<del> server.close();
<del> write(socket);
<del>});
<del>
<del>server.listen(8000, function() {
<del> fork(__dirname + '/throughput-child.js');
<del>});
<del> | 2 |
Go | Go | set correct default shell for platform in builder | fe7b4d8fcd737b9162540149acc427d51d1ddfb9 | <ide><path>builder/dockerfile/builder_unix.go
<ide>
<ide> package dockerfile
<ide>
<del>var defaultShell = []string{"/bin/sh", "-c"}
<add>func defaultShellForPlatform(platform string) []string {
<add> return []string{"/bin/sh", "-c"}
<add>}
<ide><path>builder/dockerfile/builder_windows.go
<ide> package dockerfile
<ide>
<del>var defaultShell = []string{"cmd", "/S", "/C"}
<add>func defaultShellForPlatform(platform string) []string {
<add> if platform == "linux" {
<add> return []string{"/bin/sh", "-c"}
<add> }
<add> return []string{"cmd", "/S", "/C"}
<add>}
<ide><path>builder/dockerfile/dispatchers.go
<ide> func workdir(req dispatchRequest) error {
<ide> }
<ide>
<ide> comment := "WORKDIR " + runConfig.WorkingDir
<del> runConfigWithCommentCmd := copyRunConfig(runConfig, withCmdCommentString(comment))
<add> runConfigWithCommentCmd := copyRunConfig(runConfig, withCmdCommentString(comment, req.builder.platform))
<ide> containerID, err := req.builder.probeAndCreate(req.state, runConfigWithCommentCmd)
<ide> if err != nil || containerID == "" {
<ide> return err
<ide> func workdir(req dispatchRequest) error {
<ide> // the current SHELL which defaults to 'sh -c' under linux or 'cmd /S /C' under
<ide> // Windows, in the event there is only one argument The difference in processing:
<ide> //
<del>// RUN echo hi # sh -c echo hi (Linux)
<add>// RUN echo hi # sh -c echo hi (Linux and LCOW)
<ide> // RUN echo hi # cmd /S /C echo hi (Windows)
<ide> // RUN [ "echo", "hi" ] # echo hi
<ide> //
<ide> func run(req dispatchRequest) error {
<ide> stateRunConfig := req.state.runConfig
<ide> args := handleJSONArgs(req.args, req.attributes)
<ide> if !req.attributes["json"] {
<del> args = append(getShell(stateRunConfig), args...)
<add> args = append(getShell(stateRunConfig, req.builder.platform), args...)
<ide> }
<ide> cmdFromArgs := strslice.StrSlice(args)
<ide> buildArgs := req.builder.buildArgs.FilterAllowed(stateRunConfig.Env)
<ide> func cmd(req dispatchRequest) error {
<ide> runConfig := req.state.runConfig
<ide> cmdSlice := handleJSONArgs(req.args, req.attributes)
<ide> if !req.attributes["json"] {
<del> cmdSlice = append(getShell(runConfig), cmdSlice...)
<add> cmdSlice = append(getShell(runConfig, req.builder.platform), cmdSlice...)
<ide> }
<ide>
<ide> runConfig.Cmd = strslice.StrSlice(cmdSlice)
<ide> func entrypoint(req dispatchRequest) error {
<ide> runConfig.Entrypoint = nil
<ide> default:
<ide> // ENTRYPOINT echo hi
<del> runConfig.Entrypoint = strslice.StrSlice(append(getShell(runConfig), parsed[0]))
<add> runConfig.Entrypoint = strslice.StrSlice(append(getShell(runConfig, req.builder.platform), parsed[0]))
<ide> }
<ide>
<ide> // when setting the entrypoint if a CMD was not explicitly set then
<ide><path>builder/dockerfile/dispatchers_test.go
<ide> func TestRunWithBuildArgs(t *testing.T) {
<ide>
<ide> runConfig := &container.Config{}
<ide> origCmd := strslice.StrSlice([]string{"cmd", "in", "from", "image"})
<del> cmdWithShell := strslice.StrSlice(append(getShell(runConfig), "echo foo"))
<add> cmdWithShell := strslice.StrSlice(append(getShell(runConfig, runtime.GOOS), "echo foo"))
<ide> envVars := []string{"|1", "one=two"}
<ide> cachedCmd := strslice.StrSlice(append(envVars, cmdWithShell...))
<ide>
<ide><path>builder/dockerfile/internals.go
<ide> func (b *Builder) commit(dispatchState *dispatchState, comment string) error {
<ide> return errors.New("Please provide a source image with `from` prior to commit")
<ide> }
<ide>
<del> runConfigWithCommentCmd := copyRunConfig(dispatchState.runConfig, withCmdComment(comment))
<add> runConfigWithCommentCmd := copyRunConfig(dispatchState.runConfig, withCmdComment(comment, b.platform))
<ide> hit, err := b.probeCache(dispatchState, runConfigWithCommentCmd)
<ide> if err != nil || hit {
<ide> return err
<ide> func (b *Builder) performCopy(state *dispatchState, inst copyInstruction) error
<ide> // TODO: should this have been using origPaths instead of srcHash in the comment?
<ide> runConfigWithCommentCmd := copyRunConfig(
<ide> state.runConfig,
<del> withCmdCommentString(fmt.Sprintf("%s %s in %s ", inst.cmdName, srcHash, inst.dest)))
<add> withCmdCommentString(fmt.Sprintf("%s %s in %s ", inst.cmdName, srcHash, inst.dest), b.platform))
<ide> hit, err := b.probeCache(state, runConfigWithCommentCmd)
<ide> if err != nil || hit {
<ide> return err
<ide> func withCmd(cmd []string) runConfigModifier {
<ide>
<ide> // withCmdComment sets Cmd to a nop comment string. See withCmdCommentString for
<ide> // why there are two almost identical versions of this.
<del>func withCmdComment(comment string) runConfigModifier {
<add>func withCmdComment(comment string, platform string) runConfigModifier {
<ide> return func(runConfig *container.Config) {
<del> runConfig.Cmd = append(getShell(runConfig), "#(nop) ", comment)
<add> runConfig.Cmd = append(getShell(runConfig, platform), "#(nop) ", comment)
<ide> }
<ide> }
<ide>
<ide> // withCmdCommentString exists to maintain compatibility with older versions.
<ide> // A few instructions (workdir, copy, add) used a nop comment that is a single arg
<ide> // where as all the other instructions used a two arg comment string. This
<ide> // function implements the single arg version.
<del>func withCmdCommentString(comment string) runConfigModifier {
<add>func withCmdCommentString(comment string, platform string) runConfigModifier {
<ide> return func(runConfig *container.Config) {
<del> runConfig.Cmd = append(getShell(runConfig), "#(nop) "+comment)
<add> runConfig.Cmd = append(getShell(runConfig, platform), "#(nop) "+comment)
<ide> }
<ide> }
<ide>
<ide> func withEntrypointOverride(cmd []string, entrypoint []string) runConfigModifier
<ide>
<ide> // getShell is a helper function which gets the right shell for prefixing the
<ide> // shell-form of RUN, ENTRYPOINT and CMD instructions
<del>func getShell(c *container.Config) []string {
<add>func getShell(c *container.Config, platform string) []string {
<ide> if 0 == len(c.Shell) {
<del> return append([]string{}, defaultShell[:]...)
<add> return append([]string{}, defaultShellForPlatform(platform)[:]...)
<ide> }
<ide> return append([]string{}, c.Shell[:]...)
<ide> }
<ide><path>builder/dockerfile/internals_test.go
<ide> package dockerfile
<ide>
<ide> import (
<ide> "fmt"
<add> "runtime"
<ide> "testing"
<ide>
<ide> "github.com/docker/docker/api/types"
<ide> func TestCopyRunConfig(t *testing.T) {
<ide> },
<ide> {
<ide> doc: "Set the command to a comment",
<del> modifiers: []runConfigModifier{withCmdComment("comment")},
<add> modifiers: []runConfigModifier{withCmdComment("comment", runtime.GOOS)},
<ide> expected: &container.Config{
<del> Cmd: append(defaultShell, "#(nop) ", "comment"),
<add> Cmd: append(defaultShellForPlatform(runtime.GOOS), "#(nop) ", "comment"),
<ide> Env: defaultEnv,
<ide> },
<ide> }, | 6 |
Java | Java | add websockethandlerinvoker class | 5f22cf053279b4a692eac95b5fbfb9701110cd4e | <ide><path>spring-websocket/src/main/java/org/springframework/sockjs/AbstractSockJsSession.java
<ide> import org.springframework.websocket.TextMessage;
<ide> import org.springframework.websocket.WebSocketHandler;
<ide> import org.springframework.websocket.WebSocketSession;
<add>import org.springframework.websocket.adapter.WebSocketHandlerInvoker;
<ide>
<ide>
<ide> /**
<ide> public abstract class AbstractSockJsSession implements WebSocketSession {
<ide>
<ide> private final String sessionId;
<ide>
<del> private final HandlerProvider<WebSocketHandler> handlerProvider;
<del>
<del> private WebSocketHandler handler;
<add> private WebSocketHandlerInvoker handler;
<ide>
<ide> private State state = State.NEW;
<ide>
<ide> public AbstractSockJsSession(String sessionId, HandlerProvider<WebSocketHandler>
<ide> Assert.notNull(sessionId, "sessionId is required");
<ide> Assert.notNull(handlerProvider, "handlerProvider is required");
<ide> this.sessionId = sessionId;
<del> this.handlerProvider = handlerProvider;
<add> this.handler = new WebSocketHandlerInvoker(handlerProvider).setLogger(logger);
<ide> }
<ide>
<ide> public String getId() {
<ide> protected void updateLastActiveTime() {
<ide>
<ide> public void delegateConnectionEstablished() {
<ide> this.state = State.OPEN;
<del> this.handler = handlerProvider.getHandler();
<del> try {
<del> this.handler.afterConnectionEstablished(this);
<del> }
<del> catch (Throwable ex) {
<del> tryCloseWithError(ex, null);
<del> }
<del> }
<del>
<del> /**
<del> * Close due to unhandled runtime error from WebSocketHandler.
<del> * @param closeStatus TODO
<del> */
<del> private void tryCloseWithError(Throwable ex, CloseStatus closeStatus) {
<del> logger.error("Unhandled error for " + this, ex);
<del> try {
<del> closeStatus = (closeStatus != null) ? closeStatus : CloseStatus.SERVER_ERROR;
<del> close(closeStatus);
<del> }
<del> catch (Throwable t) {
<del> destroyHandler();
<del> }
<del> }
<del>
<del> private void destroyHandler() {
<del> try {
<del> if (this.handler != null) {
<del> this.handlerProvider.destroy(this.handler);
<del> }
<del> }
<del> catch (Throwable t) {
<del> logger.warn("Error while destroying handler", t);
<del> }
<del> finally {
<del> this.handler = null;
<del> }
<add> this.handler.afterConnectionEstablished(this);
<ide> }
<ide>
<ide> /**
<ide> * Close due to error arising from SockJS transport handling.
<ide> */
<ide> protected void tryCloseWithSockJsTransportError(Throwable ex, CloseStatus closeStatus) {
<ide> delegateError(ex);
<del> tryCloseWithError(ex, closeStatus);
<add> this.handler.tryCloseWithError(this, ex, closeStatus);
<ide> }
<ide>
<ide> public void delegateMessages(String[] messages) {
<del> try {
<del> for (String message : messages) {
<del> this.handler.handleTextMessage(new TextMessage(message), this);
<del> }
<del> }
<del> catch (Throwable ex) {
<del> tryCloseWithError(ex, null);
<add> for (String message : messages) {
<add> this.handler.handleTextMessage(new TextMessage(message), this);
<ide> }
<ide> }
<ide>
<ide> public void delegateError(Throwable ex) {
<del> try {
<del> this.handler.handleTransportError(ex, this);
<del> }
<del> catch (Throwable t) {
<del> tryCloseWithError(t, null);
<del> }
<add> this.handler.handleTransportError(ex, this);
<ide> }
<ide>
<ide> /**
<ide> public final void delegateConnectionClosed(CloseStatus status) {
<ide> }
<ide> finally {
<ide> this.state = State.CLOSED;
<del> try {
<del> this.handler.afterConnectionClosed(status, this);
<del> }
<del> finally {
<del> destroyHandler();
<del> }
<add> this.handler.afterConnectionClosed(status, this);
<ide> }
<ide> }
<ide> }
<ide> public final void close(CloseStatus status) throws IOException {
<ide> }
<ide> finally {
<ide> this.state = State.CLOSED;
<del> try {
<del> this.handler.afterConnectionClosed(status, this);
<del> }
<del> finally {
<del> destroyHandler();
<del> }
<add> this.handler.afterConnectionClosed(status, this);
<ide> }
<ide> }
<ide> }
<ide><path>spring-websocket/src/main/java/org/springframework/websocket/WebSocketHandler.java
<ide> public interface WebSocketHandler {
<ide> */
<ide> void afterConnectionEstablished(WebSocketSession session);
<ide>
<del> /**
<del> * A WebSocket connection has been closed.
<del> */
<del> void afterConnectionClosed(CloseStatus closeStatus, WebSocketSession session);
<del>
<ide> /**
<ide> * Handle an incoming text message.
<ide> */
<ide> public interface WebSocketHandler {
<ide> */
<ide> void handleTransportError(Throwable exception, WebSocketSession session);
<ide>
<add> /**
<add> * A WebSocket connection has been closed.
<add> */
<add> void afterConnectionClosed(CloseStatus closeStatus, WebSocketSession session);
<add>
<ide> }
<ide><path>spring-websocket/src/main/java/org/springframework/websocket/adapter/JettyWebSocketListenerAdapter.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.websocket.adapter;
<add>
<add>import org.apache.commons.logging.Log;
<add>import org.apache.commons.logging.LogFactory;
<add>import org.eclipse.jetty.websocket.api.Session;
<add>import org.eclipse.jetty.websocket.api.WebSocketListener;
<add>import org.springframework.util.Assert;
<add>import org.springframework.websocket.BinaryMessage;
<add>import org.springframework.websocket.CloseStatus;
<add>import org.springframework.websocket.HandlerProvider;
<add>import org.springframework.websocket.TextMessage;
<add>import org.springframework.websocket.WebSocketHandler;
<add>import org.springframework.websocket.WebSocketSession;
<add>
<add>/**
<add> * Adapts Spring's {@link WebSocketHandler} to Jetty's {@link WebSocketListener}.
<add> *
<add> * @author Phillip Webb
<add> * @since 4.0
<add> */
<add>public class JettyWebSocketListenerAdapter implements WebSocketListener {
<add>
<add> private static Log logger = LogFactory.getLog(JettyWebSocketListenerAdapter.class);
<add>
<add> private final WebSocketHandler handler;
<add>
<add> private WebSocketSession wsSession;
<add>
<add>
<add> public JettyWebSocketListenerAdapter(HandlerProvider<WebSocketHandler> provider) {
<add> Assert.notNull(provider, "provider is required");
<add> this.handler = new WebSocketHandlerInvoker(provider).setLogger(logger);
<add> }
<add>
<add>
<add> @Override
<add> public void onWebSocketConnect(Session session) {
<add> this.wsSession = new JettyWebSocketSessionAdapter(session);
<add> this.handler.afterConnectionEstablished(this.wsSession);
<add> }
<add>
<add> @Override
<add> public void onWebSocketClose(int statusCode, String reason) {
<add> CloseStatus closeStatus = new CloseStatus(statusCode, reason);
<add> this.handler.afterConnectionClosed(closeStatus, this.wsSession);
<add> }
<add>
<add> @Override
<add> public void onWebSocketText(String payload) {
<add> TextMessage message = new TextMessage(payload);
<add> this.handler.handleTextMessage(message, this.wsSession);
<add> }
<add>
<add> @Override
<add> public void onWebSocketBinary(byte[] payload, int offset, int len) {
<add> BinaryMessage message = new BinaryMessage(payload, offset, len);
<add> this.handler.handleBinaryMessage(message, this.wsSession);
<add> }
<add>
<add> @Override
<add> public void onWebSocketError(Throwable cause) {
<add> this.handler.handleTransportError(cause, this.wsSession);
<add> }
<add>}
<ide><path>spring-websocket/src/main/java/org/springframework/websocket/adapter/JettyWebSocketSessionAdapter.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.websocket.adapter;
<add>
<add>import java.io.IOException;
<add>import java.net.URI;
<add>
<add>import org.eclipse.jetty.websocket.api.Session;
<add>import org.springframework.util.ObjectUtils;
<add>import org.springframework.websocket.BinaryMessage;
<add>import org.springframework.websocket.CloseStatus;
<add>import org.springframework.websocket.TextMessage;
<add>import org.springframework.websocket.WebSocketMessage;
<add>import org.springframework.websocket.WebSocketSession;
<add>
<add>
<add>/**
<add> * Adapts Jetty's {@link Session} to Spring's {@link WebSocketSession}.
<add> *
<add> * @author Phillip Webb
<add> * @since 4.0
<add> */
<add>public class JettyWebSocketSessionAdapter implements WebSocketSession {
<add>
<add> private Session session;
<add>
<add>
<add> public JettyWebSocketSessionAdapter(Session session) {
<add> this.session = session;
<add> }
<add>
<add>
<add> @Override
<add> public String getId() {
<add> return ObjectUtils.getIdentityHexString(this.session);
<add> }
<add>
<add> @Override
<add> public boolean isOpen() {
<add> return this.session.isOpen();
<add> }
<add>
<add> @Override
<add> public boolean isSecure() {
<add> return this.session.isSecure();
<add> }
<add>
<add> @Override
<add> public URI getURI() {
<add> return this.session.getUpgradeRequest().getRequestURI();
<add> }
<add>
<add> @Override
<add> public void sendMessage(WebSocketMessage message) throws IOException {
<add> if (message instanceof BinaryMessage) {
<add> sendMessage((BinaryMessage) message);
<add> }
<add> else if (message instanceof TextMessage) {
<add> sendMessage((TextMessage) message);
<add> }
<add> else {
<add> throw new IllegalArgumentException("Unsupported message type");
<add> }
<add> }
<add>
<add> private void sendMessage(BinaryMessage message) throws IOException {
<add> this.session.getRemote().sendBytes(message.getPayload());
<add> }
<add>
<add> private void sendMessage(TextMessage message) throws IOException {
<add> this.session.getRemote().sendString(message.getPayload());
<add> }
<add>
<add> @Override
<add> public void close() throws IOException {
<add> this.session.close();
<add> }
<add>
<add> @Override
<add> public void close(CloseStatus status) throws IOException {
<add> this.session.close(status.getCode(), status.getReason());
<add> }
<add>
<add>}
<ide>\ No newline at end of file
<ide><path>spring-websocket/src/main/java/org/springframework/websocket/adapter/StandardEndpointAdapter.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.websocket.adapter;
<add>
<add>import java.nio.ByteBuffer;
<add>
<add>import javax.websocket.CloseReason;
<add>import javax.websocket.Endpoint;
<add>import javax.websocket.EndpointConfig;
<add>import javax.websocket.MessageHandler;
<add>
<add>import org.apache.commons.logging.Log;
<add>import org.apache.commons.logging.LogFactory;
<add>import org.springframework.util.Assert;
<add>import org.springframework.websocket.BinaryMessage;
<add>import org.springframework.websocket.CloseStatus;
<add>import org.springframework.websocket.HandlerProvider;
<add>import org.springframework.websocket.PartialMessageHandler;
<add>import org.springframework.websocket.TextMessage;
<add>import org.springframework.websocket.WebSocketHandler;
<add>import org.springframework.websocket.WebSocketSession;
<add>
<add>
<add>/**
<add> * An {@link Endpoint} that delegates to a {@link WebSocketHandler}.
<add> *
<add> * @author Rossen Stoyanchev
<add> * @since 4.0
<add> */
<add>public class StandardEndpointAdapter extends Endpoint {
<add>
<add> private static Log logger = LogFactory.getLog(StandardEndpointAdapter.class);
<add>
<add> private final WebSocketHandler handler;
<add>
<add> private final Class<?> handlerClass;
<add>
<add> private WebSocketSession wsSession;
<add>
<add>
<add>
<add> public StandardEndpointAdapter(HandlerProvider<WebSocketHandler> provider) {
<add> Assert.notNull(provider, "provider is required");
<add> this.handler = new WebSocketHandlerInvoker(provider).setLogger(logger);
<add> this.handlerClass= provider.getHandlerType();
<add> }
<add>
<add>
<add> @Override
<add> public void onOpen(final javax.websocket.Session session, EndpointConfig config) {
<add>
<add> session.addMessageHandler(new MessageHandler.Whole<String>() {
<add> @Override
<add> public void onMessage(String message) {
<add> handleTextMessage(session, message);
<add> }
<add> });
<add> if (PartialMessageHandler.class.isAssignableFrom(this.handlerClass)) {
<add> session.addMessageHandler(new MessageHandler.Partial<ByteBuffer>() {
<add> @Override
<add> public void onMessage(ByteBuffer messagePart, boolean isLast) {
<add> handleBinaryMessage(session, messagePart, isLast);
<add> }
<add> });
<add> }
<add> else {
<add> session.addMessageHandler(new MessageHandler.Whole<ByteBuffer>() {
<add> @Override
<add> public void onMessage(ByteBuffer message) {
<add> handleBinaryMessage(session, message, true);
<add> }
<add> });
<add> }
<add>
<add> this.wsSession = new StandardWebSocketSessionAdapter(session);
<add> this.handler.afterConnectionEstablished(this.wsSession);
<add> }
<add>
<add> private void handleTextMessage(javax.websocket.Session session, String payload) {
<add> TextMessage message = new TextMessage(payload);
<add> this.handler.handleTextMessage(message, this.wsSession);
<add> }
<add>
<add> private void handleBinaryMessage(javax.websocket.Session session, ByteBuffer payload, boolean isLast) {
<add> BinaryMessage message = new BinaryMessage(payload, isLast);
<add> this.handler.handleBinaryMessage(message, this.wsSession);
<add> }
<add>
<add> @Override
<add> public void onClose(javax.websocket.Session session, CloseReason reason) {
<add> CloseStatus closeStatus = new CloseStatus(reason.getCloseCode().getCode(), reason.getReasonPhrase());
<add> this.handler.afterConnectionClosed(closeStatus, this.wsSession);
<add> }
<add>
<add> @Override
<add> public void onError(javax.websocket.Session session, Throwable exception) {
<add> this.handler.handleTransportError(exception, this.wsSession);
<add> }
<add>
<add>}
<add><path>spring-websocket/src/main/java/org/springframework/websocket/adapter/StandardWebSocketSessionAdapter.java
<del><path>spring-websocket/src/main/java/org/springframework/websocket/endpoint/StandardWebSocketSession.java
<ide> * limitations under the License.
<ide> */
<ide>
<del>package org.springframework.websocket.endpoint;
<add>package org.springframework.websocket.adapter;
<ide>
<ide> import java.io.IOException;
<ide> import java.net.URI;
<ide> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide> */
<del>public class StandardWebSocketSession implements WebSocketSession {
<add>public class StandardWebSocketSessionAdapter implements WebSocketSession {
<ide>
<del> private static Log logger = LogFactory.getLog(StandardWebSocketSession.class);
<add> private static Log logger = LogFactory.getLog(StandardWebSocketSessionAdapter.class);
<ide>
<ide> private final javax.websocket.Session session;
<ide>
<ide>
<del> public StandardWebSocketSession(javax.websocket.Session session) {
<add> public StandardWebSocketSessionAdapter(javax.websocket.Session session) {
<ide> Assert.notNull(session, "session is required");
<ide> this.session = session;
<ide> }
<ide><path>spring-websocket/src/main/java/org/springframework/websocket/adapter/WebSocketHandlerInvoker.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.websocket.adapter;
<add>
<add>import java.util.concurrent.atomic.AtomicInteger;
<add>
<add>import org.apache.commons.logging.Log;
<add>import org.apache.commons.logging.LogFactory;
<add>import org.springframework.util.Assert;
<add>import org.springframework.websocket.BinaryMessage;
<add>import org.springframework.websocket.CloseStatus;
<add>import org.springframework.websocket.HandlerProvider;
<add>import org.springframework.websocket.TextMessage;
<add>import org.springframework.websocket.WebSocketHandler;
<add>import org.springframework.websocket.WebSocketSession;
<add>
<add>/**
<add> * A class for managing and delegating to a {@link WebSocketHandler} instance, applying
<add> * initialization and destruction as necessary at the start and end of the WebSocket
<add> * session, ensuring that any unhandled exceptions from its methods are caught and handled
<add> * by closing the session, and also adding uniform logging.
<add> *
<add> * @author Rossen Stoyanchev
<add> * @since 4.0
<add> */
<add>public class WebSocketHandlerInvoker implements WebSocketHandler {
<add>
<add> private Log logger = LogFactory.getLog(WebSocketHandlerInvoker.class);
<add>
<add> private final HandlerProvider<WebSocketHandler> handlerProvider;
<add>
<add> private WebSocketHandler handler;
<add>
<add> private final AtomicInteger sessionCount = new AtomicInteger(0);
<add>
<add>
<add> public WebSocketHandlerInvoker(HandlerProvider<WebSocketHandler> handlerProvider) {
<add> this.handlerProvider = handlerProvider;
<add> }
<add>
<add> public WebSocketHandlerInvoker setLogger(Log logger) {
<add> this.logger = logger;
<add> return this;
<add> }
<add>
<add> @Override
<add> public void afterConnectionEstablished(WebSocketSession session) {
<add> if (logger.isDebugEnabled()) {
<add> logger.debug("Connection established, " + session + ", uri=" + session.getURI());
<add> }
<add> try {
<add> Assert.isTrue(this.sessionCount.compareAndSet(0, 1), "Unexpected new session");
<add>
<add> this.handler = this.handlerProvider.getHandler();
<add> this.handler.afterConnectionEstablished(session);
<add> }
<add> catch (Throwable ex) {
<add> tryCloseWithError(session, ex);
<add> }
<add> }
<add>
<add> public void tryCloseWithError(WebSocketSession session, Throwable ex) {
<add> tryCloseWithError(session, ex, null);
<add> }
<add>
<add> public void tryCloseWithError(WebSocketSession session, Throwable ex, CloseStatus status) {
<add> logger.error("Unhandled error for " + session, ex);
<add> if (session.isOpen()) {
<add> try {
<add> session.close(CloseStatus.SERVER_ERROR);
<add> }
<add> catch (Throwable t) {
<add> destroyHandler();
<add> }
<add> }
<add> }
<add>
<add> private void destroyHandler() {
<add> try {
<add> if (this.handler != null) {
<add> this.handlerProvider.destroy(this.handler);
<add> }
<add> }
<add> catch (Throwable t) {
<add> logger.warn("Error while destroying handler", t);
<add> }
<add> finally {
<add> this.handler = null;
<add> }
<add> }
<add>
<add> @Override
<add> public void handleTextMessage(TextMessage message, WebSocketSession session) {
<add> if (logger.isTraceEnabled()) {
<add> logger.trace("Received text message for " + session + ": " + message);
<add> }
<add> try {
<add> this.handler.handleTextMessage(message, session);
<add> }
<add> catch (Throwable ex) {
<add> tryCloseWithError(session,ex);
<add> }
<add> }
<add>
<add> @Override
<add> public void handleBinaryMessage(BinaryMessage message, WebSocketSession session) {
<add> if (logger.isTraceEnabled()) {
<add> logger.trace("Received binary message for " + session);
<add> }
<add> try {
<add> this.handler.handleBinaryMessage(message, session);
<add> }
<add> catch (Throwable ex) {
<add> tryCloseWithError(session, ex);
<add> }
<add> }
<add>
<add> @Override
<add> public void handleTransportError(Throwable exception, WebSocketSession session) {
<add> if (logger.isDebugEnabled()) {
<add> logger.debug("Transport error for " + session, exception);
<add> }
<add> try {
<add> this.handler.handleTransportError(exception, session);
<add> }
<add> catch (Throwable ex) {
<add> tryCloseWithError(session, ex);
<add> }
<add> }
<add>
<add> @Override
<add> public void afterConnectionClosed(CloseStatus closeStatus, WebSocketSession session) {
<add> if (logger.isDebugEnabled()) {
<add> logger.debug("Connection closed for " + session + ", " + closeStatus);
<add> }
<add> try {
<add> this.handler.afterConnectionClosed(closeStatus, session);
<add> }
<add> catch (Throwable ex) {
<add> logger.error("Unhandled error for " + this, ex);
<add> }
<add> finally {
<add> this.handlerProvider.destroy(this.handler);
<add> }
<add> }
<add>
<add>}
<add><path>spring-websocket/src/main/java/org/springframework/websocket/adapter/package-info.java
<del><path>spring-websocket/src/main/java/org/springframework/websocket/endpoint/package-info.java
<ide> */
<ide>
<ide> /**
<del> * Classes for use with the standard Java WebSocket endpoints from both client and
<del> * server code.
<add> * Adapters for the {@link org.springframework.websocket.WebSocketHandler} and
<add> * {@link org.springframework.websocket.WebSocketSession} contracts.
<ide> */
<del>package org.springframework.websocket.endpoint;
<add>package org.springframework.websocket.adapter;
<ide>
<ide><path>spring-websocket/src/main/java/org/springframework/websocket/client/endpoint/StandardWebSocketClient.java
<ide> import org.springframework.websocket.HandlerProvider;
<ide> import org.springframework.websocket.WebSocketHandler;
<ide> import org.springframework.websocket.WebSocketSession;
<add>import org.springframework.websocket.adapter.StandardWebSocketSessionAdapter;
<add>import org.springframework.websocket.adapter.StandardEndpointAdapter;
<ide> import org.springframework.websocket.client.WebSocketClient;
<ide> import org.springframework.websocket.client.WebSocketConnectFailureException;
<del>import org.springframework.websocket.endpoint.StandardWebSocketSession;
<del>import org.springframework.websocket.endpoint.WebSocketHandlerEndpoint;
<ide> import org.springframework.websocket.support.SimpleHandlerProvider;
<ide>
<ide> /**
<ide> public WebSocketSession doHandshake(HandlerProvider<WebSocketHandler> handler,
<ide> public WebSocketSession doHandshake(HandlerProvider<WebSocketHandler> handler,
<ide> final HttpHeaders httpHeaders, URI uri) throws WebSocketConnectFailureException {
<ide>
<del> Endpoint endpoint = new WebSocketHandlerEndpoint(handler);
<add> Endpoint endpoint = new StandardEndpointAdapter(handler);
<ide>
<ide> ClientEndpointConfig.Builder configBuidler = ClientEndpointConfig.Builder.create();
<ide> if (httpHeaders != null) {
<ide> public void beforeRequest(Map<String, List<String>> headers) {
<ide>
<ide> try {
<ide> Session session = this.webSocketContainer.connectToServer(endpoint, configBuidler.build(), uri);
<del> return new StandardWebSocketSession(session);
<add> return new StandardWebSocketSessionAdapter(session);
<ide> }
<ide> catch (Exception e) {
<ide> throw new WebSocketConnectFailureException("Failed to connect to " + uri, e);
<ide><path>spring-websocket/src/main/java/org/springframework/websocket/endpoint/WebSocketHandlerEndpoint.java
<del>/*
<del> * Copyright 2002-2013 the original author or authors.
<del> *
<del> * Licensed under the Apache License, Version 2.0 (the "License");
<del> * you may not use this file except in compliance with the License.
<del> * You may obtain a copy of the License at
<del> *
<del> * http://www.apache.org/licenses/LICENSE-2.0
<del> *
<del> * Unless required by applicable law or agreed to in writing, software
<del> * distributed under the License is distributed on an "AS IS" BASIS,
<del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<del> * See the License for the specific language governing permissions and
<del> * limitations under the License.
<del> */
<del>
<del>package org.springframework.websocket.endpoint;
<del>
<del>import java.util.concurrent.atomic.AtomicInteger;
<del>
<del>import javax.websocket.CloseReason;
<del>import javax.websocket.Endpoint;
<del>import javax.websocket.EndpointConfig;
<del>import javax.websocket.MessageHandler;
<del>
<del>import org.apache.commons.logging.Log;
<del>import org.apache.commons.logging.LogFactory;
<del>import org.springframework.util.Assert;
<del>import org.springframework.websocket.BinaryMessage;
<del>import org.springframework.websocket.CloseStatus;
<del>import org.springframework.websocket.HandlerProvider;
<del>import org.springframework.websocket.PartialMessageHandler;
<del>import org.springframework.websocket.TextMessage;
<del>import org.springframework.websocket.WebSocketHandler;
<del>import org.springframework.websocket.WebSocketSession;
<del>
<del>
<del>/**
<del> * An {@link Endpoint} that delegates to a {@link WebSocketHandler}.
<del> *
<del> * @author Rossen Stoyanchev
<del> * @since 4.0
<del> */
<del>public class WebSocketHandlerEndpoint extends Endpoint {
<del>
<del> private static Log logger = LogFactory.getLog(WebSocketHandlerEndpoint.class);
<del>
<del> private final HandlerProvider<WebSocketHandler> handlerProvider;
<del>
<del> private WebSocketHandler handler;
<del>
<del> private WebSocketSession webSocketSession;
<del>
<del> private final AtomicInteger sessionCount = new AtomicInteger(0);
<del>
<del>
<del> public WebSocketHandlerEndpoint(HandlerProvider<WebSocketHandler> handlerProvider) {
<del> Assert.notNull(handlerProvider, "handlerProvider is required");
<del> this.handlerProvider = handlerProvider;
<del> }
<del>
<del>
<del> @Override
<del> public void onOpen(final javax.websocket.Session session, EndpointConfig config) {
<del>
<del> Assert.isTrue(this.sessionCount.compareAndSet(0, 1), "Unexpected connection");
<del>
<del> if (logger.isDebugEnabled()) {
<del> logger.debug("Connection established, javax.websocket.Session id="
<del> + session.getId() + ", uri=" + session.getRequestURI());
<del> }
<del>
<del> this.webSocketSession = new StandardWebSocketSession(session);
<del> this.handler = handlerProvider.getHandler();
<del>
<del> session.addMessageHandler(new MessageHandler.Whole<String>() {
<del> @Override
<del> public void onMessage(String message) {
<del> handleTextMessage(session, message);
<del> }
<del> });
<del> if (this.handler instanceof PartialMessageHandler) {
<del> session.addMessageHandler(new MessageHandler.Partial<byte[]>() {
<del> @Override
<del> public void onMessage(byte[] messagePart, boolean isLast) {
<del> handleBinaryMessage(session, messagePart, isLast);
<del> }
<del> });
<del> }
<del> else {
<del> session.addMessageHandler(new MessageHandler.Whole<byte[]>() {
<del> @Override
<del> public void onMessage(byte[] message) {
<del> handleBinaryMessage(session, message, true);
<del> }
<del> });
<del> }
<del>
<del> try {
<del> this.handler.afterConnectionEstablished(this.webSocketSession);
<del> }
<del> catch (Throwable ex) {
<del> tryCloseWithError(ex);
<del> }
<del> }
<del>
<del> private void tryCloseWithError(Throwable ex) {
<del> logger.error("Unhandled error for " + this.webSocketSession, ex);
<del> if (this.webSocketSession.isOpen()) {
<del> try {
<del> this.webSocketSession.close(CloseStatus.SERVER_ERROR);
<del> }
<del> catch (Throwable t) {
<del> destroyHandler();
<del> }
<del> }
<del> }
<del>
<del> private void destroyHandler() {
<del> try {
<del> if (this.handler != null) {
<del> this.handlerProvider.destroy(this.handler);
<del> }
<del> }
<del> catch (Throwable t) {
<del> logger.warn("Error while destroying handler", t);
<del> }
<del> finally {
<del> this.webSocketSession = null;
<del> this.handler = null;
<del> }
<del> }
<del>
<del> private void handleTextMessage(javax.websocket.Session session, String message) {
<del> if (logger.isTraceEnabled()) {
<del> logger.trace("Received message for WebSocket session id=" + session.getId() + ": " + message);
<del> }
<del> try {
<del> TextMessage textMessage = new TextMessage(message);
<del> this.handler.handleTextMessage(textMessage, this.webSocketSession);
<del> }
<del> catch (Throwable ex) {
<del> tryCloseWithError(ex);
<del> }
<del> }
<del>
<del> private void handleBinaryMessage(javax.websocket.Session session, byte[] message, boolean isLast) {
<del> if (logger.isTraceEnabled()) {
<del> logger.trace("Received binary data for WebSocket session id=" + session.getId());
<del> }
<del> try {
<del> BinaryMessage binaryMessage = new BinaryMessage(message, isLast);
<del> this.handler.handleBinaryMessage(binaryMessage, this.webSocketSession);
<del> }
<del> catch (Throwable ex) {
<del> tryCloseWithError(ex);
<del> }
<del> }
<del>
<del> @Override
<del> public void onClose(javax.websocket.Session session, CloseReason reason) {
<del> if (logger.isDebugEnabled()) {
<del> logger.debug("Connection closed, WebSocket session id=" + session.getId() + ", " + reason);
<del> }
<del> try {
<del> CloseStatus closeStatus = new CloseStatus(reason.getCloseCode().getCode(), reason.getReasonPhrase());
<del> this.handler.afterConnectionClosed(closeStatus, this.webSocketSession);
<del> }
<del> catch (Throwable ex) {
<del> logger.error("Unhandled error for " + this.webSocketSession, ex);
<del> }
<del> finally {
<del> this.handlerProvider.destroy(this.handler);
<del> }
<del> }
<del>
<del> @Override
<del> public void onError(javax.websocket.Session session, Throwable exception) {
<del> logger.error("Error for WebSocket session id=" + session.getId(), exception);
<del> try {
<del> this.handler.handleTransportError(exception, this.webSocketSession);
<del> }
<del> catch (Throwable ex) {
<del> tryCloseWithError(ex);
<del> }
<del> }
<del>
<del>}
<ide><path>spring-websocket/src/main/java/org/springframework/websocket/server/endpoint/EndpointRegistration.java
<ide> import javax.websocket.server.HandshakeRequest;
<ide> import javax.websocket.server.ServerEndpointConfig;
<ide>
<del>import org.apache.commons.logging.Log;
<del>import org.apache.commons.logging.LogFactory;
<ide> import org.springframework.beans.BeansException;
<ide> import org.springframework.beans.factory.BeanFactory;
<ide> import org.springframework.beans.factory.BeanFactoryAware;
<ide> import org.springframework.util.Assert;
<ide> import org.springframework.websocket.HandlerProvider;
<del>import org.springframework.websocket.endpoint.WebSocketHandlerEndpoint;
<ide> import org.springframework.websocket.support.BeanCreatingHandlerProvider;
<ide> import org.springframework.websocket.support.SimpleHandlerProvider;
<ide>
<ide>
<ide> /**
<ide> * An implementation of {@link javax.websocket.server.ServerEndpointConfig} that also
<ide> * holds the target {@link javax.websocket.Endpoint} as a reference or a bean name.
<del> * The target can also be {@link org.springframework.websocket.WebSocketHandler}, in
<del> * which case it will be adapted via {@link WebSocketHandlerEndpoint}.
<ide> *
<ide> * <p>
<ide> * Beans of this type are detected by {@link EndpointExporter} and
<ide><path>spring-websocket/src/main/java/org/springframework/websocket/server/support/AbstractEndpointUpgradeStrategy.java
<ide> import org.springframework.http.server.ServerHttpResponse;
<ide> import org.springframework.websocket.HandlerProvider;
<ide> import org.springframework.websocket.WebSocketHandler;
<del>import org.springframework.websocket.endpoint.WebSocketHandlerEndpoint;
<add>import org.springframework.websocket.adapter.StandardEndpointAdapter;
<ide> import org.springframework.websocket.server.RequestUpgradeStrategy;
<ide>
<ide> /**
<ide> public void upgrade(ServerHttpRequest request, ServerHttpResponse response,
<ide> }
<ide>
<ide> protected Endpoint adaptWebSocketHandler(HandlerProvider<WebSocketHandler> handler) {
<del> return new WebSocketHandlerEndpoint(handler);
<add> return new StandardEndpointAdapter(handler);
<ide> }
<ide>
<ide> protected abstract void upgradeInternal(ServerHttpRequest request, ServerHttpResponse response,
<ide><path>spring-websocket/src/main/java/org/springframework/websocket/server/support/JettyRequestUpgradeStrategy.java
<ide> package org.springframework.websocket.server.support;
<ide>
<ide> import java.io.IOException;
<del>import java.net.URI;
<del>import java.util.concurrent.atomic.AtomicInteger;
<ide>
<ide> import javax.servlet.http.HttpServletRequest;
<ide> import javax.servlet.http.HttpServletResponse;
<ide>
<del>import org.apache.commons.logging.Log;
<del>import org.apache.commons.logging.LogFactory;
<del>import org.eclipse.jetty.websocket.api.Session;
<ide> import org.eclipse.jetty.websocket.api.UpgradeRequest;
<ide> import org.eclipse.jetty.websocket.api.UpgradeResponse;
<del>import org.eclipse.jetty.websocket.api.WebSocketListener;
<ide> import org.eclipse.jetty.websocket.server.HandshakeRFC6455;
<ide> import org.eclipse.jetty.websocket.server.ServletWebSocketRequest;
<ide> import org.eclipse.jetty.websocket.server.WebSocketServerFactory;
<ide> import org.springframework.http.server.ServletServerHttpRequest;
<ide> import org.springframework.http.server.ServletServerHttpResponse;
<ide> import org.springframework.util.Assert;
<del>import org.springframework.util.ObjectUtils;
<del>import org.springframework.websocket.BinaryMessage;
<del>import org.springframework.websocket.CloseStatus;
<ide> import org.springframework.websocket.HandlerProvider;
<del>import org.springframework.websocket.TextMessage;
<ide> import org.springframework.websocket.WebSocketHandler;
<del>import org.springframework.websocket.WebSocketMessage;
<del>import org.springframework.websocket.WebSocketSession;
<add>import org.springframework.websocket.adapter.JettyWebSocketListenerAdapter;
<ide> import org.springframework.websocket.server.RequestUpgradeStrategy;
<ide>
<ide> /**
<ide> * {@link RequestUpgradeStrategy} for use with Jetty. Based on Jetty's internal
<ide> * {@code org.eclipse.jetty.websocket.server.WebSocketHandler} class.
<ide> *
<ide> * @author Phillip Webb
<add> * @since 4.0
<ide> */
<ide> public class JettyRequestUpgradeStrategy implements RequestUpgradeStrategy {
<ide>
<del> private static Log logger = LogFactory.getLog(JettyRequestUpgradeStrategy.class);
<del>
<ide> // FIXME jetty has options, timeouts etc. Do we need a common abstraction
<ide>
<ide> // FIXME need a way for someone to plug their own RequestUpgradeStrategy or override
<ide> // Jetty settings
<ide>
<ide> // FIXME when to call factory.cleanup();
<ide>
<del> private static final String HANDLER_PROVIDER = JettyRequestUpgradeStrategy.class.getName()
<add> private static final String HANDLER_PROVIDER_ATTR_NAME = JettyRequestUpgradeStrategy.class.getName()
<ide> + ".HANDLER_PROVIDER";
<ide>
<ide> private WebSocketServerFactory factory;
<ide> public JettyRequestUpgradeStrategy() {
<ide> this.factory.setCreator(new WebSocketCreator() {
<ide> @Override
<ide> @SuppressWarnings("unchecked")
<del> public Object createWebSocket(UpgradeRequest req, UpgradeResponse resp) {
<del> Assert.isInstanceOf(ServletWebSocketRequest.class, req);
<del> ServletWebSocketRequest servletRequest = (ServletWebSocketRequest) req;
<del> HandlerProvider<WebSocketHandler> handlerProvider = (HandlerProvider<WebSocketHandler>) servletRequest.getServletAttributes().get(
<del> HANDLER_PROVIDER);
<del> return new WebSocketHandlerAdapter(handlerProvider);
<add> public Object createWebSocket(UpgradeRequest request, UpgradeResponse response) {
<add> Assert.isInstanceOf(ServletWebSocketRequest.class, request);
<add> ServletWebSocketRequest servletRequest = (ServletWebSocketRequest) request;
<add> HandlerProvider<WebSocketHandler> handlerProvider =
<add> (HandlerProvider<WebSocketHandler>) servletRequest.getServletAttributes().get(
<add> HANDLER_PROVIDER_ATTR_NAME);
<add> return new JettyWebSocketListenerAdapter(handlerProvider);
<ide> }
<ide> });
<ide> try {
<ide> public String[] getSupportedVersions() {
<ide>
<ide> @Override
<ide> public void upgrade(ServerHttpRequest request, ServerHttpResponse response,
<del> String selectedProtocol, HandlerProvider<WebSocketHandler> handlerProvider)
<del> throws IOException {
<del> Assert.isInstanceOf(ServletServerHttpRequest.class, request);
<del> Assert.isInstanceOf(ServletServerHttpResponse.class, response);
<del> upgrade(((ServletServerHttpRequest) request).getServletRequest(),
<del> ((ServletServerHttpResponse) response).getServletResponse(),
<del> selectedProtocol, handlerProvider);
<del> }
<del>
<del> private void upgrade(HttpServletRequest request, HttpServletResponse response,
<del> String selectedProtocol, final HandlerProvider<WebSocketHandler> handlerProvider)
<del> throws IOException {
<del> request.setAttribute(HANDLER_PROVIDER, handlerProvider);
<del> Assert.state(factory.isUpgradeRequest(request, response), "Not a suitable WebSocket upgrade request");
<del> Assert.state(factory.acceptWebSocket(request, response), "Unable to accept WebSocket");
<del> }
<del>
<del>
<del> /**
<del> * Adapts Spring's {@link WebSocketHandler} to Jetty's {@link WebSocketListener}.
<del> */
<del> private static class WebSocketHandlerAdapter implements WebSocketListener {
<del>
<del> private final HandlerProvider<WebSocketHandler> provider;
<del>
<del> private WebSocketHandler handler;
<del>
<del> private WebSocketSession session;
<del>
<del> private final AtomicInteger sessionCount = new AtomicInteger(0);
<del>
<del>
<del> public WebSocketHandlerAdapter(HandlerProvider<WebSocketHandler> provider) {
<del> Assert.notNull(provider, "Provider must not be null");
<del> Assert.isAssignable(WebSocketHandler.class, provider.getHandlerType());
<del> this.provider = provider;
<del> }
<del>
<del>
<del> @Override
<del> public void onWebSocketConnect(Session session) {
<del>
<del> Assert.isTrue(this.sessionCount.compareAndSet(0, 1), "Unexpected connection");
<del>
<del> this.session = new WebSocketSessionAdapter(session);
<del> if (logger.isDebugEnabled()) {
<del> logger.debug("Connection established, WebSocket session id="
<del> + this.session.getId() + ", uri=" + this.session.getURI());
<del> }
<del> this.handler = this.provider.getHandler();
<del>
<del> try {
<del> this.handler.afterConnectionEstablished(this.session);
<del> }
<del> catch (Throwable ex) {
<del> tryCloseWithError(ex);
<del> }
<del> }
<del>
<del> private void tryCloseWithError(Throwable ex) {
<del> logger.error("Unhandled error for " + this.session, ex);
<del> if (this.session.isOpen()) {
<del> try {
<del> this.session.close(CloseStatus.SERVER_ERROR);
<del> }
<del> catch (Throwable t) {
<del> destroyHandler();
<del> }
<del> }
<del> }
<del>
<del> private void destroyHandler() {
<del> try {
<del> if (this.handler != null) {
<del> this.provider.destroy(this.handler);
<del> }
<del> }
<del> catch (Throwable t) {
<del> logger.warn("Error while destroying handler", t);
<del> }
<del> finally {
<del> this.session = null;
<del> this.handler = null;
<del> }
<del> }
<del>
<del> @Override
<del> public void onWebSocketClose(int statusCode, String reason) {
<del> try {
<del> CloseStatus closeStatus = new CloseStatus(statusCode, reason);
<del> if (logger.isDebugEnabled()) {
<del> logger.debug("Connection closed, WebSocket session id="
<del> + this.session.getId() + ", " + closeStatus);
<del> }
<del> this.handler.afterConnectionClosed(closeStatus, this.session);
<del> }
<del> catch (Throwable ex) {
<del> logger.error("Unhandled error for " + this.session, ex);
<del> }
<del> finally {
<del> destroyHandler();
<del> }
<del> }
<add> String selectedProtocol, HandlerProvider<WebSocketHandler> handlerProvider) throws IOException {
<ide>
<del> @Override
<del> public void onWebSocketText(String payload) {
<del> try {
<del> TextMessage message = new TextMessage(payload);
<del> if (logger.isTraceEnabled()) {
<del> logger.trace("Received message for WebSocket session id="
<del> + this.session.getId() + ": " + message);
<del> }
<del> this.handler.handleTextMessage(message, this.session);
<del> }
<del> catch(Throwable ex) {
<del> tryCloseWithError(ex);
<del> }
<del> }
<add> Assert.isInstanceOf(ServletServerHttpRequest.class, request);
<add> HttpServletRequest servletRequest = ((ServletServerHttpRequest) request).getServletRequest();
<ide>
<del> @Override
<del> public void onWebSocketBinary(byte[] payload, int offset, int len) {
<del> try {
<del> BinaryMessage message = new BinaryMessage(payload, offset, len);
<del> if (logger.isTraceEnabled()) {
<del> logger.trace("Received binary data for WebSocket session id="
<del> + this.session.getId() + ": " + message);
<del> }
<del> this.handler.handleBinaryMessage(message, this.session);
<del> }
<del> catch(Throwable ex) {
<del> tryCloseWithError(ex);
<del> }
<del> }
<add> Assert.isInstanceOf(ServletServerHttpResponse.class, response);
<add> HttpServletResponse servletResponse = ((ServletServerHttpResponse) response).getServletResponse();
<ide>
<del> @Override
<del> public void onWebSocketError(Throwable cause) {
<del> try {
<del> this.handler.handleTransportError(cause, this.session);
<del> }
<del> catch (Throwable ex) {
<del> tryCloseWithError(ex);
<del> }
<del> }
<add> upgrade(servletRequest, servletResponse, selectedProtocol, handlerProvider);
<ide> }
<ide>
<add> private void upgrade(HttpServletRequest request, HttpServletResponse response,
<add> String selectedProtocol, final HandlerProvider<WebSocketHandler> handlerProvider) throws IOException {
<ide>
<del> /**
<del> * Adapts Jetty's {@link Session} to Spring's {@link WebSocketSession}.
<del> */
<del> private static class WebSocketSessionAdapter implements WebSocketSession {
<del>
<del> private Session session;
<del>
<del>
<del> public WebSocketSessionAdapter(Session session) {
<del> this.session = session;
<del> }
<del>
<del>
<del> @Override
<del> public String getId() {
<del> return ObjectUtils.getIdentityHexString(this.session);
<del> }
<del>
<del> @Override
<del> public boolean isOpen() {
<del> return this.session.isOpen();
<del> }
<del>
<del> @Override
<del> public boolean isSecure() {
<del> return this.session.isSecure();
<del> }
<del>
<del> @Override
<del> public URI getURI() {
<del> return this.session.getUpgradeRequest().getRequestURI();
<del> }
<del>
<del> @Override
<del> public void sendMessage(WebSocketMessage message) throws IOException {
<del> if (message instanceof BinaryMessage) {
<del> sendMessage((BinaryMessage) message);
<del> }
<del> else if (message instanceof TextMessage) {
<del> sendMessage((TextMessage) message);
<del> }
<del> else {
<del> throw new IllegalArgumentException("Unsupported message type");
<del> }
<del> }
<del>
<del> private void sendMessage(BinaryMessage message) throws IOException {
<del> this.session.getRemote().sendBytes(message.getPayload());
<del> }
<del>
<del> private void sendMessage(TextMessage message) throws IOException {
<del> this.session.getRemote().sendString(message.getPayload());
<del> }
<del>
<del> @Override
<del> public void close() throws IOException {
<del> this.session.close();
<del> }
<add> Assert.state(this.factory.isUpgradeRequest(request, response), "Not a suitable WebSocket upgrade request");
<add> Assert.state(this.factory.acceptWebSocket(request, response), "Unable to accept WebSocket");
<ide>
<del> @Override
<del> public void close(CloseStatus status) throws IOException {
<del> this.session.close(status.getCode(), status.getReason());
<del> }
<add> request.setAttribute(HANDLER_PROVIDER_ATTR_NAME, handlerProvider);
<ide> }
<ide>
<ide> } | 13 |
Ruby | Ruby | add collectionproxy#<< documentation | bb887b92f8ed119641d68487cef1b5b34b2518a1 | <ide><path>activerecord/lib/active_record/associations/collection_proxy.rb
<ide> def to_ary
<ide> end
<ide> alias_method :to_a, :to_ary
<ide>
<add> # Adds one or more +records+ to the collection by setting their foreign keys
<add> # to the collection‘s primary key. Returns +self+, so several appends may be
<add> # chained together.
<add> #
<add> # class Person < ActiveRecord::Base
<add> # has_many :pets
<add> # end
<add> #
<add> # person.pets.size # => 0
<add> # person.pets << Pet.new(name: 'Fancy-Fancy')
<add> # person.pets << [Pet.new(name: 'Spook'), Pet.new(name: 'Choo-Choo')]
<add> # person.pets.size # => 3
<add> #
<add> # person.id # => 1
<add> # person.pets
<add> # # => [
<add> # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
<add> # # #<Pet id: 2, name: "Spook", person_id: 1>,
<add> # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
<add> # # ]
<ide> def <<(*records)
<ide> proxy_association.concat(records) && self
<ide> end | 1 |
Ruby | Ruby | turn scope in to a linked list | dc3f25c8a5aa64de9225f11498a389a2d31e880a | <ide><path>actionpack/lib/action_dispatch/routing/mapper.rb
<ide> module Scoping
<ide> # end
<ide> def scope(*args)
<ide> options = args.extract_options!.dup
<del> recover = {}
<add> scope = {}
<ide>
<ide> options[:path] = args.flatten.join('/') if args.any?
<ide> options[:constraints] ||= {}
<ide> def scope(*args)
<ide> end
<ide>
<ide> if value
<del> recover[option] = @scope[option]
<del> @scope[option] = send("merge_#{option}_scope", @scope[option], value)
<add> scope[option] = send("merge_#{option}_scope", @scope[option], value)
<ide> end
<ide> end
<ide>
<add> @scope = @scope.new scope
<ide> yield
<ide> self
<ide> ensure
<del> @scope.merge!(recover)
<add> @scope = @scope.parent
<ide> end
<ide>
<ide> # Scopes routes to a specific controller
<ide> def nested_scope? #:nodoc:
<ide>
<ide> def with_exclusive_scope
<ide> begin
<del> old_name_prefix, old_path = @scope[:as], @scope[:path]
<del> @scope[:as], @scope[:path] = nil, nil
<add> @scope = @scope.new(:as => nil, :path => nil)
<ide>
<ide> with_scope_level(:exclusive) do
<ide> yield
<ide> end
<ide> ensure
<del> @scope[:as], @scope[:path] = old_name_prefix, old_path
<add> @scope = @scope.parent
<ide> end
<ide> end
<ide>
<ide> def with_scope_level(kind)
<del> old, @scope[:scope_level] = @scope[:scope_level], kind
<add> @scope = @scope.new(:scope_level => kind)
<ide> yield
<ide> ensure
<del> @scope[:scope_level] = old
<add> @scope = @scope.parent
<ide> end
<ide>
<ide> def resource_scope(kind, resource) #:nodoc:
<ide> resource.shallow = @scope[:shallow]
<del> old_resource, @scope[:scope_level_resource] = @scope[:scope_level_resource], resource
<add> @scope = @scope.new(:scope_level_resource => resource)
<ide> @nesting.push(resource)
<ide>
<ide> with_scope_level(kind) do
<ide> scope(parent_resource.resource_scope) { yield }
<ide> end
<ide> ensure
<ide> @nesting.pop
<del> @scope[:scope_level_resource] = old_resource
<add> @scope = @scope.parent
<ide> end
<ide>
<ide> def nested_options #:nodoc:
<ide> def canonical_action?(action, flag) #:nodoc:
<ide> end
<ide>
<ide> def shallow_scope(path, options = {}) #:nodoc:
<del> old_name_prefix, old_path = @scope[:as], @scope[:path]
<del> @scope[:as], @scope[:path] = @scope[:shallow_prefix], @scope[:shallow_path]
<add> scope = { :as => @scope[:shallow_prefix],
<add> :path => @scope[:shallow_path] }
<add> @scope = @scope.new scope
<ide>
<ide> scope(path, options) { yield }
<ide> ensure
<del> @scope[:as], @scope[:path] = old_name_prefix, old_path
<add> @scope = @scope.parent
<ide> end
<ide>
<ide> def path_for_action(action, path) #:nodoc:
<ide> def concerns(*args)
<ide> end
<ide> end
<ide>
<add> class Scope # :nodoc:
<add> attr_reader :parent
<add>
<add> def initialize(hash, parent = {})
<add> @hash = hash
<add> @parent = parent
<add> end
<add>
<add> def new(hash)
<add> self.class.new hash, self
<add> end
<add>
<add> def [](key)
<add> @hash.fetch(key) { @parent[key] }
<add> end
<add>
<add> def []=(k,v)
<add> @hash[k] = v
<add> end
<add> end
<add>
<ide> def initialize(set) #:nodoc:
<ide> @set = set
<del> @scope = { :path_names => @set.resources_path_names }
<add> @scope = Scope.new({ :path_names => @set.resources_path_names })
<ide> @concerns = {}
<ide> @nesting = []
<ide> end | 1 |
Text | Text | remove obsolete warning about regular expression | 96c525f2509a9ec1890c5218f6a65b22660b97f7 | <ide><path>guides/source/active_record_validations.md
<ide> If you set `:only_integer` to `true`, then it will use the
<ide> regular expression to validate the attribute's value. Otherwise, it will try to
<ide> convert the value to a number using `Float`.
<ide>
<del>WARNING. Note that the regular expression above allows a trailing newline
<del>character.
<del>
<ide> ```ruby
<ide> class Player < ApplicationRecord
<ide> validates :points, numericality: true | 1 |
PHP | PHP | make testremove meaningful | f2a61e270cdaf1a480f6e64fff22010f2552dd83 | <ide><path>tests/TestCase/ORM/TableRegistryTest.php
<ide> public function testGenericInstances()
<ide> */
<ide> public function testRemove()
<ide> {
<del> Plugin::load('TestPlugin');
<del>
<del> $pluginTable = TableRegistry::get('TestPlugin.Comments');
<del> $cachedTable = TableRegistry::get('Comments');
<add> $first = TableRegistry::get('Comments');
<ide>
<del> $this->assertTrue(TableRegistry::exists('TestPlugin.Comments'));
<del> $this->assertTrue(TableRegistry::exists('Comments'));
<del> $this->assertNotSame($pluginTable, $cachedTable);
<del>
<del> TableRegistry::remove('TestPlugin.Comments');
<del> $this->assertFalse(TableRegistry::exists('TestPlugin.Comments'));
<ide> $this->assertTrue(TableRegistry::exists('Comments'));
<ide>
<ide> TableRegistry::remove('Comments');
<del> $this->assertFalse(TableRegistry::exists('TestPlugin.Comments'));
<ide> $this->assertFalse(TableRegistry::exists('Comments'));
<ide>
<del> $pluginTable = TableRegistry::get('TestPlugin.Comments');
<del> $cachedTable = TableRegistry::get('Comments');
<add> $second = TableRegistry::get('Comments');
<ide>
<del> TableRegistry::remove('Comments');
<del> $this->assertTrue(TableRegistry::exists('TestPlugin.Comments'));
<del> $this->assertFalse(TableRegistry::exists('Comments'));
<add> $this->assertNotSame($first, $second, 'Should be different objects, as the reference to the first was destroyed');
<add> $this->assertTrue(TableRegistry::exists('Comments'));
<ide> }
<ide> } | 1 |
Ruby | Ruby | add support for the --git option | 128f63db53db07a59d7df19748fc95b34a86f83c | <ide><path>Library/Homebrew/cmd/reinstall.rb
<ide> def reinstall_args
<ide> env: :display_install_times,
<ide> description: "Print install times for each formula at the end of the run.",
<ide> }],
<add> [:switch, "-g", "--git", {
<add> description: "Create a Git repository, useful for creating patches to the software.",
<add> }],
<ide> ].each do |options|
<ide> send(*options)
<ide> conflicts "--cask", options[-2]
<ide> def reinstall
<ide> debug: args.debug?,
<ide> quiet: args.quiet?,
<ide> verbose: args.verbose?,
<add> git: args.git?,
<ide> )
<ide> Cleanup.install_formula_clean!(formula)
<ide> end
<ide><path>Library/Homebrew/reinstall.rb
<ide> def reinstall_formula(
<ide> force: false,
<ide> debug: false,
<ide> quiet: false,
<del> verbose: false
<add> verbose: false,
<add> git: false
<ide> )
<ide> if formula.opt_prefix.directory?
<ide> keg = Keg.new(formula.opt_prefix.resolved_path)
<ide> def reinstall_formula(
<ide> build_bottle: tab&.built_bottle?,
<ide> force_bottle: force_bottle,
<ide> build_from_source_formulae: build_from_source_formulae,
<add> git: git,
<ide> interactive: interactive,
<ide> keep_tmp: keep_tmp,
<ide> force: force, | 2 |
Python | Python | add support for grouped choices | 56b3f196057732ec441fe0f5982538431f1b069f | <ide><path>rest_framework/fields.py
<ide> import datetime
<ide> import decimal
<ide> import inspect
<add>import itertools
<ide> import re
<ide> import uuid
<ide>
<ide> class ChoiceField(Field):
<ide> }
<ide>
<ide> def __init__(self, choices, **kwargs):
<del> # Allow either single or paired choices style:
<del> # choices = [1, 2, 3]
<del> # choices = [(1, 'First'), (2, 'Second'), (3, 'Third')]
<del> pairs = [
<del> isinstance(item, (list, tuple)) and len(item) == 2
<del> for item in choices
<del> ]
<del> if all(pairs):
<del> self.choices = OrderedDict([(key, display_value) for key, display_value in choices])
<del> else:
<del> self.choices = OrderedDict([(item, item) for item in choices])
<add> flat_choices = [self.flatten_choice(c) for c in choices]
<add> self.choices = OrderedDict(itertools.chain(*flat_choices))
<ide>
<ide> # Map the string representation of choices to the underlying value.
<ide> # Allows us to deal with eg. integer choices while supporting either
<ide> def __init__(self, choices, **kwargs):
<ide>
<ide> super(ChoiceField, self).__init__(**kwargs)
<ide>
<add> def flatten_choice(self, choice):
<add> """
<add> Convert a choices choice into a flat list of choices.
<add>
<add> Returns a list of choices.
<add> """
<add>
<add> # Allow single, paired or grouped choices style:
<add> # choices = [1, 2, 3]
<add> # choices = [(1, 'First'), (2, 'Second'), (3, 'Third')]
<add> # choices = [('Category', ((1, 'First'), (2, 'Second'))), (3, 'Third')]
<add> if (not isinstance(choice, (list, tuple))):
<add> # single choice
<add> return [(choice, choice)]
<add> else:
<add> key, display_value = choice
<add> if isinstance(display_value, (list, tuple)):
<add> # grouped choices
<add> sub_choices = [self.flatten_choice(c) for c in display_value]
<add> return list(itertools.chain(*sub_choices))
<add> else:
<add> # paired choice
<add> return [(key, display_value)]
<add>
<ide> def to_internal_value(self, data):
<ide> if data == '' and self.allow_blank:
<ide> return ''
<ide><path>tests/test_fields.py
<ide> class TestChoiceFieldWithListChoices(FieldValues):
<ide> field = serializers.ChoiceField(choices=('poor', 'medium', 'good'))
<ide>
<ide>
<add>class TestChoiceFieldWithGroupedChoices(FieldValues):
<add> """
<add> Valid and invalid values for a `Choice` field that uses a grouped list for the
<add> choices, rather than a list of pairs of (`value`, `description`).
<add> """
<add> valid_inputs = {
<add> 'poor': 'poor',
<add> 'medium': 'medium',
<add> 'good': 'good',
<add> }
<add> invalid_inputs = {
<add> 'awful': ['"awful" is not a valid choice.']
<add> }
<add> outputs = {
<add> 'good': 'good'
<add> }
<add> field = serializers.ChoiceField(
<add> choices=[
<add> (
<add> 'Category',
<add> (
<add> ('poor', 'Poor quality'),
<add> ('medium', 'Medium quality'),
<add> ),
<add> ),
<add> ('good', 'Good quality'),
<add> ]
<add> )
<add>
<add>
<add>class TestChoiceFieldWithMixedChoices(FieldValues):
<add> """
<add> Valid and invalid values for a `Choice` field that uses a single paired or
<add> grouped.
<add> """
<add> valid_inputs = {
<add> 'poor': 'poor',
<add> 'medium': 'medium',
<add> 'good': 'good',
<add> }
<add> invalid_inputs = {
<add> 'awful': ['"awful" is not a valid choice.']
<add> }
<add> outputs = {
<add> 'good': 'good'
<add> }
<add> field = serializers.ChoiceField(
<add> choices=[
<add> (
<add> 'Category',
<add> (
<add> ('poor', 'Poor quality'),
<add> ),
<add> ),
<add> 'medium',
<add> ('good', 'Good quality'),
<add> ]
<add> )
<add>
<add>
<ide> class TestMultipleChoiceField(FieldValues):
<ide> """
<ide> Valid and invalid values for `MultipleChoiceField`.
<ide><path>tests/test_validation.py
<ide> def test_max_value_validation_fail(self):
<ide> self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
<ide>
<ide>
<add># regression tests for issue: 1533
<add>
<ide> class TestChoiceFieldChoicesValidate(TestCase):
<ide> CHOICES = [
<ide> (0, 'Small'),
<ide> (1, 'Medium'),
<ide> (2, 'Large'),
<ide> ]
<ide>
<add> SINGLE_CHOICES = [0, 1, 2]
<add>
<ide> CHOICES_NESTED = [
<ide> ('Category', (
<ide> (1, 'First'),
<ide> class TestChoiceFieldChoicesValidate(TestCase):
<ide> (4, 'Fourth'),
<ide> ]
<ide>
<add> MIXED_CHOICES = [
<add> ('Category', (
<add> (1, 'First'),
<add> (2, 'Second'),
<add> )),
<add> 3,
<add> (4, 'Fourth'),
<add> ]
<add>
<ide> def test_choices(self):
<ide> """
<ide> Make sure a value for choices works as expected.
<ide> def test_choices(self):
<ide> except serializers.ValidationError:
<ide> self.fail("Value %s does not validate" % str(value))
<ide>
<add> def test_single_choices(self):
<add> """
<add> Make sure a single value for choices works as expected.
<add> """
<add> f = serializers.ChoiceField(choices=self.SINGLE_CHOICES)
<add> value = self.SINGLE_CHOICES[0]
<add> try:
<add> f.to_internal_value(value)
<add> except serializers.ValidationError:
<add> self.fail("Value %s does not validate" % str(value))
<add>
<add> def test_nested_choices(self):
<add> """
<add> Make sure a nested value for choices works as expected.
<add> """
<add> f = serializers.ChoiceField(choices=self.CHOICES_NESTED)
<add> value = self.CHOICES_NESTED[0][1][0][0]
<add> try:
<add> f.to_internal_value(value)
<add> except serializers.ValidationError:
<add> self.fail("Value %s does not validate" % str(value))
<add>
<add> def test_mixed_choices(self):
<add> """
<add> Make sure mixed values for choices works as expected.
<add> """
<add> f = serializers.ChoiceField(choices=self.MIXED_CHOICES)
<add> value = self.MIXED_CHOICES[1]
<add> try:
<add> f.to_internal_value(value)
<add> except serializers.ValidationError:
<add> self.fail("Value %s does not validate" % str(value))
<add>
<ide>
<ide> class RegexSerializer(serializers.Serializer):
<ide> pin = serializers.CharField( | 3 |
Python | Python | fix ticket #944 | 4a632534604d686ff9ac5a9629ce06f7c895cd1e | <ide><path>numpy/lib/polynomial.py
<ide> def polyint(p, m=1, k=None):
<ide> else:
<ide> truepoly = isinstance(p, poly1d)
<ide> p = NX.asarray(p)
<del> y = NX.zeros(len(p)+1, float)
<add> y = NX.zeros(len(p) + 1, p.dtype)
<ide> y[:-1] = p*1.0/NX.arange(len(p), 0, -1)
<ide> y[-1] = k[0]
<del> val = polyint(y, m-1, k=k[1:])
<add> val = polyint(y, m - 1, k=k[1:])
<ide> if truepoly:
<ide> val = poly1d(val)
<ide> return val
<ide><path>numpy/lib/tests/test_regression.py
<ide> from numpy.testing import *
<ide> import numpy as np
<ide>
<del>rlevel = 1
<ide>
<del>class TestRegression(TestCase):
<del> def test_polyfit_build(self,level=rlevel):
<add>class TestRegression(object):
<add> def test_polyfit_build(self):
<ide> """Ticket #628"""
<ide> ref = [-1.06123820e-06, 5.70886914e-04, -1.13822012e-01,
<ide> 9.95368241e+00, -3.14526520e+02]
<ide> def test_polyfit_build(self,level=rlevel):
<ide> tested = np.polyfit(x, y, 4)
<ide> assert_array_almost_equal(ref, tested)
<ide>
<add> def test_polyint_type(self) :
<add> """Ticket #944"""
<add> msg = "Wrong type, should be complex"
<add> x = np.polyint(np.ones(3, dtype=np.complex))
<add> assert_(np.asarray(x).dtype == np.complex, msg)
<add>
<ide>
<ide> if __name__ == "__main__":
<ide> run_module_suite() | 2 |
Javascript | Javascript | use const where applicable in uglifyjsplugin | 1d78f990a6af1a5b85e45b345b4f7861b03bb12b | <ide><path>lib/optimize/UglifyJsPlugin.js
<ide> class UglifyJsPlugin {
<ide> }
<ide>
<ide> apply(compiler) {
<del> let options = this.options;
<add> const options = this.options;
<ide> options.test = options.test || /\.js($|\?)/i;
<ide>
<del> let requestShortener = new RequestShortener(compiler.context);
<add> const requestShortener = new RequestShortener(compiler.context);
<ide> compiler.plugin("compilation", (compilation) => {
<ide> if(options.sourceMap) {
<ide> compilation.plugin("build-module", (module) => {
<ide> class UglifyJsPlugin {
<ide> });
<ide> }
<ide> compilation.plugin("optimize-chunk-assets", (chunks, callback) => {
<del> let files = [];
<add> const files = [];
<ide> chunks.forEach((chunk) => files.push.apply(files, chunk.files));
<ide> files.push.apply(files, compilation.additionalChunkAssets);
<ide> const filterdFiles = files.filter(ModuleFilenameHelpers.matchObject.bind(undefined, options));
<ide> filterdFiles.forEach((file) => {
<del> let oldWarnFunction = uglify.AST_Node.warn_function;
<del> let warnings = [];
<add> const oldWarnFunction = uglify.AST_Node.warn_function;
<add> const warnings = [];
<ide> let sourceMap;
<ide> try {
<del> let asset = compilation.assets[file];
<add> const asset = compilation.assets[file];
<ide> if(asset.__UglifyJsPlugin) {
<ide> compilation.assets[file] = asset.__UglifyJsPlugin;
<ide> return;
<ide> class UglifyJsPlugin {
<ide> let inputSourceMap;
<ide> if(options.sourceMap) {
<ide> if(asset.sourceAndMap) {
<del> let sourceAndMap = asset.sourceAndMap();
<add> const sourceAndMap = asset.sourceAndMap();
<ide> inputSourceMap = sourceAndMap.map;
<ide> input = sourceAndMap.source;
<ide> } else {
<ide> class UglifyJsPlugin {
<ide> }
<ide> sourceMap = new SourceMapConsumer(inputSourceMap);
<ide> uglify.AST_Node.warn_function = (warning) => { // eslint-disable-line camelcase
<del> let match = /\[.+:([0-9]+),([0-9]+)\]/.exec(warning);
<del> let line = +match[1];
<del> let column = +match[2];
<del> let original = sourceMap.originalPositionFor({
<add> const match = /\[.+:([0-9]+),([0-9]+)\]/.exec(warning);
<add> const line = +match[1];
<add> const column = +match[2];
<add> const original = sourceMap.originalPositionFor({
<ide> line: line,
<ide> column: column
<ide> });
<ide> class UglifyJsPlugin {
<ide> });
<ide> if(options.compress !== false) {
<ide> ast.figure_out_scope();
<del> let compress = uglify.Compressor(options.compress || {
<add> const compress = uglify.Compressor(options.compress || {
<ide> warnings: false
<ide> }); // eslint-disable-line new-cap
<ide> ast = ast.transform(compress);
<ide> class UglifyJsPlugin {
<ide> uglify.mangle_properties(ast, options.mangle.props);
<ide> }
<ide> }
<del> let output = {};
<add> const output = {};
<ide> output.comments = Object.prototype.hasOwnProperty.call(options, "comments") ? options.comments : /^\**!|@preserve|@license/;
<ide> output.beautify = options.beautify;
<ide> for(let k in options.output) {
<ide> class UglifyJsPlugin {
<ide> }
<ide> } catch(err) {
<ide> if(err.line) {
<del> let original = sourceMap && sourceMap.originalPositionFor({
<add> const original = sourceMap && sourceMap.originalPositionFor({
<ide> line: err.line,
<ide> column: err.col
<ide> }); | 1 |
Javascript | Javascript | add test for async hooks parity for async/await | e993e45dbf99a155583ed4c7e933ceae926f597c | <ide><path>test/async-hooks/hook-checks.js
<ide> exports.checkInvocations = function checkInvocations(activity, hooks, stage) {
<ide> );
<ide>
<ide> // Check that actual invocations for all hooks match the expected invocations
<del> [ 'init', 'before', 'after', 'destroy' ].forEach(checkHook);
<add> [ 'init', 'before', 'after', 'destroy', 'promiseResolve' ].forEach(checkHook);
<ide>
<ide> function checkHook(k) {
<ide> const val = hooks[k];
<ide><path>test/async-hooks/init-hooks.js
<ide> class ActivityCollector {
<ide> onbefore,
<ide> onafter,
<ide> ondestroy,
<add> onpromiseResolve,
<ide> logid = null,
<ide> logtype = null
<ide> } = {}) {
<ide> class ActivityCollector {
<ide> this.onbefore = typeof onbefore === 'function' ? onbefore : noop;
<ide> this.onafter = typeof onafter === 'function' ? onafter : noop;
<ide> this.ondestroy = typeof ondestroy === 'function' ? ondestroy : noop;
<add> this.onpromiseResolve = typeof onpromiseResolve === 'function' ?
<add> onpromiseResolve : noop;
<ide>
<ide> // Create the hook with which we'll collect activity data
<ide> this._asyncHook = async_hooks.createHook({
<ide> init: this._init.bind(this),
<ide> before: this._before.bind(this),
<ide> after: this._after.bind(this),
<del> destroy: this._destroy.bind(this)
<add> destroy: this._destroy.bind(this),
<add> promiseResolve: this._promiseResolve.bind(this)
<ide> });
<ide> }
<ide>
<ide> class ActivityCollector {
<ide> this.ondestroy(uid);
<ide> }
<ide>
<add> _promiseResolve(uid) {
<add> const h = this._getActivity(uid, 'promiseResolve');
<add> this._stamp(h, 'promiseResolve');
<add> this._maybeLog(uid, h && h.type, 'promiseResolve');
<add> this.onpromiseResolve(uid);
<add> }
<add>
<ide> _maybeLog(uid, type, name) {
<ide> if (this._logid &&
<ide> (type == null || this._logtype == null || this._logtype === type)) {
<ide> exports = module.exports = function initHooks({
<ide> onbefore,
<ide> onafter,
<ide> ondestroy,
<add> onpromiseResolve,
<ide> allowNoInit,
<ide> logid,
<ide> logtype
<ide> exports = module.exports = function initHooks({
<ide> onbefore,
<ide> onafter,
<ide> ondestroy,
<add> onpromiseResolve,
<ide> allowNoInit,
<ide> logid,
<ide> logtype
<ide><path>test/async-hooks/test-async-await.js
<add>'use strict';
<add>const common = require('../common');
<add>
<add>// This test ensures async hooks are being properly called
<add>// when using async-await mechanics. This involves:
<add>// 1. Checking that all initialized promises are being resolved
<add>// 2. Checking that for each 'before' corresponding hook 'after' hook is called
<add>
<add>const assert = require('assert');
<add>const initHooks = require('./init-hooks');
<add>
<add>const util = require('util');
<add>
<add>const sleep = util.promisify(setTimeout);
<add>// either 'inited' or 'resolved'
<add>const promisesInitState = new Map();
<add>// either 'before' or 'after' AND asyncId must be present in the other map
<add>const promisesExecutionState = new Map();
<add>
<add>const hooks = initHooks({
<add> oninit,
<add> onbefore,
<add> onafter,
<add> ondestroy: null, // Intentionally not tested, since it will be removed soon
<add> onpromiseResolve
<add>});
<add>hooks.enable();
<add>
<add>function oninit(asyncId, type, triggerAsyncId, resource) {
<add> if (type === 'PROMISE') {
<add> promisesInitState.set(asyncId, 'inited');
<add> }
<add>}
<add>
<add>function onbefore(asyncId) {
<add> if (!promisesInitState.has(asyncId)) {
<add> return;
<add> }
<add> promisesExecutionState.set(asyncId, 'before');
<add>}
<add>
<add>function onafter(asyncId) {
<add> if (!promisesInitState.has(asyncId)) {
<add> return;
<add> }
<add>
<add> assert.strictEqual(promisesExecutionState.get(asyncId), 'before',
<add> 'after hook called for promise without prior call' +
<add> 'to before hook');
<add> assert.strictEqual(promisesInitState.get(asyncId), 'resolved',
<add> 'after hook called for promise without prior call' +
<add> 'to resolve hook');
<add> promisesExecutionState.set(asyncId, 'after');
<add>}
<add>
<add>function onpromiseResolve(asyncId) {
<add> assert(promisesInitState.has(asyncId),
<add> 'resolve hook called for promise without prior call to init hook');
<add>
<add> promisesInitState.set(asyncId, 'resolved');
<add>}
<add>
<add>const timeout = common.platformTimeout(10);
<add>
<add>function checkPromisesInitState() {
<add> for (const initState of promisesInitState.values()) {
<add> assert.strictEqual(initState, 'resolved',
<add> 'promise initialized without being resolved');
<add> }
<add>}
<add>
<add>function checkPromisesExecutionState() {
<add> for (const executionState of promisesExecutionState.values()) {
<add> assert.strictEqual(executionState, 'after',
<add> 'mismatch between before and after hook calls');
<add> }
<add>}
<add>
<add>process.on('beforeExit', common.mustCall(() => {
<add> hooks.disable();
<add> hooks.sanityCheck('PROMISE');
<add>
<add> checkPromisesInitState();
<add> checkPromisesExecutionState();
<add>}));
<add>
<add>async function asyncFunc() {
<add> await sleep(timeout);
<add>}
<add>
<add>asyncFunc(); | 3 |
PHP | PHP | fix handling of head http requests | f624a6849f6d4b32ce6187bffaf6293cad5277c7 | <ide><path>laravel/request.php
<ide> public static function uri()
<ide> */
<ide> public static function method()
<ide> {
<add> if ($_SERVER['REQUEST_METHOD'] == 'HEAD')
<add> {
<add> return 'GET';
<add> }
<add>
<ide> return (static::spoofed()) ? $_POST[Request::spoofer] : $_SERVER['REQUEST_METHOD'];
<ide> }
<ide>
<ide><path>laravel/routing/router.php
<ide> class Router {
<ide> *
<ide> * @var array
<ide> */
<del> public static $routes = array();
<add> public static $routes = array(
<add> 'GET' => array(),
<add> 'POST' => array(),
<add> 'PUT' => array(),
<add> 'DELETE' => array(),
<add> 'HEAD' => array(),
<add> );
<ide>
<ide> /**
<ide> * All of the "fallback" routes that have been registered. | 2 |
Text | Text | add javascript jquery book by duckett | d5177b4bea9f25714e95aa15859cb36df0160e2f | <ide><path>guide/english/book-recommendations/javascript/index.md
<ide> A collection of articles covering the basics (core language and working with a b
<ide> - [Online](https://javascript.info/)
<ide> - [E-book](https://javascript.info/ebook)
<ide>
<add>#### Javascript and Jquery: Interactive Front End Web Development
<add>This full-color book will show you how to make your websites more interactive and your interfaces more interesting and intuitive. Author is Jon Duckett who also penned HTML & CSS: Design and Build Websites. *Does not cover ES6*
<add>
<add>- [Amazon](https://www.amazon.com/JavaScript-JQuery-Interactive-Front-End-Development/dp/1118531647)
<add>
<add>
<ide> #### Other Resources
<ide>
<ide> - A selection of online free resources; including e-books for JavaScript are available at [JS Books](https://jsbooks.revolunet.com/) | 1 |
PHP | PHP | add validate mac address | 69179b843b2b43abd28744a0f25656455870a44e | <ide><path>src/Illuminate/Validation/Concerns/ValidatesAttributes.php
<ide> public function validateIpv6($attribute, $value)
<ide> return filter_var($value, FILTER_VALIDATE_IP, FILTER_FLAG_IPV6) !== false;
<ide> }
<ide>
<add> /**
<add> * Validate that an attribute is a valid MAC address.
<add> *
<add> * @param string $attribute
<add> * @param mixed $value
<add> * @return bool
<add> */
<add> public function validateMacAddress($attribute, $value)
<add> {
<add> return filter_var($value, FILTER_VALIDATE_MAC) !== false;
<add> }
<add>
<ide> /**
<ide> * Validate the attribute is a valid JSON string.
<ide> *
<ide><path>tests/Validation/ValidationValidatorTest.php
<ide> public function testValidateIp()
<ide> $this->assertTrue($v->fails());
<ide> }
<ide>
<add> public function testValidateMacAddress()
<add> {
<add> $trans = $this->getIlluminateArrayTranslator();
<add> $v = new Validator($trans, ['mac' => 'foo'], ['mac' => 'mac_address']);
<add> $this->assertFalse($v->passes());
<add>
<add> $trans = $this->getIlluminateArrayTranslator();
<add> $v = new Validator($trans, ['mac' => '01-23-45-67-89-ab'], ['mac' => 'mac_address']);
<add> $this->assertTrue($v->passes());
<add>
<add> $trans = $this->getIlluminateArrayTranslator();
<add> $v = new Validator($trans, ['mac' => '01-23-45-67-89-AB'], ['mac' => 'mac_address']);
<add> $this->assertTrue($v->passes());
<add>
<add> $trans = $this->getIlluminateArrayTranslator();
<add> $v = new Validator($trans, ['mac' => '01-23-45-67-89-aB'], ['mac' => 'mac_address']);
<add> $this->assertTrue($v->passes());
<add>
<add> $trans = $this->getIlluminateArrayTranslator();
<add> $v = new Validator($trans, ['mac' => '01:23:45:67:89:ab'], ['mac' => 'mac_address']);
<add> $this->assertTrue($v->passes());
<add>
<add> $trans = $this->getIlluminateArrayTranslator();
<add> $v = new Validator($trans, ['mac' => '01:23:45:67:89:AB'], ['mac' => 'mac_address']);
<add> $this->assertTrue($v->passes());
<add>
<add> $trans = $this->getIlluminateArrayTranslator();
<add> $v = new Validator($trans, ['mac' => '01:23:45:67:89:aB'], ['mac' => 'mac_address']);
<add> $this->assertTrue($v->passes());
<add>
<add> $trans = $this->getIlluminateArrayTranslator();
<add> $v = new Validator($trans, ['mac' => '01:23:45-67:89:aB'], ['mac' => 'mac_address']);
<add> $this->assertFalse($v->passes());
<add>
<add> $trans = $this->getIlluminateArrayTranslator();
<add> $v = new Validator($trans, ['mac' => 'xx:23:45:67:89:aB'], ['mac' => 'mac_address']);
<add> $this->assertFalse($v->passes());
<add>
<add> $trans = $this->getIlluminateArrayTranslator();
<add> $v = new Validator($trans, ['mac' => '0123.4567.89ab'], ['mac' => 'mac_address']);
<add> $this->assertTrue($v->passes());
<add> }
<add>
<ide> public function testValidateEmail()
<ide> {
<ide> $trans = $this->getIlluminateArrayTranslator(); | 2 |
Javascript | Javascript | run jshint and jscs on release scripts | b80700c75f2d4920ebdd81b6122fa0c4f65723b1 | <ide><path>Gruntfile.js
<ide> module.exports = function( grunt ) {
<ide> jshint: {
<ide> all: {
<ide> src: [
<del> "src/**/*.js", "Gruntfile.js", "test/**/*.js", "build/tasks/*",
<del> "build/{bower-install,release-notes,release}.js"
<add> "src/**/*.js", "Gruntfile.js", "test/**/*.js", "build/**/*.js"
<ide> ],
<ide> options: {
<ide> jshintrc: true
<ide> module.exports = function( grunt ) {
<ide>
<ide> // Right know, check only test helpers
<ide> test: [ "test/data/testrunner.js", "test/data/testinit.js" ],
<add> release: "build/*.js",
<ide> tasks: "build/tasks/*.js"
<ide> },
<ide> testswarm: {
<ide><path>build/release-notes.js
<ide> http.request({
<ide> port: 80,
<ide> method: "GET",
<ide> path: "/query?status=closed&resolution=fixed&max=400&component=!web&order=component&milestone=" + version
<del>}, function (res) {
<add>}, function( res ) {
<ide> var data = [];
<ide>
<ide> res.on( "data", function( chunk ) {
<ide> http.request({
<ide>
<ide> });
<ide> }).end();
<del> | 2 |
PHP | PHP | cleanup the caster | ae0e42fb7ff008e48abd4baa7427bba3232ab9fe | <ide><path>src/Illuminate/Foundation/Console/IlluminateCaster.php
<ide> use Illuminate\Support\Collection;
<ide> use Illuminate\Foundation\Application;
<ide> use Illuminate\Database\Eloquent\Model;
<del>use Symfony\Component\VarDumper\Cloner\Stub;
<ide> use Symfony\Component\VarDumper\Caster\Caster;
<ide>
<ide> class IlluminateCaster
<ide> class IlluminateCaster
<ide> * Get an array representing the properties of an application.
<ide> *
<ide> * @param \Illuminate\Foundation\Application $app
<del> * @param array $a
<del> * @param Stub $stub
<del> * @param bool $isNested
<del> * @param int $filter
<ide> * @return array
<ide> */
<del> public static function castApplication(Application $app, array $a, Stub $stub, $isNested, $filter = 0)
<add> public static function castApplication(Application $app)
<ide> {
<del> $a = [];
<add> $result = [];
<ide>
<ide> foreach (self::$appProperties as $property) {
<ide> try {
<ide> $val = $app->$property();
<ide> if (!is_null($val)) {
<del> $a[Caster::PREFIX_VIRTUAL . $property] = $val;
<add> $result[Caster::PREFIX_VIRTUAL . $property] = $val;
<ide> }
<ide> } catch (Exception $e) {
<ide> //
<ide> }
<ide> }
<ide>
<del> return $a;
<add> return $result;
<ide> }
<ide>
<ide> /**
<ide> * Get an array representing the properties of a collection.
<ide> *
<ide> * @param \Illuminate\Support\Collection $value
<del> * @param array $a
<del> * @param Stub $stub
<del> * @param bool $isNested
<del> * @param int $filter
<ide> * @return array
<ide> */
<del> public static function castCollection(Collection $coll, array $a, Stub $stub, $isNested, $filter = 0)
<add> public static function castCollection(Collection $collection)
<ide> {
<ide> return [
<ide> Caster::PREFIX_VIRTUAL.'all' => $coll->all(),
<ide> public static function castCollection(Collection $coll, array $a, Stub $stub, $i
<ide> * Get an array representing the properties of a model.
<ide> *
<ide> * @param \Illuminate\Database\Eloquent\Model $model
<del> * @param array $a
<del> * @param Stub $stub
<del> * @param bool $isNested
<del> * @param int $filter
<ide> * @return array
<ide> */
<del> public static function castModel(Model $model, array $a, Stub $stub, $isNested, $filter = 0)
<add> public static function castModel(Model $model)
<ide> {
<ide> $attributes = array_merge($model->getAttributes(), $model->getRelations());
<ide> $visible = array_flip($model->getVisible() ?: array_diff(array_keys($attributes), $model->getHidden()));
<ide> $attributes = array_intersect_key($attributes, $visible);
<ide>
<del> $a = [];
<add> $result = [];
<add>
<ide> foreach ($attributes as $key => $value) {
<del> $a[(isset($visible[$key]) ? Caster::PREFIX_VIRTUAL : Caster::PREFIX_PROTECTED).$key] = $value;
<add> $result[(isset($visible[$key]) ? Caster::PREFIX_VIRTUAL : Caster::PREFIX_PROTECTED).$key] = $value;
<ide> }
<ide>
<del> return $a;
<add> return $result;
<ide> }
<ide> } | 1 |
Python | Python | install typing package under python 2.7 | 7416379d1bb1bc1ece5ce9f621222318234de77d | <ide><path>setup.py
<ide> def run(self):
<ide> if PY2_pre_279:
<ide> install_requires.append('backports.ssl_match_hostname')
<ide>
<add>if PY2:
<add> install_requires.append('typing')
<add>
<ide> needs_pytest = {'pytest', 'test', 'ptr'}.intersection(sys.argv)
<ide> pytest_runner = ['pytest-runner'] if needs_pytest else []
<ide> | 1 |
Go | Go | use device id as specified by caller | 7b2b15d3e9f9b7ad898a36bbe5ceb42c9ca58d47 | <ide><path>daemon/graphdriver/devmapper/deviceset.go
<ide> func (devices *DeviceSet) initMetaData() error {
<ide> return nil
<ide> }
<ide>
<add>func (devices *DeviceSet) incNextDeviceId() {
<add> // Ids are 24bit, so wrap around
<add> devices.NextDeviceId = (devices.NextDeviceId + 1) & 0xffffff
<add>}
<add>
<add>func (devices *DeviceSet) createDevice(deviceId *int) error {
<add> for {
<add> if err := devicemapper.CreateDevice(devices.getPoolDevName(), *deviceId); err != nil {
<add> if devicemapper.DeviceIdExists(err) {
<add> // Device Id already exists. Try a new one.
<add> devices.incNextDeviceId()
<add> *deviceId = devices.NextDeviceId
<add> continue
<add> }
<add> log.Debugf("Error creating device: %s", err)
<add> return err
<add> }
<add> break
<add> }
<add> devices.incNextDeviceId()
<add> return nil
<add>}
<add>
<add>func (devices *DeviceSet) createSnapDevice(baseInfo *DevInfo, deviceId *int) error {
<add> log.Debugf("[deviceset] createSnapDevice() DeviceId=%d", *deviceId)
<add> defer log.Debugf("[deviceset] createSnapDevice() END DeviceId=%d", *deviceId)
<add>
<add> for {
<add> if err := devicemapper.CreateSnapDevice(devices.getPoolDevName(), *deviceId, baseInfo.Name(), baseInfo.DeviceId); err != nil {
<add> if devicemapper.DeviceIdExists(err) {
<add> // Device Id already exists. Try a new one.
<add> devices.incNextDeviceId()
<add> *deviceId = devices.NextDeviceId
<add> continue
<add> }
<add> log.Debugf("Error creating snap device: %s", err)
<add> return err
<add> }
<add> break
<add> }
<add> devices.incNextDeviceId()
<add> return nil
<add>}
<add>
<ide> func (devices *DeviceSet) loadMetadata(hash string) *DevInfo {
<ide> info := &DevInfo{Hash: hash, devices: devices}
<ide>
<ide> func (devices *DeviceSet) setupBaseImage() error {
<ide>
<ide> log.Debugf("Initializing base device-mapper thin volume")
<ide>
<del> id := devices.NextDeviceId
<del>
<ide> // Create initial device
<del> if err := devicemapper.CreateDevice(devices.getPoolDevName(), &id); err != nil {
<add> deviceId := devices.NextDeviceId
<add> if err := devices.createDevice(&deviceId); err != nil {
<ide> return err
<ide> }
<ide>
<del> // Ids are 24bit, so wrap around
<del> devices.NextDeviceId = (id + 1) & 0xffffff
<del>
<del> log.Debugf("Registering base device (id %v) with FS size %v", id, devices.baseFsSize)
<del> info, err := devices.registerDevice(id, "", devices.baseFsSize)
<add> log.Debugf("Registering base device (id %v) with FS size %v", deviceId, devices.baseFsSize)
<add> info, err := devices.registerDevice(deviceId, "", devices.baseFsSize)
<ide> if err != nil {
<del> _ = devicemapper.DeleteDevice(devices.getPoolDevName(), id)
<add> _ = devicemapper.DeleteDevice(devices.getPoolDevName(), deviceId)
<ide> return err
<ide> }
<ide>
<ide> func (devices *DeviceSet) initDevmapper(doInit bool) error {
<ide> }
<ide>
<ide> func (devices *DeviceSet) AddDevice(hash, baseHash string) error {
<add> log.Debugf("[deviceset] AddDevice() hash=%s basehash=%s", hash, baseHash)
<add> defer log.Debugf("[deviceset] AddDevice END")
<add>
<ide> baseInfo, err := devices.lookupDevice(baseHash)
<ide> if err != nil {
<ide> return err
<ide> func (devices *DeviceSet) AddDevice(hash, baseHash string) error {
<ide> }
<ide>
<ide> deviceId := devices.NextDeviceId
<del>
<del> if err := devicemapper.CreateSnapDevice(devices.getPoolDevName(), &deviceId, baseInfo.Name(), baseInfo.DeviceId); err != nil {
<add> if err := devices.createSnapDevice(baseInfo, &deviceId); err != nil {
<ide> log.Debugf("Error creating snap device: %s", err)
<ide> return err
<ide> }
<ide>
<del> // Ids are 24bit, so wrap around
<del> devices.NextDeviceId = (deviceId + 1) & 0xffffff
<del>
<ide> if _, err := devices.registerDevice(deviceId, hash, baseInfo.Size); err != nil {
<ide> devicemapper.DeleteDevice(devices.getPoolDevName(), deviceId)
<ide> log.Debugf("Error registering device: %s", err)
<ide><path>pkg/devicemapper/devmapper.go
<ide> var (
<ide> ErrGetLoopbackBackingFile = errors.New("Unable to get loopback backing file")
<ide> ErrLoopbackSetCapacity = errors.New("Unable set loopback capacity")
<ide> ErrBusy = errors.New("Device is Busy")
<add> ErrDeviceIdExists = errors.New("Device Id Exists")
<ide>
<ide> dmSawBusy bool
<ide> dmSawExist bool
<ide> type (
<ide> AddNodeType int
<ide> )
<ide>
<add>// Returns whether error conveys the information about device Id already
<add>// exist or not. This will be true if device creation or snap creation
<add>// operation fails if device or snap device already exists in pool.
<add>// Current implementation is little crude as it scans the error string
<add>// for exact pattern match. Replacing it with more robust implementation
<add>// is desirable.
<add>func DeviceIdExists(err error) bool {
<add> return fmt.Sprint(err) == fmt.Sprint(ErrDeviceIdExists)
<add>}
<add>
<ide> func (t *Task) destroy() {
<ide> if t != nil {
<ide> DmTaskDestroy(t.unmanaged)
<ide> func ResumeDevice(name string) error {
<ide> return nil
<ide> }
<ide>
<del>func CreateDevice(poolName string, deviceId *int) error {
<del> log.Debugf("[devmapper] CreateDevice(poolName=%v, deviceId=%v)", poolName, *deviceId)
<del>
<del> for {
<del> task, err := TaskCreateNamed(DeviceTargetMsg, poolName)
<del> if task == nil {
<del> return err
<del> }
<add>func CreateDevice(poolName string, deviceId int) error {
<add> log.Debugf("[devmapper] CreateDevice(poolName=%v, deviceId=%v)", poolName, deviceId)
<add> task, err := TaskCreateNamed(DeviceTargetMsg, poolName)
<add> if task == nil {
<add> return err
<add> }
<ide>
<del> if err := task.SetSector(0); err != nil {
<del> return fmt.Errorf("Can't set sector %s", err)
<del> }
<add> if err := task.SetSector(0); err != nil {
<add> return fmt.Errorf("Can't set sector %s", err)
<add> }
<ide>
<del> if err := task.SetMessage(fmt.Sprintf("create_thin %d", *deviceId)); err != nil {
<del> return fmt.Errorf("Can't set message %s", err)
<del> }
<add> if err := task.SetMessage(fmt.Sprintf("create_thin %d", deviceId)); err != nil {
<add> return fmt.Errorf("Can't set message %s", err)
<add> }
<ide>
<del> dmSawExist = false // reset before the task is run
<del> if err := task.Run(); err != nil {
<del> if dmSawExist {
<del> // Already exists, try next id
<del> *deviceId++
<del> continue
<del> }
<add> dmSawExist = false // reset before the task is run
<add> if err := task.Run(); err != nil {
<add> // Caller wants to know about ErrDeviceIdExists so that it can try with a different device id.
<add> if dmSawExist {
<add> return ErrDeviceIdExists
<add> } else {
<ide> return fmt.Errorf("Error running CreateDevice %s", err)
<ide> }
<del> break
<ide> }
<ide> return nil
<ide> }
<ide> func ActivateDevice(poolName string, name string, deviceId int, size uint64) err
<ide> return nil
<ide> }
<ide>
<del>func CreateSnapDevice(poolName string, deviceId *int, baseName string, baseDeviceId int) error {
<add>func CreateSnapDevice(poolName string, deviceId int, baseName string, baseDeviceId int) error {
<ide> devinfo, _ := GetInfo(baseName)
<ide> doSuspend := devinfo != nil && devinfo.Exists != 0
<ide>
<ide> func CreateSnapDevice(poolName string, deviceId *int, baseName string, baseDevic
<ide> }
<ide> }
<ide>
<del> for {
<del> task, err := TaskCreateNamed(DeviceTargetMsg, poolName)
<del> if task == nil {
<del> if doSuspend {
<del> ResumeDevice(baseName)
<del> }
<del> return err
<add> task, err := TaskCreateNamed(DeviceTargetMsg, poolName)
<add> if task == nil {
<add> if doSuspend {
<add> ResumeDevice(baseName)
<ide> }
<add> return err
<add> }
<ide>
<del> if err := task.SetSector(0); err != nil {
<del> if doSuspend {
<del> ResumeDevice(baseName)
<del> }
<del> return fmt.Errorf("Can't set sector %s", err)
<add> if err := task.SetSector(0); err != nil {
<add> if doSuspend {
<add> ResumeDevice(baseName)
<ide> }
<add> return fmt.Errorf("Can't set sector %s", err)
<add> }
<ide>
<del> if err := task.SetMessage(fmt.Sprintf("create_snap %d %d", *deviceId, baseDeviceId)); err != nil {
<del> if doSuspend {
<del> ResumeDevice(baseName)
<del> }
<del> return fmt.Errorf("Can't set message %s", err)
<add> if err := task.SetMessage(fmt.Sprintf("create_snap %d %d", deviceId, baseDeviceId)); err != nil {
<add> if doSuspend {
<add> ResumeDevice(baseName)
<ide> }
<add> return fmt.Errorf("Can't set message %s", err)
<add> }
<ide>
<del> dmSawExist = false // reset before the task is run
<del> if err := task.Run(); err != nil {
<del> if dmSawExist {
<del> // Already exists, try next id
<del> *deviceId++
<del> continue
<del> }
<del>
<del> if doSuspend {
<del> ResumeDevice(baseName)
<del> }
<add> dmSawExist = false // reset before the task is run
<add> if err := task.Run(); err != nil {
<add> if doSuspend {
<add> ResumeDevice(baseName)
<add> }
<add> // Caller wants to know about ErrDeviceIdExists so that it can try with a different device id.
<add> if dmSawExist {
<add> return ErrDeviceIdExists
<add> } else {
<ide> return fmt.Errorf("Error running DeviceCreate (createSnapDevice) %s", err)
<ide> }
<del>
<del> break
<ide> }
<ide>
<ide> if doSuspend { | 2 |
Javascript | Javascript | remove file from tree | 4960478a4794429a47b7de6942abcfb71a6adda9 | <ide><path>gulpfile.js
<del>var gulp = require( 'gulp' );
<del>var pump = require( 'pump' );
<del>var connect = require( 'gulp-connect' );
<del>
<del>
<del>gulp.task('runserver', function() {
<del>
<del> connect.server( { root: '.', port: 8888 } );
<del>
<del>});
<del>
<del> gulp.task( 'default', [ 'runserver' ] ); | 1 |
Javascript | Javascript | fix some errors in debugger tests | aea568b04a26cb7d725df57e5c593a1e0d5147b3 | <ide><path>lib/_debugger.js
<ide> function Client() {
<ide> this._reqCallbacks = [];
<ide> var socket = this;
<ide>
<del> this.currentFrame = 0;
<add> this.currentFrame = NO_FRAME;
<ide> this.currentSourceLine = -1;
<ide> this.currentSource = null;
<ide> this.handles = {};
<ide><path>test/simple/test-debugger-client.js
<ide> addTest(function (client, done) {
<ide>
<ide> addTest(function (client, done) {
<ide> console.error("requesting scripts");
<del> client.reqScripts(function (s) {
<del> console.error("got %d scripts", s.length);
<add> client.reqScripts(function () {
<add> console.error("got %d scripts", Object.keys(client.scripts).length);
<add>
<ide> var foundMainScript = false;
<del> for (var i = 0; i < s.length; i++) {
<del> if (s[i].name === 'node.js') {
<add> for (var k in client.scripts) {
<add> var script = client.scripts[k];
<add> if (script && script.name === 'node.js') {
<ide> foundMainScript = true;
<ide> break;
<ide> } | 2 |
Javascript | Javascript | fix multifile tests | 51bb887a28a5f71aab543108d6207df34df19ee5 | <ide><path>cypress/integration/learn/challenges/multifile.js
<ide> describe('Challenge with multifile editor', () => {
<ide> });
<ide>
<ide> it('renders the file tab buttons', () => {
<del> cy.get(selectors.monacoTabs).should('exist');
<ide> cy.get(selectors.monacoTabs).contains('index.html');
<ide> cy.get(selectors.monacoTabs).contains('styles.css');
<ide> });
<ide>
<del> it('checks for correct text at different widths', () => {
<del> cy.viewport(768, 660)
<del> .get(selectors.testButton)
<del> .contains('Check Your Code (Ctrl + Enter)');
<add> it.only('checks for correct text at different widths', () => {
<add> cy.viewport(768, 660);
<add> cy.get(selectors.testButton).contains('Check Your Code (Ctrl + Enter)');
<ide>
<del> cy.viewport(767, 660).get(selectors.testButton).contains('Check Your Code');
<add> cy.viewport(767, 660);
<add> cy.get(selectors.testButton)
<add> .should('not.contain.text', '(Ctrl + Enter)')
<add> .contains('Check Your Code');
<ide> });
<ide> }); | 1 |
Go | Go | use defer to unlock mutex | 7ce810c7717ec171892370c13bd3da6290cb8940 | <ide><path>pkg/ioutils/bytespipe.go
<ide> func NewBytesPipe() *BytesPipe {
<ide> // It can allocate new []byte slices in a process of writing.
<ide> func (bp *BytesPipe) Write(p []byte) (int, error) {
<ide> bp.mu.Lock()
<add> defer bp.mu.Unlock()
<ide>
<ide> written := 0
<ide> loop0:
<ide> for {
<ide> if bp.closeErr != nil {
<del> bp.mu.Unlock()
<ide> return written, ErrClosed
<ide> }
<ide>
<ide> loop0:
<ide> // errBufferFull is an error we expect to get if the buffer is full
<ide> if err != nil && err != errBufferFull {
<ide> bp.wait.Broadcast()
<del> bp.mu.Unlock()
<ide> return written, err
<ide> }
<ide>
<ide> loop0:
<ide> bp.buf = append(bp.buf, getBuffer(nextCap))
<ide> }
<ide> bp.wait.Broadcast()
<del> bp.mu.Unlock()
<ide> return written, nil
<ide> }
<ide>
<ide> func (bp *BytesPipe) Close() error {
<ide> // Data could be read only once.
<ide> func (bp *BytesPipe) Read(p []byte) (n int, err error) {
<ide> bp.mu.Lock()
<add> defer bp.mu.Unlock()
<ide> if bp.bufLen == 0 {
<ide> if bp.closeErr != nil {
<del> err := bp.closeErr
<del> bp.mu.Unlock()
<del> return 0, err
<add> return 0, bp.closeErr
<ide> }
<ide> bp.wait.Wait()
<ide> if bp.bufLen == 0 && bp.closeErr != nil {
<del> err := bp.closeErr
<del> bp.mu.Unlock()
<del> return 0, err
<add> return 0, bp.closeErr
<ide> }
<ide> }
<ide>
<ide> func (bp *BytesPipe) Read(p []byte) (n int, err error) {
<ide> }
<ide>
<ide> bp.wait.Broadcast()
<del> bp.mu.Unlock()
<ide> return
<ide> }
<ide> | 1 |
Text | Text | fix header examples and add codesandbox for part 6 | c0ac9d38d2f8fd3f0b03d685e5a1fe070a07e132 | <ide><path>docs/tutorials/fundamentals/part-5-ui-and-react.md
<ide> const Header = () => {
<ide> // highlight-next-line
<ide> const dispatch = useDispatch()
<ide>
<del> const handleChange = e => setText(e.target.value.trim())
<add> const handleChange = e => setText(e.target.value
<ide>
<ide> const handleKeyDown = e => {
<del> const text = e.target.value.trim()
<add> const trimmedText = e.target.value.trim()
<ide> // If the user pressed the Enter key:
<del> if (e.which === 13) {
<add> if (e.which === 13 && trimmedText) {
<ide> // highlight-start
<ide> // Dispatch the "todo added" action with this text
<del> dispatch({ type: 'todos/todoAdded', payload: text })
<add> dispatch({ type: 'todos/todoAdded', payload: trimmedText })
<ide> // highlight-end
<ide> // And clear out the text input
<ide> setText('')
<ide><path>docs/tutorials/fundamentals/part-6-async-logic.md
<ide> Both of the middleware in that last section were very specific and only do one t
<ide>
<ide> Here's what that middleware might look like:
<ide>
<del>```js title="asyncFunctionMiddleware.js"
<add>```js title="Example async function middleware"
<ide> const asyncFunctionMiddleware = storeAPI => next => action => {
<ide> // If the "action" is actually a function instead...
<ide> if (typeof action === 'function') {
<ide> const Header = () => {
<ide> const [text, setText] = useState('')
<ide> const dispatch = useDispatch()
<ide>
<del> const handleChange = e => setText(e.target.value.trim())
<add> const handleChange = e => setText(e.target.value)
<ide>
<ide> const handleKeyDown = e => {
<ide> // If the user pressed the Enter key:
<del> if (e.which === 13 && text) {
<add> const trimmedText = text.trim()
<add> if (e.which === 13 && trimmedText) {
<ide> // highlight-start
<ide> // Create the thunk function with the text the user wrote
<del> const saveNewTodoThunk = saveNewTodo(text)
<add> const saveNewTodoThunk = saveNewTodo(trimmedText)
<ide> // Then dispatch the thunk function itself
<ide> dispatch(saveNewTodoThunk)
<ide> // highlight-end
<ide> component, we can skip creating the temporary variable. Instead, we can call `sa
<ide> ```js title="src/features/header/Header.js"
<ide> const handleKeyDown = e => {
<ide> // If the user pressed the Enter key:
<del> if (e.which === 13 && text) {
<add> const trimmedText = text.trim()
<add> if (e.which === 13 && trimmedText) {
<ide> // highlight-start
<ide> // Create the thunk function and immediately dispatch it
<del> dispatch(saveNewTodo(text))
<add> dispatch(saveNewTodo(trimmedText))
<ide> // highlight-end
<ide> setText('')
<ide> }
<ide> In the process, we saw how Redux middleware are used to let us make async calls
<ide>
<ide> Here's what the current app looks like:
<ide>
<del>**FIXME CodeSandbox here**
<add><iframe
<add> class="codesandbox"
<add> src="https://codesandbox.io/embed/github/reduxjs/redux-fundamentals-example-app/tree/checkpoint-6-asyncThunks/?fontsize=14&hidenavigation=1&theme=dark"
<add> title="redux-essentials-example-app"
<add> allow="geolocation; microphone; camera; midi; vr; accelerometer; gyroscope; payment; ambient-light-sensor; encrypted-media; usb"
<add> sandbox="allow-modals allow-forms allow-popups allow-scripts allow-same-origin"
<add>></iframe>
<ide>
<ide> :::tip
<ide> | 2 |
Python | Python | remove unused function | 0b1dcbac1488e62379c2da326d666b39221e84e9 | <ide><path>spacy/_ml.py
<ide> def backward(d_output, sgd=None):
<ide> return vectors, backward
<ide>
<ide>
<del>def fine_tune(embedding, combine=None):
<del> if combine is not None:
<del> raise NotImplementedError(
<del> "fine_tune currently only supports addition. Set combine=None")
<del> def fine_tune_fwd(docs_tokvecs, drop=0.):
<del> docs, tokvecs = docs_tokvecs
<del>
<del> lengths = model.ops.asarray([len(doc) for doc in docs], dtype='i')
<del>
<del> vecs, bp_vecs = embedding.begin_update(docs, drop=drop)
<del> flat_tokvecs = embedding.ops.flatten(tokvecs)
<del> flat_vecs = embedding.ops.flatten(vecs)
<del> output = embedding.ops.unflatten(
<del> (model.mix[0] * flat_tokvecs + model.mix[1] * flat_vecs), lengths)
<del>
<del> def fine_tune_bwd(d_output, sgd=None):
<del> flat_grad = model.ops.flatten(d_output)
<del> model.d_mix[0] += flat_tokvecs.dot(flat_grad.T).sum()
<del> model.d_mix[1] += flat_vecs.dot(flat_grad.T).sum()
<del>
<del> bp_vecs([d_o * model.mix[1] for d_o in d_output], sgd=sgd)
<del> if sgd is not None:
<del> sgd(model._mem.weights, model._mem.gradient, key=model.id)
<del> return [d_o * model.mix[0] for d_o in d_output]
<del> return output, fine_tune_bwd
<del>
<del> def fine_tune_predict(docs_tokvecs):
<del> docs, tokvecs = docs_tokvecs
<del> vecs = embedding(docs)
<del> return [model.mix[0]*tv+model.mix[1]*v
<del> for tv, v in zip(tokvecs, vecs)]
<del>
<del> model = wrap(fine_tune_fwd, embedding)
<del> model.mix = model._mem.add((model.id, 'mix'), (2,))
<del> model.mix.fill(0.5)
<del> model.d_mix = model._mem.add_gradient((model.id, 'd_mix'), (model.id, 'mix'))
<del> model.predict = fine_tune_predict
<del> return model
<del>
<del>
<ide> @layerize
<ide> def flatten(seqs, drop=0.):
<ide> if isinstance(seqs[0], numpy.ndarray): | 1 |
Python | Python | remove match_mlperf from expected cache keys | fefe47ee1f557ed13fc2fbcd7d5e0b6c11e5121b | <ide><path>official/recommendation/data_preprocessing.py
<ide>
<ide> _EXPECTED_CACHE_KEYS = (
<ide> rconst.TRAIN_USER_KEY, rconst.TRAIN_ITEM_KEY, rconst.EVAL_USER_KEY,
<del> rconst.EVAL_ITEM_KEY, rconst.USER_MAP, rconst.ITEM_MAP, "match_mlperf")
<add> rconst.EVAL_ITEM_KEY, rconst.USER_MAP, rconst.ITEM_MAP)
<ide>
<ide>
<ide> def _filter_index_sort(raw_rating_path, cache_path): | 1 |
Text | Text | trim training 101 | fb5dbe30b5cb662113d77903cb64552c57aa6ef9 | <ide><path>website/docs/usage/101/_training.md
<ide> it's learning the right things, you don't only need **training data** – you'll
<ide> also need **evaluation data**. If you only test the model with the data it was
<ide> trained on, you'll have no idea how well it's generalizing. If you want to train
<ide> a model from scratch, you usually need at least a few hundred examples for both
<del>training and evaluation. A good rule of thumb is that you should have 10
<del>samples for each significant figure of accuracy you report.
<del>If you only have 100 samples and your model predicts 92 of them correctly, you
<del>would report accuracy of 0.9 rather than 0.92.
<add>training and evaluation. | 1 |
Javascript | Javascript | remove ie9 testing checks | a8d8c9ad00d34d1b1f461212942def6b0c528933 | <ide><path>packages/ember-glimmer/tests/integration/helpers/custom-helper-test.js
<ide> moduleFor('Helpers test: custom helpers', class extends RenderingTest {
<ide> }
<ide> });
<ide>
<del>// these feature detects prevent errors in these tests
<del>// on platforms (*cough* IE9 *cough*) that do not
<del>// property support `Object.freeze`
<del>let pushingIntoFrozenArrayThrows = (() => {
<del> let array = [];
<del> Object.freeze(array);
<del>
<del> try {
<del> array.push('foo');
<del>
<del> return false;
<del> } catch (e) {
<del> return true;
<del> }
<del>})();
<del>
<del>let assigningExistingFrozenPropertyThrows = (() => {
<del> let obj = { foo: 'asdf' };
<del> Object.freeze(obj);
<del>
<del> try {
<del> obj.foo = 'derp';
<del>
<del> return false;
<del> } catch (e) {
<del> return true;
<del> }
<del>})();
<del>
<del>let addingPropertyToFrozenObjectThrows = (() => {
<del> let obj = { foo: 'asdf' };
<del> Object.freeze(obj);
<del>
<del> try {
<del> obj.bar = 'derp';
<del>
<del> return false;
<del> } catch (e) {
<del> return true;
<del> }
<del>})();
<del>
<del>if (!EmberDev.runningProdBuild && (
<del> pushingIntoFrozenArrayThrows ||
<del> assigningExistingFrozenPropertyThrows ||
<del> addingPropertyToFrozenObjectThrows
<del>)) {
<add>if (!EmberDev.runningProdBuild) {
<ide> class HelperMutatingArgsTests extends RenderingTest {
<ide> buildCompute() {
<ide> return (params, hash) => {
<del> if (pushingIntoFrozenArrayThrows) {
<del> this.assert.throws(() => {
<del> params.push('foo');
<add> this.assert.throws(() => {
<add> params.push('foo');
<ide>
<del> // cannot assert error message as it varies by platform
<del> });
<del> }
<add> // cannot assert error message as it varies by platform
<add> });
<ide>
<del> if (assigningExistingFrozenPropertyThrows) {
<del> this.assert.throws(() => {
<del> hash.foo = 'bar';
<add> this.assert.throws(() => {
<add> hash.foo = 'bar';
<ide>
<del> // cannot assert error message as it varies by platform
<del> });
<del> }
<add> // cannot assert error message as it varies by platform
<add> });
<ide>
<del> if (addingPropertyToFrozenObjectThrows) {
<del> this.assert.throws(() => {
<del> hash.someUnusedHashProperty = 'bar';
<add> this.assert.throws(() => {
<add> hash.someUnusedHashProperty = 'bar';
<ide>
<del> // cannot assert error message as it varies by platform
<del> });
<del> }
<add> // cannot assert error message as it varies by platform
<add> });
<ide> };
<ide> }
<ide> | 1 |
Javascript | Javascript | expand helper function | 6f6e7103d174be95cc62e87f28bcc753d6045062 | <ide><path>packages/ember-metal/lib/property_get.js
<ide> if (Ember.config.overrideAccessors) {
<ide> get = Ember.get;
<ide> }
<ide>
<del>function firstKey(path) {
<del> return path.match(FIRST_KEY)[0];
<del>}
<del>
<ide> /**
<ide> @private
<ide>
<ide> var normalizeTuple = Ember.normalizeTuple = function(target, path) {
<ide> if (hasThis) path = path.slice(5);
<ide>
<ide> if (target === Ember.lookup) {
<del> key = firstKey(path);
<add> key = path.match(FIRST_KEY)[0];
<ide> target = get(target, key);
<ide> path = path.slice(key.length+1);
<ide> } | 1 |
Text | Text | add example for beforeexit event | ed24b8f10ac34a1191c0d884b3074f15f0ba6e70 | <ide><path>doc/api/process.md
<ide> termination, such as calling [`process.exit()`][] or uncaught exceptions.
<ide> The `'beforeExit'` should *not* be used as an alternative to the `'exit'` event
<ide> unless the intention is to schedule additional work.
<ide>
<add>```js
<add>process.on('beforeExit', (code) => {
<add> console.log('Process beforeExit event with code: ', code);
<add>});
<add>
<add>process.on('exit', (code) => {
<add> console.log('Process exit event with code: ', code);
<add>});
<add>
<add>console.log('This message is displayed first.');
<add>
<add>// Prints:
<add>// This message is displayed first.
<add>// Process beforeExit event with code: 0
<add>// Process exit event with code: 0
<add>```
<add>
<ide> ### Event: 'disconnect'
<ide> <!-- YAML
<ide> added: v0.7.7 | 1 |
Ruby | Ruby | build the reverse_order on its proper method | 6c311e0b7538e8c55797aa889fdf66780ab283a4 | <ide><path>activerecord/lib/active_record/relation/merger.rb
<ide> def merge_multi_values
<ide> def merge_single_values
<ide> relation.from_value = values[:from] unless relation.from_value
<ide> relation.lock_value = values[:lock] unless relation.lock_value
<del> relation.reverse_order_value = values[:reverse_order]
<ide>
<ide> unless values[:create_with].blank?
<ide> relation.create_with_value = (relation.create_with_value || {}).merge(values[:create_with])
<ide><path>activerecord/lib/active_record/relation/query_methods.rb
<ide> def reverse_order
<ide> end
<ide>
<ide> def reverse_order! # :nodoc:
<del> self.reverse_order_value = !reverse_order_value
<add> orders = order_values.uniq
<add> orders.reject!(&:blank?)
<add> self.order_values = reverse_sql_order(orders)
<ide> self
<ide> end
<ide>
<ide> def symbol_unscoping(scope)
<ide>
<ide> case scope
<ide> when :order
<del> self.reverse_order_value = false
<ide> result = []
<ide> else
<ide> result = [] unless single_val_method
<ide> def array_of_strings?(o)
<ide> def build_order(arel)
<ide> orders = order_values.uniq
<ide> orders.reject!(&:blank?)
<del> orders = reverse_sql_order(orders) if reverse_order_value
<ide>
<ide> arel.order(*orders) unless orders.empty?
<ide> end
<ide><path>activerecord/test/cases/relation/mutation_test.rb
<ide> def relation
<ide> end
<ide>
<ide> test 'reverse_order!' do
<del> assert relation.reverse_order!.equal?(relation)
<del> assert relation.reverse_order_value
<add> relation = Post.order('title ASC, comments_count DESC')
<add>
<add> relation.reverse_order!
<add>
<add> assert_equal 'title DESC', relation.order_values.first
<add> assert_equal 'comments_count ASC', relation.order_values.last
<add>
<add>
<ide> relation.reverse_order!
<del> assert !relation.reverse_order_value
<add>
<add> assert_equal 'title ASC', relation.order_values.first
<add> assert_equal 'comments_count DESC', relation.order_values.last
<ide> end
<ide>
<ide> test 'create_with!' do
<ide><path>activerecord/test/cases/relations_test.rb
<ide> def test_automatically_added_reorder_references
<ide> assert_equal [], scope.references_values
<ide> end
<ide>
<add> def test_order_with_reorder_nil_removes_the_order
<add> relation = Post.order(:title).reorder(nil)
<add>
<add> assert_nil relation.order_values.first
<add> end
<add>
<add> def test_reverse_order_with_reorder_nil_removes_the_order
<add> relation = Post.order(:title).reverse_order.reorder(nil)
<add>
<add> assert_nil relation.order_values.first
<add> end
<add>
<ide> def test_presence
<ide> topics = Topic.all
<ide> | 4 |
Python | Python | fix indentation to follow pep8 | 9ef15659f5003121605ba5096405276dcdf54254 | <ide><path>numpy/random/setup.py
<ide> def generate_libraries(ext, build_dir):
<ide> defs = [('_LARGE_FILES', None)]
<ide> else:
<ide> defs = [('_FILE_OFFSET_BITS', '64'),
<del> ('_LARGEFILE_SOURCE', '1'),
<del> ('_LARGEFILE64_SOURCE', '1')]
<add> ('_LARGEFILE_SOURCE', '1'),
<add> ('_LARGEFILE64_SOURCE', '1')]
<ide> if needs_mingw_ftime_workaround():
<ide> defs.append(("NPY_NEEDS_MINGW_TIME_WORKAROUND", None))
<ide> | 1 |
Javascript | Javascript | add validatebuffer to improve error | bb4891d8d465c951e0ab1c10346251f61f60e8c4 | <ide><path>lib/fs.js
<ide> function read(fd, buffer, offsetOrOptions, length, position, callback) {
<ide> }
<ide> ({
<ide> offset = 0,
<del> length = buffer.byteLength - offset,
<add> length = buffer?.byteLength - offset,
<ide> position = null,
<ide> } = params ?? kEmptyObject);
<ide> }
<ide><path>test/parallel/test-fs-read.js
<ide> assert.throws(
<ide>
<ide> assert.throws(
<ide> () => fs.read(fd, { buffer: null }, common.mustNotCall()),
<del> /TypeError: Cannot read properties of null \(reading 'byteLength'\)/,
<add> { code: 'ERR_INVALID_ARG_TYPE' },
<ide> 'throws when options.buffer is null'
<ide> );
<ide> | 2 |
PHP | PHP | refactor the config class for more simplicity | d9b7b7dc52b7f700e50bb4f06258011dc8f7232b | <ide><path>system/config.php
<ide> public static function get($key, $default = null)
<ide> {
<ide> list($module, $file, $key) = static::parse($key);
<ide>
<del> if ( ! static::load($module, $file))
<del> {
<del> return is_callable($default) ? call_user_func($default) : $default;
<del> }
<add> static::load($module, $file);
<ide>
<ide> if (is_null($key)) return static::$items[$module][$file];
<ide>
<ide> public static function set($key, $value)
<ide> {
<ide> list($module, $file, $key) = static::parse($key);
<ide>
<del> if ( ! static::load($module, $file))
<del> {
<del> throw new \Exception("Error setting configuration option. Option [$key] is not defined.");
<del> }
<add> static::load($module, $file);
<ide>
<ide> (is_null($key)) ? static::$items[$module][$file] = $value : Arr::set(static::$items[$module][$file], $key, $value);
<ide> }
<ide>
<ide> /**
<del> * Parse a configuration key into its module, file, and key parts.
<add> * Parse a configuration key into its module, file, and key segments.
<ide> *
<ide> * @param string $key
<ide> * @return array
<ide> private static function parse($key)
<ide> *
<ide> * @param string $file
<ide> * @param string $module
<del> * @return bool
<add> * @return void
<ide> */
<ide> private static function load($module, $file)
<ide> {
<del> if (isset(static::$items[$module]) and array_key_exists($file, static::$items[$module])) return true;
<add> if (isset(static::$items[$module][$file])) return true;
<ide>
<ide> $path = ($module === 'application') ? CONFIG_PATH : MODULE_PATH.$module.'/config/';
<ide>
<ide> private static function load($module, $file)
<ide> $config = array_merge($config, require $path);
<ide> }
<ide>
<del> if (count($config) > 0) static::$items[$module][$file] = $config;
<del>
<del> return isset(static::$items[$module][$file]);
<add> static::$items[$module][$file] = $config;
<ide> }
<ide>
<ide> }
<ide>\ No newline at end of file | 1 |
Ruby | Ruby | convert `polymorphicarrayvalue` to poro queries | 8170bcd99ad3ee4ac4ddf9e28a7c2a5fb93f1b0c | <ide><path>activerecord/lib/active_record/relation/predicate_builder.rb
<ide> def initialize(table)
<ide> register_handler(RangeHandler::RangeWithBinds, RangeHandler.new)
<ide> register_handler(Relation, RelationHandler.new)
<ide> register_handler(Array, ArrayHandler.new(self))
<del> register_handler(PolymorphicArrayValue, PolymorphicArrayHandler.new(self))
<ide> end
<ide>
<ide> def build_from_hash(attributes)
<ide> def create_binds_for_hash(attributes)
<ide> if associated_table.polymorphic_association?
<ide> case value.is_a?(Array) ? value.first : value
<ide> when Base, Relation
<del> binds.concat(value.bound_attributes) if value.is_a?(Relation)
<ide> value = [value] unless value.is_a?(Array)
<ide> klass = PolymorphicArrayValue
<ide> end
<ide> end
<ide>
<ide> if klass
<del> result[column_name] = klass.new(associated_table, value)
<add> result[column_name] = klass.new(associated_table, value).queries.map do |query|
<add> attrs, bvs = create_binds_for_hash(query)
<add> binds.concat(bvs)
<add> attrs
<add> end
<ide> else
<ide> queries = AssociationQueryValue.new(associated_table, value).queries
<ide> attrs, bvs = create_binds_for_hash(queries)
<ide><path>activerecord/lib/active_record/relation/predicate_builder/array_handler.rb
<ide> def call(attribute, value)
<ide> nils, values = values.partition(&:nil?)
<ide>
<ide> return attribute.in([]) if values.empty? && nils.empty?
<add> return queries_predicates(values) if nils.empty? && values.all? { |v| v.is_a?(Hash) }
<ide>
<ide> ranges, values = values.partition { |v| v.is_a?(Range) }
<ide>
<ide> def call(attribute, value)
<ide>
<ide> array_predicates = ranges.map { |range| predicate_builder.build(attribute, range) }
<ide> array_predicates.unshift(values_predicate)
<del> array_predicates.inject { |composite, predicate| composite.or(predicate) }
<add> array_predicates.inject(&:or)
<ide> end
<ide>
<ide> # TODO Change this to private once we've dropped Ruby 2.2 support.
<ide> def self.or(other)
<ide> other
<ide> end
<ide> end
<add>
<add> private
<add> def queries_predicates(queries)
<add> if queries.size > 1
<add> queries.map do |query|
<add> Arel::Nodes::And.new(predicate_builder.build_from_hash(query))
<add> end.inject(&:or)
<add> else
<add> predicate_builder.build_from_hash(queries.first)
<add> end
<add> end
<ide> end
<ide> end
<ide> end
<ide><path>activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_handler.rb
<ide> module ActiveRecord
<ide> class PredicateBuilder
<del> class PolymorphicArrayHandler # :nodoc:
<del> def initialize(predicate_builder)
<del> @predicate_builder = predicate_builder
<del> end
<del>
<del> def call(attribute, value)
<del> predicates = value.queries.map { |query| predicate_builder.build_from_hash(query) }
<del>
<del> if predicates.size > 1
<del> type_and_ids_predicates = predicates.map { |type_predicate, id_predicate| Arel::Nodes::Grouping.new(type_predicate.and(id_predicate)) }
<del> type_and_ids_predicates.inject(&:or)
<del> else
<del> predicates.first
<del> end
<del> end
<del>
<del> # TODO Change this to private once we've dropped Ruby 2.2 support.
<del> # Workaround for Ruby 2.2 "private attribute?" warning.
<del> protected
<del>
<del> attr_reader :predicate_builder
<del> end
<del>
<ide> class PolymorphicArrayValue # :nodoc:
<ide> attr_reader :associated_table, :values
<ide>
<ide> def queries
<ide> type_to_ids_mapping.map do |type, ids|
<ide> {
<ide> associated_table.association_foreign_type.to_s => type,
<del> associated_table.association_foreign_key.to_s => ids
<add> associated_table.association_foreign_key.to_s => ids.size > 1 ? ids : ids.first
<ide> }
<ide> end
<ide> end | 3 |
Go | Go | remove an unit test from integrations test | 2ec1146679598837cd8bab62dc672bcda2a9610c | <ide><path>integration/commands_test.go
<ide> func TestCmdLogs(t *testing.T) {
<ide> }
<ide> }
<ide>
<del>// Expected behaviour: using / as a bind mount source should throw an error
<del>func TestRunErrorBindMountRootSource(t *testing.T) {
<del>
<del> cli := docker.NewDockerCli(nil, nil, ioutil.Discard, testDaemonProto, testDaemonAddr)
<del> defer cleanup(globalEngine, t)
<del>
<del> c := make(chan struct{})
<del> go func() {
<del> defer close(c)
<del> if err := cli.CmdRun("-v", "/:/tmp", unitTestImageID, "echo 'should fail'"); err == nil {
<del> t.Fatal("should have failed to run when using / as a source for the bind mount")
<del> }
<del> }()
<del>
<del> setTimeout(t, "CmdRun timed out", 5*time.Second, func() {
<del> <-c
<del> })
<del>}
<del>
<ide> // Expected behaviour: error out when attempting to bind mount non-existing source paths
<ide> func TestRunErrorBindNonExistingSource(t *testing.T) {
<ide>
<ide> func TestRunErrorBindNonExistingSource(t *testing.T) {
<ide> c := make(chan struct{})
<ide> go func() {
<ide> defer close(c)
<add> // This check is made at runtime, can't be "unit tested"
<ide> if err := cli.CmdRun("-v", "/i/dont/exist:/tmp", unitTestImageID, "echo 'should fail'"); err == nil {
<ide> t.Fatal("should have failed to run when using /i/dont/exist as a source for the bind mount")
<ide> } | 1 |
Javascript | Javascript | improve time scale for zoom and pan | 38373300eef9fe31204e8421cae1263a5f3b94d0 | <ide><path>src/scales/scale.category.js
<ide> module.exports = function(Chart) {
<ide> // Implement this so that
<ide> determineDataLimits: function() {
<ide> this.minIndex = 0;
<del> this.maxIndex = this.chart.data.labels.length;
<add> this.maxIndex = this.chart.data.labels.length - 1;
<ide> var findIndex;
<ide>
<ide> if (this.options.ticks.min !== undefined) {
<ide> module.exports = function(Chart) {
<ide>
<ide> buildTicks: function(index) {
<ide> // If we are viewing some subset of labels, slice the original array
<del> this.ticks = (this.minIndex === 0 && this.maxIndex === this.chart.data.labels.length) ? this.chart.data.labels : this.chart.data.labels.slice(this.minIndex, this.maxIndex + 1);
<add> this.ticks = (this.minIndex === 0 && this.maxIndex === this.chart.data.labels.length - 1) ? this.chart.data.labels : this.chart.data.labels.slice(this.minIndex, this.maxIndex + 1);
<ide> },
<ide>
<ide> getLabelForIndex: function(index, datasetIndex) {
<ide> module.exports = function(Chart) {
<ide> // Used to get data value locations. Value can either be an index or a numerical value
<ide> getPixelForValue: function(value, index, datasetIndex, includeOffset) {
<ide> // 1 is added because we need the length but we have the indexes
<del> var offsetAmt = Math.max((this.ticks.length - ((this.options.gridLines.offsetGridLines) ? 0 : 1)), 1);
<add> var offsetAmt = Math.max((this.maxIndex + 1 - this.minIndex - ((this.options.gridLines.offsetGridLines) ? 0 : 1)), 1);
<ide>
<ide> if (this.isHorizontal()) {
<ide> var innerWidth = this.width - (this.paddingLeft + this.paddingRight);
<ide> module.exports = function(Chart) {
<ide> getPixelForTick: function(index, includeOffset) {
<ide> return this.getPixelForValue(this.ticks[index], index + this.minIndex, null, includeOffset);
<ide> },
<del> getValueForPixel: function(pixel)
<del> {
<add> getValueForPixel: function(pixel) {
<ide> var value
<ide> ; var offsetAmt = Math.max((this.ticks.length - ((this.options.gridLines.offsetGridLines) ? 0 : 1)), 1);
<ide> var horz = this.isHorizontal();
<ide><path>src/scales/scale.time.js
<ide> module.exports = function(Chart) {
<ide> unitDefinition = time.units[unitDefinitionIndex];
<ide>
<ide> this.tickUnit = unitDefinition.name;
<del> this.leadingUnitBuffer = this.firstTick.diff(this.firstTick.clone().startOf(this.tickUnit), this.tickUnit, true);
<del> this.scaleSizeInUnits = this.lastTick.diff(this.firstTick, this.tickUnit, true) + (this.leadingUnitBuffer > 0 ? 2 : 0);
<add> var leadingUnitBuffer = this.firstTick.diff(this.firstTick.clone().startOf(this.tickUnit), this.tickUnit, true);
<add> var trailingUnitBuffer = this.lastTick.clone().add(1, this.tickUnit).startOf(this.tickUnit).diff(this.lastTick, this.tickUnit, true);
<add> this.scaleSizeInUnits = this.lastTick.diff(this.firstTick, this.tickUnit, true) + leadingUnitBuffer + trailingUnitBuffer;
<ide> this.displayFormat = this.options.time.displayFormats[unitDefinition.name];
<ide> }
<ide> }
<ide> module.exports = function(Chart) {
<ide>
<ide> // Only round the last tick if we have no hard maximum
<ide> if (!this.options.time.max) {
<del> this.lastTick.endOf(this.tickUnit);
<add> var roundedEnd = this.lastTick.clone().startOf(this.tickUnit);
<add> if (roundedEnd.diff(this.lastTick, this.tickUnit, true) !== 0) {
<add> // Do not use end of because we need this to be in the next time unit
<add> this.lastTick.add(1, this.tickUnit).startOf(this.tickUnit);
<add> }
<ide> }
<ide>
<ide> this.smallestLabelSeparation = this.width;
<ide> module.exports = function(Chart) {
<ide> this.ticks.push(this.firstTick.clone());
<ide>
<ide> // For every unit in between the first and last moment, create a moment and add it to the ticks tick
<del> for (var i = 1; i < this.scaleSizeInUnits; ++i) {
<add> for (var i = 1; i <= this.scaleSizeInUnits; ++i) {
<ide> var newTick = roundedStart.clone().add(i, this.tickUnit);
<ide>
<ide> // Are we greater than the max time
<ide> module.exports = function(Chart) {
<ide> }
<ide>
<ide> // Always show the right tick
<del> if (this.ticks[this.ticks.length - 1].diff(this.lastTick, this.tickUnit) !== 0 || this.scaleSizeInUnits === 0) {
<del> // this is a weird case. If the <max> option is the same as the end option, we can't just diff the times because the tick was created from the roundedStart
<del> // but the last tick was not rounded.
<add> var diff = this.ticks[this.ticks.length - 1].diff(this.lastTick, this.tickUnit);
<add> if (diff !== 0 || this.scaleSizeInUnits === 0) {
<add> // this is a weird case. If the <max> option is the same as the end option, we can't just diff the times because the tick was created from the roundedStart
<add> // but the last tick was not rounded.
<ide> if (this.options.time.max) {
<ide> this.ticks.push(this.lastTick.clone());
<ide> this.scaleSizeInUnits = this.lastTick.diff(this.ticks[0], this.tickUnit, true);
<ide> } else {
<del> this.scaleSizeInUnits = Math.ceil(this.scaleSizeInUnits / this.unitScale) * this.unitScale;
<del> this.ticks.push(this.firstTick.clone().add(this.scaleSizeInUnits, this.tickUnit));
<del> this.lastTick = this.ticks[this.ticks.length - 1].clone();
<add> this.ticks.push(this.lastTick.clone());
<add> this.scaleSizeInUnits = this.lastTick.diff(this.firstTick, this.tickUnit, true);
<ide> }
<ide> }
<add>
<ide> this.ctx.restore();
<ide> },
<ide> // Get tooltip label
<ide> module.exports = function(Chart) {
<ide> this.ticks = this.ticks.map(this.tickFormatFunction, this);
<ide> },
<ide> getPixelForValue: function(value, index, datasetIndex, includeOffset) {
<del> var labelMoment = this.getLabelMoment(datasetIndex, index);
<add> var labelMoment = value && value.isValid && value.isValid() ? value : this.getLabelMoment(datasetIndex, index);
<ide>
<ide> if (labelMoment) {
<ide> var offset = labelMoment.diff(this.firstTick, this.tickUnit, true);
<ide>
<del> var decimal = offset / (this.scaleSizeInUnits - (this.leadingUnitBuffer > 0 ? 1 : 0));
<add> var decimal = offset / this.scaleSizeInUnits;
<ide>
<ide> if (this.isHorizontal()) {
<ide> var innerWidth = this.width - (this.paddingLeft + this.paddingRight);
<ide> module.exports = function(Chart) {
<ide> getValueForPixel: function(pixel) {
<ide> var innerDimension = this.isHorizontal() ? this.width - (this.paddingLeft + this.paddingRight) : this.height - (this.paddingTop + this.paddingBottom);
<ide> var offset = (pixel - (this.isHorizontal() ? this.left + this.paddingLeft : this.top + this.paddingTop)) / innerDimension;
<del> offset *= (this.scaleSizeInUnits - (this.leadingUnitBuffer > 0 ? 1 : 0));
<add> offset *= this.scaleSizeInUnits;
<ide> return this.firstTick.clone().add(moment.duration(offset, this.tickUnit).asSeconds(), 'seconds');
<ide> },
<ide> parseTime: function(label) {
<ide><path>test/scale.time.tests.js
<ide> describe('Time scale tests', function() {
<ide> var result = false;
<ide>
<ide> var diff = actual.diff(expected.value, expected.unit, true);
<del> result = Math.abs(diff) < 0.5;
<add> result = Math.abs(diff) < (expected.threshold !== undefined ? expected.threshold : 0.5);
<ide>
<ide> return {
<ide> pass: result
<ide> describe('Time scale tests', function() {
<ide> scale.update(400, 50);
<ide>
<ide> // Counts down because the lines are drawn top to bottom
<del> expect(scale.ticks).toEqual([ 'Dec 28, 2014', 'Jan 11, 2015' ]);
<add> expect(scale.ticks).toEqual([ 'Dec 28, 2014', 'Jan 4, 2015', 'Jan 11, 2015' ]);
<ide> });
<ide>
<ide> it('should build ticks using date objects', function() {
<ide> describe('Time scale tests', function() {
<ide> scale.update(400, 50);
<ide>
<ide> // Counts down because the lines are drawn top to bottom
<del> expect(scale.ticks).toEqual([ 'Dec 28, 2014', 'Jan 11, 2015' ]);
<add> expect(scale.ticks).toEqual([ 'Dec 28, 2014', 'Jan 4, 2015', 'Jan 11, 2015' ]);
<ide> });
<ide>
<ide> it('should build ticks when the data is xy points', function() {
<ide> describe('Time scale tests', function() {
<ide> var xScale = chartInstance.scales.xScale0;
<ide>
<ide> // Counts down because the lines are drawn top to bottom
<del> expect(xScale.ticks[0]).toEqualOneOf(['Nov 19, 1981', 'Nov 20, 1981']); // handle time zone changes
<del> expect(xScale.ticks[1]).toEqualOneOf(['Nov 19, 1981', 'Nov 20, 1981']); // handle time zone changes
<add> expect(xScale.ticks[0]).toEqualOneOf(['Nov 19, 1981', 'Nov 20, 1981', 'Nov 21, 1981']); // handle time zone changes
<add> expect(xScale.ticks[1]).toEqualOneOf(['Nov 19, 1981', 'Nov 20, 1981', 'Nov 21, 1981']); // handle time zone changes
<ide> });
<ide>
<ide> it('should build ticks using the config unit', function() {
<ide> describe('Time scale tests', function() {
<ide> var xScale = chartInstance.scales.xScale0;
<ide>
<ide> expect(xScale.getPixelForValue('', 0, 0)).toBeCloseToPixel(78);
<del> expect(xScale.getPixelForValue('', 6, 0)).toBeCloseToPixel(466);
<add> expect(xScale.getPixelForValue('', 6, 0)).toBeCloseToPixel(452);
<ide>
<ide> expect(xScale.getValueForPixel(78)).toBeCloseToTime({
<ide> value: moment(chartInstance.data.labels[0]),
<del> unit: 'hour'
<add> unit: 'hour',
<add> threshold: 0.75
<ide> });
<del> expect(xScale.getValueForPixel(466)).toBeCloseToTime({
<add> expect(xScale.getValueForPixel(452)).toBeCloseToTime({
<ide> value: moment(chartInstance.data.labels[6]),
<ide> unit: 'hour'
<ide> }); | 3 |
Python | Python | add drf settings + output format + testcases | f208d8d2bbe2f418caa51199070f703fba544d49 | <ide><path>rest_framework/fields.py
<ide> from django.forms import widgets
<ide> from django.utils.encoding import is_protected_type
<ide> from django.utils.translation import ugettext_lazy as _
<del>from rest_framework.compat import parse_date, parse_datetime
<add>
<ide> from rest_framework.compat import timezone
<ide> from rest_framework.compat import BytesIO
<ide> from rest_framework.compat import six
<ide> from rest_framework.compat import smart_text
<del>from rest_framework.compat import parse_time
<add>from rest_framework.settings import api_settings
<ide> from rest_framework.utils.dates import get_readable_date_format
<ide>
<ide>
<ide> class DateField(WritableField):
<ide> empty = None
<ide>
<ide> def __init__(self, *args, **kwargs):
<del> self.format = kwargs.pop('format', settings.DATE_INPUT_FORMATS)
<add> self.input_formats = kwargs.pop('input_formats', api_settings.DATE_INPUT_FORMATS)
<add> self.output_format = kwargs.pop('output_format', api_settings.DATE_OUTPUT_FORMAT)
<ide> super(DateField, self).__init__(*args, **kwargs)
<ide>
<ide> def from_native(self, value):
<ide> def from_native(self, value):
<ide> if isinstance(value, datetime.date):
<ide> return value
<ide>
<del> for format in self.format:
<add> for format in self.input_formats:
<ide> try:
<ide> parsed = datetime.datetime.strptime(value, format)
<ide> except (ValueError, TypeError):
<ide> pass
<ide> else:
<ide> return parsed.date()
<ide>
<del> date_input_formats = '; '.join(self.format)
<add> date_input_formats = '; '.join(self.input_formats)
<ide> msg = self.error_messages['invalid'] % get_readable_date_format(date_input_formats)
<ide> raise ValidationError(msg)
<ide>
<add> def to_native(self, value):
<add> if self.output_format is not None:
<add> return value.strftime(self.output_format)
<add> return value.isoformat()
<add>
<ide>
<ide> class DateTimeField(WritableField):
<ide> type_name = 'DateTimeField'
<ide> class DateTimeField(WritableField):
<ide> empty = None
<ide>
<ide> def __init__(self, *args, **kwargs):
<del> self.format = kwargs.pop('format', settings.DATETIME_INPUT_FORMATS)
<add> self.input_formats = kwargs.pop('input_formats', api_settings.DATETIME_INPUT_FORMATS)
<add> self.output_format = kwargs.pop('output_format', api_settings.DATETIME_OUTPUT_FORMAT)
<ide> super(DateTimeField, self).__init__(*args, **kwargs)
<ide>
<ide> def from_native(self, value):
<ide> def from_native(self, value):
<ide> value = timezone.make_aware(value, default_timezone)
<ide> return value
<ide>
<del> for format in self.format:
<add> for format in self.input_formats:
<ide> try:
<ide> parsed = datetime.datetime.strptime(value, format)
<ide> except (ValueError, TypeError):
<ide> pass
<ide> else:
<ide> return parsed
<ide>
<del> datetime_input_formats = '; '.join(self.format)
<add> datetime_input_formats = '; '.join(self.input_formats)
<ide> msg = self.error_messages['invalid'] % get_readable_date_format(datetime_input_formats)
<ide> raise ValidationError(msg)
<ide>
<add> def to_native(self, value):
<add> if self.output_format is not None:
<add> return value.strftime(self.output_format)
<add> return value.isoformat()
<add>
<ide>
<ide> class TimeField(WritableField):
<ide> type_name = 'TimeField'
<ide> class TimeField(WritableField):
<ide> empty = None
<ide>
<ide> def __init__(self, *args, **kwargs):
<del> self.format = kwargs.pop('format', settings.TIME_INPUT_FORMATS)
<add> self.input_formats = kwargs.pop('input_formats', api_settings.TIME_INPUT_FORMATS)
<add> self.output_format = kwargs.pop('output_format', api_settings.TIME_OUTPUT_FORMAT)
<ide> super(TimeField, self).__init__(*args, **kwargs)
<ide>
<ide> def from_native(self, value):
<ide> def from_native(self, value):
<ide> if isinstance(value, datetime.time):
<ide> return value
<ide>
<del> for format in self.format:
<add> for format in self.input_formats:
<ide> try:
<ide> parsed = datetime.datetime.strptime(value, format)
<ide> except (ValueError, TypeError):
<ide> pass
<ide> else:
<ide> return parsed.time()
<ide>
<del> time_input_formats = '; '.join(self.format)
<add> time_input_formats = '; '.join(self.input_formats)
<ide> msg = self.error_messages['invalid'] % get_readable_date_format(time_input_formats)
<ide> raise ValidationError(msg)
<ide>
<add> def to_native(self, value):
<add> if self.output_format is not None:
<add> return value.strftime(self.output_format)
<add> return value.isoformat()
<add>
<ide>
<ide> class IntegerField(WritableField):
<ide> type_name = 'IntegerField'
<ide><path>rest_framework/settings.py
<ide> 'URL_FORMAT_OVERRIDE': 'format',
<ide>
<ide> 'FORMAT_SUFFIX_KWARG': 'format',
<add>
<add> # Input and output formats
<add> 'DATE_INPUT_FORMATS': (
<add> '%Y-%m-%d', # '1984-07-31'
<add> ),
<add> 'DATE_OUTPUT_FORMAT': None,
<add>
<add> 'DATETIME_INPUT_FORMATS': (
<add> '%Y-%m-%d', # '1984-07-31'
<add> '%Y-%m-%d %H:%M', # '1984-07-31 04:31'
<add> '%Y-%m-%d %H:%M:%S', # '1984-07-31 04:31:59'
<add> '%Y-%m-%d %H:%M:%S.%f', # '1984-07-31 04:31:59.000200'
<add> ),
<add> 'DATETIME_OUTPUT_FORMAT': None,
<add>
<add> 'TIME_INPUT_FORMATS': (
<add> '%H:%M', # '04:31'
<add> '%H:%M:%S', # '04:31:59'
<add> '%H:%M:%S.%f', # '04:31:59.000200'
<add> ),
<add> 'TIME_OUTPUT_FORMAT': None,
<ide> }
<ide>
<ide>
<ide><path>rest_framework/tests/fields.py
<ide> from __future__ import unicode_literals
<ide> import datetime
<ide>
<del>import django
<ide> from django.db import models
<ide> from django.test import TestCase
<ide> from django.core import validators
<del>from django.utils import unittest
<ide>
<ide> from rest_framework import serializers
<ide>
<ide> class CharPrimaryKeyModel(models.Model):
<ide> id = models.CharField(max_length=20, primary_key=True)
<ide>
<ide>
<del>class DateObject(object):
<del> def __init__(self, date):
<del> self.date = date
<del>
<del>
<del>class DateTimeObject(object):
<del> def __init__(self, date_time):
<del> self.date_time = date_time
<del>
<del>
<del>class TimeObject(object):
<del> def __init__(self, time):
<del> self.time = time
<del>
<del>
<ide> class TimestampedModelSerializer(serializers.ModelSerializer):
<ide> class Meta:
<ide> model = TimestampedModel
<ide> class Meta:
<ide> model = CharPrimaryKeyModel
<ide>
<ide>
<del>class DateObjectSerializer(serializers.Serializer):
<del> date = serializers.DateField()
<del>
<del> def restore_object(self, attrs, instance=None):
<del> if instance is not None:
<del> instance.date = attrs['date']
<del> return instance
<del> return DateObject(**attrs)
<del>
<del>
<del>class DateObjectCustomFormatSerializer(serializers.Serializer):
<del> date = serializers.DateField(format=("%Y", "%Y -- %m"))
<del>
<del> def restore_object(self, attrs, instance=None):
<del> if instance is not None:
<del> instance.date = attrs['date']
<del> return instance
<del> return DateObject(**attrs)
<del>
<del>
<del>class DateTimeObjectSerializer(serializers.Serializer):
<del> date_time = serializers.DateTimeField()
<del>
<del> def restore_object(self, attrs, instance=None):
<del> if instance is not None:
<del> instance.date_time = attrs['date_time']
<del> return instance
<del> return DateTimeObject(**attrs)
<del>
<del>
<del>class DateTimeObjectCustomFormatSerializer(serializers.Serializer):
<del> date_time = serializers.DateTimeField(format=("%Y", "%Y %H:%M"))
<del>
<del> def restore_object(self, attrs, instance=None):
<del> if instance is not None:
<del> instance.date_time = attrs['date_time']
<del> return instance
<del> return DateTimeObject(**attrs)
<del>
<del>
<del>class TimeObjectSerializer(serializers.Serializer):
<del> time = serializers.TimeField()
<del>
<del> def restore_object(self, attrs, instance=None):
<del> if instance is not None:
<del> instance.time = attrs['time']
<del> return instance
<del> return TimeObject(**attrs)
<del>
<del>
<del>class TimeObjectCustomFormatSerializer(serializers.Serializer):
<del> time = serializers.TimeField(format=("%H -- %M", "%H%M%S"))
<del>
<del> def restore_object(self, attrs, instance=None):
<del> if instance is not None:
<del> instance.time = attrs['time']
<del> return instance
<del> return TimeObject(**attrs)
<del>
<del>
<ide> class TimeFieldModel(models.Model):
<ide> clock = models.TimeField()
<ide>
<ide> def test_non_auto_pk_fields_not_read_only(self):
<ide> PK fields other than AutoField fields should not be read_only by default.
<ide> """
<ide> serializer = CharPrimaryKeyModelSerializer()
<del> self.assertEqual(serializer.fields['id'].read_only, False)
<add> self.assertEquals(serializer.fields['id'].read_only, False)
<ide>
<ide>
<ide> class DateFieldTest(TestCase):
<del> def test_valid_default_date_input_formats(self):
<del> serializer = DateObjectSerializer(data={'date': '1984-07-31'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateObjectSerializer(data={'date': '07/31/1984'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateObjectSerializer(data={'date': '07/31/84'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateObjectSerializer(data={'date': 'Jul 31 1984'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateObjectSerializer(data={'date': 'Jul 31, 1984'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateObjectSerializer(data={'date': '31 Jul 1984'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateObjectSerializer(data={'date': '31 Jul 1984'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateObjectSerializer(data={'date': 'July 31 1984'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateObjectSerializer(data={'date': 'July 31, 1984'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateObjectSerializer(data={'date': '31 July 1984'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateObjectSerializer(data={'date': '31 July, 1984'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> def test_valid_custom_date_input_formats(self):
<del> serializer = DateObjectCustomFormatSerializer(data={'date': '1984'})
<del> self.assertTrue(serializer.is_valid())
<add> """
<add> Tests for the DateFieldTest from_native() and to_native() behavior
<add> """
<ide>
<del> serializer = DateObjectCustomFormatSerializer(data={'date': '1984 -- 07'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> def test_wrong_default_date_input_format(self):
<del> serializer = DateObjectSerializer(data={'date': 'something wrong'})
<del> self.assertFalse(serializer.is_valid())
<del> self.assertEquals(serializer.errors, {'date': ['Date has wrong format. Use one of these formats instead: '
<del> 'YYYY-MM-DD; MM/DD/YYYY; MM/DD/YY; [Jan through Dec] DD YYYY; '
<del> '[Jan through Dec] DD, YYYY; DD [Jan through Dec] YYYY; '
<del> 'DD [Jan through Dec], YYYY; [January through December] DD YYYY; '
<del> '[January through December] DD, YYYY; DD [January through December] YYYY; '
<del> 'DD [January through December], YYYY']})
<del>
<del> def test_wrong_custom_date_input_format(self):
<del> serializer = DateObjectCustomFormatSerializer(data={'date': '07/31/1984'})
<del> self.assertFalse(serializer.is_valid())
<del> self.assertEquals(serializer.errors, {'date': ['Date has wrong format. Use one of these formats instead: YYYY; YYYY -- MM']})
<del>
<del> def test_from_native(self):
<add> def test_from_native_string(self):
<add> """
<add> Make sure from_native() accepts default iso input formats.
<add> """
<ide> f = serializers.DateField()
<del> result = f.from_native('1984-07-31')
<add> result_1 = f.from_native('1984-07-31')
<ide>
<del> self.assertEqual(datetime.date(1984, 7, 31), result)
<add> self.assertEqual(datetime.date(1984, 7, 31), result_1)
<ide>
<ide> def test_from_native_datetime_date(self):
<ide> """
<ide> Make sure from_native() accepts a datetime.date instance.
<ide> """
<ide> f = serializers.DateField()
<del> result = f.from_native(datetime.date(1984, 7, 31))
<add> result_1 = f.from_native(datetime.date(1984, 7, 31))
<add>
<add> self.assertEqual(result_1, datetime.date(1984, 7, 31))
<add>
<add> def test_from_native_custom_format(self):
<add> """
<add> Make sure from_native() accepts custom input formats.
<add> """
<add> f = serializers.DateField(input_formats=['%Y -- %d'])
<add> result = f.from_native('1984 -- 31')
<ide>
<del> self.assertEqual(result, datetime.date(1984, 7, 31))
<add> self.assertEqual(datetime.date(1984, 1, 31), result)
<add>
<add> def test_from_native_invalid_default_on_custom_format(self):
<add> """
<add> Make sure from_native() don't accept default formats if custom format is preset
<add> """
<add> f = serializers.DateField(input_formats=['%Y -- %d'])
<add>
<add> try:
<add> f.from_native('1984-07-31')
<add> except validators.ValidationError as e:
<add> self.assertEqual(e.messages, ["Date has wrong format. Use one of these formats instead: YYYY -- DD"])
<add> else:
<add> self.fail("ValidationError was not properly raised")
<ide>
<ide> def test_from_native_empty(self):
<add> """
<add> Make sure from_native() returns None on empty param.
<add> """
<ide> f = serializers.DateField()
<ide> result = f.from_native('')
<ide>
<ide> self.assertEqual(result, None)
<ide>
<ide> def test_from_native_invalid_date(self):
<add> """
<add> Make sure from_native() raises a ValidationError on passing an invalid date.
<add> """
<ide> f = serializers.DateField()
<ide>
<ide> try:
<del> f.from_native('1984-42-31')
<add> f.from_native('1984-13-31')
<ide> except validators.ValidationError as e:
<del> self.assertEqual(e.messages, ['Date has wrong format. Use one of these formats instead: '
<del> 'YYYY-MM-DD; MM/DD/YYYY; MM/DD/YY; [Jan through Dec] DD YYYY; '
<del> '[Jan through Dec] DD, YYYY; DD [Jan through Dec] YYYY; '
<del> 'DD [Jan through Dec], YYYY; [January through December] DD YYYY; '
<del> '[January through December] DD, YYYY; DD [January through December] YYYY; '
<del> 'DD [January through December], YYYY'])
<add> self.assertEqual(e.messages, ["Date has wrong format. Use one of these formats instead: YYYY-MM-DD"])
<ide> else:
<ide> self.fail("ValidationError was not properly raised")
<ide>
<add> def test_from_native_invalid_format(self):
<add> """
<add> Make sure from_native() raises a ValidationError on passing an invalid format.
<add> """
<add> f = serializers.DateField()
<ide>
<del>class DateTimeFieldTest(TestCase):
<del> def test_valid_default_date_time_input_formats(self):
<del> serializer = DateTimeObjectSerializer(data={'date_time': '1984-07-31 04:31:59'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateTimeObjectSerializer(data={'date_time': '1984-07-31 04:31'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateTimeObjectSerializer(data={'date_time': '1984-07-31'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateTimeObjectSerializer(data={'date_time': '07/31/1984 04:31:59'})
<del> self.assertTrue(serializer.is_valid())
<del>
<del> serializer = DateTimeObjectSerializer(data={'date_time': '07/31/1984 04:31'})
<del> self.assertTrue(serializer.is_valid())
<add> try:
<add> f.from_native('1984 -- 31')
<add> except validators.ValidationError as e:
<add> self.assertEqual(e.messages, ["Date has wrong format. Use one of these formats instead: YYYY-MM-DD"])
<add> else:
<add> self.fail("ValidationError was not properly raised")
<ide>
<del> serializer = DateTimeObjectSerializer(data={'date_time': '07/31/1984'})
<del> self.assertTrue(serializer.is_valid())
<add> def test_to_native(self):
<add> """
<add> Make sure to_native() returns isoformat as default.
<add> """
<add> f = serializers.DateField()
<ide>
<del> serializer = DateTimeObjectSerializer(data={'date_time': '07/31/84 04:31:59'})
<del> self.assertTrue(serializer.is_valid())
<add> result_1 = f.to_native(datetime.date(1984, 7, 31))
<ide>
<del> serializer = DateTimeObjectSerializer(data={'date_time': '07/31/84 04:31'})
<del> self.assertTrue(serializer.is_valid())
<add> self.assertEqual('1984-07-31', result_1)
<ide>
<del> serializer = DateTimeObjectSerializer(data={'date_time': '07/31/84'})
<del> self.assertTrue(serializer.is_valid())
<add> def test_to_native_custom_format(self):
<add> """
<add> Make sure to_native() returns correct custom format.
<add> """
<add> f = serializers.DateField(output_format="%Y - %m.%d")
<ide>
<del> @unittest.skipUnless(django.VERSION >= (1, 4), "django < 1.4 don't have microseconds in default settings")
<del> def test_valid_default_date_time_input_formats_for_django_gte_1_4(self):
<del> serializer = DateTimeObjectSerializer(data={'date_time': '1984-07-31 04:31:59.123456'})
<del> self.assertTrue(serializer.is_valid())
<add> result_1 = f.to_native(datetime.date(1984, 7, 31))
<ide>
<del> serializer = DateTimeObjectSerializer(data={'date_time': '07/31/1984 04:31:59.123456'})
<del> self.assertTrue(serializer.is_valid())
<add> self.assertEqual('1984 - 07.31', result_1)
<ide>
<del> serializer = DateTimeObjectSerializer(data={'date_time': '07/31/84 04:31:59.123456'})
<del> self.assertTrue(serializer.is_valid())
<ide>
<del> def test_valid_custom_date_time_input_formats(self):
<del> serializer = DateTimeObjectCustomFormatSerializer(data={'date_time': '1984'})
<del> self.assertTrue(serializer.is_valid())
<add>class DateTimeFieldTest(TestCase):
<add> """
<add> Tests for the DateTimeField from_native() and to_native() behavior
<add> """
<ide>
<del> serializer = DateTimeObjectCustomFormatSerializer(data={'date_time': '1984 04:31'})
<del> self.assertTrue(serializer.is_valid())
<add> def test_from_native_string(self):
<add> """
<add> Make sure from_native() accepts default iso input formats.
<add> """
<add> f = serializers.DateTimeField()
<add> result_1 = f.from_native('1984-07-31')
<add> result_2 = f.from_native('1984-07-31 04:31')
<add> result_3 = f.from_native('1984-07-31 04:31:59')
<add> result_4 = f.from_native('1984-07-31 04:31:59.000200')
<ide>
<del> @unittest.skipUnless(django.VERSION >= (1, 4), "django < 1.4 don't have microseconds in default settings")
<del> def test_wrong_default_date_time_input_format_for_django_gte_1_4(self):
<del> serializer = DateTimeObjectSerializer(data={'date_time': 'something wrong'})
<del> self.assertFalse(serializer.is_valid())
<del> self.assertEquals(serializer.errors, {'date_time': ['Datetime has wrong format. Use one of these formats instead: '
<del> 'YYYY-MM-DD HH:MM:SS; YYYY-MM-DD HH:MM:SS.uuuuuu; YYYY-MM-DD HH:MM; '
<del> 'YYYY-MM-DD; MM/DD/YYYY HH:MM:SS; MM/DD/YYYY HH:MM:SS.uuuuuu; '
<del> 'MM/DD/YYYY HH:MM; MM/DD/YYYY; MM/DD/YY HH:MM:SS; '
<del> 'MM/DD/YY HH:MM:SS.uuuuuu; MM/DD/YY HH:MM; MM/DD/YY']})
<add> self.assertEqual(datetime.datetime(1984, 7, 31), result_1)
<add> self.assertEqual(datetime.datetime(1984, 7, 31, 4, 31), result_2)
<add> self.assertEqual(datetime.datetime(1984, 7, 31, 4, 31, 59), result_3)
<add> self.assertEqual(datetime.datetime(1984, 7, 31, 4, 31, 59, 200), result_4)
<ide>
<del> @unittest.skipUnless(django.VERSION < (1, 4), "django >= 1.4 have microseconds in default settings")
<del> def test_wrong_default_date_time_input_format_for_django_lt_1_4(self):
<del> serializer = DateTimeObjectSerializer(data={'date_time': 'something wrong'})
<del> self.assertFalse(serializer.is_valid())
<del> self.assertEquals(serializer.errors, {'date_time': ['Datetime has wrong format. Use one of these formats instead: '
<del> 'YYYY-MM-DD HH:MM:SS; YYYY-MM-DD HH:MM; YYYY-MM-DD; '
<del> 'MM/DD/YYYY HH:MM:SS; MM/DD/YYYY HH:MM; MM/DD/YYYY; '
<del> 'MM/DD/YY HH:MM:SS; MM/DD/YY HH:MM; MM/DD/YY']})
<add> def test_from_native_datetime_datetime(self):
<add> """
<add> Make sure from_native() accepts a datetime.datetime instance.
<add> """
<add> f = serializers.DateTimeField()
<add> result_1 = f.from_native(datetime.datetime(1984, 7, 31))
<add> result_2 = f.from_native(datetime.datetime(1984, 7, 31, 4, 31))
<add> result_3 = f.from_native(datetime.datetime(1984, 7, 31, 4, 31, 59))
<add> result_4 = f.from_native(datetime.datetime(1984, 7, 31, 4, 31, 59, 200))
<ide>
<del> def test_wrong_custom_date_time_input_format(self):
<del> serializer = DateTimeObjectCustomFormatSerializer(data={'date_time': '07/31/84 04:31'})
<del> self.assertFalse(serializer.is_valid())
<del> self.assertEquals(serializer.errors, {'date_time': ['Datetime has wrong format. Use one of these formats instead: YYYY; YYYY HH:MM']})
<add> self.assertEqual(result_1, datetime.datetime(1984, 7, 31))
<add> self.assertEqual(result_2, datetime.datetime(1984, 7, 31, 4, 31))
<add> self.assertEqual(result_3, datetime.datetime(1984, 7, 31, 4, 31, 59))
<add> self.assertEqual(result_4, datetime.datetime(1984, 7, 31, 4, 31, 59, 200))
<ide>
<del> def test_from_native(self):
<del> f = serializers.DateTimeField()
<del> result = f.from_native('1984-07-31 04:31')
<add> def test_from_native_custom_format(self):
<add> """
<add> Make sure from_native() accepts custom input formats.
<add> """
<add> f = serializers.DateTimeField(input_formats=['%Y -- %H:%M'])
<add> result = f.from_native('1984 -- 04:59')
<ide>
<del> self.assertEqual(datetime.datetime(1984, 7, 31, 4, 31), result)
<add> self.assertEqual(datetime.datetime(1984, 1, 1, 4, 59), result)
<ide>
<del> def test_from_native_datetime_datetime(self):
<add> def test_from_native_invalid_default_on_custom_format(self):
<ide> """
<del> Make sure from_native() accepts a datetime.date instance.
<add> Make sure from_native() don't accept default formats if custom format is preset
<ide> """
<del> f = serializers.DateTimeField()
<del> result = f.from_native(datetime.datetime(1984, 7, 31))
<add> f = serializers.DateTimeField(input_formats=['%Y -- %H:%M'])
<ide>
<del> self.assertEqual(result, datetime.datetime(1984, 7, 31))
<add> try:
<add> f.from_native('1984-07-31 04:31:59')
<add> except validators.ValidationError as e:
<add> self.assertEqual(e.messages, ["Datetime has wrong format. Use one of these formats instead: YYYY -- HH:MM"])
<add> else:
<add> self.fail("ValidationError was not properly raised")
<ide>
<ide> def test_from_native_empty(self):
<add> """
<add> Make sure from_native() returns None on empty param.
<add> """
<ide> f = serializers.DateTimeField()
<ide> result = f.from_native('')
<ide>
<ide> self.assertEqual(result, None)
<ide>
<del> @unittest.skipUnless(django.VERSION >= (1, 4), "django < 1.4 don't have microseconds in default settings")
<del> def test_from_native_invalid_datetime_for_django_gte_1_4(self):
<add> def test_from_native_invalid_datetime(self):
<add> """
<add> Make sure from_native() raises a ValidationError on passing an invalid datetime.
<add> """
<ide> f = serializers.DateTimeField()
<ide>
<ide> try:
<del> f.from_native('1984-42-31 04:31')
<add> f.from_native('04:61:59')
<ide> except validators.ValidationError as e:
<del> self.assertEqual(e.messages, ['Datetime has wrong format. Use one of these formats instead: '
<del> 'YYYY-MM-DD HH:MM:SS; YYYY-MM-DD HH:MM:SS.uuuuuu; YYYY-MM-DD HH:MM; '
<del> 'YYYY-MM-DD; MM/DD/YYYY HH:MM:SS; MM/DD/YYYY HH:MM:SS.uuuuuu; '
<del> 'MM/DD/YYYY HH:MM; MM/DD/YYYY; MM/DD/YY HH:MM:SS; '
<del> 'MM/DD/YY HH:MM:SS.uuuuuu; MM/DD/YY HH:MM; MM/DD/YY'])
<add> self.assertEqual(e.messages, ["Datetime has wrong format. Use one of these formats instead: "
<add> "YYYY-MM-DD; YYYY-MM-DD HH:MM; YYYY-MM-DD HH:MM:SS; "
<add> "YYYY-MM-DD HH:MM:SS.uuuuuu"])
<ide> else:
<ide> self.fail("ValidationError was not properly raised")
<ide>
<del> @unittest.skipUnless(django.VERSION < (1, 4), "django >= 1.4 have microseconds in default settings")
<del> def test_from_native_invalid_datetime_for_django_lt_1_4(self):
<add> def test_from_native_invalid_format(self):
<add> """
<add> Make sure from_native() raises a ValidationError on passing an invalid format.
<add> """
<ide> f = serializers.DateTimeField()
<ide>
<ide> try:
<del> f.from_native('1984-42-31 04:31')
<add> f.from_native('04 -- 31')
<ide> except validators.ValidationError as e:
<del> self.assertEqual(e.messages, ['Datetime has wrong format. Use one of these formats instead: '
<del> 'YYYY-MM-DD HH:MM:SS; YYYY-MM-DD HH:MM; YYYY-MM-DD; '
<del> 'MM/DD/YYYY HH:MM:SS; MM/DD/YYYY HH:MM; MM/DD/YYYY; '
<del> 'MM/DD/YY HH:MM:SS; MM/DD/YY HH:MM; MM/DD/YY'])
<add> self.assertEqual(e.messages, ["Datetime has wrong format. Use one of these formats instead: "
<add> "YYYY-MM-DD; YYYY-MM-DD HH:MM; YYYY-MM-DD HH:MM:SS; "
<add> "YYYY-MM-DD HH:MM:SS.uuuuuu"])
<ide> else:
<ide> self.fail("ValidationError was not properly raised")
<ide>
<add> def test_to_native(self):
<add> """
<add> Make sure to_native() returns isoformat as default.
<add> """
<add> f = serializers.DateTimeField()
<ide>
<del>class TimeFieldTest(TestCase):
<del> def test_valid_default_time_input_formats(self):
<del> serializer = TimeObjectSerializer(data={'time': '04:31'})
<del> self.assertTrue(serializer.is_valid())
<add> result_1 = f.to_native(datetime.datetime(1984, 7, 31))
<add> result_2 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31))
<add> result_3 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31, 59))
<add> result_4 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31, 59, 200))
<ide>
<del> serializer = TimeObjectSerializer(data={'time': '04:31:59'})
<del> self.assertTrue(serializer.is_valid())
<add> self.assertEqual('1984-07-31T00:00:00', result_1)
<add> self.assertEqual('1984-07-31T04:31:00', result_2)
<add> self.assertEqual('1984-07-31T04:31:59', result_3)
<add> self.assertEqual('1984-07-31T04:31:59.000200', result_4)
<ide>
<del> def test_valid_custom_time_input_formats(self):
<del> serializer = TimeObjectCustomFormatSerializer(data={'time': '04 -- 31'})
<del> self.assertTrue(serializer.is_valid())
<add> def test_to_native_custom_format(self):
<add> """
<add> Make sure to_native() returns correct custom format.
<add> """
<add> f = serializers.DateTimeField(output_format="%Y - %H:%M")
<ide>
<del> serializer = TimeObjectCustomFormatSerializer(data={'time': '043159'})
<del> self.assertTrue(serializer.is_valid())
<add> result_1 = f.to_native(datetime.datetime(1984, 7, 31))
<add> result_2 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31))
<add> result_3 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31, 59))
<add> result_4 = f.to_native(datetime.datetime(1984, 7, 31, 4, 31, 59, 200))
<ide>
<del> def test_wrong_default_time_input_format(self):
<del> serializer = TimeObjectSerializer(data={'time': 'something wrong'})
<del> self.assertFalse(serializer.is_valid())
<del> self.assertEquals(serializer.errors, {'time': ['Time has wrong format. Use one of these formats instead: HH:MM:SS; HH:MM']})
<add> self.assertEqual('1984 - 00:00', result_1)
<add> self.assertEqual('1984 - 04:31', result_2)
<add> self.assertEqual('1984 - 04:31', result_3)
<add> self.assertEqual('1984 - 04:31', result_4)
<ide>
<del> def test_wrong_custom_time_input_format(self):
<del> serializer = TimeObjectCustomFormatSerializer(data={'time': '04:31'})
<del> self.assertFalse(serializer.is_valid())
<del> self.assertEquals(serializer.errors, {'time': ['Time has wrong format. Use one of these formats instead: HH -- MM; HHMMSS']})
<ide>
<del> def test_from_native(self):
<add>class TimeFieldTest(TestCase):
<add> """
<add> Tests for the TimeField from_native() and to_native() behavior
<add> """
<add>
<add> def test_from_native_string(self):
<add> """
<add> Make sure from_native() accepts default iso input formats.
<add> """
<ide> f = serializers.TimeField()
<del> result = f.from_native('12:34:56')
<add> result_1 = f.from_native('04:31')
<add> result_2 = f.from_native('04:31:59')
<add> result_3 = f.from_native('04:31:59.000200')
<ide>
<del> self.assertEqual(datetime.time(12, 34, 56), result)
<add> self.assertEqual(datetime.time(4, 31), result_1)
<add> self.assertEqual(datetime.time(4, 31, 59), result_2)
<add> self.assertEqual(datetime.time(4, 31, 59, 200), result_3)
<ide>
<ide> def test_from_native_datetime_time(self):
<ide> """
<ide> Make sure from_native() accepts a datetime.time instance.
<ide> """
<ide> f = serializers.TimeField()
<del> result = f.from_native(datetime.time(12, 34, 56))
<add> result_1 = f.from_native(datetime.time(4, 31))
<add> result_2 = f.from_native(datetime.time(4, 31, 59))
<add> result_3 = f.from_native(datetime.time(4, 31, 59, 200))
<add>
<add> self.assertEqual(result_1, datetime.time(4, 31))
<add> self.assertEqual(result_2, datetime.time(4, 31, 59))
<add> self.assertEqual(result_3, datetime.time(4, 31, 59, 200))
<add>
<add> def test_from_native_custom_format(self):
<add> """
<add> Make sure from_native() accepts custom input formats.
<add> """
<add> f = serializers.TimeField(input_formats=['%H -- %M'])
<add> result = f.from_native('04 -- 31')
<add>
<add> self.assertEqual(datetime.time(4, 31), result)
<add>
<add> def test_from_native_invalid_default_on_custom_format(self):
<add> """
<add> Make sure from_native() don't accept default formats if custom format is preset
<add> """
<add> f = serializers.TimeField(input_formats=['%H -- %M'])
<ide>
<del> self.assertEqual(result, datetime.time(12, 34, 56))
<add> try:
<add> f.from_native('04:31:59')
<add> except validators.ValidationError as e:
<add> self.assertEqual(e.messages, ["Time has wrong format. Use one of these formats instead: HH -- MM"])
<add> else:
<add> self.fail("ValidationError was not properly raised")
<ide>
<ide> def test_from_native_empty(self):
<add> """
<add> Make sure from_native() returns None on empty param.
<add> """
<ide> f = serializers.TimeField()
<ide> result = f.from_native('')
<ide>
<ide> self.assertEqual(result, None)
<ide>
<ide> def test_from_native_invalid_time(self):
<add> """
<add> Make sure from_native() raises a ValidationError on passing an invalid time.
<add> """
<ide> f = serializers.TimeField()
<ide>
<ide> try:
<del> f.from_native('12:69:12')
<add> f.from_native('04:61:59')
<ide> except validators.ValidationError as e:
<del> self.assertEqual(e.messages, ["Time has wrong format. Use one of these formats instead: HH:MM:SS; HH:MM"])
<add> self.assertEqual(e.messages, ["Time has wrong format. Use one of these formats instead: "
<add> "HH:MM; HH:MM:SS; HH:MM:SS.uuuuuu"])
<ide> else:
<ide> self.fail("ValidationError was not properly raised")
<add>
<add> def test_from_native_invalid_format(self):
<add> """
<add> Make sure from_native() raises a ValidationError on passing an invalid format.
<add> """
<add> f = serializers.TimeField()
<add>
<add> try:
<add> f.from_native('04 -- 31')
<add> except validators.ValidationError as e:
<add> self.assertEqual(e.messages, ["Time has wrong format. Use one of these formats instead: "
<add> "HH:MM; HH:MM:SS; HH:MM:SS.uuuuuu"])
<add> else:
<add> self.fail("ValidationError was not properly raised")
<add>
<add> def test_to_native(self):
<add> """
<add> Make sure to_native() returns isoformat as default.
<add> """
<add> f = serializers.TimeField()
<add> result_1 = f.to_native(datetime.time(4, 31))
<add> result_2 = f.to_native(datetime.time(4, 31, 59))
<add> result_3 = f.to_native(datetime.time(4, 31, 59, 200))
<add>
<add> self.assertEqual('04:31:00', result_1)
<add> self.assertEqual('04:31:59', result_2)
<add> self.assertEqual('04:31:59.000200', result_3)
<add>
<add> def test_to_native_custom_format(self):
<add> """
<add> Make sure to_native() returns correct custom format.
<add> """
<add> f = serializers.TimeField(output_format="%H - %S [%f]")
<add> result_1 = f.to_native(datetime.time(4, 31))
<add> result_2 = f.to_native(datetime.time(4, 31, 59))
<add> result_3 = f.to_native(datetime.time(4, 31, 59, 200))
<add>
<add> self.assertEqual('04 - 00 [000000]', result_1)
<add> self.assertEqual('04 - 59 [000000]', result_2)
<add> self.assertEqual('04 - 59 [000200]', result_3)
<ide>\ No newline at end of file
<ide><path>rest_framework/tests/filterset.py
<ide> def setUp(self):
<ide>
<ide> self.objects = FilterableItem.objects
<ide> self.data = [
<del> {'id': obj.id, 'text': obj.text, 'decimal': obj.decimal, 'date': obj.date}
<del> for obj in self.objects.all()
<add> {'id': obj.id, 'text': obj.text, 'decimal': obj.decimal, 'date': obj.date.isoformat()}
<add> for obj in self.objects.all()
<ide> ]
<ide>
<ide> @unittest.skipUnless(django_filters, 'django-filters not installed')
<ide> def test_get_filtered_fields_root_view(self):
<ide> request = factory.get('/?date=%s' % search_date) # search_date str: '2012-09-22'
<ide> response = view(request).render()
<ide> self.assertEqual(response.status_code, status.HTTP_200_OK)
<del> expected_data = [f for f in self.data if f['date'] == search_date]
<add> expected_data = [f for f in self.data if datetime.datetime.strptime(f['date'], '%Y-%m-%d').date() == search_date]
<ide> self.assertEqual(response.data, expected_data)
<ide>
<ide> @unittest.skipUnless(django_filters, 'django-filters not installed')
<ide> def test_get_filtered_class_root_view(self):
<ide> request = factory.get('/?date=%s' % search_date) # search_date str: '2012-10-02'
<ide> response = view(request).render()
<ide> self.assertEqual(response.status_code, status.HTTP_200_OK)
<del> expected_data = [f for f in self.data if f['date'] > search_date]
<add> expected_data = [f for f in self.data if datetime.datetime.strptime(f['date'], '%Y-%m-%d').date() > search_date]
<ide> self.assertEqual(response.data, expected_data)
<ide>
<ide> # Tests that the text filter set with 'icontains' in the filter class works.
<ide> def test_get_filtered_class_root_view(self):
<ide> request = factory.get('/?decimal=%s&date=%s' % (search_decimal, search_date))
<ide> response = view(request).render()
<ide> self.assertEqual(response.status_code, status.HTTP_200_OK)
<del> expected_data = [f for f in self.data if f['date'] > search_date and
<del> f['decimal'] < search_decimal]
<add> expected_data = [f for f in self.data if
<add> datetime.datetime.strptime(f['date'], '%Y-%m-%d').date() > search_date and
<add> f['decimal'] < search_decimal]
<ide> self.assertEqual(response.data, expected_data)
<ide>
<ide> @unittest.skipUnless(django_filters, 'django-filters not installed')
<ide><path>rest_framework/tests/pagination.py
<ide> def setUp(self):
<ide>
<ide> self.objects = FilterableItem.objects
<ide> self.data = [
<del> {'id': obj.id, 'text': obj.text, 'decimal': obj.decimal, 'date': obj.date}
<del> for obj in self.objects.all()
<add> {'id': obj.id, 'text': obj.text, 'decimal': obj.decimal, 'date': obj.date.isoformat()}
<add> for obj in self.objects.all()
<ide> ]
<ide> self.view = FilterFieldsRootView.as_view()
<ide>
<ide><path>rest_framework/tests/serializer.py
<ide> def setUp(self):
<ide> self.expected = {
<ide> 'email': '[email protected]',
<ide> 'content': 'Happy new year!',
<del> 'created': datetime.datetime(2012, 1, 1),
<add> 'created': '2012-01-01T00:00:00',
<ide> 'sub_comment': 'And Merry Christmas!'
<ide> }
<ide> self.person_data = {'name': 'dwight', 'age': 35} | 6 |
Javascript | Javascript | check sizzle or jquery | 872d18c10e8ad1583b3a40671527355aa930aea7 | <ide><path>test/unit/selector.js
<ide> test("pseudo - position", function() {
<ide> t( "Check element position", "div#nothiddendiv:first > div:first", ["nothiddendivchild"] );
<ide> });
<ide>
<del>if ( Sizzle.selectors.filters.visibility ) {
<add>if ( (window.Sizzle || jQuery.find).selectors.filters.visibility ) {
<ide> test("pseudo - visibility", function() {
<ide> expect(11);
<ide> | 1 |
Text | Text | fix some inconsistent use of hostname | 3833d69936ada7e1e1a68dc73dd31d31423fbbaf | <ide><path>doc/api/async_hooks.md
<ide> of propagating what resource is responsible for the new resource's existence.
<ide> been initialized. This can contain useful information that can vary based on
<ide> the value of `type`. For instance, for the `GETADDRINFOREQWRAP` resource type,
<ide> `resource` provides the hostname used when looking up the IP address for the
<del>hostname in `net.Server.listen()`. The API for accessing this information is
<add>host in `net.Server.listen()`. The API for accessing this information is
<ide> currently not considered public, but using the Embedder API, users can provide
<ide> and document their own resource objects. For example, such a resource object
<ide> could contain the SQL query being executed.
<ide><path>doc/api/http.md
<ide> proxy.listen(1337, '127.0.0.1', () => {
<ide> // make a request to a tunneling proxy
<ide> const options = {
<ide> port: 1337,
<del> hostname: '127.0.0.1',
<add> host: '127.0.0.1',
<ide> method: 'CONNECT',
<ide> path: 'www.google.com:80'
<ide> };
<ide> event is emitted with a callback containing an object with a status code.
<ide> const http = require('http');
<ide>
<ide> const options = {
<del> hostname: '127.0.0.1',
<add> host: '127.0.0.1',
<ide> port: 8080,
<ide> path: '/length_request'
<ide> };
<ide> srv.listen(1337, '127.0.0.1', () => {
<ide> // make a request
<ide> const options = {
<ide> port: 1337,
<del> hostname: '127.0.0.1',
<add> host: '127.0.0.1',
<ide> headers: {
<ide> 'Connection': 'Upgrade',
<ide> 'Upgrade': 'websocket'
<ide> changes:
<ide> * `host` {string} A domain name or IP address of the server to issue the
<ide> request to. **Default:** `'localhost'`.
<ide> * `hostname` {string} Alias for `host`. To support [`url.parse()`][],
<del> `hostname` is preferred over `host`.
<del> * `family` {number} IP address family to use when resolving `host` and
<add> `hostname` will be used if both `host` and `hostname` are specified.
<add> * `family` {number} IP address family to use when resolving `host` or
<ide> `hostname`. Valid values are `4` or `6`. When unspecified, both IP v4 and
<ide> v6 will be used.
<ide> * `port` {number} Port of remote server. **Default:** `80`.
<ide> * `localAddress` {string} Local interface to bind for network connections.
<del> * `socketPath` {string} Unix Domain Socket (use one of `host:port` or
<del> `socketPath`).
<add> * `socketPath` {string} Unix Domain Socket (cannot be used if one of `host`
<add> or `port` is specified, those specify a TCP Socket).
<ide> * `method` {string} A string specifying the HTTP request method. **Default:**
<ide> `'GET'`.
<ide> * `path` {string} Request path. Should include query string if any.
<ide><path>doc/api/tls.md
<ide> decrease overall server throughput.
<ide> added: v0.8.4
<ide> -->
<ide>
<del>* `hostname` {string} The hostname to verify the certificate against
<add>* `hostname` {string} The host name or IP address to verify the certificate
<add> against.
<ide> * `cert` {Object} An object representing the peer's certificate. The returned
<ide> object has some properties corresponding to the fields of the certificate.
<ide> * Returns: {Error|undefined}
<ide>
<ide> Verifies the certificate `cert` is issued to `hostname`.
<ide>
<del>Returns {Error} object, populating it with the reason, host, and cert on
<add>Returns {Error} object, populating it with `reason`, `host`, and `cert` on
<ide> failure. On success, returns {undefined}.
<ide>
<ide> This function can be overwritten by providing alternative function as part of | 3 |
Java | Java | convert networkingmodule to support web workers | 9a3f11d3e701e36dfae1cb55864f7efebc171935 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/modules/core/DeviceEventManagerModule.java
<ide> import javax.annotation.Nullable;
<ide>
<ide> import com.facebook.react.bridge.ReactApplicationContext;
<del>import com.facebook.react.bridge.ReactContext;
<ide> import com.facebook.react.bridge.ReactContextBaseJavaModule;
<ide> import com.facebook.react.bridge.ReactMethod;
<ide> import com.facebook.react.bridge.JavaScriptModule;
<add>import com.facebook.react.bridge.SupportsWebWorkers;
<ide> import com.facebook.react.bridge.UiThreadUtil;
<ide>
<ide> /**
<ide> * Native module that handles device hardware events like hardware back presses.
<ide> */
<ide> public class DeviceEventManagerModule extends ReactContextBaseJavaModule {
<ide>
<del> public static interface RCTDeviceEventEmitter extends JavaScriptModule {
<add> @SupportsWebWorkers
<add> public interface RCTDeviceEventEmitter extends JavaScriptModule {
<ide> void emit(String eventName, @Nullable Object data);
<ide> }
<ide>
<ide><path>ReactAndroid/src/main/java/com/facebook/react/modules/network/NetworkingModule.java
<ide> import java.util.concurrent.TimeUnit;
<ide>
<ide> import com.facebook.react.bridge.Arguments;
<add>import com.facebook.react.bridge.ExecutorToken;
<ide> import com.facebook.react.bridge.GuardedAsyncTask;
<ide> import com.facebook.react.bridge.ReactApplicationContext;
<ide> import com.facebook.react.bridge.ReactContextBaseJavaModule;
<ide> public void onCatalystInstanceDestroy() {
<ide>
<ide> @ReactMethod
<ide> /**
<del> * @param timeout value of 0 results in no timeout
<del> */
<add> * @param timeout value of 0 results in no timeout
<add> */
<ide> public void sendRequest(
<add> final ExecutorToken executorToken,
<ide> String method,
<ide> String url,
<ide> final int requestId,
<ide> public void sendRequest(
<ide>
<ide> Headers requestHeaders = extractHeaders(headers, data);
<ide> if (requestHeaders == null) {
<del> onRequestError(requestId, "Unrecognized headers format");
<add> onRequestError(executorToken, requestId, "Unrecognized headers format");
<ide> return;
<ide> }
<ide> String contentType = requestHeaders.get(CONTENT_TYPE_HEADER_NAME);
<ide> public void sendRequest(
<ide> requestBuilder.method(method, RequestBodyUtil.getEmptyBody(method));
<ide> } else if (data.hasKey(REQUEST_BODY_KEY_STRING)) {
<ide> if (contentType == null) {
<del> onRequestError(requestId, "Payload is set but no content-type header specified");
<add> onRequestError(
<add> executorToken,
<add> requestId,
<add> "Payload is set but no content-type header specified");
<ide> return;
<ide> }
<ide> String body = data.getString(REQUEST_BODY_KEY_STRING);
<ide> MediaType contentMediaType = MediaType.parse(contentType);
<ide> if (RequestBodyUtil.isGzipEncoding(contentEncoding)) {
<ide> RequestBody requestBody = RequestBodyUtil.createGzip(contentMediaType, body);
<ide> if (requestBody == null) {
<del> onRequestError(requestId, "Failed to gzip request body");
<add> onRequestError(executorToken, requestId, "Failed to gzip request body");
<ide> return;
<ide> }
<ide> requestBuilder.method(method, requestBody);
<ide> public void sendRequest(
<ide> }
<ide> } else if (data.hasKey(REQUEST_BODY_KEY_URI)) {
<ide> if (contentType == null) {
<del> onRequestError(requestId, "Payload is set but no content-type header specified");
<add> onRequestError(
<add> executorToken,
<add> requestId,
<add> "Payload is set but no content-type header specified");
<ide> return;
<ide> }
<ide> String uri = data.getString(REQUEST_BODY_KEY_URI);
<ide> InputStream fileInputStream =
<ide> RequestBodyUtil.getFileInputStream(getReactApplicationContext(), uri);
<ide> if (fileInputStream == null) {
<del> onRequestError(requestId, "Could not retrieve file for uri " + uri);
<add> onRequestError(executorToken, requestId, "Could not retrieve file for uri " + uri);
<ide> return;
<ide> }
<ide> requestBuilder.method(
<ide> public void sendRequest(
<ide> contentType = "multipart/form-data";
<ide> }
<ide> ReadableArray parts = data.getArray(REQUEST_BODY_KEY_FORMDATA);
<del> MultipartBuilder multipartBuilder = constructMultipartBody(parts, contentType, requestId);
<add> MultipartBuilder multipartBuilder =
<add> constructMultipartBody(executorToken, parts, contentType, requestId);
<ide> if (multipartBuilder == null) {
<ide> return;
<ide> }
<ide> public void onFailure(Request request, IOException e) {
<ide> if (mShuttingDown) {
<ide> return;
<ide> }
<del> onRequestError(requestId, e.getMessage());
<add> onRequestError(executorToken, requestId, e.getMessage());
<ide> }
<ide>
<ide> @Override
<ide> public void onResponse(Response response) throws IOException {
<ide> }
<ide>
<ide> // Before we touch the body send headers to JS
<del> onResponseReceived(requestId, response);
<add> onResponseReceived(executorToken, requestId, response);
<ide>
<ide> ResponseBody responseBody = response.body();
<ide> try {
<ide> if (useIncrementalUpdates) {
<del> readWithProgress(requestId, responseBody);
<del> onRequestSuccess(requestId);
<add> readWithProgress(executorToken, requestId, responseBody);
<add> onRequestSuccess(executorToken, requestId);
<ide> } else {
<del> onDataReceived(requestId, responseBody.string());
<del> onRequestSuccess(requestId);
<add> onDataReceived(executorToken, requestId, responseBody.string());
<add> onRequestSuccess(executorToken, requestId);
<ide> }
<ide> } catch (IOException e) {
<del> onRequestError(requestId, e.getMessage());
<add> onRequestError(executorToken, requestId, e.getMessage());
<ide> }
<ide> }
<ide> });
<ide> }
<ide>
<del> private void readWithProgress(int requestId, ResponseBody responseBody) throws IOException {
<add> private void readWithProgress(
<add> ExecutorToken executorToken,
<add> int requestId,
<add> ResponseBody responseBody) throws IOException {
<ide> Reader reader = responseBody.charStream();
<ide> try {
<ide> StringBuilder sb = new StringBuilder(getBufferSize(responseBody));
<ide> private void readWithProgress(int requestId, ResponseBody responseBody) throws I
<ide> sb.append(buffer, 0, read);
<ide> long now = System.nanoTime();
<ide> if (shouldDispatch(now, last)) {
<del> onDataReceived(requestId, sb.toString());
<add> onDataReceived(executorToken, requestId, sb.toString());
<ide> sb.setLength(0);
<ide> last = now;
<ide> }
<ide> }
<ide>
<ide> if (sb.length() > 0) {
<del> onDataReceived(requestId, sb.toString());
<add> onDataReceived(executorToken, requestId, sb.toString());
<ide> }
<ide> } finally {
<ide> reader.close();
<ide> private static int getBufferSize(ResponseBody responseBody) throws IOException {
<ide> }
<ide> }
<ide>
<del> private void onDataReceived(int requestId, String data) {
<add> private void onDataReceived(ExecutorToken ExecutorToken, int requestId, String data) {
<ide> WritableArray args = Arguments.createArray();
<ide> args.pushInt(requestId);
<ide> args.pushString(data);
<ide>
<del> getEventEmitter().emit("didReceiveNetworkData", args);
<add> getEventEmitter(ExecutorToken).emit("didReceiveNetworkData", args);
<ide> }
<ide>
<del> private void onRequestError(int requestId, String error) {
<add> private void onRequestError(ExecutorToken ExecutorToken, int requestId, String error) {
<ide> WritableArray args = Arguments.createArray();
<ide> args.pushInt(requestId);
<ide> args.pushString(error);
<ide>
<del> getEventEmitter().emit("didCompleteNetworkResponse", args);
<add> getEventEmitter(ExecutorToken).emit("didCompleteNetworkResponse", args);
<ide> }
<ide>
<del> private void onRequestSuccess(int requestId) {
<add> private void onRequestSuccess(ExecutorToken ExecutorToken, int requestId) {
<ide> WritableArray args = Arguments.createArray();
<ide> args.pushInt(requestId);
<ide> args.pushNull();
<ide>
<del> getEventEmitter().emit("didCompleteNetworkResponse", args);
<add> getEventEmitter(ExecutorToken).emit("didCompleteNetworkResponse", args);
<ide> }
<ide>
<del> private void onResponseReceived(int requestId, Response response) {
<add> private void onResponseReceived(
<add> ExecutorToken ExecutorToken,
<add> int requestId,
<add> Response response) {
<ide> WritableMap headers = translateHeaders(response.headers());
<ide>
<ide> WritableArray args = Arguments.createArray();
<ide> private void onResponseReceived(int requestId, Response response) {
<ide> args.pushMap(headers);
<ide> args.pushString(response.request().urlString());
<ide>
<del> getEventEmitter().emit("didReceiveNetworkResponse", args);
<add> getEventEmitter(ExecutorToken).emit("didReceiveNetworkResponse", args);
<ide> }
<ide>
<ide> private static WritableMap translateHeaders(Headers headers) {
<ide> private static WritableMap translateHeaders(Headers headers) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public void abortRequest(final int requestId) {
<add> public void abortRequest(ExecutorToken executorToken, final int requestId) {
<ide> // We have to use AsyncTask since this might trigger a NetworkOnMainThreadException, this is an
<ide> // open issue on OkHttp: https://github.com/square/okhttp/issues/869
<ide> new GuardedAsyncTask<Void, Void>(getReactApplicationContext()) {
<ide> protected void doInBackgroundGuarded(Void... params) {
<ide> }
<ide>
<ide> @ReactMethod
<del> public void clearCookies(com.facebook.react.bridge.Callback callback) {
<add> public void clearCookies(
<add> ExecutorToken executorToken,
<add> com.facebook.react.bridge.Callback callback) {
<ide> mCookieHandler.clearCookies(callback);
<ide> }
<ide>
<del> private @Nullable MultipartBuilder constructMultipartBody(
<add> @Override
<add> public boolean supportsWebWorkers() {
<add> return true;
<add> }
<add>
<add> private
<add> @Nullable
<add> MultipartBuilder constructMultipartBody(
<add> ExecutorToken ExecutorToken,
<ide> ReadableArray body,
<ide> String contentType,
<ide> int requestId) {
<ide> public void clearCookies(com.facebook.react.bridge.Callback callback) {
<ide> ReadableArray headersArray = bodyPart.getArray("headers");
<ide> Headers headers = extractHeaders(headersArray, null);
<ide> if (headers == null) {
<del> onRequestError(requestId, "Missing or invalid header format for FormData part.");
<add> onRequestError(
<add> ExecutorToken,
<add> requestId,
<add> "Missing or invalid header format for FormData part.");
<ide> return null;
<ide> }
<ide> MediaType partContentType = null;
<ide> public void clearCookies(com.facebook.react.bridge.Callback callback) {
<ide> multipartBuilder.addPart(headers, RequestBody.create(partContentType, bodyValue));
<ide> } else if (bodyPart.hasKey(REQUEST_BODY_KEY_URI)) {
<ide> if (partContentType == null) {
<del> onRequestError(requestId, "Binary FormData part needs a content-type header.");
<add> onRequestError(
<add> ExecutorToken,
<add> requestId,
<add> "Binary FormData part needs a content-type header.");
<ide> return null;
<ide> }
<ide> String fileContentUriStr = bodyPart.getString(REQUEST_BODY_KEY_URI);
<ide> InputStream fileInputStream =
<ide> RequestBodyUtil.getFileInputStream(getReactApplicationContext(), fileContentUriStr);
<ide> if (fileInputStream == null) {
<del> onRequestError(requestId, "Could not retrieve file for uri " + fileContentUriStr);
<add> onRequestError(
<add> ExecutorToken,
<add> requestId,
<add> "Could not retrieve file for uri " + fileContentUriStr);
<ide> return null;
<ide> }
<ide> multipartBuilder.addPart(headers, RequestBodyUtil.create(partContentType, fileInputStream));
<ide> } else {
<del> onRequestError(requestId, "Unrecognized FormData part.");
<add> onRequestError(ExecutorToken, requestId, "Unrecognized FormData part.");
<ide> }
<ide> }
<ide> return multipartBuilder;
<ide> public void clearCookies(com.facebook.react.bridge.Callback callback) {
<ide> /**
<ide> * Extracts the headers from the Array. If the format is invalid, this method will return null.
<ide> */
<del> private @Nullable Headers extractHeaders(
<add> private
<add> @Nullable
<add> Headers extractHeaders(
<ide> @Nullable ReadableArray headersArray,
<ide> @Nullable ReadableMap requestData) {
<ide> if (headersArray == null) {
<ide> public void clearCookies(com.facebook.react.bridge.Callback callback) {
<ide> return headersBuilder.build();
<ide> }
<ide>
<del> private DeviceEventManagerModule.RCTDeviceEventEmitter getEventEmitter() {
<add> private DeviceEventManagerModule.RCTDeviceEventEmitter getEventEmitter(ExecutorToken ExecutorToken) {
<ide> return getReactApplicationContext()
<del> .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class);
<add> .getJSModule(ExecutorToken, DeviceEventManagerModule.RCTDeviceEventEmitter.class);
<ide> }
<ide> }
<ide><path>ReactAndroid/src/test/java/com/facebook/react/modules/network/NetworkingModuleTest.java
<ide> import java.util.List;
<ide>
<ide> import com.facebook.react.bridge.Arguments;
<add>import com.facebook.react.bridge.ExecutorToken;
<ide> import com.facebook.react.bridge.ReactApplicationContext;
<ide> import com.facebook.react.bridge.ReactContext;
<ide> import com.facebook.react.bridge.JavaOnlyArray;
<ide> public Object answer(InvocationOnMock invocation) throws Throwable {
<ide> NetworkingModule networkingModule = new NetworkingModule(null, "", httpClient);
<ide>
<ide> networkingModule.sendRequest(
<del> "GET",
<del> "http://somedomain/foo",
<del> 0,
<del> JavaOnlyArray.of(),
<del> null,
<del> true,
<del> 0);
<add> mock(ExecutorToken.class),
<add> "GET",
<add> "http://somedomain/foo",
<add> 0,
<add> JavaOnlyArray.of(),
<add> null,
<add> true,
<add> 0);
<ide>
<ide> ArgumentCaptor<Request> argumentCaptor = ArgumentCaptor.forClass(Request.class);
<ide> verify(httpClient).newCall(argumentCaptor.capture());
<ide> public Object answer(InvocationOnMock invocation) throws Throwable {
<ide> public void testFailGetWithInvalidHeadersStruct() throws Exception {
<ide> RCTDeviceEventEmitter emitter = mock(RCTDeviceEventEmitter.class);
<ide> ReactApplicationContext context = mock(ReactApplicationContext.class);
<del> when(context.getJSModule(any(Class.class))).thenReturn(emitter);
<add> when(context.getJSModule(any(ExecutorToken.class), any(Class.class))).thenReturn(emitter);
<ide>
<ide> OkHttpClient httpClient = mock(OkHttpClient.class);
<ide> NetworkingModule networkingModule = new NetworkingModule(context, "", httpClient);
<ide> public void testFailGetWithInvalidHeadersStruct() throws Exception {
<ide> mockEvents();
<ide>
<ide> networkingModule.sendRequest(
<del> "GET",
<del> "http://somedoman/foo",
<del> 0,
<del> JavaOnlyArray.from(invalidHeaders),
<del> null,
<del> true,
<del> 0);
<add> mock(ExecutorToken.class),
<add> "GET",
<add> "http://somedoman/foo",
<add> 0,
<add> JavaOnlyArray.from(invalidHeaders),
<add> null,
<add> true,
<add> 0);
<ide>
<ide> verifyErrorEmit(emitter, 0);
<ide> }
<ide> public void testFailGetWithInvalidHeadersStruct() throws Exception {
<ide> public void testFailPostWithoutContentType() throws Exception {
<ide> RCTDeviceEventEmitter emitter = mock(RCTDeviceEventEmitter.class);
<ide> ReactApplicationContext context = mock(ReactApplicationContext.class);
<del> when(context.getJSModule(any(Class.class))).thenReturn(emitter);
<add> when(context.getJSModule(any(ExecutorToken.class), any(Class.class))).thenReturn(emitter);
<ide>
<ide> OkHttpClient httpClient = mock(OkHttpClient.class);
<ide> NetworkingModule networkingModule = new NetworkingModule(context, "", httpClient);
<ide> public void testFailPostWithoutContentType() throws Exception {
<ide> mockEvents();
<ide>
<ide> networkingModule.sendRequest(
<del> "POST",
<del> "http://somedomain/bar",
<del> 0,
<del> JavaOnlyArray.of(),
<del> body,
<del> true,
<del> 0);
<add> mock(ExecutorToken.class),
<add> "POST",
<add> "http://somedomain/bar",
<add> 0,
<add> JavaOnlyArray.of(),
<add> body,
<add> true,
<add> 0);
<ide>
<ide> verifyErrorEmit(emitter, 0);
<ide> }
<ide> public WritableMap answer(InvocationOnMock invocation) throws Throwable {
<ide> }
<ide>
<ide> @Test
<del> public void testSuccessfullPostRequest() throws Exception {
<add> public void testSuccessfulPostRequest() throws Exception {
<ide> OkHttpClient httpClient = mock(OkHttpClient.class);
<ide> when(httpClient.newCall(any(Request.class))).thenAnswer(new Answer<Object>() {
<ide> @Override
<ide> public Object answer(InvocationOnMock invocation) throws Throwable {
<ide> body.putString("string", "This is request body");
<ide>
<ide> networkingModule.sendRequest(
<del> "POST",
<del> "http://somedomain/bar",
<del> 0,
<del> JavaOnlyArray.of(JavaOnlyArray.of("Content-Type", "text/plain")),
<del> body,
<del> true,
<del> 0);
<add> mock(ExecutorToken.class),
<add> "POST",
<add> "http://somedomain/bar",
<add> 0,
<add> JavaOnlyArray.of(JavaOnlyArray.of("Content-Type", "text/plain")),
<add> body,
<add> true,
<add> 0);
<ide>
<ide> ArgumentCaptor<Request> argumentCaptor = ArgumentCaptor.forClass(Request.class);
<ide> verify(httpClient).newCall(argumentCaptor.capture());
<ide> public Object answer(InvocationOnMock invocation) throws Throwable {
<ide> JavaOnlyArray.of("User-Agent", "React test agent/1.0"));
<ide>
<ide> networkingModule.sendRequest(
<del> "GET",
<del> "http://someurl/baz",
<del> 0,
<del> JavaOnlyArray.from(headers),
<del> null,
<del> true,
<del> 0);
<add> mock(ExecutorToken.class),
<add> "GET",
<add> "http://someurl/baz",
<add> 0,
<add> JavaOnlyArray.from(headers),
<add> null,
<add> true,
<add> 0);
<ide> ArgumentCaptor<Request> argumentCaptor = ArgumentCaptor.forClass(Request.class);
<ide> verify(httpClient).newCall(argumentCaptor.capture());
<ide> Headers requestHeaders = argumentCaptor.getValue().headers();
<ide> public Object answer(InvocationOnMock invocation) throws Throwable {
<ide>
<ide> NetworkingModule networkingModule = new NetworkingModule(null, "", httpClient);
<ide> networkingModule.sendRequest(
<del> "POST",
<del> "http://someurl/uploadFoo",
<del> 0,
<del> new JavaOnlyArray(),
<del> body,
<del> true,
<del> 0);
<add> mock(ExecutorToken.class),
<add> "POST",
<add> "http://someurl/uploadFoo",
<add> 0,
<add> new JavaOnlyArray(),
<add> body,
<add> true,
<add> 0);
<ide>
<ide> // verify url, method, headers
<ide> ArgumentCaptor<Request> argumentCaptor = ArgumentCaptor.forClass(Request.class);
<ide> public Object answer(InvocationOnMock invocation) throws Throwable {
<ide>
<ide> NetworkingModule networkingModule = new NetworkingModule(null, "", httpClient);
<ide> networkingModule.sendRequest(
<del> "POST",
<del> "http://someurl/uploadFoo",
<del> 0,
<del> JavaOnlyArray.from(headers),
<del> body,
<del> true,
<del> 0);
<add> mock(ExecutorToken.class),
<add> "POST",
<add> "http://someurl/uploadFoo",
<add> 0,
<add> JavaOnlyArray.from(headers),
<add> body,
<add> true,
<add> 0);
<ide>
<ide> // verify url, method, headers
<ide> ArgumentCaptor<Request> argumentCaptor = ArgumentCaptor.forClass(Request.class);
<ide> public Object answer(InvocationOnMock invocation) throws Throwable {
<ide>
<ide> NetworkingModule networkingModule = new NetworkingModule(null, "", httpClient);
<ide> networkingModule.sendRequest(
<del> "POST",
<del> "http://someurl/uploadFoo",
<del> 0,
<del> JavaOnlyArray.from(headers),
<del> body,
<del> true,
<del> 0);
<add> mock(ExecutorToken.class),
<add> "POST",
<add> "http://someurl/uploadFoo",
<add> 0,
<add> JavaOnlyArray.from(headers),
<add> body,
<add> true,
<add> 0);
<ide>
<ide> // verify RequestBodyPart for image
<ide> PowerMockito.verifyStatic(times(1)); | 3 |
Text | Text | replace line items with chapters [ci skip] | f0c161cc05f4964ce3316c14e2dc2b6c445c5cd6 | <ide><path>guides/source/association_basics.md
<ide> end
<ide> You can use the `includes` method to specify second-order associations that should be eager-loaded when this association is used. For example, consider these models:
<ide>
<ide> ```ruby
<del>class LineItem < ApplicationRecord
<add>class Chapter < ApplicationRecord
<ide> belongs_to :book
<ide> end
<ide>
<ide> class Book < ApplicationRecord
<ide> belongs_to :author
<del> has_many :line_items
<add> has_many :chapters
<ide> end
<ide>
<ide> class Author < ApplicationRecord
<ide> has_many :books
<ide> end
<ide> ```
<ide>
<del>If you frequently retrieve authors directly from line items (`@line_item.book.author`), then you can make your code somewhat more efficient by including authors in the association from line items to books:
<add>If you frequently retrieve authors directly from chapters (`@chapter.book.author`), then you can make your code somewhat more efficient by including authors in the association from chapters to books:
<ide>
<ide> ```ruby
<del>class LineItem < ApplicationRecord
<add>class Chapter < ApplicationRecord
<ide> belongs_to :book, -> { includes :author }
<ide> end
<ide>
<ide> class Book < ApplicationRecord
<ide> belongs_to :author
<del> has_many :line_items
<add> has_many :chapters
<ide> end
<ide>
<ide> class Author < ApplicationRecord
<ide> The `group` method supplies an attribute name to group the result set by, using
<ide>
<ide> ```ruby
<ide> class Author < ApplicationRecord
<del> has_many :line_items, -> { group 'books.id' },
<del> through: :books
<add> has_many :chapters, -> { group 'books.id' },
<add> through: :books
<ide> end
<ide> ```
<ide>
<ide> end
<ide>
<ide> class Book < ApplicationRecord
<ide> belongs_to :author
<del> has_many :line_items
<add> has_many :chapters
<ide> end
<ide>
<del>class LineItem < ApplicationRecord
<add>class Chapter < ApplicationRecord
<ide> belongs_to :book
<ide> end
<ide> ```
<ide>
<del>If you frequently retrieve line items directly from authors (`@author.books.line_items`), then you can make your code somewhat more efficient by including line items in the association from authors to books:
<add>If you frequently retrieve chapters directly from authors (`@author.books.chapters`), then you can make your code somewhat more efficient by including chapters in the association from authors to books:
<ide>
<ide> ```ruby
<ide> class Author < ApplicationRecord
<del> has_many :books, -> { includes :line_items }
<add> has_many :books, -> { includes :chapters }
<ide> end
<ide>
<ide> class Book < ApplicationRecord
<ide> belongs_to :author
<del> has_many :line_items
<add> has_many :chapters
<ide> end
<ide>
<del>class LineItem < ApplicationRecord
<add>class Chapter < ApplicationRecord
<ide> belongs_to :book
<ide> end
<ide> ``` | 1 |
Javascript | Javascript | add missing semicolon in firstpersoncontrols | 189969453917a350ff2e6af4eaf256642bb84e41 | <ide><path>examples/js/controls/FirstPersonControls.js
<ide> THREE.FirstPersonControls = function ( object, domElement ) {
<ide> window.removeEventListener( 'keydown', _onKeyDown, false );
<ide> window.removeEventListener( 'keyup', _onKeyUp, false );
<ide>
<del> }
<add> };
<ide>
<ide> var _onMouseMove = bind( this, this.onMouseMove );
<ide> var _onMouseDown = bind( this, this.onMouseDown ); | 1 |
Ruby | Ruby | fix x11 proxy constant lookup under 1.9+ | 9561b4bc8e0fee449af60f8957c7ea510e41b561 | <ide><path>Library/Homebrew/requirements.rb
<ide> def self.inherited(mod)
<ide> class Proxy < self
<ide> PACKAGES = [:libpng, :freetype, :fontconfig]
<ide>
<del> def self.for(name, *tags)
<del> constant = name.capitalize
<del>
<del> if const_defined?(constant)
<del> klass = const_get(constant)
<del> else
<del> klass = Class.new(self) do
<del> def initialize(name, *tags) super end
<add> class << self
<add> def defines_const?(const)
<add> if ::RUBY_VERSION >= "1.9"
<add> const_defined?(const, false)
<add> else
<add> const_defined?(const)
<ide> end
<add> end
<ide>
<del> const_set(constant, klass)
<add> def for(name, *tags)
<add> constant = name.capitalize
<add>
<add> if defines_const?(constant)
<add> klass = const_get(constant)
<add> else
<add> klass = Class.new(self) do
<add> def initialize(name, *tags) super end
<add> end
<add>
<add> const_set(constant, klass)
<add> end
<add> klass.new(name, *tags)
<ide> end
<del> klass.new(name, *tags)
<ide> end
<ide> end
<ide> end | 1 |
Ruby | Ruby | extract constant io_default_buffer_size | 09d7889ed8f6eee000c523a7ada42b850ec64e04 | <ide><path>Library/Homebrew/utils/popen.rb
<ide> # frozen_string_literal: true
<ide>
<ide> module Utils
<add> IO_DEFAULT_BUFFER_SIZE = 4096
<add> private_constant :IO_DEFAULT_BUFFER_SIZE
<add>
<ide> def self.popen_read(*args, **options, &block)
<ide> popen(args, "rb", options, &block)
<ide> end
<ide> def self.popen_write(*args, **options)
<ide>
<ide> # Before we yield to the block, capture as much output as we can
<ide> loop do
<del> output += pipe.read_nonblock(4096)
<add> output += pipe.read_nonblock(IO_DEFAULT_BUFFER_SIZE)
<ide> rescue IO::WaitReadable, EOFError
<ide> break
<ide> end | 1 |
Text | Text | remove braces which shouldn't be there | a80b1621b034b6bcd920805681db22e1f6c5a282 | <ide><path>doc/api/http2.md
<ide> added: v8.4.0
<ide>
<ide> Shortcut for `http2stream.rstStream()` using error code `0x00` (No Error).
<ide>
<del>#### http2stream.rstWithProtocolError() {
<add>#### http2stream.rstWithProtocolError()
<ide> <!-- YAML
<ide> added: v8.4.0
<ide> -->
<ide> added: v8.4.0
<ide>
<ide> Shortcut for `http2stream.rstStream()` using error code `0x01` (Protocol Error).
<ide>
<del>#### http2stream.rstWithCancel() {
<add>#### http2stream.rstWithCancel()
<ide> <!-- YAML
<ide> added: v8.4.0
<ide> -->
<ide> added: v8.4.0
<ide>
<ide> Shortcut for `http2stream.rstStream()` using error code `0x08` (Cancel).
<ide>
<del>#### http2stream.rstWithRefuse() {
<add>#### http2stream.rstWithRefuse()
<ide> <!-- YAML
<ide> added: v8.4.0
<ide> -->
<ide> added: v8.4.0
<ide>
<ide> Shortcut for `http2stream.rstStream()` using error code `0x07` (Refused Stream).
<ide>
<del>#### http2stream.rstWithInternalError() {
<add>#### http2stream.rstWithInternalError()
<ide> <!-- YAML
<ide> added: v8.4.0
<ide> --> | 1 |
Go | Go | send the images in correct order | 3cbf5670c5de9948dedf439defb0d772531e717d | <ide><path>registry.go
<ide> func (graph *Graph) pushPrimitive(stdout io.Writer, remote, tag, imgId, registry
<ide> return nil
<ide> }
<ide>
<del>// Push a repository to the registry.
<del>// Remote has the format '<user>/<repo>
<del>func (graph *Graph) PushRepository(stdout io.Writer, remote string, localRepo Repository, authConfig *auth.AuthConfig) error {
<del> client := graph.getHttpClient()
<del>
<del> checksums, err := graph.Checksums(stdout, localRepo)
<del> if err != nil {
<del> return err
<del> }
<del>
<del> for tag, id := range originRepo {
<del> if exists, err := graph.getRemoteImageJson(id); err != nil {
<del> return nil, err
<del> } else if !exists {
<del> filteredRepo[tag] = id
<del>
<del> }
<del> }
<del> return filteredRepo, nil
<del>}
<del>
<ide> // Retrieve the checksum of an image
<ide> // Priority:
<ide> // - Check on the stored checksums
<ide> func (graph *Graph) getChecksum(imageId string) (string, error) {
<ide> type ImgListJson struct {
<ide> Id string `json:"id"`
<ide> Checksum string `json:"checksum,omitempty"`
<add> tag string
<ide> }
<ide>
<ide> // Push a repository to the registry.
<ide> func (graph *Graph) PushRepository(stdout io.Writer, remote string, localRepo Re
<ide>
<ide> fmt.Fprintf(stdout, "Processing checksums\n")
<ide> imageSet := make(map[string]struct{})
<del> for _, id := range localRepo {
<add>
<add> for tag, id := range localRepo {
<ide> img, err := graph.Get(id)
<ide> if err != nil {
<ide> return err
<ide> func (graph *Graph) PushRepository(stdout io.Writer, remote string, localRepo Re
<ide> if err != nil {
<ide> return err
<ide> }
<del> imgList = append(imgList, &ImgListJson{
<add> imgList = append([]*ImgListJson{{
<ide> Id: img.Id,
<ide> Checksum: checksum,
<del> })
<add> tag: tag,
<add> }}, imgList...)
<ide> return nil
<ide> })
<ide> }
<ide> func (graph *Graph) PushRepository(stdout io.Writer, remote string, localRepo Re
<ide> for _, registry := range endpoints {
<ide> fmt.Fprintf(stdout, "Pushing repository %s to %s (%d tags)\r\n", remote, registry, len(localRepo))
<ide> // For each image within the repo, push them
<del> for tag, imgId := range localRepo {
<del> if err := graph.pushPrimitive(stdout, remote, tag, imgId, registry, token); err != nil {
<add> for _, elem := range imgList {
<add> if err := graph.pushPrimitive(stdout, remote, elem.tag, elem.Id, registry, token); err != nil {
<ide> // FIXME: Continue on error?
<ide> return err
<ide> } | 1 |
Javascript | Javascript | improve output for more common case | e880673950810a06c25b3cb9d763cc7513927285 | <ide><path>packages/ember-handlebars/lib/ext.js
<ide> Ember.Handlebars.Compiler.prototype.mustache = function(mustache) {
<ide> // Update the mustache node to include a hash value indicating whether the original node
<ide> // was escaped. This will allow us to properly escape values when the underlying value
<ide> // changes and we need to re-render the value.
<del> if(mustache.escaped) {
<add> if(!mustache.escaped) {
<ide> mustache.hash = mustache.hash || new Handlebars.AST.HashNode([]);
<del> mustache.hash.pairs.push(["escaped", new Handlebars.AST.StringNode("true")]);
<add> mustache.hash.pairs.push(["unescaped", new Handlebars.AST.StringNode("true")]);
<ide> }
<ide> mustache = new Handlebars.AST.MustacheNode([id].concat([mustache.id]), mustache.hash, !mustache.escaped);
<ide> return Handlebars.Compiler.prototype.mustache.call(this, mustache);
<ide><path>packages/ember-handlebars/lib/helpers/binding.js
<ide> var bind = function(property, options, preserveContext, shouldDisplay, valueNorm
<ide> path: path,
<ide> pathRoot: pathRoot,
<ide> previousContext: currentContext,
<del> isEscaped: options.hash.escaped,
<add> isEscaped: !options.hash.unescaped,
<ide> templateData: options.data
<ide> });
<ide> | 2 |
Go | Go | remove redundant variable | 1f7beb85949c4c31b3b5874510531021d5a2b45b | <ide><path>daemon/events/testutils/testutils.go
<ide> func Scan(text string) (*events.Message, error) {
<ide> attrs[kv[0]] = kv[1]
<ide> }
<ide>
<del> tu := time.Unix(t, tn)
<ide> return &events.Message{
<ide> Time: t,
<del> TimeNano: tu.UnixNano(),
<add> TimeNano: time.Unix(t, tn).UnixNano(),
<ide> Type: md["eventType"],
<ide> Action: md["action"],
<ide> Actor: events.Actor{ | 1 |
Mixed | Python | make get_reference public | 563a20a04098937db1e2c353b2ee6a3059e92ebf | <ide><path>docs/api-guide/schemas.md
<ide> Computes the component's name from the serializer.
<ide>
<ide> You may see warnings if your API has duplicate component names. If so you can override `get_component_name()` or pass the `component_name` `__init__()` kwarg (see below) to provide different names.
<ide>
<add>#### `get_reference()`
<add>
<add>Returns a reference to the serializer component. This may be useful if you override `get_schema()`.
<add>
<add>
<ide> #### `map_serializer()`
<ide>
<ide> Maps serializers to their OpenAPI representations.
<ide><path>rest_framework/schemas/openapi.py
<ide> def get_response_serializer(self, path, method):
<ide> """
<ide> return self.get_serializer(path, method)
<ide>
<del> def _get_reference(self, serializer):
<add> def get_reference(self, serializer):
<ide> return {'$ref': '#/components/schemas/{}'.format(self.get_component_name(serializer))}
<ide>
<ide> def get_request_body(self, path, method):
<ide> def get_request_body(self, path, method):
<ide> if not isinstance(serializer, serializers.Serializer):
<ide> item_schema = {}
<ide> else:
<del> item_schema = self._get_reference(serializer)
<add> item_schema = self.get_reference(serializer)
<ide>
<ide> return {
<ide> 'content': {
<ide> def get_responses(self, path, method):
<ide> if not isinstance(serializer, serializers.Serializer):
<ide> item_schema = {}
<ide> else:
<del> item_schema = self._get_reference(serializer)
<add> item_schema = self.get_reference(serializer)
<ide>
<ide> if is_list_view(path, method, self.view):
<ide> response_schema = {
<ide> def _allows_filters(self, path, method):
<ide> RemovedInDRF314Warning, stacklevel=2
<ide> )
<ide> return self.allows_filters(path, method)
<add>
<add> def _get_reference(self, serializer):
<add> warnings.warn(
<add> "Method `_get_reference()` has been renamed to `get_reference()`. "
<add> "The old name will be removed in DRF v3.14.",
<add> RemovedInDRF314Warning, stacklevel=2
<add> )
<add> return self.get_reference(serializer) | 2 |
Ruby | Ruby | finalize metadata handling and uninstall logic | 7f2e4f583a5789a86712322818612c439307b90d | <ide><path>Library/Homebrew/cask/lib/hbc/cli/upgrade.rb
<ide> def run
<ide>
<ide> old_cask_installer = Installer.new(old_cask, binaries: binaries?, verbose: verbose?, force: force?, upgrade: true)
<ide>
<del> old_cask_installer.uninstall
<del>
<del> begin
<del> odebug "Installing new version of Cask #{old_cask}"
<del>
<del> new_cask = CaskLoader.load(old_cask.to_s)
<add> new_cask = CaskLoader.load(old_cask.to_s)
<ide>
<add> new_cask_installer =
<ide> Installer.new(new_cask, binaries: binaries?,
<ide> verbose: verbose?,
<ide> force: force?,
<ide> skip_cask_deps: skip_cask_deps?,
<ide> require_sha: require_sha?,
<del> upgrade: true).install
<add> upgrade: true)
<add>
<add> begin
<add> # purge artifacts BUT keep metadata aside
<add> old_cask_installer.start_upgrade
<add>
<add> # install BUT do not yet save metadata
<ide>
<add> new_cask_installer.install
<add>
<add> # if successful, remove old metadata and install new
<ide> old_cask_installer.finalize_upgrade
<del> rescue CaskUnavailableError => e
<del> opoo e.message
<del> rescue CaskAlreadyInstalledError => e
<add> rescue CaskError => e
<ide> opoo e.message
<add> old_cask_installer.revert_upgrade
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/cask/lib/hbc/installer.rb
<ide> def stage
<ide> def install
<ide> odebug "Hbc::Installer#install"
<ide>
<del> if @cask.installed? && !force? && !@reinstall && !@upgrade
<del> raise CaskAlreadyInstalledError, @cask
<add> if @cask.installed? && !force? && !@reinstall
<add> raise CaskAlreadyInstalledError, @cask unless @upgrade
<ide> end
<ide>
<ide> check_conflicts
<ide> def uninstall
<ide> oh1 "Uninstalling Cask #{@cask}"
<ide> disable_accessibility_access
<ide> uninstall_artifacts
<del> return if @upgrade
<del>
<ide> purge_versioned_files
<ide> purge_caskroom_path if force?
<ide> end
<ide>
<del> def finalize_upgrade
<add> def start_upgrade
<ide> return unless @upgrade
<add> oh1 "Starting upgrade for Cask #{@cask}"
<ide>
<del> purge_versioned_files
<del> purge_caskroom_path if force?
<add> disable_accessibility_access
<add> uninstall_artifacts
<add> end
<add>
<add> def revert_upgrade
<add> return unless @upgrade
<add> opoo "Reverting upgrade for Cask #{@cask}"
<add> reinstall
<add> end
<add>
<add> def finalize_upgrade
<add> return unless @upgrade
<add> purge_versioned_files(upgrade: true)
<add> oh1 "Cask #{@cask} was successfully upgraded!"
<ide> end
<ide>
<ide> def uninstall_artifacts
<ide> def gain_permissions_remove(path)
<ide> Utils.gain_permissions_remove(path, command: @command)
<ide> end
<ide>
<del> def purge_versioned_files
<add> def purge_versioned_files(upgrade: false)
<ide> odebug "Purging files for version #{@cask.version} of Cask #{@cask}"
<ide>
<ide> # versioned staged distribution
<ide> def purge_versioned_files
<ide> end
<ide> end
<ide> @cask.metadata_versioned_path.rmdir_if_possible
<del> @cask.metadata_master_container_path.rmdir_if_possible
<add> @cask.metadata_master_container_path.rmdir_if_possible unless upgrade
<ide>
<ide> # toplevel staged distribution
<del> @cask.caskroom_path.rmdir_if_possible
<add> @cask.caskroom_path.rmdir_if_possible unless upgrade
<ide> end
<ide>
<ide> def purge_caskroom_path | 2 |
Ruby | Ruby | optimize the performance of #delegate | 1bac04e854b42fc0e47162e251105434d356d2b4 | <ide><path>activesupport/lib/active_support/core_ext/module/delegation.rb
<ide> def delegate(*methods)
<ide> file, line = caller.first.split(':', 2)
<ide> line = line.to_i
<ide>
<del> if allow_nil
<del> methods.each do |method|
<add> methods.each do |method|
<add> method = method.to_s
<add>
<add> # Attribute writer methods only accept one argument. Makes sure []=
<add> # methods still accept two arguments.
<add> definition = (method =~ /[^\]]=$/) ? "arg" : "*args, &block"
<add>
<add> if allow_nil
<ide> module_eval(<<-EOS, file, line - 2)
<del> def #{method_prefix}#{method}(*args, &block) # def customer_name(*args, &block)
<add> def #{method_prefix}#{method}(#{definition}) # def customer_name(*args, &block)
<ide> if #{to} || #{to}.respond_to?(:#{method}) # if client || client.respond_to?(:name)
<del> #{to}.__send__(:#{method}, *args, &block) # client.__send__(:name, *args, &block)
<add> #{to}.#{method}(#{definition}) # client.name(*args, &block)
<ide> end # end
<ide> end # end
<ide> EOS
<del> end
<del> else
<del> methods.each do |method|
<add> else
<ide> exception = %(raise "#{self}##{method_prefix}#{method} delegated to #{to}.#{method}, but #{to} is nil: \#{self.inspect}")
<ide>
<ide> module_eval(<<-EOS, file, line - 1)
<del> def #{method_prefix}#{method}(*args, &block) # def customer_name(*args, &block)
<del> #{to}.__send__(:#{method}, *args, &block) # client.__send__(:name, *args, &block)
<add> def #{method_prefix}#{method}(#{definition}) # def customer_name(*args, &block)
<add> #{to}.#{method}(#{definition}) # client.name(*args, &block)
<ide> rescue NoMethodError # rescue NoMethodError
<ide> if #{to}.nil? # if client.nil?
<ide> #{exception} # # add helpful message to the exception
<ide><path>activesupport/test/core_ext/module_test.rb
<ide> class Someone < Struct.new(:name, :place)
<ide> delegate :name, :to => :client, :prefix => false
<ide> end
<ide>
<add>class ParameterSet
<add> delegate :[], :[]=, :to => :@params
<add>
<add> def initialize
<add> @params = {:foo => "bar"}
<add> end
<add>end
<add>
<ide> class Name
<ide> delegate :upcase, :to => :@full_name
<ide>
<ide> def test_delegation_to_assignment_method
<ide> assert_equal "Fred", @david.place.name
<ide> end
<ide>
<add> def test_delegation_to_index_get_method
<add> @params = ParameterSet.new
<add> assert_equal "bar", @params[:foo]
<add> end
<add>
<add> def test_delegation_to_index_set_method
<add> @params = ParameterSet.new
<add> @params[:foo] = "baz"
<add> assert_equal "baz", @params[:foo]
<add> end
<add>
<ide> def test_delegation_down_hierarchy
<ide> assert_equal "CHICAGO", @david.upcase
<ide> end | 2 |
Javascript | Javascript | implement template scopes as classes | 87190229e133deab859fbc935e8241f8c667936d | <ide><path>packages/ember-htmlbars/lib/env.js
<ide> import merge from 'ember-metal/merge';
<ide> import subexpr from 'ember-htmlbars/hooks/subexpr';
<ide> import concat from 'ember-htmlbars/hooks/concat';
<ide> import linkRenderNode from 'ember-htmlbars/hooks/link-render-node';
<del>import createFreshScope from 'ember-htmlbars/hooks/create-fresh-scope';
<add>import createFreshScope, { createChildScope } from 'ember-htmlbars/hooks/create-fresh-scope';
<ide> import bindShadowScope from 'ember-htmlbars/hooks/bind-shadow-scope';
<ide> import bindSelf from 'ember-htmlbars/hooks/bind-self';
<ide> import bindScope from 'ember-htmlbars/hooks/bind-scope';
<ide> import bindLocal from 'ember-htmlbars/hooks/bind-local';
<add>import bindBlock from 'ember-htmlbars/hooks/bind-block';
<ide> import updateSelf from 'ember-htmlbars/hooks/update-self';
<ide> import getRoot from 'ember-htmlbars/hooks/get-root';
<ide> import getChild from 'ember-htmlbars/hooks/get-child';
<add>import getBlock from 'ember-htmlbars/hooks/get-block';
<ide> import getValue from 'ember-htmlbars/hooks/get-value';
<ide> import getCellOrValue from 'ember-htmlbars/hooks/get-cell-or-value';
<ide> import cleanupRenderNode from 'ember-htmlbars/hooks/cleanup-render-node';
<ide> emberHooks.keywords = keywords;
<ide> merge(emberHooks, {
<ide> linkRenderNode,
<ide> createFreshScope,
<add> createChildScope,
<ide> bindShadowScope,
<ide> bindSelf,
<ide> bindScope,
<ide> bindLocal,
<add> bindBlock,
<ide> updateSelf,
<add> getBlock,
<ide> getRoot,
<ide> getChild,
<ide> getValue,
<ide> import partial from 'ember-htmlbars/keywords/partial';
<ide> import input from 'ember-htmlbars/keywords/input';
<ide> import textarea from 'ember-htmlbars/keywords/textarea';
<ide> import collection from 'ember-htmlbars/keywords/collection';
<add>import yieldKeyword from 'ember-htmlbars/keywords/yield';
<ide> import legacyYield from 'ember-htmlbars/keywords/legacy-yield';
<ide> import mut, { privateMut } from 'ember-htmlbars/keywords/mut';
<ide> import each from 'ember-htmlbars/keywords/each';
<ide> registerKeyword('component', componentKeyword);
<ide> registerKeyword('partial', partial);
<ide> registerKeyword('input', input);
<ide> registerKeyword('textarea', textarea);
<add>registerKeyword('yield', yieldKeyword);
<ide> registerKeyword('legacy-yield', legacyYield);
<ide> registerKeyword('mut', mut);
<ide> registerKeyword('@mut', privateMut);
<ide><path>packages/ember-htmlbars/lib/hooks/bind-block.js
<add>export default function bindBlock(env, scope, block, name='default') {
<add> scope.bindBlock(name, block);
<add>}
<ide><path>packages/ember-htmlbars/lib/hooks/bind-local.js
<ide> import Stream from 'ember-metal/streams/stream';
<ide> import ProxyStream from 'ember-metal/streams/proxy-stream';
<ide>
<ide> export default function bindLocal(env, scope, key, value) {
<del> var isExisting = scope.locals.hasOwnProperty(key);
<del> if (isExisting) {
<del> var existing = scope.locals[key];
<del>
<add> // TODO: What is the cause of these cases?
<add> if (scope.hasOwnLocal(key)) {
<add> let existing = scope.getLocal(key);
<ide> if (existing !== value) {
<ide> existing.setSource(value);
<ide> }
<ide> } else {
<del> var newValue = Stream.wrap(value, ProxyStream, key);
<del> scope.locals[key] = newValue;
<add> let newValue = Stream.wrap(value, ProxyStream, key);
<add> scope.bindLocal(key, newValue);
<ide> }
<ide> }
<ide><path>packages/ember-htmlbars/lib/hooks/bind-self.js
<ide> @submodule ember-htmlbars
<ide> */
<ide>
<del>import newStream from 'ember-htmlbars/utils/new-stream';
<add>import ProxyStream from 'ember-metal/streams/proxy-stream';
<ide>
<ide> export default function bindSelf(env, scope, _self) {
<ide> let self = _self;
<ide> export default function bindSelf(env, scope, _self) {
<ide> let { controller } = self;
<ide> self = self.self;
<ide>
<del> newStream(scope.locals, 'controller', controller || self);
<add> scope.bindLocal('controller', newStream(controller || self));
<ide> }
<ide>
<ide> if (self && self.isView) {
<del> newStream(scope.locals, 'view', self, null);
<del> newStream(scope.locals, 'controller', scope.locals.view.getKey('controller'));
<add> scope.bindLocal('view', newStream(self, 'view'));
<add> scope.bindLocal('controller', newStream(self, '').getKey('controller'));
<add>
<add> let selfStream = newStream(self, '');
<ide>
<ide> if (self.isGlimmerComponent) {
<del> newStream(scope, 'self', self, null, true);
<add> scope.bindSelf(selfStream);
<ide> } else {
<del> newStream(scope, 'self', scope.locals.view.getKey('context'), null, true);
<add> scope.bindSelf(newStream(selfStream.getKey('context'), ''));
<ide> }
<ide>
<ide> return;
<ide> }
<ide>
<del> newStream(scope, 'self', self, null, true);
<add> let selfStream = newStream(self, '');
<add> scope.bindSelf(selfStream);
<ide>
<del> if (!scope.locals.controller) {
<del> scope.locals.controller = scope.self;
<add> if (!scope.hasLocal('controller')) {
<add> scope.bindLocal('controller', selfStream);
<ide> }
<ide> }
<add>
<add>function newStream(newValue, key) {
<add> return new ProxyStream(newValue, key);
<add>}
<ide><path>packages/ember-htmlbars/lib/hooks/bind-shadow-scope.js
<ide> @submodule ember-htmlbars
<ide> */
<ide>
<del>import newStream from 'ember-htmlbars/utils/new-stream';
<add>import ProxyStream from 'ember-metal/streams/proxy-stream';
<ide>
<ide> export default function bindShadowScope(env, parentScope, shadowScope, options) {
<ide> if (!options) { return; }
<ide> export default function bindShadowScope(env, parentScope, shadowScope, options)
<ide>
<ide> if (parentScope && parentScope.overrideController) {
<ide> didOverrideController = true;
<del> shadowScope.locals.controller = parentScope.locals.controller;
<add> shadowScope.bindLocal('controller', parentScope.getLocal('controller'));
<ide> }
<ide>
<ide> var view = options.view;
<ide> if (view && !view.isComponent) {
<del> newStream(shadowScope.locals, 'view', view, null);
<add> shadowScope.bindLocal('view', newStream(view, 'view'));
<ide>
<ide> if (!didOverrideController) {
<del> newStream(shadowScope.locals, 'controller', shadowScope.locals.view.getKey('controller'));
<add> shadowScope.bindLocal('controller', newStream(shadowScope.getLocal('view').getKey('controller')));
<ide> }
<ide>
<ide> if (view.isView) {
<del> newStream(shadowScope, 'self', shadowScope.locals.view.getKey('context'), null, true);
<add> shadowScope.bindSelf(newStream(shadowScope.getLocal('view').getKey('context'), ''));
<ide> }
<ide> }
<ide>
<del> shadowScope.view = view;
<add> shadowScope.bindView(view);
<ide>
<ide> if (view && options.attrs) {
<del> shadowScope.component = view;
<add> shadowScope.bindComponent(view);
<ide> }
<ide>
<ide> if ('attrs' in options) {
<del> shadowScope.attrs = options.attrs;
<add> shadowScope.bindAttrs(options.attrs);
<ide> }
<ide>
<ide> return shadowScope;
<ide> }
<add>
<add>function newStream(newValue, key) {
<add> return new ProxyStream(newValue, key);
<add>}
<ide><path>packages/ember-htmlbars/lib/hooks/component.js
<ide> export default function componentHook(renderNode, env, scope, _tagName, params,
<ide> tagName,
<ide> isAngleBracket: true,
<ide> isComponentElement: true,
<del> outerAttrs: scope.attrs,
<add> outerAttrs: scope.getAttrs(),
<ide> parentScope: scope
<ide> };
<ide>
<ide><path>packages/ember-htmlbars/lib/hooks/create-fresh-scope.js
<add>import ProxyStream from 'ember-metal/streams/proxy-stream';
<add>
<ide> /*
<ide> Ember's implementation of HTMLBars creates an enriched scope.
<ide>
<ide> the current view's `controller`.
<ide> */
<ide>
<add>function Scope(parent) {
<add> this._self = null;
<add> this._blocks = {};
<add> this._component = null;
<add> this._view = null;
<add> this._attrs = null;
<add> this._locals = {};
<add> this._localPresent = {};
<add> this.overrideController = false;
<add> this.parent = parent;
<add>}
<add>
<add>let proto = Scope.prototype;
<add>
<add>proto.getSelf = function() {
<add> return this._self || this.parent.getSelf();
<add>};
<add>
<add>proto.bindSelf = function(self) {
<add> this._self = self;
<add>};
<add>
<add>proto.updateSelf = function(self, key) {
<add> let existing = this._self;
<add>
<add> if (existing) {
<add> existing.setSource(self);
<add> } else {
<add> this._self = new ProxyStream(self, key);
<add> }
<add>};
<add>
<add>proto.getBlock = function(name) {
<add> return this._blocks[name] || this.parent.getBlock(name);
<add>};
<add>
<add>proto.hasBlock = function(name) {
<add> return !!(this._blocks[name] || this.parent.hasBlock(name));
<add>};
<add>
<add>proto.bindBlock = function(name, block) {
<add> this._blocks[name] = block;
<add>};
<add>
<add>proto.getComponent = function() {
<add> return this._component || this.parent.getComponent();
<add>};
<add>
<add>proto.bindComponent = function(component) {
<add> this._component = component;
<add>};
<add>
<add>proto.getView = function() {
<add> return this._view || this.parent.getView();
<add>};
<add>
<add>proto.bindView = function(view) {
<add> this._view = view;
<add>};
<add>
<add>proto.getAttrs = function() {
<add> return this._attrs || this.parent.getAttrs();
<add>};
<add>
<add>proto.bindAttrs = function(attrs) {
<add> this._attrs = attrs;
<add>};
<add>
<add>proto.hasLocal = function(name) {
<add> return this._localPresent[name] || this.parent.hasLocal(name);
<add>};
<add>
<add>proto.hasOwnLocal = function(name) {
<add> return this._localPresent[name];
<add>};
<add>
<add>proto.getLocal = function(name) {
<add> return this._localPresent[name] ? this._locals[name] : this.parent.getLocal(name);
<add>};
<add>
<add>proto.bindLocal = function(name, value) {
<add> this._localPresent[name] = true;
<add> this._locals[name] = value;
<add>};
<add>
<add>const EMPTY = {
<add> getSelf() { return null; },
<add> bindSelf(self) { return null; },
<add> updateSelf(self, key) { return null; },
<add> getBlock(name) { return null; },
<add> bindBlock(name, block) { return null; },
<add> hasBlock(name) { return false; },
<add> getComponent() { return null; },
<add> bindComponent() { return null; },
<add> getView() { return null; },
<add> bindView(view) { return null; },
<add> getAttrs() { return null; },
<add> bindAttrs(attrs) { return null; },
<add> hasLocal(name) { return false; },
<add> hasOwnLocal(name) { return false; },
<add> getLocal(name) { return null; },
<add> bindLocal(name, value) { return null; }
<add>};
<add>
<ide> export default function createFreshScope() {
<del> return {
<del> self: null,
<del> blocks: {},
<del> component: null,
<del> attrs: null,
<del> locals: {},
<del> localPresent: {}
<del> };
<add> return new Scope(EMPTY);
<add>}
<add>
<add>export function createChildScope(parent) {
<add> return new Scope(parent);
<ide> }
<ide><path>packages/ember-htmlbars/lib/hooks/element.js
<ide> export default function emberElement(morph, env, scope, path, params, hash, visi
<ide> }
<ide>
<ide> var result;
<del> var helper = findHelper(path, scope.self, env);
<add> var helper = findHelper(path, scope.getSelf(), env);
<ide> if (helper) {
<ide> var helperStream = buildHelperStream(helper, params, hash, { element: morph.element }, env, scope, path);
<ide> result = helperStream.value();
<ide><path>packages/ember-htmlbars/lib/hooks/get-block.js
<add>export default function getBlock(scope, key) {
<add> return scope.getBlock(key);
<add>}
<ide><path>packages/ember-htmlbars/lib/hooks/get-root.js
<ide>
<ide> export default function getRoot(scope, key) {
<ide> if (key === 'this') {
<del> return [scope.self];
<add> return [scope.getSelf()];
<ide> } else if (key === 'hasBlock') {
<del> return [!!scope.blocks.default];
<add> return [!!scope.hasBlock('default')];
<ide> } else if (key === 'hasBlockParams') {
<del> return [!!(scope.blocks.default && scope.blocks.default.arity)];
<del> } else if (key in scope.locals) {
<del> return [scope.locals[key]];
<add> let block = scope.getBlock('default');
<add> return [!!block && block.arity];
<add> } else if (scope.hasLocal(key)) {
<add> return [scope.getLocal(key)];
<ide> } else {
<ide> return [getKey(scope, key)];
<ide> }
<ide> }
<ide>
<ide> function getKey(scope, key) {
<del> if (key === 'attrs' && scope.attrs) {
<del> return scope.attrs;
<add> if (key === 'attrs') {
<add> let attrs = scope.getAttrs();
<add> if (attrs) { return attrs; }
<ide> }
<ide>
<del> var self = scope.self || scope.locals.view;
<add> var self = scope.getSelf() || scope.getLocal('view');
<ide>
<ide> if (self) {
<ide> return self.getKey(key);
<del> } else if (scope.attrs && key in scope.attrs) {
<add> }
<add>
<add> let attrs = scope.getAttrs();
<add> if (key in attrs) {
<ide> // TODO: attrs
<ide> // deprecate("You accessed the `" + key + "` attribute directly. Please use `attrs." + key + "` instead.");
<del> return scope.attrs[key];
<add> return attrs[key];
<ide> }
<ide> }
<ide><path>packages/ember-htmlbars/lib/hooks/lookup-helper.js
<ide> import lookupHelper from 'ember-htmlbars/system/lookup-helper';
<ide>
<ide> export default function lookupHelperHook(env, scope, helperName) {
<del> return lookupHelper(helperName, scope.self, env);
<add> return lookupHelper(helperName, scope.getSelf(), env);
<ide> }
<ide><path>packages/ember-htmlbars/lib/hooks/subexpr.js
<ide> export default function subexpr(env, scope, helperName, params, hash) {
<ide> }
<ide>
<ide> var label = labelForSubexpr(params, hash, helperName);
<del> var helper = lookupHelper(helperName, scope.self, env);
<add> var helper = lookupHelper(helperName, scope.getSelf(), env);
<ide>
<ide> var helperStream = buildHelperStream(helper, params, hash, null, env, scope, label);
<ide>
<ide><path>packages/ember-htmlbars/lib/hooks/update-self.js
<ide>
<ide> import { assert } from 'ember-metal/debug';
<ide> import { get } from 'ember-metal/property_get';
<del>import updateScope from 'ember-htmlbars/utils/update-scope';
<ide>
<ide> export default function updateSelf(env, scope, _self) {
<ide> let self = _self;
<ide> export default function updateSelf(env, scope, _self) {
<ide> let { controller } = self;
<ide> self = self.self;
<ide>
<del> updateScope(scope.locals, 'controller', controller || self);
<add> scope.updateLocal('controller', controller || self);
<ide> }
<ide>
<ide> assert('BUG: scope.attrs and self.isView should not both be true', !(scope.attrs && self.isView));
<ide>
<ide> if (self && self.isView) {
<del> updateScope(scope.locals, 'view', self, null);
<del> updateScope(scope, 'self', get(self, 'context'), null, true);
<add> scope.updateLocal('view', self);
<add> scope.updateSelf(get(self, 'context'), '');
<ide> return;
<ide> }
<ide>
<del> updateScope(scope, 'self', self, null);
<add> scope.updateSelf(self);
<ide> }
<ide><path>packages/ember-htmlbars/lib/keywords/debugger.js
<ide> import { info } from 'ember-metal/debug';
<ide> export default function debuggerKeyword(morph, env, scope) {
<ide> /* jshint unused: false, debug: true */
<ide>
<del> var view = env.hooks.getValue(scope.locals.view);
<del> var context = env.hooks.getValue(scope.self);
<add> var view = env.hooks.getValue(scope.getLocal('view'));
<add> var context = env.hooks.getValue(scope.getSelf());
<ide>
<ide> function get(path) {
<ide> return env.hooks.getValue(env.hooks.get(env, scope, path));
<ide><path>packages/ember-htmlbars/lib/keywords/legacy-yield.js
<ide> import ProxyStream from 'ember-metal/streams/proxy-stream';
<ide>
<ide> export default function legacyYield(morph, env, _scope, params, hash, template, inverse, visitor) {
<ide> let scope = _scope;
<add> let block = scope.getBlock('default');
<ide>
<del> if (scope.blocks.default.arity === 0) {
<add> if (block.arity === 0) {
<ide> // Typically, the `controller` local is persists through lexical scope.
<ide> // However, in this case, the `{{legacy-yield}}` in the legacy each view
<ide> // needs to override the controller local for the template it is yielding.
<ide> // This megahaxx allows us to override the controller, and most importantly,
<ide> // prevents the downstream scope from attempting to bind the `controller` local.
<ide> if (hash.controller) {
<ide> scope = env.hooks.createChildScope(scope);
<del> scope.locals.controller = new ProxyStream(hash.controller, 'controller');
<add> scope.bindLocal('controller', new ProxyStream(hash.controller, 'controller'));
<ide> scope.overrideController = true;
<ide> }
<del> scope.blocks.default.invoke(env, [], params[0], morph, scope, visitor);
<add> block.invoke(env, [], params[0], morph, scope, visitor);
<ide> } else {
<del> scope.blocks.default.invoke(env, params, undefined, morph, scope, visitor);
<add> block.invoke(env, params, undefined, morph, scope, visitor);
<ide> }
<ide>
<ide> return true;
<ide><path>packages/ember-htmlbars/lib/keywords/view.js
<ide> import ViewNodeManager from 'ember-htmlbars/node-managers/view-node-manager';
<ide> export default {
<ide> setupState(state, env, scope, params, hash) {
<ide> var read = env.hooks.getValue;
<del> var targetObject = read(scope.self);
<add> var targetObject = read(scope.getSelf());
<ide> var viewClassOrInstance = state.viewClassOrInstance;
<ide> if (!viewClassOrInstance) {
<ide> viewClassOrInstance = getView(read(params[0]), env.container);
<ide> }
<ide>
<ide> // if parentView exists, use its controller (the default
<ide> // behavior), otherwise use `scope.self` as the controller
<del> var controller = scope.locals.view ? null : read(scope.self);
<add> var controller = scope.hasLocal('view') ? null : read(scope.getSelf());
<ide>
<ide> return {
<ide> manager: state.manager,
<ide><path>packages/ember-htmlbars/lib/keywords/yield.js
<add>export default function yieldKeyword(morph, env, scope, params, hash, template, inverse, visitor) {
<add> let to = env.hooks.getValue(hash.to) || 'default';
<add> let block = scope.getBlock(to);
<add>
<add> if (block) {
<add> block.invoke(env, params, hash.self, morph, scope, visitor);
<add> }
<add>
<add> return true;
<add>}
<ide><path>packages/ember-htmlbars/lib/node-managers/component-node-manager.js
<ide> ComponentNodeManager.create = function(renderNode, env, options) {
<ide> // If there is a controller on the scope, pluck it off and save it on the
<ide> // component. This allows the component to target actions sent via
<ide> // `sendAction` correctly.
<del> if (parentScope.locals.controller) {
<del> createOptions._controller = getValue(parentScope.locals.controller);
<add> if (parentScope.hasLocal('controller')) {
<add> createOptions._controller = getValue(parentScope.getLocal('controller'));
<ide> }
<ide>
<ide> extractPositionalParams(renderNode, component, params, attrs);
<ide><path>packages/ember-htmlbars/lib/node-managers/view-node-manager.js
<ide> ViewNodeManager.create = function(renderNode, env, attrs, found, parentView, pat
<ide> if (attrs && attrs._defaultTagName) { options._defaultTagName = getValue(attrs._defaultTagName); }
<ide> if (attrs && attrs.viewName) { options.viewName = getValue(attrs.viewName); }
<ide>
<del> if (found.component.create && contentScope && contentScope.self) {
<del> options._context = getValue(contentScope.self);
<add> if (found.component.create && contentScope) {
<add> let _self = contentScope.getSelf();
<add> if (_self) {
<add> options._context = getValue(contentScope.getSelf());
<add> }
<ide> }
<ide>
<ide> if (found.self) {
<ide><path>packages/ember-htmlbars/lib/utils/subscribe.js
<ide> import { isStream, labelFor } from 'ember-metal/streams/utils';
<ide>
<ide> export default function subscribe(node, env, scope, stream) {
<ide> if (!isStream(stream)) { return; }
<del> var component = scope.component;
<add> var component = scope.getComponent();
<ide> var unsubscribers = node.streamUnsubscribers = node.streamUnsubscribers || [];
<ide>
<ide> unsubscribers.push(stream.subscribe(function() {
<ide><path>packages/ember-routing-htmlbars/lib/keywords/closure-action.js
<ide> export default function closureAction(morph, env, scope, params, hash, template,
<ide> // on-change={{action setName}}
<ide> // element-space actions look to "controller" then target. Here we only
<ide> // look to "target".
<del> target = read(scope.self);
<add> target = read(scope.getSelf());
<ide> action = read(rawAction);
<ide> let actionType = typeof action;
<ide>
<ide><path>packages/ember-routing-htmlbars/lib/keywords/element-action.js
<ide> export default {
<ide> target = read(hash.target);
<ide> }
<ide> } else {
<del> target = read(scope.locals.controller) || read(scope.self);
<add> target = read(scope.getLocal('controller')) || read(scope.getSelf());
<ide> }
<ide>
<ide> return { actionName, actionArgs, target };
<ide><path>packages/ember-routing-htmlbars/lib/keywords/render.js
<ide> export default {
<ide> controllerFullName = 'controller:' + controllerName;
<ide> }
<ide>
<del> var parentController = read(scope.locals.controller);
<add> var parentController = read(scope.getLocal('controller'));
<ide> var controller;
<ide>
<ide> // choose name | 23 |
Go | Go | fix incorrect info and format of error in image | b2ec509a3f9e50980fe3733500df56289c174252 | <ide><path>image/fs.go
<ide> func (s *fs) Walk(f DigestWalkFunc) error {
<ide> for _, v := range dir {
<ide> dgst := digest.NewDigestFromHex(string(digest.Canonical), v.Name())
<ide> if err := dgst.Validate(); err != nil {
<del> logrus.Debugf("Skipping invalid digest %s: %s", dgst, err)
<add> logrus.Debugf("skipping invalid digest %s: %s", dgst, err)
<ide> continue
<ide> }
<ide> if err := f(dgst); err != nil {
<ide> func (s *fs) Set(data []byte) (digest.Digest, error) {
<ide> defer s.Unlock()
<ide>
<ide> if len(data) == 0 {
<del> return "", fmt.Errorf("Invalid empty data")
<add> return "", fmt.Errorf("invalid empty data")
<ide> }
<ide>
<ide> dgst := digest.FromBytes(data)
<ide><path>image/fs_test.go
<ide> func TestFSGetInvalidData(t *testing.T) {
<ide>
<ide> _, err = fs.Get(id)
<ide> if err == nil {
<del> t.Fatal("Expected get to fail after data modification.")
<add> t.Fatal("expected get to fail after data modification.")
<ide> }
<ide> }
<ide>
<ide> func TestFSInvalidSet(t *testing.T) {
<ide>
<ide> _, err = fs.Set([]byte("foobar"))
<ide> if err == nil {
<del> t.Fatal("Expecting error from invalid filesystem data.")
<add> t.Fatal("expected error from invalid filesystem data.")
<ide> }
<ide> }
<ide>
<ide> func TestFSInvalidRoot(t *testing.T) {
<ide>
<ide> _, err = NewFSStoreBackend(root)
<ide> if err == nil {
<del> t.Fatalf("Expected error from root %q and invlid file %q", tc.root, tc.invalidFile)
<add> t.Fatalf("expected error from root %q and invalid file %q", tc.root, tc.invalidFile)
<ide> }
<ide>
<ide> os.RemoveAll(root)
<ide> func testMetadataGetSet(t *testing.T, store StoreBackend) {
<ide>
<ide> _, err = store.GetMetadata(id2, "tkey2")
<ide> if err == nil {
<del> t.Fatal("Expected error for getting metadata for unknown key")
<add> t.Fatal("expected error for getting metadata for unknown key")
<ide> }
<ide>
<ide> id3 := digest.FromBytes([]byte("baz"))
<ide> err = store.SetMetadata(id3, "tkey", []byte("tval"))
<ide> if err == nil {
<del> t.Fatal("Expected error for setting metadata for unknown ID.")
<add> t.Fatal("expected error for setting metadata for unknown ID.")
<ide> }
<ide>
<ide> _, err = store.GetMetadata(id3, "tkey")
<ide> if err == nil {
<del> t.Fatal("Expected error for getting metadata for unknown ID.")
<add> t.Fatal("expected error for getting metadata for unknown ID.")
<ide> }
<ide> }
<ide>
<ide> func TestFSInvalidWalker(t *testing.T) {
<ide> n := 0
<ide> err = fs.Walk(func(id digest.Digest) error {
<ide> if id != fooID {
<del> t.Fatalf("Invalid walker ID %q, expected %q", id, fooID)
<add> t.Fatalf("invalid walker ID %q, expected %q", id, fooID)
<ide> }
<ide> n++
<ide> return nil
<ide> })
<ide> if err != nil {
<del> t.Fatalf("Invalid data should not have caused walker error, got %v", err)
<add> t.Fatalf("invalid data should not have caused walker error, got %v", err)
<ide> }
<ide> if n != 1 {
<del> t.Fatalf("Expected 1 walk initialization, got %d", n)
<add> t.Fatalf("expected 1 walk initialization, got %d", n)
<ide> }
<ide> }
<ide>
<ide> func testGetSet(t *testing.T, store StoreBackend) {
<ide> if err != nil {
<ide> t.Fatal(err)
<ide> }
<del> // skipping use of digest pkg because its used by the implementation
<add> // skipping use of digest pkg because it is used by the implementation
<ide> h := sha256.New()
<ide> _, err = h.Write(randomInput)
<ide> if err != nil {
<ide> func testGetSet(t *testing.T, store StoreBackend) {
<ide> t.Fatal(err)
<ide> }
<ide> if id != tc.expected {
<del> t.Fatalf("Expected ID %q, got %q", tc.expected, id)
<add> t.Fatalf("expected ID %q, got %q", tc.expected, id)
<ide> }
<ide> }
<ide>
<ide> for _, emptyData := range [][]byte{nil, {}} {
<ide> _, err := store.Set(emptyData)
<ide> if err == nil {
<del> t.Fatal("Expected error for nil input.")
<add> t.Fatal("expected error for nil input.")
<ide> }
<ide> }
<ide>
<ide> func testGetSet(t *testing.T, store StoreBackend) {
<ide> t.Fatal(err)
<ide> }
<ide> if bytes.Compare(data, tc.input) != 0 {
<del> t.Fatalf("Expected data %q, got %q", tc.input, data)
<add> t.Fatalf("expected data %q, got %q", tc.input, data)
<ide> }
<ide> }
<ide>
<ide> for _, key := range []digest.Digest{"foobar:abc", "sha256:abc", "sha256:c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2a"} {
<ide> _, err := store.Get(key)
<ide> if err == nil {
<del> t.Fatalf("Expected error for ID %q.", key)
<add> t.Fatalf("expected error for ID %q.", key)
<ide> }
<ide> }
<ide>
<ide> func testDelete(t *testing.T, store StoreBackend) {
<ide>
<ide> _, err = store.Get(id)
<ide> if err == nil {
<del> t.Fatalf("Expected getting deleted item %q to fail", id)
<add> t.Fatalf("expected getting deleted item %q to fail", id)
<ide> }
<ide> _, err = store.Get(id2)
<ide> if err != nil {
<ide> func testDelete(t *testing.T, store StoreBackend) {
<ide> }
<ide> _, err = store.Get(id2)
<ide> if err == nil {
<del> t.Fatalf("Expected getting deleted item %q to fail", id2)
<add> t.Fatalf("expected getting deleted item %q to fail", id2)
<ide> }
<ide> }
<ide>
<ide> func testWalker(t *testing.T, store StoreBackend) {
<ide> }
<ide>
<ide> if n != 2 {
<del> t.Fatalf("Expected 2 walk initializations, got %d", n)
<add> t.Fatalf("expected 2 walk initializations, got %d", n)
<ide> }
<ide> if len(tcases) != 0 {
<del> t.Fatalf("Expected empty unwalked set, got %+v", tcases)
<add> t.Fatalf("expected empty unwalked set, got %+v", tcases)
<ide> }
<ide>
<ide> // stop on error
<ide> func testWalker(t *testing.T, store StoreBackend) {
<ide> return errors.New("")
<ide> })
<ide> if err == nil {
<del> t.Fatalf("Exected error from walker.")
<add> t.Fatalf("expected error from walker.")
<ide> }
<ide> }
<ide><path>image/image.go
<ide> func IDFromDigest(digest digest.Digest) ID {
<ide>
<ide> // V1Image stores the V1 image configuration.
<ide> type V1Image struct {
<del> // ID a unique 64 character identifier of the image
<add> // ID is a unique 64 character identifier of the image
<ide> ID string `json:"id,omitempty"`
<del> // Parent id of the image
<add> // Parent is the ID of the parent image
<ide> Parent string `json:"parent,omitempty"`
<del> // Comment user added comment
<add> // Comment is the commit message that was set when committing the image
<ide> Comment string `json:"comment,omitempty"`
<del> // Created timestamp when image was created
<add> // Created is the timestamp at which the image was created
<ide> Created time.Time `json:"created"`
<ide> // Container is the id of the container used to commit
<ide> Container string `json:"container,omitempty"`
<ide> // ContainerConfig is the configuration of the container that is committed into the image
<ide> ContainerConfig container.Config `json:"container_config,omitempty"`
<del> // DockerVersion specifies version on which image is built
<add> // DockerVersion specifies the version of Docker that was used to build the image
<ide> DockerVersion string `json:"docker_version,omitempty"`
<del> // Author of the image
<add> // Author is the name of the author that was specified when committing the image
<ide> Author string `json:"author,omitempty"`
<ide> // Config is the configuration of the container received from the client
<ide> Config *container.Config `json:"config,omitempty"`
<ide> func (img *Image) MarshalJSON() ([]byte, error) {
<ide>
<ide> // History stores build commands that were used to create an image
<ide> type History struct {
<del> // Created timestamp for build point
<add> // Created is the timestamp at which the image was created
<ide> Created time.Time `json:"created"`
<del> // Author of the build point
<add> // Author is the name of the author that was specified when committing the image
<ide> Author string `json:"author,omitempty"`
<del> // CreatedBy keeps the Dockerfile command used while building image.
<add> // CreatedBy keeps the Dockerfile command used while building the image
<ide> CreatedBy string `json:"created_by,omitempty"`
<del> // Comment is custom message set by the user when creating the image.
<add> // Comment is the commit message that was set when committing the image
<ide> Comment string `json:"comment,omitempty"`
<ide> // EmptyLayer is set to true if this history item did not generate a
<ide> // layer. Otherwise, the history item is associated with the next
<ide> // layer in the RootFS section.
<ide> EmptyLayer bool `json:"empty_layer,omitempty"`
<ide> }
<ide>
<del>// Exporter provides interface for exporting and importing images
<add>// Exporter provides interface for loading and saving images
<ide> type Exporter interface {
<ide> Load(io.ReadCloser, io.Writer, bool) error
<ide> // TODO: Load(net.Context, io.ReadCloser, <- chan StatusMessage) error
<ide> func NewFromJSON(src []byte) (*Image, error) {
<ide> return nil, err
<ide> }
<ide> if img.RootFS == nil {
<del> return nil, errors.New("Invalid image JSON, no RootFS key.")
<add> return nil, errors.New("invalid image JSON, no RootFS key")
<ide> }
<ide>
<ide> img.rawJSON = src
<ide><path>image/image_test.go
<ide> func TestJSON(t *testing.T) {
<ide> }
<ide> rawJSON := img.RawJSON()
<ide> if string(rawJSON) != sampleImageJSON {
<del> t.Fatalf("Raw JSON of config didn't match: expected %+v, got %v", sampleImageJSON, rawJSON)
<add> t.Fatalf("raw JSON of config didn't match: expected %+v, got %v", sampleImageJSON, rawJSON)
<ide> }
<ide> }
<ide>
<ide> func TestInvalidJSON(t *testing.T) {
<ide> _, err := NewFromJSON([]byte("{}"))
<ide> if err == nil {
<del> t.Fatal("Expected JSON parse error")
<add> t.Fatal("expected JSON parse error")
<ide> }
<ide> }
<ide>
<ide><path>image/tarexport/tarexport.go
<ide> type LogImageEvent interface {
<ide> LogImageEvent(imageID, refName, action string)
<ide> }
<ide>
<del>// NewTarExporter returns new ImageExporter for tar packages
<add>// NewTarExporter returns new Exporter for tar packages
<ide> func NewTarExporter(is image.Store, ls layer.Store, rs reference.Store, loggerImgEvent LogImageEvent) image.Exporter {
<ide> return &tarexporter{
<ide> is: is, | 5 |
Javascript | Javascript | set the correct initial value on input range | 36546b5137e9012ebdc62fc9ec11e3518c9e0aab | <ide><path>packages/react-dom/src/__tests__/ReactDOMServerIntegrationForms-test.js
<ide> describe('ReactDOMServerIntegration', () => {
<ide> ControlledSelect;
<ide> beforeEach(() => {
<ide> ControlledInput = class extends React.Component {
<add> static defaultProps = {
<add> type: 'text',
<add> initialValue: 'Hello',
<add> };
<ide> constructor() {
<del> super();
<del> this.state = {value: 'Hello'};
<add> super(...arguments);
<add> this.state = {value: this.props.initialValue};
<ide> }
<ide> handleChange(event) {
<ide> if (this.props.onChange) {
<ide> describe('ReactDOMServerIntegration', () => {
<ide> render() {
<ide> return (
<ide> <input
<add> type={this.props.type}
<ide> value={this.state.value}
<ide> onChange={this.handleChange.bind(this)}
<ide> />
<ide> describe('ReactDOMServerIntegration', () => {
<ide> expect(changeCount).toBe(0);
<ide> });
<ide>
<add> it('should not blow away user-interaction on successful reconnect to an uncontrolled range input', () =>
<add> testUserInteractionBeforeClientRender(
<add> <input type="text" defaultValue="0.5" />,
<add> '0.5',
<add> '1',
<add> ));
<add>
<add> it('should not blow away user-interaction on successful reconnect to a controlled range input', async () => {
<add> let changeCount = 0;
<add> await testUserInteractionBeforeClientRender(
<add> <ControlledInput
<add> type="range"
<add> initialValue="0.25"
<add> onChange={() => changeCount++}
<add> />,
<add> '0.25',
<add> '1',
<add> );
<add> expect(changeCount).toBe(0);
<add> });
<add>
<ide> it('should not blow away user-entered text on successful reconnect to an uncontrolled checkbox', () =>
<ide> testUserInteractionBeforeClientRender(
<ide> <input type="checkbox" defaultChecked={true} />,
<ide><path>packages/react-dom/src/client/ReactDOMFiberComponent.js
<ide> export function setInitialProperties(
<ide> // TODO: Make sure we check if this is still unmounted or do any clean
<ide> // up necessary since we never stop tracking anymore.
<ide> inputValueTracking.track((domElement: any));
<del> ReactDOMFiberInput.postMountWrapper(domElement, rawProps);
<add> ReactDOMFiberInput.postMountWrapper(domElement, rawProps, false);
<ide> break;
<ide> case 'textarea':
<ide> // TODO: Make sure we check if this is still unmounted or do any clean
<ide> export function diffHydratedProperties(
<ide> // TODO: Make sure we check if this is still unmounted or do any clean
<ide> // up necessary since we never stop tracking anymore.
<ide> inputValueTracking.track((domElement: any));
<del> ReactDOMFiberInput.postMountWrapper(domElement, rawProps);
<add> ReactDOMFiberInput.postMountWrapper(domElement, rawProps, true);
<ide> break;
<ide> case 'textarea':
<ide> // TODO: Make sure we check if this is still unmounted or do any clean
<ide><path>packages/react-dom/src/client/ReactDOMFiberInput.js
<ide> export function updateWrapper(element: Element, props: Object) {
<ide> }
<ide> }
<ide>
<del>export function postMountWrapper(element: Element, props: Object) {
<add>export function postMountWrapper(
<add> element: Element,
<add> props: Object,
<add> isHydrating: boolean,
<add>) {
<ide> const node = ((element: any): InputWithWrapperState);
<ide>
<ide> if (props.hasOwnProperty('value') || props.hasOwnProperty('defaultValue')) {
<ide> export function postMountWrapper(element: Element, props: Object) {
<ide>
<ide> // Do not assign value if it is already set. This prevents user text input
<ide> // from being lost during SSR hydration.
<del> if (currentValue === '') {
<add> if (!isHydrating) {
<ide> // Do not re-assign the value property if there is no change. This
<ide> // potentially avoids a DOM write and prevents Firefox (~60.0.1) from
<ide> // prematurely marking required inputs as invalid | 3 |
Ruby | Ruby | use the database type to deserialize enum | 67c1719012506c3387df067961252b5df50a97ce | <ide><path>activerecord/lib/active_record/enum.rb
<ide> def inherited(base) # :nodoc:
<ide> end
<ide>
<ide> class EnumType < Type::Value # :nodoc:
<del> def initialize(name, mapping)
<add> def initialize(name, mapping, subtype)
<ide> @name = name
<ide> @mapping = mapping
<add> @subtype = subtype
<ide> end
<ide>
<ide> def cast(value)
<ide> def cast(value)
<ide>
<ide> def deserialize(value)
<ide> return if value.nil?
<del> mapping.key(value)
<add> mapping.key(subtype.deserialize(value))
<ide> end
<ide>
<ide> def serialize(value)
<ide> def assert_valid_value(value)
<ide>
<ide> protected
<ide>
<del> attr_reader :name, :mapping
<add> attr_reader :name, :mapping, :subtype
<ide> end
<ide>
<ide> def enum(definitions)
<ide> def enum(definitions)
<ide> detect_enum_conflict!(name, name)
<ide> detect_enum_conflict!(name, "#{name}=")
<ide>
<del> attribute name, EnumType.new(name, enum_values)
<add> decorate_attribute_type(name, :enum) do |subtype|
<add> EnumType.new(name, enum_values, subtype)
<add> end
<ide>
<ide> _enum_methods_module.module_eval do
<ide> pairs = values.respond_to?(:each_pair) ? values.each_pair : values.each_with_index
<ide><path>activerecord/test/cases/enum_test.rb
<ide> def self.name; 'Book'; end
<ide> assert book.proposed?, "expected fixture to default to proposed status"
<ide> assert book.in_english?, "expected fixture to default to english language"
<ide> end
<add>
<add> test "uses default value from database on initialization" do
<add> book = Book.new
<add> assert book.proposed?
<add> end
<add>
<add> test "uses default value from database on initialization when using custom mapping" do
<add> book = Book.new
<add> assert book.hard?
<add> end
<ide> end
<ide><path>activerecord/test/models/book.rb
<ide> class Book < ActiveRecord::Base
<ide> enum author_visibility: [:visible, :invisible], _prefix: true
<ide> enum illustrator_visibility: [:visible, :invisible], _prefix: true
<ide> enum font_size: [:small, :medium, :large], _prefix: :with, _suffix: true
<add> enum cover: { hard: 'hard', soft: 'soft' }
<ide>
<ide> def published!
<ide> super
<ide><path>activerecord/test/schema/schema.rb
<ide> def except(adapter_names_to_exclude)
<ide> t.column :author_visibility, :integer, default: 0
<ide> t.column :illustrator_visibility, :integer, default: 0
<ide> t.column :font_size, :integer, default: 0
<add> t.column :cover, :string, default: 'hard'
<ide> end
<ide>
<ide> create_table :booleans, force: true do |t| | 4 |
Python | Python | update irnn example | df860fdb94c63cf7898315277fe951d1c0ba16a9 | <ide><path>examples/mnist_irnn.py
<ide> Optimizer is replaced with RMSprop which yields more stable and steady
<ide> improvement.
<ide>
<del> Reaches 0.93 train/test accuracy after 900 epochs (which roughly corresponds
<del> to 1687500 steps in the original paper.)
<add> Reaches 0.93 train/test accuracy after 900 epochs
<add> (which roughly corresponds to 1687500 steps in the original paper.)
<ide> '''
<ide>
<ide> batch_size = 32
<ide>
<ide> learning_rate = 1e-6
<ide> clip_norm = 1.0
<del>BPTT_truncate = 28*28
<ide>
<ide> # the data, shuffled and split between train and test sets
<ide> (X_train, y_train), (X_test, y_test) = mnist.load_data()
<ide> model.add(SimpleRNN(output_dim=hidden_units,
<ide> init=lambda shape: normal(shape, scale=0.001),
<ide> inner_init=lambda shape: identity(shape, scale=1.0),
<del> activation='relu', truncate_gradient=BPTT_truncate,
<del> input_shape=(None, 1)))
<add> activation='relu', input_shape=X_train.shape[1:]))
<ide> model.add(Dense(nb_classes))
<ide> model.add(Activation('softmax'))
<ide> rmsprop = RMSprop(lr=learning_rate)
<ide>
<ide> print('Compare to LSTM...')
<ide> model = Sequential()
<del>model.add(LSTM(hidden_units, input_shape=(None, 1)))
<add>model.add(LSTM(hidden_units, input_shape=X_train.shape[1:]))
<ide> model.add(Dense(nb_classes))
<ide> model.add(Activation('softmax'))
<ide> rmsprop = RMSprop(lr=learning_rate) | 1 |
Javascript | Javascript | fix instrumentation patterns | 0ddf21a77620b88ddd9a7f679c98f88df8a95b1a | <ide><path>packages/ember-metal/lib/instrumentation.js
<ide> Ember.Instrumentation.instrument = function(name, payload, callback, binding) {
<ide> };
<ide>
<ide> Ember.Instrumentation.subscribe = function(pattern, object) {
<del> var paths = pattern.split("."), path, regex = "^";
<add> var paths = pattern.split("."), path, regex = [];
<ide>
<ide> for (var i=0, l=paths.length; i<l; i++) {
<ide> path = paths[i];
<ide> if (path === "*") {
<del> regex = regex + "[^\\.]*";
<add> regex.push("[^\\.]*");
<ide> } else {
<del> regex = regex + path;
<add> regex.push(path);
<ide> }
<ide> }
<ide>
<add> regex = regex.join("\\.");
<ide> regex = regex + "(\\..*)?";
<ide>
<ide> var subscriber = {
<ide> pattern: pattern,
<del> regex: new RegExp(regex + "$"),
<add> regex: new RegExp("^" + regex + "$"),
<ide> object: object
<ide> };
<ide> | 1 |
Go | Go | use flags.ipvar() instead of custom type | ccb75439fff3f09ac4fa4daea787db5cb5438468 | <ide><path>cmd/dockerd/config.go
<ide> func installCommonConfigFlags(conf *config.Config, flags *pflag.FlagSet) error {
<ide> flags.Var(opts.NewListOptsRef(&conf.DNS, opts.ValidateIPAddress), "dns", "DNS server to use")
<ide> flags.Var(opts.NewNamedListOptsRef("dns-opts", &conf.DNSOptions, nil), "dns-opt", "DNS options to use")
<ide> flags.Var(opts.NewListOptsRef(&conf.DNSSearch, opts.ValidateDNSSearch), "dns-search", "DNS search domains to use")
<del> flags.Var(opts.NewIPOpt(&conf.HostGatewayIP, ""), "host-gateway-ip", "IP address that the special 'host-gateway' string in --add-host resolves to. Defaults to the IP address of the default bridge")
<add> flags.IPVar(&conf.HostGatewayIP, "host-gateway-ip", nil, "IP address that the special 'host-gateway' string in --add-host resolves to. Defaults to the IP address of the default bridge")
<ide> flags.Var(opts.NewNamedListOptsRef("labels", &conf.Labels, opts.ValidateLabel), "label", "Set key=value labels to the daemon")
<ide> flags.StringVar(&conf.LogConfig.Type, "log-driver", "json-file", "Default driver for container logs")
<ide> flags.Var(opts.NewNamedMapOpts("log-opts", conf.LogConfig.Config, nil), "log-opt", "Default log driver options for containers")
<ide><path>cmd/dockerd/config_unix.go
<ide> package main
<ide>
<ide> import (
<add> "net"
<ide> "os/exec"
<ide> "path/filepath"
<ide>
<ide> func installConfigFlags(conf *config.Config, flags *pflag.FlagSet) error {
<ide> flags.StringVarP(&conf.BridgeConfig.Iface, "bridge", "b", "", "Attach containers to a network bridge")
<ide> flags.StringVar(&conf.BridgeConfig.FixedCIDR, "fixed-cidr", "", "IPv4 subnet for fixed IPs")
<ide> flags.StringVar(&conf.BridgeConfig.FixedCIDRv6, "fixed-cidr-v6", "", "IPv6 subnet for fixed IPs")
<del> flags.Var(opts.NewIPOpt(&conf.BridgeConfig.DefaultGatewayIPv4, ""), "default-gateway", "Container default gateway IPv4 address")
<del> flags.Var(opts.NewIPOpt(&conf.BridgeConfig.DefaultGatewayIPv6, ""), "default-gateway-v6", "Container default gateway IPv6 address")
<add> flags.IPVar(&conf.BridgeConfig.DefaultGatewayIPv4, "default-gateway", nil, "Container default gateway IPv4 address")
<add> flags.IPVar(&conf.BridgeConfig.DefaultGatewayIPv6, "default-gateway-v6", nil, "Container default gateway IPv6 address")
<ide> flags.BoolVar(&conf.BridgeConfig.InterContainerCommunication, "icc", true, "Enable inter-container communication")
<del> flags.Var(opts.NewIPOpt(&conf.BridgeConfig.DefaultIP, "0.0.0.0"), "ip", "Default IP when binding container ports")
<add> flags.IPVar(&conf.BridgeConfig.DefaultIP, "ip", net.IPv4zero, "Default IP when binding container ports")
<ide> flags.BoolVar(&conf.BridgeConfig.EnableUserlandProxy, "userland-proxy", true, "Use userland proxy for loopback traffic")
<ide> defaultUserlandProxyPath := ""
<ide> if rootless.RunningWithRootlessKit() { | 2 |
PHP | PHP | add more tests | ec30b5ecf94071058ac19e88c93126d7c0275bdf | <ide><path>tests/Support/SupportStrTest.php
<ide> public function testStrBeforeLast()
<ide>
<ide> public function testStrBetween()
<ide> {
<add> $this->assertSame('abc', Str::between('abc', '', 'c'));
<add> $this->assertSame('abc', Str::between('abc', 'a', ''));
<add> $this->assertSame('abc', Str::between('abc', '', ''));
<add> $this->assertSame('b', Str::between('abc', 'a', 'c'));
<add> $this->assertSame('b', Str::between('dddabc', 'a', 'c'));
<add> $this->assertSame('b', Str::between('abcddd', 'a', 'c'));
<add> $this->assertSame('b', Str::between('dddabcddd', 'a', 'c'));
<ide> $this->assertSame('nn', Str::between('hannah', 'ha', 'ah'));
<add> $this->assertSame('a]ab[b', Str::between('[a]ab[b]', '[', ']'));
<ide> $this->assertSame('foo', Str::between('foofoobar', 'foo', 'bar'));
<ide> $this->assertSame('bar', Str::between('foobarbar', 'foo', 'bar'));
<ide> } | 1 |
PHP | PHP | fix bug with root routing | 597feed4a740ba54cd97c0e48922886ccf1f4516 | <ide><path>laravel/routing/router.php
<ide> protected static function root($identifier, $controller, $root)
<ide> // to point the pattern to the controller's index method.
<ide> $pattern = trim($root.'/'.$home, '/') ?: '/';
<ide>
<del> $attributes = array('uses' => "{$identifier}@(:1)", 'defaults' => 'index');
<add> $attributes = array('uses' => "{$identifier}@index");
<ide>
<ide> static::register('*', $pattern, $attributes);
<ide> } | 1 |
PHP | PHP | add array mailer | 672f626da1788a46bf6bc830d15725ee3ae668d8 | <ide><path>config/mail.php
<ide> 'transport' => 'log',
<ide> 'channel' => env('MAIL_LOG_CHANNEL'),
<ide> ],
<add>
<add> 'array' => [
<add> 'transport' => 'array',
<add> ],
<ide> ],
<ide>
<ide> /* | 1 |
PHP | PHP | upgrade shell - refactoring | 3dddcf87572e99406467bc54a7c6eac43ee92dd1 | <ide><path>cake/console/shells/upgrade.php
<ide> class UpgradeShell extends Shell {
<ide> * @return void
<ide> */
<ide> function helpers() {
<del> if (!empty($this->params['plugin'])) {
<del> $this->_paths = array(App::pluginPath($this->params['plugin']));
<del> } else {
<del> $this->_paths = array(
<del> VIEWS
<del> );
<del> }
<add> $this->_paths = array(
<add> VIEWS
<add> );
<ide>
<ide> $patterns = array();
<ide> foreach(App::objects('helper') as $helper) {
<ide> function helpers() {
<ide> );
<ide> }
<ide>
<del> $this->_findFiles();
<del> foreach ($this->_files as $file) {
<del> $this->out('Updating ' . $file . '...', 1, Shell::VERBOSE);
<del> $this->_updateFile($file, $patterns);
<del> }
<add> $this->_filesRegexpUpdate($patterns);
<ide> }
<ide>
<ide> /**
<ide> function helpers() {
<ide> * @return void
<ide> */
<ide> function i18n() {
<del> if (!empty($this->params['plugin'])) {
<del> $this->_paths = array(App::pluginPath($this->params['plugin']));
<del> } else {
<del> $this->_paths = array(
<del> CONTROLLERS,
<del> MODELS,
<del> VIEWS
<del> );
<del> }
<add> $this->_paths = array(
<add> CONTROLLERS,
<add> MODELS,
<add> VIEWS
<add> );
<ide>
<ide> $patterns = array(
<ide> array(
<ide> function i18n() {
<ide> array('__*(*, true) to __*(*)', '/(__[a-z]*\(.*?)(,\s*true)(\))/', '\1\3')
<ide> );
<ide>
<add> $this->_filesRegexpUpdate($patterns);
<add> }
<add>
<add> protected function _filesRegexpUpdate($patterns) {
<add> if (!empty($this->params['plugin'])) {
<add> $this->_paths = array(App::pluginPath($this->params['plugin']));
<add> }
<add>
<ide> $this->_findFiles();
<ide> foreach ($this->_files as $file) {
<ide> $this->out('Updating ' . $file . '...', 1, Shell::VERBOSE); | 1 |
Javascript | Javascript | use consistent timeouts | c3aa86d6784af77121e5e98e75c6dc12e5bb39ec | <ide><path>test/parallel/test-http-server-headers-timeout-delayed-headers.js
<ide> const { connect } = require('net');
<ide> // pauses before start sending the request.
<ide>
<ide> let sendDelayedRequestHeaders;
<del>const headersTimeout = common.platformTimeout(1000);
<add>const headersTimeout = common.platformTimeout(2000);
<ide> const server = createServer({
<ide> headersTimeout,
<ide> requestTimeout: 0,
<ide> keepAliveTimeout: 0,
<del> connectionsCheckingInterval: common.platformTimeout(250),
<add> connectionsCheckingInterval: headersTimeout / 4,
<ide> }, common.mustNotCall());
<ide> server.on('connection', common.mustCall(() => {
<ide> assert.strictEqual(typeof sendDelayedRequestHeaders, 'function');
<ide><path>test/parallel/test-http-server-headers-timeout-interrupted-headers.js
<ide> const { connect } = require('net');
<ide> // pauses sending in the middle of a header.
<ide>
<ide> let sendDelayedRequestHeaders;
<del>const headersTimeout = common.platformTimeout(1000);
<add>const headersTimeout = common.platformTimeout(2000);
<ide> const server = createServer({
<ide> headersTimeout,
<ide> requestTimeout: 0,
<ide> keepAliveTimeout: 0,
<del> connectionsCheckingInterval: common.platformTimeout(250),
<add> connectionsCheckingInterval: headersTimeout / 4,
<ide> }, common.mustNotCall());
<ide> server.on('connection', common.mustCall(() => {
<ide> assert.strictEqual(typeof sendDelayedRequestHeaders, 'function');
<ide><path>test/parallel/test-http-server-headers-timeout-pipelining.js
<ide> const { connect } = require('net');
<ide> // after server.requestTimeout if the client
<ide> // does not complete a request when using pipelining.
<ide>
<del>const headersTimeout = common.platformTimeout(1000);
<add>const headersTimeout = common.platformTimeout(2000);
<ide> const server = createServer({
<ide> headersTimeout,
<ide> requestTimeout: 0,
<ide> keepAliveTimeout: 0,
<del> connectionsCheckingInterval: common.platformTimeout(250),
<add> connectionsCheckingInterval: headersTimeout / 4,
<ide> }, common.mustCallAtLeast((req, res) => {
<ide> res.writeHead(200, { 'Content-Type': 'text/plain' });
<ide> res.end();
<ide><path>test/parallel/test-http-server-request-timeout-delayed-body.js
<ide> const { connect } = require('net');
<ide> // pauses before start sending the body.
<ide>
<ide> let sendDelayedRequestBody;
<del>const requestTimeout = common.platformTimeout(1000);
<add>const requestTimeout = common.platformTimeout(2000);
<ide> const server = createServer({
<ide> headersTimeout: 0,
<ide> requestTimeout,
<ide> keepAliveTimeout: 0,
<del> connectionsCheckingInterval: common.platformTimeout(250),
<add> connectionsCheckingInterval: requestTimeout / 4,
<ide> }, common.mustCall((req, res) => {
<ide> let body = '';
<ide> req.setEncoding('utf-8');
<ide><path>test/parallel/test-http-server-request-timeout-delayed-headers.js
<ide> const { connect } = require('net');
<ide> // pauses before start sending the request.
<ide>
<ide> let sendDelayedRequestHeaders;
<del>const requestTimeout = common.platformTimeout(1000);
<add>const requestTimeout = common.platformTimeout(2000);
<ide> const server = createServer({
<ide> headersTimeout: 0,
<ide> requestTimeout,
<ide> keepAliveTimeout: 0,
<del> connectionsCheckingInterval: common.platformTimeout(250),
<add> connectionsCheckingInterval: requestTimeout / 4,
<ide> }, common.mustNotCall());
<ide> server.on('connection', common.mustCall(() => {
<ide> assert.strictEqual(typeof sendDelayedRequestHeaders, 'function');
<ide><path>test/parallel/test-http-server-request-timeout-interrupted-body.js
<ide> const { connect } = require('net');
<ide> // pauses sending in the middle of the body.
<ide>
<ide> let sendDelayedRequestBody;
<del>const requestTimeout = common.platformTimeout(1000);
<add>const requestTimeout = common.platformTimeout(2000);
<ide> const server = createServer({
<ide> headersTimeout: 0,
<ide> requestTimeout,
<ide> keepAliveTimeout: 0,
<del> connectionsCheckingInterval: common.platformTimeout(250),
<add> connectionsCheckingInterval: requestTimeout / 4,
<ide> }, common.mustCall((req, res) => {
<ide> let body = '';
<ide> req.setEncoding('utf-8');
<ide><path>test/parallel/test-http-server-request-timeout-interrupted-headers.js
<ide> const { connect } = require('net');
<ide> // pauses sending in the middle of a header.
<ide>
<ide> let sendDelayedRequestHeaders;
<del>const requestTimeout = common.platformTimeout(1000);
<add>const requestTimeout = common.platformTimeout(2000);
<ide> const server = createServer({
<ide> headersTimeout: 0,
<ide> requestTimeout,
<ide> keepAliveTimeout: 0,
<del> connectionsCheckingInterval: common.platformTimeout(250),
<add> connectionsCheckingInterval: requestTimeout / 4,
<ide> }, common.mustNotCall());
<ide> server.on('connection', common.mustCall(() => {
<ide> assert.strictEqual(typeof sendDelayedRequestHeaders, 'function');
<ide><path>test/parallel/test-http-server-request-timeout-keepalive.js
<ide> server.listen(0, common.mustCall(() => {
<ide> performRequestWithDelay(
<ide> client,
<ide> requestTimeout / 5,
<del> requestTimeout,
<add> requestTimeout * 2,
<ide> true
<ide> );
<ide> }, defer).unref();
<ide> server.listen(0, common.mustCall(() => {
<ide> client.on('error', errOrEnd);
<ide> client.on('end', errOrEnd);
<ide>
<del> // Perform a second request expected to finish before requestTimeout
<add> // Perform a first request which is completed immediately
<ide> performRequestWithDelay(
<ide> client,
<ide> requestTimeout / 5,
<ide><path>test/parallel/test-http-server-request-timeout-pipelining.js
<ide> const { connect } = require('net');
<ide> // after server.requestTimeout if the client
<ide> // does not complete a request when using pipelining.
<ide>
<del>const requestTimeout = common.platformTimeout(1000);
<add>const requestTimeout = common.platformTimeout(2000);
<ide> const server = createServer({
<ide> headersTimeout: 0,
<ide> requestTimeout,
<ide> keepAliveTimeout: 0,
<del> connectionsCheckingInterval: common.platformTimeout(250),
<add> connectionsCheckingInterval: requestTimeout / 4
<ide> }, common.mustCallAtLeast((req, res) => {
<ide> res.writeHead(200, { 'Content-Type': 'text/plain' });
<ide> res.end();
<ide> server.listen(0, common.mustCall(() => {
<ide> // Complete the request
<ide> setTimeout(() => {
<ide> client.write('close\r\n\r\n');
<del> }, requestTimeout * 1.5).unref();
<add> }, requestTimeout * 2).unref();
<ide> }));
<ide><path>test/parallel/test-http-server-request-timeout-upgrade.js
<ide> const { connect } = require('net');
<ide> // This test validates that the requestTimeoout
<ide> // is disabled after the connection is upgraded.
<ide> let sendDelayedRequestHeaders;
<del>const requestTimeout = common.platformTimeout(1000);
<add>const requestTimeout = common.platformTimeout(2000);
<ide> const server = createServer({
<ide> headersTimeout: 0,
<ide> requestTimeout,
<ide> keepAliveTimeout: 0,
<del> connectionsCheckingInterval: common.platformTimeout(250),
<add> connectionsCheckingInterval: requestTimeout / 4
<ide> }, common.mustNotCall());
<ide> server.on('connection', common.mustCall(() => {
<ide> assert.strictEqual(typeof sendDelayedRequestHeaders, 'function');
<ide> server.listen(0, common.mustCall(() => {
<ide> setTimeout(() => {
<ide> client.write('12345678901234567890');
<ide> client.end();
<del> }, common.platformTimeout(2000)).unref();
<add> }, requestTimeout * 2).unref();
<ide> });
<ide> }));
<ide><path>test/parallel/test-http-server-request-timeouts-mixed.js
<add>'use strict';
<add>
<add>const common = require('../common');
<add>const assert = require('assert');
<add>const { createServer } = require('http');
<add>const { connect } = require('net');
<add>
<add>// This test validates that request are correct checked for both requests and headers timeout in various situations.
<add>
<add>const requestBodyPart1 = 'POST / HTTP/1.1\r\nContent-Length: 20\r\n';
<add>const requestBodyPart2 = 'Connection: close\r\n\r\n1234567890';
<add>const requestBodyPart3 = '1234567890';
<add>
<add>const responseOk = 'HTTP/1.1 200 OK\r\n';
<add>const responseTimeout = 'HTTP/1.1 408 Request Timeout\r\n';
<add>
<add>const headersTimeout = common.platformTimeout(2000);
<add>const connectionsCheckingInterval = headersTimeout / 4;
<add>
<add>const server = createServer({
<add> headersTimeout,
<add> requestTimeout: headersTimeout * 2,
<add> keepAliveTimeout: 0,
<add> connectionsCheckingInterval
<add>}, common.mustCall((req, res) => {
<add> req.resume();
<add>
<add> req.on('end', () => {
<add> res.writeHead(200, { 'Content-Type': 'text/plain' });
<add> res.end();
<add> });
<add>}, 4));
<add>
<add>assert.strictEqual(server.headersTimeout, headersTimeout);
<add>assert.strictEqual(server.requestTimeout, headersTimeout * 2);
<add>
<add>let i = 0;
<add>function createClient(server) {
<add> const request = {
<add> index: i++,
<add> client: connect(server.address().port),
<add> response: '',
<add> completed: false
<add> };
<add>
<add> request.client.on('data', common.mustCallAtLeast((chunk) => {
<add> request.response += chunk.toString('utf-8');
<add> }));
<add>
<add> request.client.on('end', common.mustCall(() => {
<add> request.completed = true;
<add> }));
<add>
<add> request.client.on('error', common.mustNotCall());
<add>
<add> request.client.resume();
<add>
<add> return request;
<add>}
<add>
<add>server.listen(0, common.mustCall(() => {
<add> const request1 = createClient(server);
<add> let request2;
<add> let request3;
<add> let request4;
<add> let request5;
<add>
<add> // Send the first request and stop before the body
<add> request1.client.write(requestBodyPart1);
<add>
<add> // After a little while send two new requests
<add> setTimeout(() => {
<add> request2 = createClient(server);
<add> request3 = createClient(server);
<add>
<add> // Send the second request, stop in the middle of the headers
<add> request2.client.write(requestBodyPart1);
<add> // Send the second request, stop in the middle of the headers
<add> request3.client.write(requestBodyPart1);
<add> }, headersTimeout * 0.2);
<add>
<add> // After another little while send the last two new requests
<add> setTimeout(() => {
<add> request4 = createClient(server);
<add> request5 = createClient(server);
<add>
<add> // Send the fourth request, stop in the middle of the headers
<add> request4.client.write(requestBodyPart1);
<add> // Send the fifth request, stop in the middle of the headers
<add> request5.client.write(requestBodyPart1);
<add> }, headersTimeout * 0.6);
<add>
<add> setTimeout(() => {
<add> // Finish the first request
<add> request1.client.write(requestBodyPart2 + requestBodyPart3);
<add>
<add> // Complete headers for all requests but second
<add> request3.client.write(requestBodyPart2);
<add> request4.client.write(requestBodyPart2);
<add> request5.client.write(requestBodyPart2);
<add> }, headersTimeout * 0.8);
<add>
<add> setTimeout(() => {
<add> // After the first timeout, the first request should have been completed and second timedout
<add> assert(request1.completed);
<add> assert(request2.completed);
<add> assert(!request3.completed);
<add> assert(!request4.completed);
<add> assert(!request5.completed);
<add>
<add> assert(request1.response.startsWith(responseOk));
<add> assert(request2.response.startsWith(responseTimeout)); // It is expired due to headersTimeout
<add> }, headersTimeout * 1.2 + connectionsCheckingInterval);
<add>
<add> setTimeout(() => {
<add> // Complete the body for the fourth request
<add> request4.client.write(requestBodyPart3);
<add> }, headersTimeout * 1.5);
<add>
<add> setTimeout(() => {
<add> // All request should be completed now, either with 200 or 408
<add> assert(request3.completed);
<add> assert(request4.completed);
<add> assert(request5.completed);
<add>
<add> assert(request3.response.startsWith(responseTimeout)); // It is expired due to requestTimeout
<add> assert(request4.response.startsWith(responseOk));
<add> assert(request5.response.startsWith(responseTimeout)); // It is expired due to requestTimeout
<add> server.close();
<add> }, headersTimeout * 3 + connectionsCheckingInterval);
<add>})); | 11 |
Mixed | Javascript | remove web-worker example | 6c27c5f22e0ae94663d5e11a5e2a4fd8adf4b4da | <ide><path>examples/web-worker/README.md
<del>
<del># example.js
<del>
<del>``` javascript
<del>var Worker = require("worker-loader?name=hash.worker.js!./worker");
<del>var worker = new Worker;
<del>worker.postMessage("b");
<del>worker.onmessage = function(event) {
<del> var templateB = event.data; // "This text was generated by template B"
<del>}
<del>```
<del>
<del># worker.js
<del>
<del>``` javascript
<del>onmessage = function(event) {
<del> var template = event.data;
<del> require(["../require.context/templates/" + event.data], function(tmpl) {
<del> postMessage(tmpl());
<del> });
<del>}
<del>```
<del>
<del># dist/output.js
<del>
<del><details><summary><code>/******/ (function(modules) { /* webpackBootstrap */ })</code></summary>
<del>
<del>``` javascript
<del>/******/ (function(modules) { // webpackBootstrap
<del>/******/ // The module cache
<del>/******/ var installedModules = {};
<del>/******/
<del>/******/ // The require function
<del>/******/ function __webpack_require__(moduleId) {
<del>/******/
<del>/******/ // Check if module is in cache
<del>/******/ if(installedModules[moduleId]) {
<del>/******/ return installedModules[moduleId].exports;
<del>/******/ }
<del>/******/ // Create a new module (and put it into the cache)
<del>/******/ var module = installedModules[moduleId] = {
<del>/******/ i: moduleId,
<del>/******/ l: false,
<del>/******/ exports: {}
<del>/******/ };
<del>/******/
<del>/******/ // Execute the module function
<del>/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
<del>/******/
<del>/******/ // Flag the module as loaded
<del>/******/ module.l = true;
<del>/******/
<del>/******/ // Return the exports of the module
<del>/******/ return module.exports;
<del>/******/ }
<del>/******/
<del>/******/
<del>/******/ // expose the modules object (__webpack_modules__)
<del>/******/ __webpack_require__.m = modules;
<del>/******/
<del>/******/ // expose the module cache
<del>/******/ __webpack_require__.c = installedModules;
<del>/******/
<del>/******/ // define getter function for harmony exports
<del>/******/ __webpack_require__.d = function(exports, name, getter) {
<del>/******/ if(!__webpack_require__.o(exports, name)) {
<del>/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
<del>/******/ }
<del>/******/ };
<del>/******/
<del>/******/ // define __esModule on exports
<del>/******/ __webpack_require__.r = function(exports) {
<del>/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
<del>/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
<del>/******/ }
<del>/******/ Object.defineProperty(exports, '__esModule', { value: true });
<del>/******/ };
<del>/******/
<del>/******/ // create a fake namespace object
<del>/******/ // mode & 1: value is a module id, require it
<del>/******/ // mode & 2: merge all properties of value into the ns
<del>/******/ // mode & 4: return value when already ns object
<del>/******/ // mode & 8|1: behave like require
<del>/******/ __webpack_require__.t = function(value, mode) {
<del>/******/ if(mode & 1) value = __webpack_require__(value);
<del>/******/ if(mode & 8) return value;
<del>/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
<del>/******/ var ns = Object.create(null);
<del>/******/ __webpack_require__.r(ns);
<del>/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
<del>/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
<del>/******/ return ns;
<del>/******/ };
<del>/******/
<del>/******/ // getDefaultExport function for compatibility with non-harmony modules
<del>/******/ __webpack_require__.n = function(module) {
<del>/******/ var getter = module && module.__esModule ?
<del>/******/ function getDefault() { return module['default']; } :
<del>/******/ function getModuleExports() { return module; };
<del>/******/ __webpack_require__.d(getter, 'a', getter);
<del>/******/ return getter;
<del>/******/ };
<del>/******/
<del>/******/ // Object.prototype.hasOwnProperty.call
<del>/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
<del>/******/
<del>/******/ // __webpack_public_path__
<del>/******/ __webpack_require__.p = "dist/";
<del>/******/
<del>/******/
<del>/******/ // Load entry module and return exports
<del>/******/ return __webpack_require__(__webpack_require__.s = 0);
<del>/******/ })
<del>/************************************************************************/
<del>```
<del>
<del></details>
<del>
<del>``` javascript
<del>/******/ ([
<del>/* 0 */
<del>/*!********************!*\
<del> !*** ./example.js ***!
<del> \********************/
<del>/*! no static exports found */
<del>/***/ (function(module, exports, __webpack_require__) {
<del>
<del>var Worker = __webpack_require__(/*! worker-loader?name=hash.worker.js!./worker */ 1);
<del>var worker = new Worker;
<del>worker.postMessage("b");
<del>worker.onmessage = function(event) {
<del> var templateB = event.data; // "This text was generated by template B"
<del>}
<del>
<del>
<del>/***/ }),
<del>/* 1 */
<del>/*!****************************************************************************************!*\
<del> !*** (webpack)/node_modules/worker-loader/dist/cjs.js?name=hash.worker.js!./worker.js ***!
<del> \****************************************************************************************/
<del>/*! no static exports found */
<del>/***/ (function(module, exports, __webpack_require__) {
<del>
<del>module.exports = function() {
<del> return new Worker(__webpack_require__.p + "hash.worker.js");
<del>};
<del>
<del>/***/ })
<del>/******/ ]);
<del>```
<del>
<del># dist/[hash].worker.js
<del>
<del>``` javascript
<del>/******/ (function(modules) { // webpackBootstrap
<del>/******/ window["webpackChunk"] = function webpackChunkCallback(chunkIds, moreModules) {
<del>/******/ for(var moduleId in moreModules) {
<del>/******/ modules[moduleId] = moreModules[moduleId];
<del>/******/ }
<del>/******/ while(chunkIds.length)
<del>/******/ installedChunks[chunkIds.pop()] = 1;
<del>/******/ };
<del>/******/
<del>/******/ // The module cache
<del>/******/ var installedModules = {};
<del>/******/
<del>/******/ // object to store loaded chunks
<del>/******/ // "1" means "already loaded"
<del>/******/ var installedChunks = {
<del>/******/ 0: 1
<del>/******/ };
<del>/******/
<del>/******/ // The require function
<del>/******/ function __webpack_require__(moduleId) {
<del>/******/
<del>/******/ // Check if module is in cache
<del>/******/ if(installedModules[moduleId]) {
<del>/******/ return installedModules[moduleId].exports;
<del>/******/ }
<del>/******/ // Create a new module (and put it into the cache)
<del>/******/ var module = installedModules[moduleId] = {
<del>/******/ i: moduleId,
<del>/******/ l: false,
<del>/******/ exports: {}
<del>/******/ };
<del>/******/
<del>/******/ // Execute the module function
<del>/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
<del>/******/
<del>/******/ // Flag the module as loaded
<del>/******/ module.l = true;
<del>/******/
<del>/******/ // Return the exports of the module
<del>/******/ return module.exports;
<del>/******/ }
<del>/******/
<del>/******/ // This file contains only the entry chunk.
<del>/******/ // The chunk loading function for additional chunks
<del>/******/ __webpack_require__.e = function requireEnsure(chunkId) {
<del>/******/ var promises = [];
<del>/******/ promises.push(Promise.resolve().then(function() {
<del>/******/ // "1" is the signal for "already loaded"
<del>/******/ if(!installedChunks[chunkId]) {
<del>/******/ importScripts("" + chunkId + ".hash.worker.js");
<del>/******/ }
<del>/******/ }));
<del>/******/ return Promise.all(promises);
<del>/******/ };
<del>/******/
<del>/******/ // expose the modules object (__webpack_modules__)
<del>/******/ __webpack_require__.m = modules;
<del>/******/
<del>/******/ // expose the module cache
<del>/******/ __webpack_require__.c = installedModules;
<del>/******/
<del>/******/ // define getter function for harmony exports
<del>/******/ __webpack_require__.d = function(exports, name, getter) {
<del>/******/ if(!__webpack_require__.o(exports, name)) {
<del>/******/ Object.defineProperty(exports, name, { enumerable: true, get: getter });
<del>/******/ }
<del>/******/ };
<del>/******/
<del>/******/ // define __esModule on exports
<del>/******/ __webpack_require__.r = function(exports) {
<del>/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
<del>/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
<del>/******/ }
<del>/******/ Object.defineProperty(exports, '__esModule', { value: true });
<del>/******/ };
<del>/******/
<del>/******/ // create a fake namespace object
<del>/******/ // mode & 1: value is a module id, require it
<del>/******/ // mode & 2: merge all properties of value into the ns
<del>/******/ // mode & 4: return value when already ns object
<del>/******/ // mode & 8|1: behave like require
<del>/******/ __webpack_require__.t = function(value, mode) {
<del>/******/ if(mode & 1) value = __webpack_require__(value);
<del>/******/ if(mode & 8) return value;
<del>/******/ if((mode & 4) && typeof value === 'object' && value && value.__esModule) return value;
<del>/******/ var ns = Object.create(null);
<del>/******/ __webpack_require__.r(ns);
<del>/******/ Object.defineProperty(ns, 'default', { enumerable: true, value: value });
<del>/******/ if(mode & 2 && typeof value != 'string') for(var key in value) __webpack_require__.d(ns, key, function(key) { return value[key]; }.bind(null, key));
<del>/******/ return ns;
<del>/******/ };
<del>/******/
<del>/******/ // getDefaultExport function for compatibility with non-harmony modules
<del>/******/ __webpack_require__.n = function(module) {
<del>/******/ var getter = module && module.__esModule ?
<del>/******/ function getDefault() { return module['default']; } :
<del>/******/ function getModuleExports() { return module; };
<del>/******/ __webpack_require__.d(getter, 'a', getter);
<del>/******/ return getter;
<del>/******/ };
<del>/******/
<del>/******/ // Object.prototype.hasOwnProperty.call
<del>/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
<del>/******/
<del>/******/ // __webpack_public_path__
<del>/******/ __webpack_require__.p = "dist/";
<del>/******/
<del>/******/
<del>/******/ // Load entry module and return exports
<del>/******/ return __webpack_require__(__webpack_require__.s = 0);
<del>/******/ })
<del>/************************************************************************/
<del>/******/ ([
<del>/* 0 */
<del>/*!*******************!*\
<del> !*** ./worker.js ***!
<del> \*******************/
<del>/*! no static exports found */
<del>/***/ (function(module, exports, __webpack_require__) {
<del>
<del>onmessage = function(event) {
<del> var template = event.data;
<del> __webpack_require__.e(/*! AMD require */ 1).then(function() { var __WEBPACK_AMD_REQUIRE_ARRAY__ = [__webpack_require__(1)("./" + event.data)]; (function(tmpl) {
<del> postMessage(tmpl());
<del> }).apply(null, __WEBPACK_AMD_REQUIRE_ARRAY__);}).catch(__webpack_require__.oe);
<del>}
<del>
<del>
<del>/***/ })
<del>/******/ ]);
<del>```
<del>
<del># dist/1.[hash].worker.js
<del>
<del>``` javascript
<del>window["webpackChunk"]([1],[
<del>/* 0 */,
<del>/* 1 */
<del>/*!**************************************************!*\
<del> !*** ../require.context/templates sync ^\.\/.*$ ***!
<del> \**************************************************/
<del>/*! no static exports found */
<del>/***/ (function(module, exports, __webpack_require__) {
<del>
<del>var map = {
<del> "./a": 2,
<del> "./a.js": 2,
<del> "./b": 3,
<del> "./b.js": 3,
<del> "./c": 4,
<del> "./c.js": 4
<del>};
<del>
<del>
<del>function webpackContext(req) {
<del> var id = webpackContextResolve(req);
<del> return __webpack_require__(id);
<del>}
<del>function webpackContextResolve(req) {
<del> var id = map[req];
<del> if(!(id + 1)) { // check for number or string
<del> var e = new Error("Cannot find module '" + req + "'");
<del> e.code = 'MODULE_NOT_FOUND';
<del> throw e;
<del> }
<del> return id;
<del>}
<del>webpackContext.keys = function webpackContextKeys() {
<del> return Object.keys(map);
<del>};
<del>webpackContext.resolve = webpackContextResolve;
<del>module.exports = webpackContext;
<del>webpackContext.id = 1;
<del>
<del>/***/ }),
<del>/* 2 */
<del>/*!*****************************************!*\
<del> !*** ../require.context/templates/a.js ***!
<del> \*****************************************/
<del>/*! no static exports found */
<del>/***/ (function(module, exports) {
<del>
<del>module.exports = function() {
<del> return "This text was generated by template A";
<del>}
<del>
<del>/***/ }),
<del>/* 3 */
<del>/*!*****************************************!*\
<del> !*** ../require.context/templates/b.js ***!
<del> \*****************************************/
<del>/*! no static exports found */
<del>/***/ (function(module, exports) {
<del>
<del>module.exports = function() {
<del> return "This text was generated by template B";
<del>}
<del>
<del>/***/ }),
<del>/* 4 */
<del>/*!*****************************************!*\
<del> !*** ../require.context/templates/c.js ***!
<del> \*****************************************/
<del>/*! no static exports found */
<del>/***/ (function(module, exports) {
<del>
<del>module.exports = function() {
<del> return "This text was generated by template C";
<del>}
<del>
<del>/***/ })
<del>]);
<del>```
<del>
<del># Info
<del>
<del>## Unoptimized
<del>
<del>```
<del>Hash: 0a1b2c3d4e5f6a7b8c9d
<del>Version: webpack 5.0.0-next
<del> Asset Size Chunks Chunk Names
<del>1.hash.worker.js 1.8 KiB [emitted]
<del> hash.worker.js 4.99 KiB [emitted]
<del> output.js 4.43 KiB 0 [emitted] main
<del>Entrypoint main = output.js
<del>chunk {0} output.js (main) 332 bytes [entry] [rendered]
<del> > .\example.js main
<del> [0] ./example.js 235 bytes {0} [built]
<del> [used exports unknown]
<del> entry .\example.js main
<del> [1] (webpack)/node_modules/worker-loader/dist/cjs.js?name=hash.worker.js!./worker.js 97 bytes {0} [not cacheable] [built]
<del> [used exports unknown]
<del> cjs require worker-loader?name=hash.worker.js!./worker [0] ./example.js 1:13-66
<del>Child worker:
<del> Asset Size Chunks Chunk Names
<del> 1.hash.worker.js 1.8 KiB 1 [emitted]
<del> hash.worker.js 4.99 KiB 0 [emitted] main
<del> Entrypoint main = hash.worker.js
<del> chunk {0} hash.worker.js (main) 168 bytes >{1}< [entry] [rendered]
<del> > !!.\worker.js main
<del> [0] ./worker.js 168 bytes {0} [built]
<del> [used exports unknown]
<del> entry !!.\worker.js main
<del> chunk {1} 1.hash.worker.js 463 bytes <{0}> [rendered]
<del> > [0] ./worker.js 3:1-5:3
<del> [1] ../require.context/templates sync ^\.\/.*$ 217 bytes {1} [built]
<del> [used exports unknown]
<del> amd require context ../require.context/templates [0] ./worker.js 3:1-5:3
<del> [2] ../require.context/templates/a.js 82 bytes {1} [optional] [built]
<del> [used exports unknown]
<del> context element ./a [1] ../require.context/templates sync ^\.\/.*$ ./a
<del> context element ./a.js [1] ../require.context/templates sync ^\.\/.*$ ./a.js
<del> [3] ../require.context/templates/b.js 82 bytes {1} [optional] [built]
<del> [used exports unknown]
<del> context element ./b [1] ../require.context/templates sync ^\.\/.*$ ./b
<del> context element ./b.js [1] ../require.context/templates sync ^\.\/.*$ ./b.js
<del> [4] ../require.context/templates/c.js 82 bytes {1} [optional] [built]
<del> [used exports unknown]
<del> context element ./c [1] ../require.context/templates sync ^\.\/.*$ ./c
<del> context element ./c.js [1] ../require.context/templates sync ^\.\/.*$ ./c.js
<del>```
<del>
<del>## Production mode
<del>
<del>```
<del>Hash: 0a1b2c3d4e5f6a7b8c9d
<del>Version: webpack 5.0.0-next
<del> Asset Size Chunks Chunk Names
<del>1.hash.worker.js 593 bytes [emitted]
<del> hash.worker.js 1.27 KiB [emitted]
<del> output.js 1.06 KiB 0 [emitted] main
<del>Entrypoint main = output.js
<del>chunk {0} output.js (main) 332 bytes [entry] [rendered]
<del> > .\example.js main
<del> [0] ./example.js 235 bytes {0} [built]
<del> entry .\example.js main
<del> [1] (webpack)/node_modules/worker-loader/dist/cjs.js?name=hash.worker.js!./worker.js 97 bytes {0} [not cacheable] [built]
<del> cjs require worker-loader?name=hash.worker.js!./worker [0] ./example.js 1:13-66
<del>Child worker:
<del> Asset Size Chunks Chunk Names
<del> 1.hash.worker.js 593 bytes 1 [emitted]
<del> hash.worker.js 1.27 KiB 0 [emitted] main
<del> Entrypoint main = hash.worker.js
<del> chunk {0} hash.worker.js (main) 168 bytes >{1}< [entry] [rendered]
<del> > !!.\worker.js main
<del> [0] ./worker.js 168 bytes {0} [built]
<del> entry !!.\worker.js main
<del> chunk {1} 1.hash.worker.js 463 bytes <{0}> [rendered]
<del> > [0] ./worker.js 3:1-5:3
<del> [1] ../require.context/templates sync ^\.\/.*$ 217 bytes {1} [built]
<del> amd require context ../require.context/templates [0] ./worker.js 3:1-5:3
<del> [2] ../require.context/templates/a.js 82 bytes {1} [optional] [built]
<del> context element ./a [1] ../require.context/templates sync ^\.\/.*$ ./a
<del> context element ./a.js [1] ../require.context/templates sync ^\.\/.*$ ./a.js
<del> [3] ../require.context/templates/b.js 82 bytes {1} [optional] [built]
<del> context element ./b [1] ../require.context/templates sync ^\.\/.*$ ./b
<del> context element ./b.js [1] ../require.context/templates sync ^\.\/.*$ ./b.js
<del> [4] ../require.context/templates/c.js 82 bytes {1} [optional] [built]
<del> context element ./c [1] ../require.context/templates sync ^\.\/.*$ ./c
<del> context element ./c.js [1] ../require.context/templates sync ^\.\/.*$ ./c.js
<del>```
<ide><path>examples/web-worker/build.js
<del>require("../build-common");
<ide>\ No newline at end of file
<ide><path>examples/web-worker/example.js
<del>var Worker = require("worker-loader?name=hash.worker.js!./worker");
<del>var worker = new Worker;
<del>worker.postMessage("b");
<del>worker.onmessage = function(event) {
<del> var templateB = event.data; // "This text was generated by template B"
<del>}
<ide><path>examples/web-worker/template.md
<del>
<del># example.js
<del>
<del>``` javascript
<del>{{example.js}}
<del>```
<del>
<del># worker.js
<del>
<del>``` javascript
<del>{{worker.js}}
<del>```
<del>
<del># dist/output.js
<del>
<del>``` javascript
<del>{{dist/output.js}}
<del>```
<del>
<del># dist/[hash].worker.js
<del>
<del>``` javascript
<del>{{dist/hash.worker.js}}
<del>```
<del>
<del># dist/1.[hash].worker.js
<del>
<del>``` javascript
<del>{{dist/1.hash.worker.js}}
<del>```
<del>
<del># Info
<del>
<del>## Unoptimized
<del>
<del>```
<del>{{stdout}}
<del>```
<del>
<del>## Production mode
<del>
<del>```
<del>{{production:stdout}}
<del>```
<ide><path>examples/web-worker/webpack.config.js
<del>var webpack = require("../../");
<del>module.exports = {
<del> // mode: "development" || "production",
<del> plugins: [
<del> new webpack.LoaderOptionsPlugin({
<del> options: {
<del> worker: {
<del> output: {
<del> filename: "hash.worker.js",
<del> chunkFilename: "[id].hash.worker.js"
<del> }
<del> }
<del> }
<del> })
<del> ],
<del> optimization: {
<del> chunkIds: "total-size" // To keep filename consistent between different modes (for example building only)
<del> }
<del>};
<ide><path>examples/web-worker/worker.js
<del>onmessage = function(event) {
<del> var template = event.data;
<del> require(["../require.context/templates/" + event.data], function(tmpl) {
<del> postMessage(tmpl());
<del> });
<del>} | 6 |
Javascript | Javascript | fix style issue | c3c7be5eb59252da8ba079971d50656f4e8ce44f | <ide><path>test/locale/my.js
<ide> exports['locale:my'] = {
<ide> ],
<ide> b = moment(new Date(2010, 1, 14, 15, 25, 50, 125)),
<ide> i;
<del> test.expect( a.length );
<add> test.expect(a.length);
<ide> for (i = 0; i < a.length; i++) {
<ide> test.equal(b.format(a[i][0]), a[i][1], a[i][0] + ' ---> ' + a[i][1]);
<ide> } | 1 |
Text | Text | remove contacts of exgoogler | ba72997d46b3c5fd6f5998394e04224788ec33b2 | <ide><path>research/differential_privacy/README.md
<ide> <font size=4><b>Deep Learning with Differential Privacy</b></font>
<ide>
<del>Open Sourced By: Xin Pan ([email protected], github: panyx0718)
<add>Open Sourced By: Xin Pan
<ide>
<ide>
<ide> ### Introduction for [dp_sgd/README.md](dp_sgd/README.md)
<ide><path>research/lm_1b/README.md
<ide> <b>Authors:</b>
<ide>
<ide> Oriol Vinyals ([email protected], github: OriolVinyals),
<del>Xin Pan ([email protected], github: panyx0718)
<add>Xin Pan
<ide>
<ide> <b>Paper Authors:</b>
<ide>
<ide><path>research/next_frame_prediction/README.md
<ide> This is an implementation based on my understanding, with small
<ide> variations. It doesn't necessarily represents the paper published
<ide> by the original authors.
<ide>
<del>Authors: Xin Pan (Github: panyx0718), Anelia Angelova
<add>Authors: Xin Pan, Anelia Angelova
<ide>
<ide> <b>Results:</b>
<ide>
<ide><path>research/resnet/README.md
<ide> <font size=4><b>Reproduced ResNet on CIFAR-10 and CIFAR-100 dataset.</b></font>
<ide>
<del>contact: panyx0718 ([email protected])
<add>Xin Pan
<ide>
<ide> <b>Dataset:</b>
<ide>
<ide><path>research/textsum/README.md
<ide> Sequence-to-Sequence with Attention Model for Text Summarization.
<ide>
<ide> Authors:
<ide>
<del>Xin Pan ([email protected], github:panyx0718),
<add>Xin Pan
<ide> Peter Liu ([email protected], github:peterjliu)
<ide>
<ide> <b>Introduction</b> | 5 |
Text | Text | fix typo and wording in readme.md | 297d1466e7dd03dd178778d5c5e5ec8d8d19486a | <ide><path>examples/with-dotenv/README.md
<ide> This example shows how to inline env vars.
<ide> **Please note**:
<ide>
<ide> - It is a bad practice to commit env vars to a repository. Thats why you should normally [gitignore](https://git-scm.com/docs/gitignore) your `.env` file.
<del>- In this example, as soon as you reference an env var in your code it will be automatically be publicly available and exposed to the client.
<add>- In this example, as soon as you reference an env var in your code, it will automatically be made publicly available and exposed to the client.
<ide> - If you want to have more centralized control of what is exposed to the client check out the example [with-universal-configuration-build-time](../with-universal-configuration-build-time).
<del>- Env vars are set (inlined) at build time. If you need to configure your app on rutime check out [examples/with-universal-configuration-runtime](../with-universal-configuration-runtime).
<add>- Env vars are set (inlined) at build time. If you need to configure your app at runtime, check out [examples/with-universal-configuration-runtime](../with-universal-configuration-runtime). | 1 |
Ruby | Ruby | add audit for xcodebuild | ace8723caffbb0de638a5fb75242dfbd4a8b08a0 | <ide><path>Library/Homebrew/cmd/audit.rb
<ide> require 'formula'
<ide> require 'utils'
<ide>
<add># Use "brew audit --strict" to enable even stricter checks.
<add>
<add>def strict?
<add> ARGV.flag? "--strict"
<add>end
<add>
<ide> def ff
<ide> return Formula.all if ARGV.named.empty?
<ide> return ARGV.formulae
<ide> def audit_formula_text name, text
<ide> problems << " * Use ENV.fortran during install instead of depends_on 'gfortran'"
<ide> end unless name == "gfortran" # Gfortran itself has this text in the caveats
<ide>
<add> # xcodebuild should specify SYMROOT
<add> if text =~ /xcodebuild/ and not text =~ /SYMROOT=/
<add> problems << " * xcodebuild should be passed an explicit \"SYMROOT\""
<add> end if strict?
<add>
<ide> return problems
<ide> end
<ide>
<ide> def audit_formula_urls f
<ide>
<ide> unless p =~ %r[^http://mirrors\.kernel\.org/debian/pool/]
<ide> problems << " * \"mirrors.kernel.org\" is the preferred mirror for debian software."
<del> end
<add> end if strict?
<ide> end
<ide>
<ide> return problems | 1 |
Ruby | Ruby | fix bottle domain fallback handling | 635e58e9aaf692e3006c216916b17a2c13f51d0c | <ide><path>Library/Homebrew/formula.rb
<ide> def head_only?
<ide> # @private
<ide> sig { returns(T.nilable(Bottle)) }
<ide> def bottle
<del> Bottle.new(self, bottle_specification) if bottled?
<add> @bottle ||= Bottle.new(self, bottle_specification) if bottled?
<ide> end
<ide>
<ide> # The description of the software.
<ide> def bottle_hash
<ide>
<ide> checksum = collector_os[:checksum].hexdigest
<ide> filename = Bottle::Filename.create(self, os, bottle_spec.rebuild)
<del> path, = bottle_spec.path_resolved_basename(name, checksum, filename)
<add> path, = Utils::Bottles.path_resolved_basename(bottle_spec.root_url, name, checksum, filename)
<ide> url = "#{bottle_spec.root_url}/#{path}"
<ide>
<ide> hash["files"][os] = {
<ide><path>Library/Homebrew/resource.rb
<ide> def url(val = nil, **specs)
<ide> @specs.merge!(specs)
<ide> @using = @specs.delete(:using)
<ide> @download_strategy = DownloadStrategyDetector.detect(url, using)
<add> @downloader = nil
<ide> end
<ide>
<ide> def version(val = nil)
<ide><path>Library/Homebrew/software_spec.rb
<ide> def initialize(formula, spec)
<ide>
<ide> checksum, tag, cellar = spec.checksum_for(Utils::Bottles.tag)
<ide>
<del> filename = Filename.create(formula, tag, spec.rebuild)
<del>
<del> path, resolved_basename = spec.path_resolved_basename(@name, checksum, filename)
<del>
<del> @resource.url("#{spec.root_url}/#{path}", select_download_strategy(spec.root_url_specs))
<del> @resource.version = formula.pkg_version
<del> @resource.checksum = checksum
<del> @resource.downloader.resolved_basename = resolved_basename if resolved_basename.present?
<ide> @prefix = spec.prefix
<add> @tag = tag
<ide> @cellar = cellar
<ide> @rebuild = spec.rebuild
<add>
<add> @resource.version = formula.pkg_version
<add> @resource.checksum = checksum
<add>
<add> root_url(spec.root_url, spec.root_url_specs)
<ide> end
<ide>
<ide> def fetch(verify_download_integrity: true)
<del> # add the default bottle domain as a fallback mirror
<del> if @resource.download_strategy == CurlDownloadStrategy &&
<del> @resource.url.start_with?(Homebrew::EnvConfig.bottle_domain)
<del> fallback_url = @resource.url
<del> .sub(/^#{Regexp.escape(Homebrew::EnvConfig.bottle_domain)}/,
<del> HOMEBREW_BOTTLE_DEFAULT_DOMAIN)
<del> @resource.mirror(fallback_url) if [@resource.url, *@resource.mirrors].exclude?(fallback_url)
<del> end
<ide> @resource.fetch(verify_download_integrity: verify_download_integrity)
<add> rescue DownloadError
<add> raise unless fallback_on_error
<add>
<add> fetch_tab
<add> retry
<ide> end
<ide>
<ide> def clear_cache
<ide> def stage
<ide> def fetch_tab
<ide> # a checksum is used later identifying the correct tab but we do not have the checksum for the manifest/tab
<ide> github_packages_manifest_resource&.fetch(verify_download_integrity: false)
<add> rescue DownloadError
<add> raise unless fallback_on_error
<add>
<add> retry
<ide> end
<ide>
<ide> def tab_attributes
<ide> def github_packages_manifest_resource
<ide>
<ide> image_name = GitHubPackages.image_formula_name(@name)
<ide> image_tag = GitHubPackages.image_version_rebuild(version_rebuild)
<del> resource.url("#{@spec.root_url}/#{image_name}/manifests/#{image_tag}", {
<add> resource.url("#{root_url}/#{image_name}/manifests/#{image_tag}", {
<ide> using: CurlGitHubPackagesDownloadStrategy,
<ide> headers: ["Accept: application/vnd.oci.image.index.v1+json"],
<ide> })
<ide> def github_packages_manifest_resource
<ide> end
<ide>
<ide> def select_download_strategy(specs)
<del> specs[:using] ||= DownloadStrategyDetector.detect(@spec.root_url)
<add> specs[:using] ||= DownloadStrategyDetector.detect(@root_url)
<ide> specs
<ide> end
<add>
<add> def fallback_on_error
<add> # Use the default bottle domain as a fallback mirror
<add> if @resource.url.start_with?(Homebrew::EnvConfig.bottle_domain) &&
<add> Homebrew::EnvConfig.bottle_domain != HOMEBREW_BOTTLE_DEFAULT_DOMAIN
<add> opoo "Bottle missing, falling back to the default domain..."
<add> root_url(HOMEBREW_BOTTLE_DEFAULT_DOMAIN)
<add> @github_packages_manifest_resource = nil
<add> true
<add> else
<add> false
<add> end
<add> end
<add>
<add> def root_url(val = nil, specs = {})
<add> return @root_url if val.nil?
<add>
<add> @root_url = val
<add>
<add> filename = Filename.create(resource.owner, @tag, @spec.rebuild)
<add> path, resolved_basename = Utils::Bottles.path_resolved_basename(val, name, resource.checksum, filename)
<add> @resource.url("#{val}/#{path}", select_download_strategy(specs))
<add> @resource.downloader.resolved_basename = resolved_basename if resolved_basename.present?
<add> end
<ide> end
<ide>
<ide> class BottleSpecification
<ide> def root_url(var = nil, specs = {})
<ide> end
<ide> end
<ide>
<del> def path_resolved_basename(name, checksum, filename)
<del> if root_url.match?(GitHubPackages::URL_REGEX)
<del> image_name = GitHubPackages.image_formula_name(name)
<del> ["#{image_name}/blobs/sha256:#{checksum}", filename&.github_packages]
<del> else
<del> filename&.url_encode
<del> end
<del> end
<del>
<ide> def cellar(val = nil)
<ide> if val.present?
<ide> odeprecated(
<ide><path>Library/Homebrew/utils/bottles.rb
<ide> def formula_contents(bottle_file,
<ide>
<ide> contents
<ide> end
<add>
<add> def path_resolved_basename(root_url, name, checksum, filename)
<add> if root_url.match?(GitHubPackages::URL_REGEX)
<add> image_name = GitHubPackages.image_formula_name(name)
<add> ["#{image_name}/blobs/sha256:#{checksum}", filename&.github_packages]
<add> else
<add> filename&.url_encode
<add> end
<add> end
<ide> end
<ide>
<ide> # Denotes the arch and OS of a bottle. | 4 |
Ruby | Ruby | use consistent method calls for column inspection | 5e77872ac9da3007d6bc0b86428dca9ac8881936 | <ide><path>activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
<ide> def unescape_bytea(value)
<ide> def quote(value, column = nil) #:nodoc:
<ide> return super unless column
<ide>
<del> if value.kind_of?(String) && column.type == :binary
<add> if value.kind_of?(String) && column.sql_type == 'bytea'
<ide> "'#{escape_bytea(value)}'"
<ide> elsif value.kind_of?(String) && column.sql_type == 'xml'
<ide> "xml '#{quote_string(value)}'" | 1 |
Text | Text | remove duplicate section from hex guide | aff06dc01276b5a227262c7bd80a24d331948b09 | <ide><path>client/src/pages/guide/english/computer-science/hexcode/index.md
<ide> The proposed CSS4 Draft<sup>1</sup> includes a proposal to allow for an extra by
<ide>
<ide> For now, use of the standard `rgba()` function is the recommended way to add an alpha value to your colors.
<ide>
<del>#### More Information:
<del> + [Hexadecimal numeral system on Wikipedia](https://wikipedia.org/wiki/Hexadecimal_numeral_system)
<del> + [CSS color on the MDN web docs](https://developer.mozilla.org/en-US/docs/Web/CSS/color)
<del>
<ide> #### References:
<ide> + <sup>1</sup> [CSS Color Module Level 4 - 4.2. The RGB hexadecimal notations: #RRGGBB](https://www.w3.org/TR/css-color-4/#hex-notation)
<ide>
<ide> For now, use of the standard `rgba()` function is the recommended way to add an
<ide> * [Hex Codes & Color Theory](https://www.youtube.com/watch?v=xlRiLSDdqcY) - A Longer Video which delves into Color theory (Such as what are additive colors and what are subtractive colors etc.) and it also points to other resources for delving deeper into the topic.
<ide> * [Web Colors](https://en.wikipedia.org/wiki/Web_colors) - Wikipedia Article on how colors are used on the web.
<ide> * [Wikipedia article about Hexadecimal code](https://en.wikipedia.org/wiki/Hexadecimal)
<add>* [Wikipedia article about hexadecimal numeral system](https://wikipedia.org/wiki/Hexadecimal_numeral_system)
<ide> * [Wikipedia article about web colors](https://en.wikipedia.org/wiki/Web_colors)
<ide> * [Hex Colors](http://www.color-hex.com/)
<ide> * [Medium article on hex color code](https://medium.com/webkul-dev/hex-color-codes-27cd0a37c3ce)
<del>* [More information on colors in CSS](https://developer.mozilla.org/en-US/docs/Web/CSS/color_value)
<del>* [Explore different Hex colors](http://www.colorhexa.com/)
<ide>\ No newline at end of file
<add>* [More information on color values in CSS](https://developer.mozilla.org/en-US/docs/Web/CSS/color_value)
<add>* [CSS color property on the MDN web docs](https://developer.mozilla.org/en-US/docs/Web/CSS/color)
<add>* [Explore different Hex colors](http://www.colorhexa.com/) | 1 |
Go | Go | remove use of servicespecisupdated | 94429d40787f0267fd515c80c332d800ee58b609 | <ide><path>integration/service/update_test.go
<ide> func TestServiceUpdateLabel(t *testing.T) {
<ide> service.Spec.Labels["foo"] = "bar"
<ide> _, err := cli.ServiceUpdate(ctx, serviceID, service.Version, service.Spec, types.ServiceUpdateOptions{})
<ide> assert.NilError(t, err)
<del> poll.WaitOn(t, serviceSpecIsUpdated(cli, serviceID, service.Version.Index), swarm.ServicePoll)
<add> poll.WaitOn(t, serviceIsUpdated(cli, serviceID), swarm.ServicePoll)
<ide> service = getService(t, cli, serviceID)
<ide> assert.Check(t, is.DeepEqual(service.Spec.Labels, map[string]string{"foo": "bar"}))
<ide>
<ide> // add label to non-empty set
<ide> service.Spec.Labels["foo2"] = "bar"
<ide> _, err = cli.ServiceUpdate(ctx, serviceID, service.Version, service.Spec, types.ServiceUpdateOptions{})
<ide> assert.NilError(t, err)
<del> poll.WaitOn(t, serviceSpecIsUpdated(cli, serviceID, service.Version.Index), swarm.ServicePoll)
<add> poll.WaitOn(t, serviceIsUpdated(cli, serviceID), swarm.ServicePoll)
<ide> service = getService(t, cli, serviceID)
<ide> assert.Check(t, is.DeepEqual(service.Spec.Labels, map[string]string{"foo": "bar", "foo2": "bar"}))
<ide>
<ide> delete(service.Spec.Labels, "foo2")
<ide> _, err = cli.ServiceUpdate(ctx, serviceID, service.Version, service.Spec, types.ServiceUpdateOptions{})
<ide> assert.NilError(t, err)
<del> poll.WaitOn(t, serviceSpecIsUpdated(cli, serviceID, service.Version.Index), swarm.ServicePoll)
<add> poll.WaitOn(t, serviceIsUpdated(cli, serviceID), swarm.ServicePoll)
<ide> service = getService(t, cli, serviceID)
<ide> assert.Check(t, is.DeepEqual(service.Spec.Labels, map[string]string{"foo": "bar"}))
<ide>
<ide> delete(service.Spec.Labels, "foo")
<ide> _, err = cli.ServiceUpdate(ctx, serviceID, service.Version, service.Spec, types.ServiceUpdateOptions{})
<ide> assert.NilError(t, err)
<del> poll.WaitOn(t, serviceSpecIsUpdated(cli, serviceID, service.Version.Index), swarm.ServicePoll)
<add> poll.WaitOn(t, serviceIsUpdated(cli, serviceID), swarm.ServicePoll)
<ide> service = getService(t, cli, serviceID)
<ide> assert.Check(t, is.DeepEqual(service.Spec.Labels, map[string]string{}))
<ide>
<ide> // now make sure we can add again
<ide> service.Spec.Labels["foo"] = "bar"
<ide> _, err = cli.ServiceUpdate(ctx, serviceID, service.Version, service.Spec, types.ServiceUpdateOptions{})
<ide> assert.NilError(t, err)
<del> poll.WaitOn(t, serviceSpecIsUpdated(cli, serviceID, service.Version.Index), swarm.ServicePoll)
<add> poll.WaitOn(t, serviceIsUpdated(cli, serviceID), swarm.ServicePoll)
<ide> service = getService(t, cli, serviceID)
<ide> assert.Check(t, is.DeepEqual(service.Spec.Labels, map[string]string{"foo": "bar"}))
<ide>
<ide> func serviceIsUpdated(client client.ServiceAPIClient, serviceID string) func(log
<ide> }
<ide> }
<ide> }
<del>
<del>func serviceSpecIsUpdated(client client.ServiceAPIClient, serviceID string, serviceOldVersion uint64) func(log poll.LogT) poll.Result {
<del> return func(log poll.LogT) poll.Result {
<del> service, _, err := client.ServiceInspectWithRaw(context.Background(), serviceID, types.ServiceInspectOptions{})
<del> switch {
<del> case err != nil:
<del> return poll.Error(err)
<del> case service.Version.Index > serviceOldVersion:
<del> return poll.Success()
<del> default:
<del> return poll.Continue("waiting for service %s to be updated", serviceID)
<del> }
<del> }
<del>} | 1 |
Mixed | Text | add readme.me, contributor update | e8cb8757653e03a1c25014d9403f3f32d3a2515a | <ide><path>CONTRIBUTING.md
<ide> your pull request should be accepted quickly.
<ide>
<ide> Some things that will increase the chance that your pull request is accepted:
<ide>
<del>* Write tests
<add>* [Write tests](./test/README.md)
<ide> * Follow the existing coding style
<ide> * Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html)
<ide>
<ide><path>test/README.md
<add># Welcome to the webpack test suite!!!!
<add>Every pull request that you submit to webpack (besides README and spelling corrections in comments) requires tests that are created.
<add>
<add>But don't give up hope!!! Although our tests may appear complex and overwhelming, once you become familiar with the test suite and structure, adding and creating tests will be fun and beneficial as you work inside the codebase! ❤
<add>
<add>## tl;dr
<add>* Clone repo
<add>* install and link deps
<add> * `yarn install.`
<add> * `npm run test` or `npm t`
<add>* To run an individual suite: (recommended during development for easier isolated diffs)
<add>
<add>Example: `$(npm bin)/mocha --grep ConfigTestCases`
<add>
<add>## Test suite overview
<add>We use MochaJS for our tests. For more information on Mocha you can visit their [homepage](https://mochajs.org/)!
<add>
<add>### Class Tests
<add>All test files can be found in *.test.js. There are many tests that simply test API's of a specific class/file (such as `Compiler`, `Errors`, Integration, `Parser`, `RuleSet`, Validation).
<add>If the feature you are contributing involves one of those classes, then best to start there to understand the structure.
<add>
<add>### xCases
<add>In addition to Class specific tests, there are also directories that end in "Cases". The suites for these cases also have corresponding *.test.js files.
<add>
<add>#### cases (`TestCases.test.js`) <sup>1</sup>
<add>Cases are a set of general purpose tests that will run against a variety of permutations of webpack configurations. When you are making a general purpose change that doesn't require you to have a special configuration, you would likely add your tests here. Inside of the `./test/cases` directory you will find tests are broken into thematic sub directories. Take a moment to explore the different options.
<add>
<add>To add a new case, create a new directory inside of the top level test groups, and then add an `index.js` file (and any other supporting files).
<add>
<add>By default this file will be the entry point for the test suite and you can add your `it()`'s there. This will also become bundled so that node env support happens as well.
<add>
<add>#### configCases (`ConfigTestCases.test.js`) <sup>1</sup>
<add>If you are trying to solve a bug which is reproducible when x and y properties are used together in a config, then configCases is the place to be!!!!
<add>
<add>In addition to an `index.js`, these configCases require a `webpack.config.js` is located inside of your test suite. This will run this specific config through `webpack` just as you were building individually. They will use the same loading/bundling technique of your `it()` tests, however you now have a more specific config use cases that you can write even before you start coding.
<add>
<add>#### statsCases (`Stats.test.js`)
<add>Stats cases are similar to configCases except specifically focusing on the `expected` output of your stats. Instead of writing to the console, however the output of stats will be written to disk.
<add>
<add>By default, the "expected" outcome is a pain to write by hand so instead when statsCases are run the following happens:
<add>
<add>* Checks for `expected.txt` file containing expected results.
<add>* If the `expected.txt` doesn't match what is output, then an `actual.txt` stats output file will be created and the test will fail. (A typical workflow for stats cases is to fail the test and copy the results from `actual.txt` to `expected.txt`.)
<add>* If the actual output matches `expected.txt`, the tests passes and you are free to submit that PR with pride!!!
<add>
<add>## Questions? Comments?
<add>If you are still nervous or don't quite understand, please submit an issue and tag us in it, and provide a relevant PR while working on!
<add>
<add>
<add>## Footnotes
<add><sup>1</sup> webpack's parser supports the use of ES2015 features like arrow functions, harmony exports, etc. However as a library we follow NodeJS's timeline for dropping older versions of node. Because of this we expect your tests on Travis to pass all the way back to NodeJS v0.12; Therefore if you would like specific tests that use these features to be ignored if they are not supported, then you should add a `test.filter.js` file. This allows you to import the syntax needed for that test, meanwhile ignoring it on node versions (during CI) that don't support it. webpack has a variety of helpful exapmles you can refer to if you are just starting out. See the `./helpers` folder to find a list of the versions.
<add>
<ide><path>test/TestCases.test.js
<ide> describe("TestCases", function() {
<ide> loader: "jade-loader"
<ide> }]
<ide> },
<del> plugins: (config.plugins || []).concat(
<del> function() {
<del> this.plugin("compilation", function(compilation) {
<del> ["optimize", "optimize-modules-basic", "optimize-chunks-basic", "after-optimize-tree", "after-optimize-assets"].forEach(function(hook) {
<del> compilation.plugin(hook, function() {
<del> compilation.checkConstraints();
<del> });
<add> plugins: (config.plugins || []).concat(function() {
<add> this.plugin("compilation", function(compilation) {
<add> ["optimize", "optimize-modules-basic", "optimize-chunks-basic", "after-optimize-tree", "after-optimize-assets"].forEach(function(hook) {
<add> compilation.plugin(hook, function() {
<add> compilation.checkConstraints();
<ide> });
<ide> });
<del> }
<del> )
<add> });
<add> })
<ide> };
<ide> webpack(options, function(err, stats) {
<ide> if(err) return done(err); | 3 |
Mixed | Python | fix the first `nlp` call for `ja` (closes ) | 2fb004832f48750a9cf35e1f89e4118ccc70da30 | <ide><path>.github/contributors/kbulygin.md
<add># spaCy contributor agreement
<add>
<add>This spaCy Contributor Agreement (**"SCA"**) is based on the
<add>[Oracle Contributor Agreement](http://www.oracle.com/technetwork/oca-405177.pdf).
<add>The SCA applies to any contribution that you make to any product or project
<add>managed by us (the **"project"**), and sets out the intellectual property rights
<add>you grant to us in the contributed materials. The term **"us"** shall mean
<add>[ExplosionAI UG (haftungsbeschränkt)](https://explosion.ai/legal). The term
<add>**"you"** shall mean the person or entity identified below.
<add>
<add>If you agree to be bound by these terms, fill in the information requested
<add>below and include the filled-in version with your first pull request, under the
<add>folder [`.github/contributors/`](/.github/contributors/). The name of the file
<add>should be your GitHub username, with the extension `.md`. For example, the user
<add>example_user would create the file `.github/contributors/example_user.md`.
<add>
<add>Read this agreement carefully before signing. These terms and conditions
<add>constitute a binding legal agreement.
<add>
<add>## Contributor Agreement
<add>
<add>1. The term "contribution" or "contributed materials" means any source code,
<add>object code, patch, tool, sample, graphic, specification, manual,
<add>documentation, or any other material posted or submitted by you to the project.
<add>
<add>2. With respect to any worldwide copyrights, or copyright applications and
<add>registrations, in your contribution:
<add>
<add> * you hereby assign to us joint ownership, and to the extent that such
<add> assignment is or becomes invalid, ineffective or unenforceable, you hereby
<add> grant to us a perpetual, irrevocable, non-exclusive, worldwide, no-charge,
<add> royalty-free, unrestricted license to exercise all rights under those
<add> copyrights. This includes, at our option, the right to sublicense these same
<add> rights to third parties through multiple levels of sublicensees or other
<add> licensing arrangements;
<add>
<add> * you agree that each of us can do all things in relation to your
<add> contribution as if each of us were the sole owners, and if one of us makes
<add> a derivative work of your contribution, the one who makes the derivative
<add> work (or has it made will be the sole owner of that derivative work;
<add>
<add> * you agree that you will not assert any moral rights in your contribution
<add> against us, our licensees or transferees;
<add>
<add> * you agree that we may register a copyright in your contribution and
<add> exercise all ownership rights associated with it; and
<add>
<add> * you agree that neither of us has any duty to consult with, obtain the
<add> consent of, pay or render an accounting to the other for any use or
<add> distribution of your contribution.
<add>
<add>3. With respect to any patents you own, or that you can license without payment
<add>to any third party, you hereby grant to us a perpetual, irrevocable,
<add>non-exclusive, worldwide, no-charge, royalty-free license to:
<add>
<add> * make, have made, use, sell, offer to sell, import, and otherwise transfer
<add> your contribution in whole or in part, alone or in combination with or
<add> included in any product, work or materials arising out of the project to
<add> which your contribution was submitted, and
<add>
<add> * at our option, to sublicense these same rights to third parties through
<add> multiple levels of sublicensees or other licensing arrangements.
<add>
<add>4. Except as set out above, you keep all right, title, and interest in your
<add>contribution. The rights that you grant to us under these terms are effective
<add>on the date you first submitted a contribution to us, even if your submission
<add>took place before the date you sign these terms.
<add>
<add>5. You covenant, represent, warrant and agree that:
<add>
<add> * Each contribution that you submit is and shall be an original work of
<add> authorship and you can legally grant the rights set out in this SCA;
<add>
<add> * to the best of your knowledge, each contribution will not violate any
<add> third party's copyrights, trademarks, patents, or other intellectual
<add> property rights; and
<add>
<add> * each contribution shall be in compliance with U.S. export control laws and
<add> other applicable export and import laws. You agree to notify us if you
<add> become aware of any circumstance which would make any of the foregoing
<add> representations inaccurate in any respect. We may publicly disclose your
<add> participation in the project, including the fact that you have signed the SCA.
<add>
<add>6. This SCA is governed by the laws of the State of California and applicable
<add>U.S. Federal law. Any choice of law rules will not apply.
<add>
<add>7. Please place an “x” on one of the applicable statement below. Please do NOT
<add>mark both statements:
<add>
<add> * [x] I am signing on behalf of myself as an individual and no other person
<add> or entity, including my employer, has or will have rights with respect to my
<add> contributions.
<add>
<add> * [ ] I am signing on behalf of my employer or a legal entity and I have the
<add> actual authority to contractually bind that entity.
<add>
<add>## Contributor Details
<add>
<add>| Field | Entry |
<add>|------------------------------- | -------------------- |
<add>| Name | Kirill Bulygin |
<add>| Company name (if applicable) | |
<add>| Title or role (if applicable) | |
<add>| Date | 2018-12-18 |
<add>| GitHub username | kbulygin |
<add>| Website (optional) | |
<ide><path>spacy/lang/ja/__init__.py
<ide> def __init__(self, cls, nlp=None):
<ide>
<ide> MeCab = try_mecab_import()
<ide> self.tokenizer = MeCab.Tagger()
<add> self.tokenizer.parseToNode('') # see #2901
<ide>
<ide> def __call__(self, text):
<ide> dtokens = detailed_tokens(self.tokenizer, text)
<ide><path>spacy/tests/regression/test_issue2901.py
<add># coding: utf8
<add>from __future__ import unicode_literals
<add>
<add>import pytest
<add>
<add>from ...lang.ja import Japanese
<add>
<add>
<add>def test_issue2901():
<add> """Test that `nlp` doesn't fail."""
<add> try:
<add> nlp = Japanese()
<add> except ImportError:
<add> pytest.skip()
<add>
<add> doc = nlp("pythonが大好きです")
<add> assert doc | 3 |
Javascript | Javascript | fix lint error regarding unused commons const | 690bdede1307b4499683c417784e3c84b74bfb45 | <ide><path>test/parallel/test-stream-writableState-ending.js
<ide> 'use strict';
<ide>
<del>const common = require('../common');
<add>require('../common');
<ide>
<ide> const assert = require('assert');
<ide> const stream = require('stream'); | 1 |
Python | Python | avoid indexing with list | a4cc361003d3b2b241b826372d9691187b47f86f | <ide><path>numpy/ma/extras.py
<ide> def _median(a, axis=None, out=None, overwrite_input=False):
<ide> ind = np.meshgrid(*axes_grid, sparse=True, indexing='ij')
<ide> # insert indices of low and high median
<ide> ind.insert(axis, h - 1)
<del> low = asorted[ind]
<add> low = asorted[tuple(ind)]
<ide> low._sharedmask = False
<ide> ind[axis] = h
<del> high = asorted[ind]
<add> high = asorted[tuple(ind)]
<ide> # duplicate high if odd number of elements so mean does nothing
<ide> odd = counts % 2 == 1
<ide> if asorted.ndim == 1: | 1 |
Python | Python | fix bucket sort | 2fa009aa530ee1c243090c31b69bbf7effc754e2 | <ide><path>sorts/bucket_sort.py
<ide>
<ide> Source: https://en.wikipedia.org/wiki/Bucket_sort
<ide> """
<del>DEFAULT_BUCKET_SIZE = 5
<ide>
<ide>
<del>def bucket_sort(my_list: list, bucket_size: int = DEFAULT_BUCKET_SIZE) -> list:
<add>def bucket_sort(my_list: list) -> list:
<ide> """
<ide> >>> data = [-1, 2, -5, 0]
<ide> >>> bucket_sort(data) == sorted(data)
<ide> True
<del>
<ide> >>> data = [9, 8, 7, 6, -12]
<ide> >>> bucket_sort(data) == sorted(data)
<ide> True
<del>
<ide> >>> data = [.4, 1.2, .1, .2, -.9]
<ide> >>> bucket_sort(data) == sorted(data)
<ide> True
<del>
<del> >>> bucket_sort([])
<del> Traceback (most recent call last):
<del> ...
<del> Exception: Please add some elements in the array.
<add> >>> bucket_sort([]) == sorted([])
<add> True
<add> >>> import random
<add> >>> collection = random.sample(range(-50, 50), 50)
<add> >>> bucket_sort(collection) == sorted(collection)
<add> True
<ide> """
<ide> if len(my_list) == 0:
<del> raise Exception("Please add some elements in the array.")
<del>
<del> min_value, max_value = (min(my_list), max(my_list))
<del> bucket_count = (max_value - min_value) // bucket_size + 1
<del> buckets = [[] for _ in range(int(bucket_count))]
<add> return []
<add> min_value, max_value = min(my_list), max(my_list)
<add> bucket_count = int(max_value - min_value) + 1
<add> buckets = [[] for _ in range(bucket_count)]
<ide>
<ide> for i in range(len(my_list)):
<del> buckets[int((my_list[i] - min_value) // bucket_size)].append(my_list[i])
<add> buckets[(int(my_list[i] - min_value) // bucket_count)].append(my_list[i])
<ide>
<del> return sorted(
<del> buckets[i][j] for i in range(len(buckets)) for j in range(len(buckets[i]))
<del> )
<add> return [v for bucket in buckets for v in sorted(bucket)]
<ide>
<ide>
<ide> if __name__ == "__main__":
<add> from doctest import testmod
<add>
<add> testmod()
<ide> assert bucket_sort([4, 5, 3, 2, 1]) == [1, 2, 3, 4, 5]
<ide> assert bucket_sort([0, 1, -10, 15, 2, -2]) == [-10, -2, 0, 1, 2, 15] | 1 |
Javascript | Javascript | add runtime block | 5cdfe45aa3e50fabad44009c1b8511253c6e4915 | <ide><path>src/Injector.js
<ide> function createInjector(modulesToLoad) {
<ide> }));
<ide>
<ide>
<del> loadModules(modulesToLoad);
<add> forEach(loadModules(modulesToLoad), function(fn) { instanceInjector.invoke(fn || noop); });
<ide>
<ide> return instanceInjector;
<ide>
<ide> function createInjector(modulesToLoad) {
<ide> // Module Loading
<ide> ////////////////////////////////////
<ide> function loadModules(modulesToLoad){
<add> var runBlocks = [];
<ide> forEach(modulesToLoad, function(module) {
<ide> if (loadedModules.get(module)) return;
<ide> loadedModules.put(module, true);
<ide> if (isString(module)) {
<ide> var moduleFn = angularModule(module);
<del> loadModules(moduleFn.requires);
<add> runBlocks = runBlocks.concat(loadModules(moduleFn.requires)).concat(moduleFn._runBlocks);
<ide>
<ide> try {
<del> for(var invokeQueue = moduleFn.invokeQueue, i = 0, ii = invokeQueue.length; i < ii; i++) {
<add> for(var invokeQueue = moduleFn._invokeQueue, i = 0, ii = invokeQueue.length; i < ii; i++) {
<ide> var invokeArgs = invokeQueue[i],
<ide> provider = invokeArgs[0] == '$injector'
<ide> ? providerInjector
<ide> function createInjector(modulesToLoad) {
<ide> }
<ide> } else if (isFunction(module)) {
<ide> try {
<del> providerInjector.invoke(module);
<add> runBlocks.push(providerInjector.invoke(module));
<ide> } catch (e) {
<ide> if (e.message) e.message += ' from ' + module;
<ide> throw e;
<ide> }
<ide> } else if (isArray(module)) {
<ide> try {
<del> providerInjector.invoke(module);
<add> runBlocks.push(providerInjector.invoke(module));
<ide> } catch (e) {
<ide> if (e.message) e.message += ' from ' + String(module[module.length - 1]);
<ide> throw e;
<ide> function createInjector(modulesToLoad) {
<ide> assertArgFn(module, 'module');
<ide> }
<ide> });
<add> return runBlocks;
<ide> }
<ide>
<ide> ////////////////////////////////////
<ide><path>src/angular-mocks.js
<ide> angular.module('ngMock', ['ng']).service({
<ide> * Currently there is only one mock present in this module -
<ide> * the {@link angular.module.ngMockE2E.$httpBackend e2e $httpBackend} mock.
<ide> */
<del>angular.module('ngMockE2E', ['ng']).init(function($provide) {
<add>angular.module('ngMockE2E', ['ng']).config(function($provide) {
<ide> $provide.decorator('$httpBackend', angular.mock.e2e.$httpBackendDecorator);
<ide> });
<ide>
<ide><path>src/loader.js
<ide> function setupModuleLoader(window) {
<ide> * {@link angular.Module#init Module.init()}.
<ide> * @return {angular.Module}
<ide> */
<del> return function module(name, requires, initFn) {
<add> return function module(name, requires, configFn) {
<ide> if (requires && modules.hasOwnProperty(name)) {
<ide> modules[name] = null;
<ide> }
<ide> function setupModuleLoader(window) {
<ide> /** @type {!Array.<Array.<*>>} */
<ide> var invokeQueue = [];
<ide>
<del> var init = invokeLater('$injector', 'invoke');
<add> /** @type {!Array.<Function>} */
<add> var runBlocks = [];
<add>
<add> var config = invokeLater('$injector', 'invoke');
<ide>
<ide> /** @type {angular.Module} */
<ide> var moduleInstance = {
<add> // Private state
<add> _invokeQueue: invokeQueue,
<add> _runBlocks: runBlocks,
<add>
<ide> /**
<ide> * @ngdoc property
<ide> * @name angular.Module#requires
<ide> function setupModuleLoader(window) {
<ide> * Holds the list of modules which the injector will load before the current module is loaded.
<ide> */
<ide> requires: requires,
<del> invokeQueue: invokeQueue,
<add>
<add> /**
<add> * @ngdoc property
<add> * @name angular.Module#name
<add> * @propertyOf angular.Module
<add> * @returns {string} Name of the module.
<add> * @description
<add> */
<add> name: name,
<add>
<ide>
<ide> /**
<ide> * @ngdoc method
<ide> function setupModuleLoader(window) {
<ide>
<ide> /**
<ide> * @ngdoc method
<del> * @name angular.Module#init
<add> * @name angular.Module#config
<ide> * @methodOf angular.Module
<del> * @param {Function} initializationFn Execute this function on module load, allowing it to do any
<del> * service configuration..
<add> * @param {Function} initializationFn Execute this function on module load. Useful for
<add> * service configuration.
<ide> * @description
<ide> * Use this method to register work which needs to be performed on module loading.
<ide> */
<del> init: init
<add> config: config,
<add>
<add> /**
<add> * @ngdoc method
<add> * @name angular.Module#run
<add> * @methodOf angular.Module
<add> * @param {Function} initializationFn Execute this function after injector creation.
<add> * Useful for application initialization.
<add> * @description
<add> * Use this method to register work which needs to be performed on module loading.
<add> */
<add> run: function(block) {
<add> runBlocks.push(block);
<add> return this;
<add> }
<ide> };
<ide>
<del> if (initFn) {
<del> init(initFn);
<add> if (configFn) {
<add> config(configFn);
<ide> }
<ide>
<ide> return moduleInstance;
<ide><path>test/InjectorSpec.js
<ide> describe('injector', function() {
<ide> expect(log).toEqual('abc');
<ide> });
<ide>
<add> it('should execute runBlocks after injector creation', function() {
<add> var log = '';
<add> angular.module('a', [], function(){ log += 'a'; }).run(function() { log += 'A'; });
<add> angular.module('b', ['a'], function(){ log += 'b'; }).run(function() { log += 'B'; });
<add> createInjector([
<add> 'b',
<add> valueFn(function() { log += 'C'; }),
<add> [valueFn(function() { log += 'D'; })]
<add> ]);
<add> expect(log).toEqual('abABCD');
<add> });
<add>
<ide> describe('$provide', function() {
<ide> describe('value', function() {
<ide> it('should configure $provide values', function() {
<ide><path>test/loaderSpec.js
<ide> describe('module loader', function() {
<ide>
<ide> it('should record calls', function() {
<ide> var otherModule = window.angular.module('other', []);
<del> otherModule.init('otherInit');
<add> otherModule.config('otherInit');
<ide>
<del> var myModule = window.angular.module('my', ['other'], 'init');
<add> var myModule = window.angular.module('my', ['other'], 'config');
<ide>
<del> myModule.
<add> expect(myModule.
<ide> service('sk', 'sv').
<ide> factory('fk', 'fv').
<ide> value('k', 'v').
<ide> filter('f', 'ff').
<del> init('init2');
<add> config('init2').
<add> run('runBlock')).toBe(myModule);
<ide>
<ide> expect(myModule.requires).toEqual(['other']);
<del> expect(myModule.invokeQueue).toEqual([
<del> ['$injector', 'invoke', ['init'] ],
<add> expect(myModule._invokeQueue).toEqual([
<add> ['$injector', 'invoke', ['config'] ],
<ide> ['$provide', 'service', ['sk', 'sv'] ],
<ide> ['$provide', 'factory', ['fk', 'fv'] ],
<ide> ['$provide', 'value', ['k', 'v'] ],
<ide> ['$filterProvider', 'register', ['f', 'ff'] ],
<ide> ['$injector', 'invoke', ['init2'] ]
<ide> ]);
<add> expect(myModule._runBlocks).toEqual(['runBlock']);
<ide> });
<ide>
<ide> | 5 |
Python | Python | remove asunicode where a u prefix would suffice | 09a21de418ef8df60a0121637cb213e0fa778e5d | <ide><path>numpy/core/tests/test_numerictypes.py
<ide> import sys
<ide>
<ide> import numpy as np
<del>from numpy.compat import asunicode
<ide> from numpy.testing import (
<ide> TestCase, run_module_suite, assert_, assert_equal
<ide> )
<ide> # x Info color info y z
<ide> # value y2 Info2 name z2 Name Value
<ide> # name value y3 z3
<del> ([3, 2], (6j, 6., (b'nn', [6j, 4j], [6., 4.], [1, 2]), b'NN', True), b'cc', (asunicode('NN'), 6j), [[6., 4.], [6., 4.]], 8),
<del> ([4, 3], (7j, 7., (b'oo', [7j, 5j], [7., 5.], [2, 1]), b'OO', False), b'dd', (asunicode('OO'), 7j), [[7., 5.], [7., 5.]], 9),
<add> ([3, 2], (6j, 6., (b'nn', [6j, 4j], [6., 4.], [1, 2]), b'NN', True), b'cc', (u'NN', 6j), [[6., 4.], [6., 4.]], 8),
<add> ([4, 3], (7j, 7., (b'oo', [7j, 5j], [7., 5.], [2, 1]), b'OO', False), b'dd', (u'OO', 7j), [[7., 5.], [7., 5.]], 9),
<ide> ]
<ide>
<ide> | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.