content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Go | Go | fix post config verification without flags | cd3446972e968639684f2b65bfc11c099a25f1b0 | <ide><path>daemon/config.go
<ide> type CommonConfig struct {
<ide> TLSOptions CommonTLSOptions `json:"tls-opts,omitempty"`
<ide>
<ide> reloadLock sync.Mutex
<add> valuesSet map[string]interface{}
<ide> }
<ide>
<ide> // InstallCommonFlags adds command-line options to the top-level flag parser for
<ide> func (config *Config) InstallCommonFlags(cmd *flag.FlagSet, usageFn func(string)
<ide> cmd.Var(opts.NewNamedMapOpts("cluster-store-opts", config.ClusterOpts, nil), []string{"-cluster-store-opt"}, usageFn("Set cluster store options"))
<ide> }
<ide>
<add>// IsValueSet returns true if a configuration value
<add>// was explicitly set in the configuration file.
<add>func (config *Config) IsValueSet(name string) bool {
<add> if config.valuesSet == nil {
<add> return false
<add> }
<add> _, ok := config.valuesSet[name]
<add> return ok
<add>}
<add>
<ide> func parseClusterAdvertiseSettings(clusterStore, clusterAdvertise string) (string, error) {
<ide> if clusterAdvertise == "" {
<ide> return "", errDiscoveryDisabled
<ide> func getConflictFreeConfiguration(configFile string, flags *flag.FlagSet) (*Conf
<ide> return nil, err
<ide> }
<ide>
<add> var config Config
<ide> var reader io.Reader
<ide> if flags != nil {
<ide> var jsonConfig map[string]interface{}
<ide> func getConflictFreeConfiguration(configFile string, flags *flag.FlagSet) (*Conf
<ide> return nil, err
<ide> }
<ide>
<del> if err := findConfigurationConflicts(jsonConfig, flags); err != nil {
<add> configSet := configValuesSet(jsonConfig)
<add>
<add> if err := findConfigurationConflicts(configSet, flags); err != nil {
<ide> return nil, err
<ide> }
<add>
<add> config.valuesSet = configSet
<ide> }
<ide>
<del> var config Config
<ide> reader = bytes.NewReader(b)
<ide> err = json.NewDecoder(reader).Decode(&config)
<ide> return &config, err
<ide> }
<ide>
<del>// findConfigurationConflicts iterates over the provided flags searching for
<del>// duplicated configurations. It returns an error with all the conflicts if
<del>// it finds any.
<del>func findConfigurationConflicts(config map[string]interface{}, flags *flag.FlagSet) error {
<del> var conflicts []string
<add>// configValuesSet returns the configuration values explicitly set in the file.
<add>func configValuesSet(config map[string]interface{}) map[string]interface{} {
<ide> flatten := make(map[string]interface{})
<ide> for k, v := range config {
<ide> if m, ok := v.(map[string]interface{}); ok {
<ide> func findConfigurationConflicts(config map[string]interface{}, flags *flag.FlagS
<ide> flatten[k] = v
<ide> }
<ide> }
<add> return flatten
<add>}
<add>
<add>// findConfigurationConflicts iterates over the provided flags searching for
<add>// duplicated configurations. It returns an error with all the conflicts if
<add>// it finds any.
<add>func findConfigurationConflicts(config map[string]interface{}, flags *flag.FlagSet) error {
<add> var conflicts []string
<ide>
<ide> printConflict := func(name string, flagValue, fileValue interface{}) string {
<ide> return fmt.Sprintf("%s: (from flag: %v, from file: %v)", name, flagValue, fileValue)
<ide> func findConfigurationConflicts(config map[string]interface{}, flags *flag.FlagS
<ide> collectConflicts := func(f *flag.Flag) {
<ide> // search option name in the json configuration payload if the value is a named option
<ide> if namedOption, ok := f.Value.(opts.NamedOption); ok {
<del> if optsValue, ok := flatten[namedOption.Name()]; ok {
<add> if optsValue, ok := config[namedOption.Name()]; ok {
<ide> conflicts = append(conflicts, printConflict(namedOption.Name(), f.Value.String(), optsValue))
<ide> }
<ide> } else {
<ide> // search flag name in the json configuration payload without trailing dashes
<ide> for _, name := range f.Names {
<ide> name = strings.TrimLeft(name, "-")
<ide>
<del> if value, ok := flatten[name]; ok {
<add> if value, ok := config[name]; ok {
<ide> conflicts = append(conflicts, printConflict(name, f.Value.String(), value))
<ide> break
<ide> }
<ide><path>docker/common.go
<ide> func init() {
<ide> func postParseCommon() {
<ide> cmd := commonFlags.FlagSet
<ide>
<del> if commonFlags.LogLevel != "" {
<del> lvl, err := logrus.ParseLevel(commonFlags.LogLevel)
<del> if err != nil {
<del> fmt.Fprintf(os.Stderr, "Unable to parse logging level: %s\n", commonFlags.LogLevel)
<del> os.Exit(1)
<del> }
<del> logrus.SetLevel(lvl)
<del> } else {
<del> logrus.SetLevel(logrus.InfoLevel)
<del> }
<add> setDaemonLogLevel(commonFlags.LogLevel)
<ide>
<ide> // Regardless of whether the user sets it to true or false, if they
<ide> // specify --tlsverify at all then we need to turn on tls
<ide> func postParseCommon() {
<ide> }
<ide> }
<ide> }
<add>
<add>func setDaemonLogLevel(logLevel string) {
<add> if logLevel != "" {
<add> lvl, err := logrus.ParseLevel(logLevel)
<add> if err != nil {
<add> fmt.Fprintf(os.Stderr, "Unable to parse logging level: %s\n", logLevel)
<add> os.Exit(1)
<add> }
<add> logrus.SetLevel(lvl)
<add> } else {
<add> logrus.SetLevel(logrus.InfoLevel)
<add> }
<add>}
<ide><path>docker/daemon.go
<ide> func loadDaemonCliConfig(config *daemon.Config, daemonFlags *flag.FlagSet, commo
<ide> }
<ide> }
<ide>
<add> // Regardless of whether the user sets it to true or false, if they
<add> // specify TLSVerify at all then we need to turn on TLS
<add> if config.IsValueSet("tls-verify") {
<add> config.TLS = true
<add> }
<add>
<add> // ensure that the log level is the one set after merging configurations
<add> setDaemonLogLevel(config.LogLevel)
<add>
<ide> return config, nil
<ide> }
<ide><path>docker/daemon_test.go
<ide> import (
<ide> "strings"
<ide> "testing"
<ide>
<add> "github.com/Sirupsen/logrus"
<ide> "github.com/docker/docker/cli"
<ide> "github.com/docker/docker/daemon"
<ide> "github.com/docker/docker/opts"
<ide> func TestLoadDaemonCliConfigWithConflicts(t *testing.T) {
<ide> t.Fatalf("expected labels conflict, got %v", err)
<ide> }
<ide> }
<add>
<add>func TestLoadDaemonCliConfigWithTLSVerify(t *testing.T) {
<add> c := &daemon.Config{}
<add> common := &cli.CommonFlags{
<add> TLSOptions: &tlsconfig.Options{
<add> CAFile: "/tmp/ca.pem",
<add> },
<add> }
<add>
<add> f, err := ioutil.TempFile("", "docker-config-")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> configFile := f.Name()
<add> f.Write([]byte(`{"tls-verify": true}`))
<add> f.Close()
<add>
<add> flags := mflag.NewFlagSet("test", mflag.ContinueOnError)
<add> loadedConfig, err := loadDaemonCliConfig(c, flags, common, configFile)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> if loadedConfig == nil {
<add> t.Fatalf("expected configuration %v, got nil", c)
<add> }
<add>
<add> if !loadedConfig.TLS {
<add> t.Fatalf("expected TLS enabled, got %q", loadedConfig)
<add> }
<add>}
<add>
<add>func TestLoadDaemonCliConfigWithExplicitTLSVerifyFalse(t *testing.T) {
<add> c := &daemon.Config{}
<add> common := &cli.CommonFlags{
<add> TLSOptions: &tlsconfig.Options{
<add> CAFile: "/tmp/ca.pem",
<add> },
<add> }
<add>
<add> f, err := ioutil.TempFile("", "docker-config-")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> configFile := f.Name()
<add> f.Write([]byte(`{"tls-verify": false}`))
<add> f.Close()
<add>
<add> flags := mflag.NewFlagSet("test", mflag.ContinueOnError)
<add> loadedConfig, err := loadDaemonCliConfig(c, flags, common, configFile)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> if loadedConfig == nil {
<add> t.Fatalf("expected configuration %v, got nil", c)
<add> }
<add>
<add> if !loadedConfig.TLS {
<add> t.Fatalf("expected TLS enabled, got %q", loadedConfig)
<add> }
<add>}
<add>
<add>func TestLoadDaemonCliConfigWithoutTLSVerify(t *testing.T) {
<add> c := &daemon.Config{}
<add> common := &cli.CommonFlags{
<add> TLSOptions: &tlsconfig.Options{
<add> CAFile: "/tmp/ca.pem",
<add> },
<add> }
<add>
<add> f, err := ioutil.TempFile("", "docker-config-")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> configFile := f.Name()
<add> f.Write([]byte(`{}`))
<add> f.Close()
<add>
<add> flags := mflag.NewFlagSet("test", mflag.ContinueOnError)
<add> loadedConfig, err := loadDaemonCliConfig(c, flags, common, configFile)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> if loadedConfig == nil {
<add> t.Fatalf("expected configuration %v, got nil", c)
<add> }
<add>
<add> if loadedConfig.TLS {
<add> t.Fatalf("expected TLS disabled, got %q", loadedConfig)
<add> }
<add>}
<add>
<add>func TestLoadDaemonCliConfigWithLogLevel(t *testing.T) {
<add> c := &daemon.Config{}
<add> common := &cli.CommonFlags{}
<add>
<add> f, err := ioutil.TempFile("", "docker-config-")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> configFile := f.Name()
<add> f.Write([]byte(`{"log-level": "warn"}`))
<add> f.Close()
<add>
<add> flags := mflag.NewFlagSet("test", mflag.ContinueOnError)
<add> loadedConfig, err := loadDaemonCliConfig(c, flags, common, configFile)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> if loadedConfig == nil {
<add> t.Fatalf("expected configuration %v, got nil", c)
<add> }
<add> if loadedConfig.LogLevel != "warn" {
<add> t.Fatalf("expected warn log level, got %v", loadedConfig.LogLevel)
<add> }
<add>
<add> if logrus.GetLevel() != logrus.WarnLevel {
<add> t.Fatalf("expected warn log level, got %v", logrus.GetLevel())
<add> }
<add>} | 4 |
PHP | PHP | fix comment on default session driver | 5e2d8843d8f3e651fa5f6f5252d64fd71c26fc9b | <ide><path>app/config/session.php
<ide> |--------------------------------------------------------------------------
<ide> |
<ide> | This option controls the default session "driver" that will be used on
<del> | requests. By default we will use the light-weight cookie driver but
<add> | requests. By default we will use the light-weight native driver but
<ide> | you may specify any of the other wonderful drivers provided here.
<ide> |
<ide> | Supported: "native", "cookie", "database", "apc", | 1 |
PHP | PHP | update csrfprotectionmiddleware to psr 15 standard | bb7d60711e176e978c4cffa279b71773516439d7 | <ide><path>src/Http/Middleware/CsrfProtectionMiddleware.php
<ide> use Cake\I18n\Time;
<ide> use Cake\Utility\Hash;
<ide> use Cake\Utility\Security;
<add>use Psr\Http\Message\ResponseInterface;
<add>use Psr\Http\Message\ServerRequestInterface;
<add>use Psr\Http\Server\MiddlewareInterface;
<add>use Psr\Http\Server\RequestHandlerInterface;
<ide>
<ide> /**
<ide> * Provides CSRF protection & validation.
<ide> * used together your forms will have CSRF tokens automatically added
<ide> * when `$this->Form->create(...)` is used in a view.
<ide> */
<del>class CsrfProtectionMiddleware
<add>class CsrfProtectionMiddleware implements MiddlewareInterface
<ide> {
<ide> /**
<ide> * Default config for the CSRF handling.
<ide> public function __construct(array $config = [])
<ide> /**
<ide> * Checks and sets the CSRF token depending on the HTTP verb.
<ide> *
<del> * @param \Cake\Http\ServerRequest $request The request.
<del> * @param \Cake\Http\Response $response The response.
<del> * @param callable $next Callback to invoke the next middleware.
<del> * @return \Cake\Http\Response A response
<add> * @param \Psr\Http\Message\ServerRequestInterface $request The request.
<add> * @param \Psr\Http\Server\RequestHandlerInterface $handler The request handler.
<add> * @return \Cake\Http\ResponseInterface A response.
<ide> */
<del> public function __invoke(ServerRequest $request, Response $response, callable $next): Response
<add> public function process(ServerRequestInterface $request, RequestHandlerInterface $handler): ResponseInterface
<ide> {
<ide> $cookies = $request->getCookieParams();
<ide> $cookieData = Hash::get($cookies, $this->_config['cookieName']);
<ide> public function __invoke(ServerRequest $request, Response $response, callable $n
<ide> if ($method === 'GET' && $cookieData === null) {
<ide> $token = $this->_createToken();
<ide> $request = $this->_addTokenToRequest($token, $request);
<del> $response = $this->_addTokenCookie($token, $request, $response);
<add> $response = $handler->handle($request);
<ide>
<del> return $next($request, $response);
<add> return $this->_addTokenCookie($token, $request, $response);
<ide> }
<ide> $request = $this->_validateAndUnsetTokenField($request);
<ide>
<del> return $next($request, $response);
<add> return $handler->handle($request);
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Http/Middleware/CsrfProtectionMiddlewareTest.php
<ide> use Cake\Http\ServerRequest;
<ide> use Cake\I18n\Time;
<ide> use Cake\TestSuite\TestCase;
<add>use TestApp\Http\TestRequestHandler;
<ide>
<ide> /**
<ide> * Test for CsrfProtection
<ide> public static function httpMethodProvider()
<ide> }
<ide>
<ide> /**
<del> * Provides the callback for the next middleware
<add> * Provides the request handler
<ide> *
<del> * @return callable
<add> * @return \Psr\Server\RequestHandlerInterface
<ide> */
<del> protected function _getNextClosure()
<add> protected function _getRequestHandler()
<ide> {
<del> return function ($request, $response) {
<del> return $response;
<del> };
<add> return new TestRequestHandler(function ($request) {
<add> return new Response();
<add> });
<ide> }
<ide>
<ide> /**
<ide> public function testSettingCookie()
<ide> 'environment' => ['REQUEST_METHOD' => 'GET'],
<ide> 'webroot' => '/dir/',
<ide> ]);
<del> $response = new Response();
<ide>
<del> $closure = function ($request, $response) {
<del> $cookie = $response->getCookie('csrfToken');
<del> $this->assertNotEmpty($cookie, 'Should set a token.');
<del> $this->assertRegExp('/^[a-f0-9]+$/', $cookie['value'], 'Should look like a hash.');
<del> $this->assertEquals(0, $cookie['expire'], 'session duration.');
<del> $this->assertEquals('/dir/', $cookie['path'], 'session path.');
<del> $this->assertEquals($cookie['value'], $request->getParam('_csrfToken'));
<add> $updatedRequest = null;
<add> $handler = new TestRequestHandler(function ($request) use (&$updatedRequest) {
<add> $updatedRequest = $request;
<ide>
<del> return $response;
<del> };
<add> return new Response();
<add> });
<ide>
<ide> $middleware = new CsrfProtectionMiddleware();
<del> $middleware($request, $response, $closure);
<add> $response = $middleware->process($request, $handler);
<add>
<add> $cookie = $response->getCookie('csrfToken');
<add> $this->assertNotEmpty($cookie, 'Should set a token.');
<add> $this->assertRegExp('/^[a-f0-9]+$/', $cookie['value'], 'Should look like a hash.');
<add> $this->assertEquals(0, $cookie['expire'], 'session duration.');
<add> $this->assertEquals('/dir/', $cookie['path'], 'session path.');
<add> $this->assertEquals($cookie['value'], $updatedRequest->getParam('_csrfToken'));
<ide> }
<ide>
<ide> /**
<ide> public function testSafeMethodNoCsrfRequired($method)
<ide> ],
<ide> 'cookies' => ['csrfToken' => 'testing123'],
<ide> ]);
<del> $response = new Response();
<ide>
<ide> // No exception means the test is valid
<ide> $middleware = new CsrfProtectionMiddleware();
<del> $response = $middleware($request, $response, $this->_getNextClosure());
<add> $response = $middleware->process($request, $this->_getRequestHandler());
<ide> $this->assertInstanceOf(Response::class, $response);
<ide> }
<ide>
<ide> public function testValidTokenInHeader($method)
<ide>
<ide> // No exception means the test is valid
<ide> $middleware = new CsrfProtectionMiddleware();
<del> $response = $middleware($request, $response, $this->_getNextClosure());
<add> $response = $middleware->process($request, $this->_getRequestHandler());
<ide> $this->assertInstanceOf(Response::class, $response);
<ide> }
<ide>
<ide> public function testInvalidTokenInHeader($method)
<ide> $response = new Response();
<ide>
<ide> $middleware = new CsrfProtectionMiddleware();
<del> $middleware($request, $response, $this->_getNextClosure());
<add> $middleware->process($request, $this->_getRequestHandler());
<ide> }
<ide>
<ide> /**
<ide> public function testValidTokenRequestData($method)
<ide> ]);
<ide> $response = new Response();
<ide>
<del> $closure = function ($request, $response) {
<add> $handler = new TestRequestHandler(function ($request) {
<ide> $this->assertNull($request->getData('_csrfToken'));
<ide>
<del> return $response;
<del> };
<add> return new Response();
<add> });
<ide>
<ide> // No exception means everything is OK
<ide> $middleware = new CsrfProtectionMiddleware();
<del> $middleware($request, $response, $closure);
<add> $middleware->process($request, $handler);
<ide> }
<ide>
<ide> /**
<ide> public function testInvalidTokenRequestData($method)
<ide> $response = new Response();
<ide>
<ide> $middleware = new CsrfProtectionMiddleware();
<del> $middleware($request, $response, $this->_getNextClosure());
<add> $middleware->process($request, $this->_getRequestHandler());
<ide> }
<ide>
<ide> /**
<ide> public function testInvalidTokenRequestDataMissing()
<ide> $response = new Response();
<ide>
<ide> $middleware = new CsrfProtectionMiddleware();
<del> $middleware($request, $response, $this->_getNextClosure());
<add> $middleware->process($request, $this->_getRequestHandler());
<ide> }
<ide>
<ide> /**
<ide> public function testInvalidTokenMissingCookie($method)
<ide> $response = new Response();
<ide>
<ide> $middleware = new CsrfProtectionMiddleware();
<del> $middleware($request, $response, $this->_getNextClosure());
<add> $middleware->process($request, $this->_getRequestHandler());
<ide> }
<ide>
<ide> /**
<ide> public function testConfigurationCookieCreate()
<ide> 'environment' => ['REQUEST_METHOD' => 'GET'],
<ide> 'webroot' => '/dir/',
<ide> ]);
<del> $response = new Response();
<del>
<del> $closure = function ($request, $response) {
<del> $this->assertEmpty($response->getCookie('csrfToken'));
<del> $cookie = $response->getCookie('token');
<del> $this->assertNotEmpty($cookie, 'Should set a token.');
<del> $this->assertRegExp('/^[a-f0-9]+$/', $cookie['value'], 'Should look like a hash.');
<del> $this->assertWithinRange((new Time('+1 hour'))->format('U'), $cookie['expire'], 1, 'session duration.');
<del> $this->assertEquals('/dir/', $cookie['path'], 'session path.');
<del> $this->assertTrue($cookie['secure'], 'cookie security flag missing');
<del> $this->assertTrue($cookie['httpOnly'], 'cookie httpOnly flag missing');
<del>
<del> return $response;
<del> };
<ide>
<ide> $middleware = new CsrfProtectionMiddleware([
<ide> 'cookieName' => 'token',
<ide> 'expiry' => '+1 hour',
<ide> 'secure' => true,
<ide> 'httpOnly' => true,
<ide> ]);
<del> $middleware($request, $response, $closure);
<add> $response = $middleware->process($request, $this->_getRequestHandler());
<add>
<add> $this->assertEmpty($response->getCookie('csrfToken'));
<add> $cookie = $response->getCookie('token');
<add> $this->assertNotEmpty($cookie, 'Should set a token.');
<add> $this->assertRegExp('/^[a-f0-9]+$/', $cookie['value'], 'Should look like a hash.');
<add> $this->assertWithinRange((new Time('+1 hour'))->format('U'), $cookie['expire'], 1, 'session duration.');
<add> $this->assertEquals('/dir/', $cookie['path'], 'session path.');
<add> $this->assertTrue($cookie['secure'], 'cookie security flag missing');
<add> $this->assertTrue($cookie['httpOnly'], 'cookie httpOnly flag missing');
<ide> }
<ide>
<ide> /**
<ide> public function testConfigurationValidate()
<ide> 'field' => 'token',
<ide> 'expiry' => 90,
<ide> ]);
<del> $response = $middleware($request, $response, $this->_getNextClosure());
<add> $response = $middleware->process($request, $this->_getRequestHandler());
<ide> $this->assertInstanceOf(Response::class, $response);
<ide> }
<ide> } | 2 |
PHP | PHP | add str() and string() to request object | c9d34b7be0611d26f3e46669934cf542cc5e9e21 | <ide><path>src/Illuminate/Http/Concerns/InteractsWithInput.php
<ide> public function dump($keys = [])
<ide>
<ide> return $this;
<ide> }
<add>
<add> /**
<add> * Retrieve input from the request as a stringable.
<add> *
<add> * @param string $key
<add> * @param mixed $default
<add> * @return \Illuminate\Support\Stringable
<add> */
<add> public function str($key, $default = null)
<add> {
<add> return $this->string($key, $default);
<add> }
<add>
<add> /**
<add> * Retrieve input from the request as a stringable.
<add> *
<add> * @param string $key
<add> * @param mixed $default
<add> * @return \Illuminate\Support\Stringable
<add> */
<add> public function string($key, $default = null)
<add> {
<add> return str($this->input($key, $default));
<add> }
<ide> } | 1 |
Javascript | Javascript | use attributes to set .type | b447b409585a673e0259f51a46523b845ae12e25 | <ide><path>src/renderers/dom/shared/HTMLDOMPropertyConfig.js
<ide> var HTMLDOMPropertyConfig = {
<ide> tabIndex: null,
<ide> target: null,
<ide> title: null,
<del> type: null,
<add> // Setting .type throws on non-<input> tags
<add> type: MUST_USE_ATTRIBUTE,
<ide> useMap: null,
<ide> value: MUST_USE_PROPERTY | HAS_SIDE_EFFECTS,
<ide> width: MUST_USE_ATTRIBUTE, | 1 |
PHP | PHP | use single line | 7d9c4feec50429dd3151065dbfdd45317805681d | <ide><path>src/Illuminate/View/Compilers/BladeCompiler.php
<ide> public function compile($path = null)
<ide> }
<ide>
<ide> if (! is_null($this->cachePath)) {
<del> $contents = $this->compileString(
<del> $this->files->get($this->getPath())
<del> );
<add> $contents = $this->compileString($this->files->get($this->getPath()));
<ide>
<ide> if (! empty($this->getPath())) {
<ide> $contents = $this->appendFilePath($contents); | 1 |
Text | Text | add redux-resource to ecosystem | a74b9bfa635bed432813afefc9cfb19ec29f79d4 | <ide><path>docs/introduction/Ecosystem.md
<ide> On this page we will only feature a few of them that the Redux maintainers have
<ide>
<ide> * [redux-form](https://github.com/erikras/redux-form) — Keep React form state in Redux
<ide> * [react-redux-form](https://github.com/davidkpiano/react-redux-form) — Create forms easily in React with Redux
<add>* [redux-resource](https://github.com/jmeas/redux-resource) — Manage remote resources with Redux
<ide>
<ide> ### Enhancers
<ide> | 1 |
Javascript | Javascript | add a test for ssr stack traces | 377e1a049e03cbbf78a63ecadf31c3460602fe06 | <ide><path>packages/react-dom/src/__tests__/ReactServerRendering-test.js
<ide> describe('ReactDOMServer', () => {
<ide> require('react-dom');
<ide> }).not.toThrow();
<ide> });
<add>
<add> it('includes a useful stack in warnings', () => {
<add> function A() {
<add> return null;
<add> }
<add>
<add> function B() {
<add> return (
<add> <font>
<add> <C>
<add> <span ariaTypo="no" />
<add> </C>
<add> </font>
<add> );
<add> }
<add>
<add> class C extends React.Component {
<add> render() {
<add> return <b>{this.props.children}</b>;
<add> }
<add> }
<add>
<add> function Child() {
<add> return [<A key="1" />, <B key="2" />, <span ariaTypo2="no" />];
<add> }
<add>
<add> function App() {
<add> return (
<add> <div>
<add> <section />
<add> <span>
<add> <Child />
<add> </span>
<add> </div>
<add> );
<add> }
<add>
<add> expect(() => ReactDOMServer.renderToString(<App />)).toWarnDev([
<add> 'Invalid ARIA attribute `ariaTypo`. ARIA attributes follow the pattern aria-* and must be lowercase.\n' +
<add> ' in span (at **)\n' +
<add> ' in b (at **)\n' +
<add> ' in C (at **)\n' +
<add> ' in font (at **)\n' +
<add> ' in B (at **)\n' +
<add> ' in Child (at **)\n' +
<add> ' in span (at **)\n' +
<add> ' in div (at **)\n' +
<add> ' in App (at **)',
<add> 'Invalid ARIA attribute `ariaTypo2`. ARIA attributes follow the pattern aria-* and must be lowercase.\n' +
<add> ' in span (at **)\n' +
<add> ' in Child (at **)\n' +
<add> ' in span (at **)\n' +
<add> ' in div (at **)\n' +
<add> ' in App (at **)',
<add> ]);
<add> });
<ide> }); | 1 |
Mixed | Javascript | use keep-alive by default in global agents | 4267b92604ad78584244488e7f7508a690cb80d0 | <ide><path>doc/api/http.md
<ide> type other than {net.Socket}.
<ide>
<ide> <!-- YAML
<ide> added: v0.1.90
<add>changes:
<add> - version:
<add> - REPLACEME
<add> pr-url: https://github.com/nodejs/node/pull/43522
<add> description: The method closes idle connections before returning.
<add>
<ide> -->
<ide>
<ide> * `callback` {Function}
<ide>
<del>Stops the server from accepting new connections. See [`net.Server.close()`][].
<add>Stops the server from accepting new connections and closes all connections
<add>connected to this server which are not sending a request or waiting for
<add>a response.
<add>See [`net.Server.close()`][].
<ide>
<ide> ### `server.closeAllConnections()`
<ide>
<ide> server.listen(8000);
<ide>
<ide> <!-- YAML
<ide> added: v0.5.9
<add>changes:
<add> - version:
<add> - REPLACEME
<add> pr-url: https://github.com/nodejs/node/pull/43522
<add> description: The agent now uses HTTP Keep-Alive by default.
<ide> -->
<ide>
<ide> * {http.Agent}
<ide><path>doc/api/https.md
<ide> https.get('https://encrypted.google.com/', (res) => {
<ide>
<ide> <!-- YAML
<ide> added: v0.5.9
<add>changes:
<add> - version:
<add> - REPLACEME
<add> pr-url: https://github.com/nodejs/node/pull/43522
<add> description: The agent now uses HTTP Keep-Alive by default.
<ide> -->
<ide>
<ide> Global instance of [`https.Agent`][] for all HTTPS client requests.
<ide><path>lib/_http_agent.js
<ide> const {
<ide> ArrayPrototypeSplice,
<ide> FunctionPrototypeCall,
<ide> NumberIsNaN,
<add> NumberParseInt,
<ide> ObjectCreate,
<ide> ObjectKeys,
<ide> ObjectSetPrototypeOf,
<ide> ObjectValues,
<add> RegExpPrototypeExec,
<ide> StringPrototypeIndexOf,
<ide> StringPrototypeSplit,
<ide> StringPrototypeStartsWith,
<ide> Agent.prototype.keepSocketAlive = function keepSocketAlive(socket) {
<ide> socket.setKeepAlive(true, this.keepAliveMsecs);
<ide> socket.unref();
<ide>
<del> const agentTimeout = this.options.timeout || 0;
<add> let agentTimeout = this.options.timeout || 0;
<add>
<add> if (socket._httpMessage?.res) {
<add> const keepAliveHint = socket._httpMessage.res.headers['keep-alive'];
<add>
<add> if (keepAliveHint) {
<add> const hint = RegExpPrototypeExec(/^timeout=(\d+)/, keepAliveHint)?.[1];
<add>
<add> if (hint) {
<add> const serverHintTimeout = NumberParseInt(hint) * 1000;
<add>
<add> if (serverHintTimeout < agentTimeout) {
<add> agentTimeout = serverHintTimeout;
<add> }
<add> }
<add> }
<add> }
<add>
<ide> if (socket.timeout !== agentTimeout) {
<ide> socket.setTimeout(agentTimeout);
<ide> }
<ide> function asyncResetHandle(socket) {
<ide>
<ide> module.exports = {
<ide> Agent,
<del> globalAgent: new Agent()
<add> globalAgent: new Agent({ keepAlive: true, scheduling: 'lifo', timeout: 5000 })
<ide> };
<ide><path>lib/_http_server.js
<ide> ObjectSetPrototypeOf(Server.prototype, net.Server.prototype);
<ide> ObjectSetPrototypeOf(Server, net.Server);
<ide>
<ide> Server.prototype.close = function() {
<add> this.closeIdleConnections();
<ide> clearInterval(this[kConnectionsCheckingInterval]);
<ide> ReflectApply(net.Server.prototype.close, this, arguments);
<ide> };
<ide><path>lib/https.js
<ide> Agent.prototype._evictSession = function _evictSession(key) {
<ide> delete this._sessionCache.map[key];
<ide> };
<ide>
<del>const globalAgent = new Agent();
<add>const globalAgent = new Agent({ keepAlive: true, scheduling: 'lifo', timeout: 5000 });
<ide>
<ide> /**
<ide> * Makes a request to a secure web server.
<ide><path>test/async-hooks/test-graph.http.js
<ide> const hooks = initHooks();
<ide> hooks.enable();
<ide>
<ide> const server = http.createServer(common.mustCall((req, res) => {
<add> res.writeHead(200, { 'Connection': 'close' });
<ide> res.end();
<ide> server.close(common.mustCall());
<ide> }));
<ide><path>test/parallel/test-http-agent-no-wait.js
<add>'use strict';
<add>
<add>const common = require('../common');
<add>const assert = require('assert');
<add>const http = require('http');
<add>
<add>const server = http.createServer(function(req, res) {
<add> res.writeHead(200);
<add> res.end();
<add>});
<add>
<add>server.listen(0, common.mustCall(() => {
<add> const req = http.get({ port: server.address().port }, (res) => {
<add> assert.strictEqual(res.statusCode, 200);
<add>
<add> res.resume();
<add> server.close();
<add> });
<add>
<add> req.end();
<add>}));
<add>
<add>// This timer should never go off as the server will close the socket
<add>setTimeout(common.mustNotCall(), 1000).unref();
<ide><path>test/parallel/test-http-client-agent.js
<ide> const Countdown = require('../common/countdown');
<ide>
<ide> let name;
<ide> const max = 3;
<add>const agent = new http.Agent();
<ide>
<ide> const server = http.Server(common.mustCall((req, res) => {
<ide> if (req.url === '/0') {
<ide> const server = http.Server(common.mustCall((req, res) => {
<ide> }
<ide> }, max));
<ide> server.listen(0, common.mustCall(() => {
<del> name = http.globalAgent.getName({ port: server.address().port });
<add> name = agent.getName({ port: server.address().port });
<ide> for (let i = 0; i < max; ++i)
<ide> request(i);
<ide> }));
<ide>
<ide> const countdown = new Countdown(max, () => {
<del> assert(!(name in http.globalAgent.sockets));
<del> assert(!(name in http.globalAgent.requests));
<add> assert(!(name in agent.sockets));
<add> assert(!(name in agent.requests));
<ide> server.close();
<ide> });
<ide>
<ide> function request(i) {
<ide> const req = http.get({
<ide> port: server.address().port,
<del> path: `/${i}`
<add> path: `/${i}`,
<add> agent
<ide> }, function(res) {
<ide> const socket = req.socket;
<ide> socket.on('close', common.mustCall(() => {
<ide> countdown.dec();
<ide> if (countdown.remaining > 0) {
<del> assert.strictEqual(http.globalAgent.sockets[name].includes(socket),
<add> assert.strictEqual(agent.sockets[name].includes(socket),
<ide> false);
<ide> }
<ide> }));
<ide><path>test/parallel/test-http-client-close-with-default-agent.js
<add>'use strict';
<add>
<add>const common = require('../common');
<add>const assert = require('assert');
<add>const http = require('http');
<add>
<add>const server = http.createServer(function(req, res) {
<add> res.writeHead(200);
<add> res.end();
<add>});
<add>
<add>server.listen(0, common.mustCall(() => {
<add> const req = http.get({ port: server.address().port }, (res) => {
<add> assert.strictEqual(res.statusCode, 200);
<add> res.resume();
<add> server.close();
<add> });
<add>
<add> req.end();
<add>}));
<add>
<add>// This timer should never go off as the server will close the socket
<add>setTimeout(common.mustNotCall(), common.platformTimeout(10000)).unref();
<ide><path>test/parallel/test-http-client-headers-array.js
<ide> function execute(options) {
<ide> const expectHeaders = {
<ide> 'x-foo': 'boom',
<ide> 'cookie': 'a=1; b=2; c=3',
<del> 'connection': 'close'
<add> 'connection': 'keep-alive'
<ide> };
<ide>
<ide> // no Host header when you set headers an array
<ide> function execute(options) {
<ide>
<ide> assert.deepStrictEqual(req.headers, expectHeaders);
<ide>
<add> res.writeHead(200, { 'Connection': 'close' });
<ide> res.end();
<ide> }).listen(0, function() {
<ide> options = Object.assign(options, {
<ide><path>test/parallel/test-http-client-keep-alive-hint.js
<add>'use strict';
<add>
<add>const common = require('../common');
<add>const assert = require('assert');
<add>const http = require('http');
<add>
<add>const server = http.createServer(
<add> { keepAliveTimeout: common.platformTimeout(60000) },
<add> function(req, res) {
<add> req.resume();
<add> res.writeHead(200, { 'Connection': 'keep-alive', 'Keep-Alive': 'timeout=1' });
<add> res.end('FOO');
<add> }
<add>);
<add>
<add>server.listen(0, common.mustCall(() => {
<add> http.get({ port: server.address().port }, (res) => {
<add> assert.strictEqual(res.statusCode, 200);
<add>
<add> res.resume();
<add> server.close();
<add> });
<add>}));
<add>
<add>
<add>// This timer should never go off as the agent will parse the hint and terminate earlier
<add>setTimeout(common.mustNotCall(), common.platformTimeout(3000)).unref();
<ide><path>test/parallel/test-http-client-spurious-aborted.js
<ide> const N = 2;
<ide> let abortRequest = true;
<ide>
<ide> const server = http.Server(common.mustCall((req, res) => {
<del> const headers = { 'Content-Type': 'text/plain' };
<add> const headers = { 'Content-Type': 'text/plain', 'Connection': 'close' };
<ide> headers['Content-Length'] = 50;
<ide> const socket = res.socket;
<ide> res.writeHead(200, headers);
<ide><path>test/parallel/test-http-client-timeout-on-connect.js
<ide> const server = http.createServer((req, res) => {
<ide>
<ide> server.listen(0, common.localhostIPv4, common.mustCall(() => {
<ide> const port = server.address().port;
<del> const req = http.get(`http://${common.localhostIPv4}:${port}`);
<add> const req = http.get(
<add> `http://${common.localhostIPv4}:${port}`,
<add> { agent: new http.Agent() }
<add> );
<ide>
<ide> req.setTimeout(1);
<ide> req.on('socket', common.mustCall((socket) => {
<ide><path>test/parallel/test-http-content-length.js
<ide> const http = require('http');
<ide> const Countdown = require('../common/countdown');
<ide>
<ide> const expectedHeadersMultipleWrites = {
<del> 'connection': 'close',
<add> 'connection': 'keep-alive',
<ide> 'transfer-encoding': 'chunked',
<ide> };
<ide>
<ide> const expectedHeadersEndWithData = {
<del> 'connection': 'close',
<del> 'content-length': String('hello world'.length)
<add> 'connection': 'keep-alive',
<add> 'content-length': String('hello world'.length),
<ide> };
<ide>
<ide> const expectedHeadersEndNoData = {
<del> 'connection': 'close',
<add> 'connection': 'keep-alive',
<ide> 'content-length': '0',
<ide> };
<ide>
<ide> const countdown = new Countdown(3, () => server.close());
<ide>
<ide> const server = http.createServer(function(req, res) {
<ide> res.removeHeader('Date');
<add> res.setHeader('Keep-Alive', 'timeout=1');
<ide>
<ide> switch (req.url.substr(1)) {
<ide> case 'multiple-writes':
<ide> server.listen(0, function() {
<ide> req.write('hello ');
<ide> req.end('world');
<ide> req.on('response', function(res) {
<del> assert.deepStrictEqual(res.headers, expectedHeadersMultipleWrites);
<add> assert.deepStrictEqual(res.headers, { ...expectedHeadersMultipleWrites, 'keep-alive': 'timeout=1' });
<add> res.resume();
<ide> });
<ide>
<ide> req = http.request({
<ide> server.listen(0, function() {
<ide> req.removeHeader('Host');
<ide> req.end('hello world');
<ide> req.on('response', function(res) {
<del> assert.deepStrictEqual(res.headers, expectedHeadersEndWithData);
<add> assert.deepStrictEqual(res.headers, { ...expectedHeadersEndWithData, 'keep-alive': 'timeout=1' });
<add> res.resume();
<ide> });
<ide>
<ide> req = http.request({
<ide> server.listen(0, function() {
<ide> req.removeHeader('Host');
<ide> req.end();
<ide> req.on('response', function(res) {
<del> assert.deepStrictEqual(res.headers, expectedHeadersEndNoData);
<add> assert.deepStrictEqual(res.headers, { ...expectedHeadersEndNoData, 'keep-alive': 'timeout=1' });
<add> res.resume();
<ide> });
<ide>
<ide> });
<ide><path>test/parallel/test-http-default-encoding.js
<ide> const server = http.Server((req, res) => {
<ide> req.on('data', (chunk) => {
<ide> result += chunk;
<ide> }).on('end', () => {
<del> server.close();
<ide> res.writeHead(200);
<ide> res.end('hello world\n');
<add> server.close();
<ide> });
<ide>
<ide> });
<ide><path>test/parallel/test-http-max-headers-count.js
<ide> const server = http.createServer(function(req, res) {
<ide> expected = maxAndExpected[requests][1];
<ide> server.maxHeadersCount = max;
<ide> }
<del> res.writeHead(200, headers);
<add> res.writeHead(200, { ...headers, 'Connection': 'close' });
<ide> res.end();
<ide> });
<ide> server.maxHeadersCount = max;
<ide><path>test/parallel/test-http-outgoing-message-capture-rejection.js
<ide> events.captureRejections = true;
<ide>
<ide> res.socket.on('error', common.mustCall((err) => {
<ide> assert.strictEqual(err, _err);
<add> server.close();
<ide> }));
<ide>
<ide> // Write until there is space in the buffer
<add> res.writeHead(200, { 'Connection': 'close' });
<ide> while (res.write('hello'));
<ide> }));
<ide>
<ide> events.captureRejections = true;
<ide> code: 'ECONNRESET'
<ide> }));
<ide> res.resume();
<del> server.close();
<ide> }));
<ide> }));
<ide> }
<ide><path>test/parallel/test-http-raw-headers.js
<ide> http.createServer(function(req, res) {
<ide> 'x-BaR',
<ide> 'yoyoyo',
<ide> 'Connection',
<del> 'close',
<add> 'keep-alive',
<ide> ];
<ide> const expectHeaders = {
<ide> 'host': `localhost:${this.address().port}`,
<ide> 'transfer-encoding': 'CHUNKED',
<ide> 'x-bar': 'yoyoyo',
<del> 'connection': 'close'
<add> 'connection': 'keep-alive'
<ide> };
<ide> const expectRawTrailers = [
<ide> 'x-bAr',
<ide> http.createServer(function(req, res) {
<ide> });
<ide>
<ide> req.resume();
<add> res.setHeader('Keep-Alive', 'timeout=1');
<ide> res.setHeader('Trailer', 'x-foo');
<ide> res.addTrailers([
<ide> ['x-fOo', 'xOxOxOx'],
<ide> http.createServer(function(req, res) {
<ide> req.end('y b a r');
<ide> req.on('response', function(res) {
<ide> const expectRawHeaders = [
<add> 'Keep-Alive',
<add> 'timeout=1',
<ide> 'Trailer',
<ide> 'x-foo',
<ide> 'Date',
<ide> null,
<ide> 'Connection',
<del> 'close',
<add> 'keep-alive',
<ide> 'Transfer-Encoding',
<ide> 'chunked',
<ide> ];
<ide> const expectHeaders = {
<add> 'keep-alive': 'timeout=1',
<ide> 'trailer': 'x-foo',
<ide> 'date': null,
<del> 'connection': 'close',
<add> 'connection': 'keep-alive',
<ide> 'transfer-encoding': 'chunked'
<ide> };
<del> res.rawHeaders[3] = null;
<add> res.rawHeaders[5] = null;
<ide> res.headers.date = null;
<ide> assert.deepStrictEqual(res.rawHeaders, expectRawHeaders);
<ide> assert.deepStrictEqual(res.headers, expectHeaders);
<ide><path>test/parallel/test-http-request-end.js
<ide> const server = http.Server(function(req, res) {
<ide>
<ide> req.on('end', function() {
<ide> assert.strictEqual(result, expected);
<del> server.close();
<ide> res.writeHead(200);
<ide> res.end('hello world\n');
<add> server.close();
<ide> });
<ide>
<ide> });
<ide><path>test/parallel/test-http-should-keep-alive.js
<ide> const getCountdownIndex = () => SERVER_RESPONSES.length - countdown.remaining;
<ide>
<ide> const server = net.createServer(function(socket) {
<ide> socket.write(SERVER_RESPONSES[getCountdownIndex()]);
<add>
<add> if (SHOULD_KEEP_ALIVE[getCountdownIndex()]) {
<add> socket.end();
<add> }
<ide> }).listen(0, function() {
<ide> function makeRequest() {
<ide> const req = http.get({ port: server.address().port }, function(res) {
<ide><path>test/parallel/test-http-unix-socket-keep-alive.js
<ide> server.listen(common.PIPE, common.mustCall(() =>
<ide> function asyncLoop(fn, times, cb) {
<ide> fn(function handler() {
<ide> if (--times) {
<del> fn(handler);
<add> setTimeout(() => fn(handler), common.platformTimeout(10));
<ide> } else {
<ide> cb();
<ide> }
<ide><path>test/parallel/test-https-agent-session-eviction.js
<ide> const options = {
<ide>
<ide> // Create TLS1.2 server
<ide> https.createServer(options, function(req, res) {
<add> res.writeHead(200, { 'Connection': 'close' });
<ide> res.end('ohai');
<ide> }).listen(0, function() {
<ide> first(this);
<ide> function first(server) {
<ide> function faultyServer(port) {
<ide> options.secureProtocol = 'TLSv1_method';
<ide> https.createServer(options, function(req, res) {
<add> res.writeHead(200, { 'Connection': 'close' });
<ide> res.end('hello faulty');
<ide> }).listen(port, function() {
<ide> second(this);
<ide><path>test/parallel/test-https-max-headers-count.js
<ide> const server = https.createServer(serverOptions, common.mustCall((req, res) => {
<ide> expected = maxAndExpected[requests][1];
<ide> server.maxHeadersCount = max;
<ide> }
<del> res.writeHead(200, headers);
<add> res.writeHead(200, { ...headers, 'Connection': 'close' });
<ide> res.end();
<ide> }, 3));
<ide> server.maxHeadersCount = max;
<ide><path>test/parallel/test-stream-destroy.js
<ide> const http = require('http');
<ide>
<ide> server.listen(0, () => {
<ide> const req = http.request({
<del> port: server.address().port
<add> port: server.address().port,
<add> agent: new http.Agent()
<ide> });
<ide>
<ide> req.write('asd');
<ide> const http = require('http');
<ide>
<ide> server.listen(0, () => {
<ide> const req = http.request({
<del> port: server.address().port
<add> port: server.address().port,
<add> agent: new http.Agent()
<ide> });
<ide>
<ide> req.write('asd');
<ide><path>test/parallel/test-tls-over-http-tunnel.js
<ide> const proxy = net.createServer((clientSocket) => {
<ide> 'HTTP/1.1\r\n' +
<ide> 'Proxy-Connections: keep-alive\r\n' +
<ide> `Host: localhost:${proxy.address().port}\r\n` +
<del> 'Connection: close\r\n\r\n');
<add> 'Connection: keep-alive\r\n\r\n');
<ide>
<ide> console.log('PROXY: got CONNECT request');
<ide> console.log('PROXY: creating a tunnel');
<ide><path>test/parallel/test-tls-set-secure-context.js
<ide> function makeRequest(port, id) {
<ide> rejectUnauthorized: true,
<ide> ca: credentialOptions[0].ca,
<ide> servername: 'agent1',
<del> headers: { id }
<add> headers: { id },
<add> agent: new https.Agent()
<ide> };
<ide>
<ide> let errored = false;
<ide><path>test/sequential/test-http-econnrefused.js
<ide> const server = http.createServer(function(req, res) {
<ide>
<ide> req.on('end', function() {
<ide> assert.strictEqual(body, 'PING');
<del> res.writeHead(200);
<add> res.writeHead(200, { 'Connection': 'close' });
<ide> res.end('PONG');
<ide> });
<ide> }); | 27 |
PHP | PHP | remove reference to jshelper | 52a19bb072bf2e689e8a43131537f3ab21c6eadd | <ide><path>lib/Cake/View/View.php
<ide> * @property CacheHelper $Cache
<ide> * @property FormHelper $Form
<ide> * @property HtmlHelper $Html
<del> * @property JsHelper $Js
<ide> * @property NumberHelper $Number
<ide> * @property PaginatorHelper $Paginator
<ide> * @property RssHelper $Rss | 1 |
Javascript | Javascript | add support for /-separated namespace lookup | f78d6a1b4986b8d846d78b21fe7a8aa844ed83d5 | <ide><path>packages/ember-application/lib/system/application.js
<ide>
<ide> var get = Ember.get, set = Ember.set,
<ide> classify = Ember.String.classify,
<add> capitalize = Ember.String.capitalize,
<ide> decamelize = Ember.String.decamelize;
<ide>
<ide> /**
<ide> Ember.Application.reopenClass({
<ide> function resolverFor(namespace) {
<ide> return function(fullName) {
<ide> var nameParts = fullName.split(":"),
<del> type = nameParts[0], name = nameParts[1];
<add> type = nameParts[0], name = nameParts[1],
<add> root = namespace;
<ide>
<ide> if (type === 'template') {
<ide> var templateName = name.replace(/\./g, '/');
<ide> function resolverFor(namespace) {
<ide> name = name.replace(/\./g, '_');
<ide> }
<ide>
<add> if (type !== 'template' && name.indexOf('/') !== -1) {
<add> var parts = name.split('/');
<add> name = parts[parts.length - 1];
<add> var namespaceName = capitalize(parts.slice(0, -1).join('.'));
<add> root = Ember.Namespace.byName(namespaceName);
<add>
<add> Ember.assert('You are looking for a ' + name + ' ' + type + ' in the ' + namespaceName + ' namespace, but it could not be found', root);
<add> }
<add>
<ide> var className = classify(name) + classify(type);
<del> var factory = get(namespace, className);
<add> var factory = get(root, className);
<ide>
<ide> if (factory) { return factory; }
<ide> };
<ide><path>packages/ember-application/tests/system/application_test.js
<ide> test("Minimal Application initialized with just an application template", functi
<ide> equal(trim(Ember.$('#qunit-fixture').text()), 'Hello World');
<ide> });
<ide>
<del>var locator;
<add>var locator, originalLookup = Ember.lookup, lookup;
<add>
<ide> module("Ember.Application Depedency Injection", {
<ide> setup: function(){
<ide> Ember.run(function(){
<ide> module("Ember.Application Depedency Injection", {
<ide> application.register('communication:main', application.Email, {singleton: false});
<ide>
<ide> locator = application.__container__;
<add>
<add> lookup = Ember.lookup = {};
<ide> },
<ide> teardown: function() {
<ide> Ember.run(function(){
<ide> application.destroy();
<ide> });
<ide> application = locator = null;
<add> Ember.lookup = originalLookup;
<ide> }
<ide> });
<ide>
<ide> test('injections', function(){
<ide>
<ide> ok(application.Email.detectInstance(user.get('communication')));
<ide> });
<add>
<add>test('the default resolver hook can look things up in other namespaces', function() {
<add> var UserInterface = lookup.UserInterface = Ember.Namespace.create();
<add> UserInterface.NavigationController = Ember.Controller.extend();
<add>
<add> var nav = locator.lookup('controller:userInterface/navigation');
<add>
<add> ok(nav instanceof UserInterface.NavigationController, "the result should be an instance of the specified class");
<add>}); | 2 |
PHP | PHP | fix bug in postgres processor | c3b19a2ed40ecb36c07ed74898b63fb92fa480cd | <ide><path>src/Illuminate/Database/Query/Processors/PostgresProcessor.php
<ide> public function processInsertGetId(Builder $query, $sql, $values, $sequence = nu
<ide>
<ide> $result = (array) $results[0];
<ide>
<del> return (int) $row[$sequence];
<add> return (int) $result[$sequence];
<ide> }
<ide>
<ide> }
<ide>\ No newline at end of file | 1 |
Text | Text | add changelogs for assert | a0af30533c43fe6939b8a25733a69a137b3a472d | <ide><path>doc/api/assert.md
<ide> assert(false, 'it\'s false');
<ide> ## assert.deepEqual(actual, expected[, message])
<ide> <!-- YAML
<ide> added: v0.1.21
<add>changes:
<add> - version: v6.4.0, v4.7.1
<add> pr-url: https://github.com/nodejs/node/pull/8002
<add> description: Typed array slices are handled correctly now.
<add> - version: v6.1.0, v4.5.0
<add> pr-url: https://github.com/nodejs/node/pull/6432
<add> description: Objects with circular references can be used as inputs now.
<add> - version: v5.10.1, v4.4.3
<add> pr-url: https://github.com/nodejs/node/pull/5910
<add> description: Handle non-`Uint8Array` typed arrays correctly.
<ide> -->
<ide>
<ide> Tests for deep equality between the `actual` and `expected` parameters.
<ide> parameter is undefined, a default error message is assigned.
<ide> ## assert.deepStrictEqual(actual, expected[, message])
<ide> <!-- YAML
<ide> added: v1.2.0
<add>changes:
<add> - version: v6.4.0, v4.7.1
<add> pr-url: https://github.com/nodejs/node/pull/8002
<add> description: Typed array slices are handled correctly now.
<add> - version: v6.1.0
<add> pr-url: https://github.com/nodejs/node/pull/6432
<add> description: Objects with circular references can be used as inputs now.
<add> - version: v5.10.1, v4.4.3
<add> pr-url: https://github.com/nodejs/node/pull/5910
<add> description: Handle non-`Uint8Array` typed arrays correctly.
<ide> -->
<ide>
<ide> Generally identical to `assert.deepEqual()` with two exceptions. First,
<ide> parameter is undefined, a default error message is assigned.
<ide> ## assert.doesNotThrow(block[, error][, message])
<ide> <!-- YAML
<ide> added: v0.1.21
<add>changes:
<add> - version: v5.11.0, v4.4.5
<add> pr-url: https://github.com/nodejs/node/pull/2407
<add> description: The `message` parameter is respected now.
<add> - version: v4.2.0
<add> pr-url: https://github.com/nodejs/node/pull/3276
<add> description: The `error` parameter can now be an arrow function.
<ide> -->
<ide>
<ide> Asserts that the function `block` does not throw an error. See
<ide> If the values are not strictly equal, an `AssertionError` is thrown with a
<ide> ## assert.throws(block[, error][, message])
<ide> <!-- YAML
<ide> added: v0.1.21
<add>changes:
<add> - version: v4.2.0
<add> pr-url: https://github.com/nodejs/node/pull/3276
<add> description: The `error` parameter can now be an arrow function.
<ide> -->
<ide>
<ide> Expects the function `block` to throw an error. | 1 |
Go | Go | improve ipam test coverage | 4cb0d27cf97ccb6da4698b27c25b20ebb1b2894d | <ide><path>libnetwork/ipam/allocator.go
<ide> func (a *Allocator) getAddress(nw *net.IPNet, bitmask *bitseq.Handle, prefAddres
<ide> // DumpDatabase dumps the internal info
<ide> func (a *Allocator) DumpDatabase() string {
<ide> a.Lock()
<del> defer a.Unlock()
<add> aspaces := make(map[string]*addrSpace, len(a.addrSpaces))
<add> for as, aSpace := range a.addrSpaces {
<add> aspaces[as] = aSpace
<add> }
<add> a.Unlock()
<ide>
<ide> var s string
<del> for as, aSpace := range a.addrSpaces {
<add> for as, aSpace := range aspaces {
<ide> s = fmt.Sprintf("\n\n%s Config", as)
<ide> aSpace.Lock()
<ide> for k, config := range aSpace.subnets {
<ide> s = fmt.Sprintf("%s%s", s, fmt.Sprintf("\n%v: %v", k, config))
<add> if config.Range == nil {
<add> a.retrieveBitmask(k, config.Pool)
<add> }
<ide> }
<ide> aSpace.Unlock()
<ide> }
<ide>
<ide> s = fmt.Sprintf("%s\n\nBitmasks", s)
<ide> for k, bm := range a.addresses {
<del> s = fmt.Sprintf("%s%s", s, fmt.Sprintf("\n\t%s: %s\n\t%d", k, bm, bm.Unselected()))
<add> s = fmt.Sprintf("%s%s", s, fmt.Sprintf("\n%s: %s", k, bm))
<ide> }
<ide>
<ide> return s
<ide><path>libnetwork/ipam/allocator_test.go
<ide> import (
<ide> "encoding/json"
<ide> "fmt"
<ide> "io/ioutil"
<add> "math/rand"
<ide> "net"
<del> "os"
<ide> "testing"
<ide> "time"
<ide>
<ide> func randomLocalStore() (datastore.DataStore, error) {
<ide> })
<ide> }
<ide>
<del>// enable w/ upper case
<del>func TestMain(m *testing.M) {
<del> var err error
<del> if err != nil {
<del> fmt.Println(err)
<del> }
<del>
<del> os.Exit(m.Run())
<del>}
<del>
<ide> func getAllocator() (*Allocator, error) {
<ide> ds, err := randomLocalStore()
<ide> if err != nil {
<ide> func BenchmarkRequest_8(b *testing.B) {
<ide> a, _ := getAllocator()
<ide> benchmarkRequest(b, a, "10.0.0.0/8")
<ide> }
<add>
<add>func TestAllocateRandomDeallocate(t *testing.T) {
<add> testAllocateRandomDeallocate(t, "172.25.0.0/16", "", 384)
<add> testAllocateRandomDeallocate(t, "172.25.0.0/16", "172.25.252.0/22", 384)
<add>}
<add>
<add>func testAllocateRandomDeallocate(t *testing.T, pool, subPool string, num int) {
<add> ds, err := randomLocalStore()
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> a, err := NewAllocator(ds, nil)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> pid, _, _, err := a.RequestPool(localAddressSpace, pool, subPool, nil, false)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add>
<add> // Allocate num ip addresses
<add> indices := make(map[int]*net.IPNet, num)
<add> allocated := make(map[string]bool, num)
<add> for i := 0; i < num; i++ {
<add> ip, _, err := a.RequestAddress(pid, nil, nil)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> ips := ip.String()
<add> if _, ok := allocated[ips]; ok {
<add> t.Fatalf("Address %s is already allocated", ips)
<add> }
<add> allocated[ips] = true
<add> indices[i] = ip
<add> }
<add> if len(indices) != len(allocated) || len(indices) != num {
<add> t.Fatalf("Unexpected number of allocated addresses: (%d,%d).", len(indices), len(allocated))
<add> }
<add>
<add> seed := time.Now().Unix()
<add> rand.Seed(seed)
<add>
<add> // Deallocate half of the allocated addresses following a random pattern
<add> pattern := rand.Perm(num)
<add> for i := 0; i < num/2; i++ {
<add> idx := pattern[i]
<add> ip := indices[idx]
<add> err := a.ReleaseAddress(pid, ip.IP)
<add> if err != nil {
<add> t.Fatalf("Unexpected failure on deallocation of %s: %v.\nSeed: %d.", ip, err, seed)
<add> }
<add> delete(indices, idx)
<add> delete(allocated, ip.String())
<add> }
<add>
<add> // Request a quarter of addresses
<add> for i := 0; i < num/2; i++ {
<add> ip, _, err := a.RequestAddress(pid, nil, nil)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> ips := ip.String()
<add> if _, ok := allocated[ips]; ok {
<add> t.Fatalf("\nAddress %s is already allocated.\nSeed: %d.", ips, seed)
<add> }
<add> allocated[ips] = true
<add> }
<add> if len(allocated) != num {
<add> t.Fatalf("Unexpected number of allocated addresses: %d.\nSeed: %d.", len(allocated), seed)
<add> }
<add>}
<add>
<add>func TestRetrieveFromStore(t *testing.T) {
<add> num := 200
<add> ds, err := randomLocalStore()
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> a, err := NewAllocator(ds, nil)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> pid, _, _, err := a.RequestPool(localAddressSpace, "172.25.0.0/16", "", nil, false)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> for i := 0; i < num; i++ {
<add> if _, _, err := a.RequestAddress(pid, nil, nil); err != nil {
<add> t.Fatal(err)
<add> }
<add> }
<add>
<add> // Restore
<add> a1, err := NewAllocator(ds, nil)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> a1.refresh(localAddressSpace)
<add> db := a.DumpDatabase()
<add> db1 := a1.DumpDatabase()
<add> if db != db1 {
<add> t.Fatalf("Unexpected db change.\nExpected:%s\nGot:%s", db, db1)
<add> }
<add> checkDBEquality(a, a1, t)
<add> pid, _, _, err = a1.RequestPool(localAddressSpace, "172.25.0.0/16", "172.25.1.0/24", nil, false)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> for i := 0; i < num/2; i++ {
<add> if _, _, err := a1.RequestAddress(pid, nil, nil); err != nil {
<add> t.Fatal(err)
<add> }
<add> }
<add>
<add> // Restore
<add> a2, err := NewAllocator(ds, nil)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> a2.refresh(localAddressSpace)
<add> checkDBEquality(a1, a2, t)
<add> pid, _, _, err = a2.RequestPool(localAddressSpace, "172.25.0.0/16", "172.25.2.0/24", nil, false)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> for i := 0; i < num/2; i++ {
<add> if _, _, err := a2.RequestAddress(pid, nil, nil); err != nil {
<add> t.Fatal(err)
<add> }
<add> }
<add>
<add> // Restore
<add> a3, err := NewAllocator(ds, nil)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> a3.refresh(localAddressSpace)
<add> checkDBEquality(a2, a3, t)
<add> pid, _, _, err = a3.RequestPool(localAddressSpace, "172.26.0.0/16", "", nil, false)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> for i := 0; i < num/2; i++ {
<add> if _, _, err := a3.RequestAddress(pid, nil, nil); err != nil {
<add> t.Fatal(err)
<add> }
<add> }
<add>
<add> // Restore
<add> a4, err := NewAllocator(ds, nil)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> a4.refresh(localAddressSpace)
<add> checkDBEquality(a3, a4, t)
<add>}
<add>
<add>func checkDBEquality(a1, a2 *Allocator, t *testing.T) {
<add> for k, cnf1 := range a1.addrSpaces[localAddressSpace].subnets {
<add> cnf2 := a2.addrSpaces[localAddressSpace].subnets[k]
<add> if cnf1.String() != cnf2.String() {
<add> t.Fatalf("%s\n%s", cnf1, cnf2)
<add> }
<add> if cnf1.Range == nil {
<add> a2.retrieveBitmask(k, cnf1.Pool)
<add> }
<add> }
<add>
<add> for k, bm1 := range a1.addresses {
<add> bm2 := a2.addresses[k]
<add> if bm1.String() != bm2.String() {
<add> t.Fatalf("%s\n%s", bm1, bm2)
<add> }
<add> }
<add>} | 2 |
Javascript | Javascript | extract scheduler into its own module | 986c63c6d4a808f0f3108bb9610803c11f04ed47 | <ide><path>src/renderers/shared/fiber/ReactFiberReconciler.js
<ide> 'use strict';
<ide>
<ide> import type { Fiber } from 'ReactFiber';
<del>import type { PriorityLevel } from 'ReactPriorityLevel';
<ide> import type { FiberRoot } from 'ReactFiberRoot';
<ide>
<del>var ReactFiber = require('ReactFiber');
<del>var { beginWork } = require('ReactFiberBeginWork');
<del>var { completeWork } = require('ReactFiberCompleteWork');
<ide> var { createFiberRoot } = require('ReactFiberRoot');
<add>var ReactFiberScheduler = require('ReactFiberScheduler');
<ide>
<ide> var {
<del> NoWork,
<del> HighPriority,
<ide> LowPriority,
<del> OffscreenPriority,
<ide> } = require('ReactPriorityLevel');
<ide>
<ide> type ReactHostElement<T, P> = {
<ide> type Deadline = {
<ide> timeRemaining : () => number
<ide> };
<ide>
<del>var timeHeuristicForUnitOfWork = 1;
<del>
<ide> export type HostConfig<T, P, I> = {
<ide>
<ide> createHostInstance(element : ReactHostElement<T, P>) : I,
<ide> export type Reconciler = {
<ide>
<ide> module.exports = function<T, P, I>(config : HostConfig<T, P, I>) : Reconciler {
<ide>
<del> // const scheduleHighPriCallback = config.scheduleHighPriCallback;
<del> const scheduleLowPriCallback = config.scheduleLowPriCallback;
<del>
<del> // The next work in progress fiber that we're currently working on.
<del> let nextUnitOfWork : ?Fiber = null;
<del>
<del> // Linked list of roots with scheduled work on them.
<del> let nextScheduledRoot : ?FiberRoot = null;
<del> let lastScheduledRoot : ?FiberRoot = null;
<del>
<del> function findNextUnitOfWorkAtPriority(root : FiberRoot, priorityLevel : PriorityLevel) : ?Fiber {
<del> let current = root.current;
<del> while (current) {
<del> if (current.pendingWorkPriority !== 0 &&
<del> current.pendingWorkPriority <= priorityLevel) {
<del> // This node has work to do that fits our priority level criteria.
<del> if (current.pendingProps !== null) {
<del> // We found some work to do. We need to return the "work in progress"
<del> // of this node which will be the alternate.
<del> const clone = ReactFiber.cloneFiber(current, current.pendingWorkPriority);
<del> clone.pendingProps = current.pendingProps;
<del> return clone;
<del> }
<del> // If we have a child let's see if any of our children has work to do.
<del> // Only bother doing this at all if the current priority level matches
<del> // because it is the highest priority for the whole subtree.
<del> // TODO: Coroutines can have work in their stateNode which is another
<del> // type of child that needs to be searched for work.
<del> if (current.child) {
<del> // Ensure we have a work in progress copy to backtrack through.
<del> ReactFiber.cloneFiber(current, NoWork);
<del> current = current.child;
<del> continue;
<del> }
<del> // If we match the priority but has no child and no work to do,
<del> // then we can safely reset the flag.
<del> current.pendingWorkPriority = NoWork;
<del> }
<del> while (!current.sibling) {
<del> // TODO: Stop using parent here. See below.
<del> // $FlowFixMe: This downcast is not safe. It is intentionally an error.
<del> current = current.parent;
<del> if (!current) {
<del> return null;
<del> }
<del> if (current.pendingWorkPriority <= priorityLevel) {
<del> // If this subtree had work left to do, we would have returned it by
<del> // now. This could happen if a child with pending work gets cleaned up
<del> // but we don't clear the flag then. It is safe to reset it now.
<del> current.pendingWorkPriority = NoWork;
<del> }
<del> }
<del> current = current.sibling;
<del> }
<del> return null;
<del> }
<del>
<del> function findNextUnitOfWork() {
<del> // Clear out roots with no more work on them.
<del> while (nextScheduledRoot && nextScheduledRoot.current.pendingWorkPriority === NoWork) {
<del> nextScheduledRoot.isScheduled = false;
<del> if (nextScheduledRoot === lastScheduledRoot) {
<del> nextScheduledRoot = null;
<del> lastScheduledRoot = null;
<del> return null;
<del> }
<del> nextScheduledRoot = nextScheduledRoot.nextScheduledRoot;
<del> }
<del> let root = nextScheduledRoot;
<del> while (root) {
<del> // Find the highest possible priority work to do.
<del> // This loop is unrolled just to satisfy Flow's enum constraint.
<del> // We could make arbitrary many idle priority levels but having
<del> // too many just means flushing changes too often.
<del> let work = findNextUnitOfWorkAtPriority(root, HighPriority);
<del> if (work) {
<del> return work;
<del> }
<del> work = findNextUnitOfWorkAtPriority(root, LowPriority);
<del> if (work) {
<del> return work;
<del> }
<del> work = findNextUnitOfWorkAtPriority(root, OffscreenPriority);
<del> if (work) {
<del> return work;
<del> }
<del> // We didn't find anything to do in this root, so let's try the next one.
<del> root = root.nextScheduledRoot;
<del> }
<del> return null;
<del> }
<del>
<del> function completeUnitOfWork(workInProgress : Fiber) : ?Fiber {
<del> while (true) {
<del> // The current, flushed, state of this fiber is the alternate.
<del> // Ideally nothing should rely on this, but relying on it here
<del> // means that we don't need an additional field on the work in
<del> // progress.
<del> const current = workInProgress.alternate;
<del> const next = completeWork(current, workInProgress);
<del>
<del> // The work is now done. We don't need this anymore. This flags
<del> // to the system not to redo any work here.
<del> workInProgress.pendingProps = null;
<del>
<del> // TODO: Stop using the parent for this purpose. I think this will break
<del> // down in edge cases because when nodes are reused during bailouts, we
<del> // don't know which of two parents was used. Instead we should maintain
<del> // a temporary manual stack.
<del> // $FlowFixMe: This downcast is not safe. It is intentionally an error.
<del> const parent = workInProgress.parent;
<del>
<del> // Ensure that remaining work priority bubbles up.
<del> if (parent && workInProgress.pendingWorkPriority !== NoWork &&
<del> (parent.pendingWorkPriority === NoWork ||
<del> parent.pendingWorkPriority > workInProgress.pendingWorkPriority)) {
<del> parent.pendingWorkPriority = workInProgress.pendingWorkPriority;
<del> }
<del>
<del> if (next) {
<del> // If completing this work spawned new work, do that next.
<del> return next;
<del> } else if (workInProgress.sibling) {
<del> // If there is more work to do in this parent, do that next.
<del> return workInProgress.sibling;
<del> } else if (parent) {
<del> // If there's no more work in this parent. Complete the parent.
<del> workInProgress = parent;
<del> } else {
<del> // If we're at the root, there's no more work to do. We can flush it.
<del> const root : FiberRoot = (workInProgress.stateNode : any);
<del> root.current = workInProgress;
<del> console.log('completed one root flush with remaining work at priority', workInProgress.pendingWorkPriority);
<del> const hasMoreWork = workInProgress.pendingWorkPriority !== NoWork;
<del> // TODO: We can be smarter here and only look for more work in the
<del> // "next" scheduled work since we've already scanned passed. That
<del> // also ensures that work scheduled during reconciliation gets deferred.
<del> const nextWork = findNextUnitOfWork();
<del> if (!nextWork && hasMoreWork) {
<del> throw new Error('FiberRoots should not have flagged more work if there is none.');
<del> }
<del> return nextWork;
<del> }
<del> }
<del> }
<del>
<del> function performUnitOfWork(workInProgress : Fiber) : ?Fiber {
<del> // Ignore work if there is nothing to do.
<del> if (workInProgress.pendingProps === null) {
<del> return null;
<del> }
<del> // The current, flushed, state of this fiber is the alternate.
<del> // Ideally nothing should rely on this, but relying on it here
<del> // means that we don't need an additional field on the work in
<del> // progress.
<del> const current = workInProgress.alternate;
<del> const next = beginWork(current, workInProgress);
<del> if (next) {
<del> // If this spawns new work, do that next.
<del> return next;
<del> } else {
<del> // Otherwise, complete the current work.
<del> return completeUnitOfWork(workInProgress);
<del> }
<del> }
<del>
<del> function performLowPriWork(deadline : Deadline) {
<del> if (!nextUnitOfWork) {
<del> nextUnitOfWork = findNextUnitOfWork();
<del> }
<del> while (nextUnitOfWork) {
<del> if (deadline.timeRemaining() > timeHeuristicForUnitOfWork) {
<del> nextUnitOfWork = performUnitOfWork(nextUnitOfWork);
<del> if (!nextUnitOfWork) {
<del> // Find more work. We might have time to complete some more.
<del> nextUnitOfWork = findNextUnitOfWork();
<del> }
<del> } else {
<del> scheduleLowPriCallback(performLowPriWork);
<del> return;
<del> }
<del> }
<del> }
<del>
<del> function scheduleLowPriWork(root : FiberRoot) {
<del> // We must reset the current unit of work pointer so that we restart the
<del> // search from the root during the next tick, in case there is now higher
<del> // priority work somewhere earlier than before.
<del> nextUnitOfWork = null;
<del>
<del> if (root.isScheduled) {
<del> // If we're already scheduled, we can bail out.
<del> return;
<del> }
<del> root.isScheduled = true;
<del> if (lastScheduledRoot) {
<del> // Schedule ourselves to the end.
<del> lastScheduledRoot.nextScheduledRoot = root;
<del> lastScheduledRoot = root;
<del> } else {
<del> // We're the only work scheduled.
<del> nextScheduledRoot = root;
<del> lastScheduledRoot = root;
<del> scheduleLowPriCallback(performLowPriWork);
<del> }
<del> }
<del>
<del> /*
<del> function performHighPriWork() {
<del> // There is no such thing as high pri work yet.
<del> }
<del>
<del> function ensureHighPriIsScheduled() {
<del> scheduleHighPriCallback(performHighPriWork);
<del> }
<del> */
<add> var { scheduleLowPriWork } = ReactFiberScheduler(config);
<ide>
<ide> return {
<ide>
<ide><path>src/renderers/shared/fiber/ReactFiberScheduler.js
<add>/**
<add> * Copyright 2013-present, Facebook, Inc.
<add> * All rights reserved.
<add> *
<add> * This source code is licensed under the BSD-style license found in the
<add> * LICENSE file in the root directory of this source tree. An additional grant
<add> * of patent rights can be found in the PATENTS file in the same directory.
<add> *
<add> * @providesModule ReactFiberScheduler
<add> * @flow
<add> */
<add>
<add>'use strict';
<add>
<add>import type { Fiber } from 'ReactFiber';
<add>import type { PriorityLevel } from 'ReactPriorityLevel';
<add>import type { FiberRoot } from 'ReactFiberRoot';
<add>import type { HostConfig } from 'ReactFiberReconciler';
<add>
<add>var ReactFiber = require('ReactFiber');
<add>var { beginWork } = require('ReactFiberBeginWork');
<add>var { completeWork } = require('ReactFiberCompleteWork');
<add>
<add>var {
<add> NoWork,
<add> HighPriority,
<add> LowPriority,
<add> OffscreenPriority,
<add>} = require('ReactPriorityLevel');
<add>
<add>var timeHeuristicForUnitOfWork = 1;
<add>
<add>module.exports = function<T, P, I>(config : HostConfig<T, P, I>) {
<add>
<add> // const scheduleHighPriCallback = config.scheduleHighPriCallback;
<add> const scheduleLowPriCallback = config.scheduleLowPriCallback;
<add>
<add> // The next work in progress fiber that we're currently working on.
<add> let nextUnitOfWork : ?Fiber = null;
<add>
<add> // Linked list of roots with scheduled work on them.
<add> let nextScheduledRoot : ?FiberRoot = null;
<add> let lastScheduledRoot : ?FiberRoot = null;
<add>
<add> function findNextUnitOfWorkAtPriority(root : FiberRoot, priorityLevel : PriorityLevel) : ?Fiber {
<add> let current = root.current;
<add> while (current) {
<add> if (current.pendingWorkPriority !== 0 &&
<add> current.pendingWorkPriority <= priorityLevel) {
<add> // This node has work to do that fits our priority level criteria.
<add> if (current.pendingProps !== null) {
<add> // We found some work to do. We need to return the "work in progress"
<add> // of this node which will be the alternate.
<add> const clone = ReactFiber.cloneFiber(current, current.pendingWorkPriority);
<add> clone.pendingProps = current.pendingProps;
<add> return clone;
<add> }
<add> // If we have a child let's see if any of our children has work to do.
<add> // Only bother doing this at all if the current priority level matches
<add> // because it is the highest priority for the whole subtree.
<add> // TODO: Coroutines can have work in their stateNode which is another
<add> // type of child that needs to be searched for work.
<add> if (current.child) {
<add> // Ensure we have a work in progress copy to backtrack through.
<add> ReactFiber.cloneFiber(current, NoWork);
<add> current = current.child;
<add> continue;
<add> }
<add> // If we match the priority but has no child and no work to do,
<add> // then we can safely reset the flag.
<add> current.pendingWorkPriority = NoWork;
<add> }
<add> while (!current.sibling) {
<add> // TODO: Stop using parent here. See below.
<add> // $FlowFixMe: This downcast is not safe. It is intentionally an error.
<add> current = current.parent;
<add> if (!current) {
<add> return null;
<add> }
<add> if (current.pendingWorkPriority <= priorityLevel) {
<add> // If this subtree had work left to do, we would have returned it by
<add> // now. This could happen if a child with pending work gets cleaned up
<add> // but we don't clear the flag then. It is safe to reset it now.
<add> current.pendingWorkPriority = NoWork;
<add> }
<add> }
<add> current = current.sibling;
<add> }
<add> return null;
<add> }
<add>
<add> function findNextUnitOfWork() {
<add> // Clear out roots with no more work on them.
<add> while (nextScheduledRoot && nextScheduledRoot.current.pendingWorkPriority === NoWork) {
<add> nextScheduledRoot.isScheduled = false;
<add> if (nextScheduledRoot === lastScheduledRoot) {
<add> nextScheduledRoot = null;
<add> lastScheduledRoot = null;
<add> return null;
<add> }
<add> nextScheduledRoot = nextScheduledRoot.nextScheduledRoot;
<add> }
<add> let root = nextScheduledRoot;
<add> while (root) {
<add> // Find the highest possible priority work to do.
<add> // This loop is unrolled just to satisfy Flow's enum constraint.
<add> // We could make arbitrary many idle priority levels but having
<add> // too many just means flushing changes too often.
<add> let work = findNextUnitOfWorkAtPriority(root, HighPriority);
<add> if (work) {
<add> return work;
<add> }
<add> work = findNextUnitOfWorkAtPriority(root, LowPriority);
<add> if (work) {
<add> return work;
<add> }
<add> work = findNextUnitOfWorkAtPriority(root, OffscreenPriority);
<add> if (work) {
<add> return work;
<add> }
<add> // We didn't find anything to do in this root, so let's try the next one.
<add> root = root.nextScheduledRoot;
<add> }
<add> return null;
<add> }
<add>
<add> function completeUnitOfWork(workInProgress : Fiber) : ?Fiber {
<add> while (true) {
<add> // The current, flushed, state of this fiber is the alternate.
<add> // Ideally nothing should rely on this, but relying on it here
<add> // means that we don't need an additional field on the work in
<add> // progress.
<add> const current = workInProgress.alternate;
<add> const next = completeWork(current, workInProgress);
<add>
<add> // The work is now done. We don't need this anymore. This flags
<add> // to the system not to redo any work here.
<add> workInProgress.pendingProps = null;
<add>
<add> // TODO: Stop using the parent for this purpose. I think this will break
<add> // down in edge cases because when nodes are reused during bailouts, we
<add> // don't know which of two parents was used. Instead we should maintain
<add> // a temporary manual stack.
<add> // $FlowFixMe: This downcast is not safe. It is intentionally an error.
<add> const parent = workInProgress.parent;
<add>
<add> // Ensure that remaining work priority bubbles up.
<add> if (parent && workInProgress.pendingWorkPriority !== NoWork &&
<add> (parent.pendingWorkPriority === NoWork ||
<add> parent.pendingWorkPriority > workInProgress.pendingWorkPriority)) {
<add> parent.pendingWorkPriority = workInProgress.pendingWorkPriority;
<add> }
<add>
<add> if (next) {
<add> // If completing this work spawned new work, do that next.
<add> return next;
<add> } else if (workInProgress.sibling) {
<add> // If there is more work to do in this parent, do that next.
<add> return workInProgress.sibling;
<add> } else if (parent) {
<add> // If there's no more work in this parent. Complete the parent.
<add> workInProgress = parent;
<add> } else {
<add> // If we're at the root, there's no more work to do. We can flush it.
<add> const root : FiberRoot = (workInProgress.stateNode : any);
<add> root.current = workInProgress;
<add> console.log('completed one root flush with remaining work at priority', workInProgress.pendingWorkPriority);
<add> const hasMoreWork = workInProgress.pendingWorkPriority !== NoWork;
<add> // TODO: We can be smarter here and only look for more work in the
<add> // "next" scheduled work since we've already scanned passed. That
<add> // also ensures that work scheduled during reconciliation gets deferred.
<add> const nextWork = findNextUnitOfWork();
<add> if (!nextWork && hasMoreWork) {
<add> throw new Error('FiberRoots should not have flagged more work if there is none.');
<add> }
<add> return nextWork;
<add> }
<add> }
<add> }
<add>
<add> function performUnitOfWork(workInProgress : Fiber) : ?Fiber {
<add> // Ignore work if there is nothing to do.
<add> if (workInProgress.pendingProps === null) {
<add> return null;
<add> }
<add> // The current, flushed, state of this fiber is the alternate.
<add> // Ideally nothing should rely on this, but relying on it here
<add> // means that we don't need an additional field on the work in
<add> // progress.
<add> const current = workInProgress.alternate;
<add> const next = beginWork(current, workInProgress);
<add> if (next) {
<add> // If this spawns new work, do that next.
<add> return next;
<add> } else {
<add> // Otherwise, complete the current work.
<add> return completeUnitOfWork(workInProgress);
<add> }
<add> }
<add>
<add> function performLowPriWork(deadline) {
<add> if (!nextUnitOfWork) {
<add> nextUnitOfWork = findNextUnitOfWork();
<add> }
<add> while (nextUnitOfWork) {
<add> if (deadline.timeRemaining() > timeHeuristicForUnitOfWork) {
<add> nextUnitOfWork = performUnitOfWork(nextUnitOfWork);
<add> if (!nextUnitOfWork) {
<add> // Find more work. We might have time to complete some more.
<add> nextUnitOfWork = findNextUnitOfWork();
<add> }
<add> } else {
<add> scheduleLowPriCallback(performLowPriWork);
<add> return;
<add> }
<add> }
<add> }
<add>
<add> function scheduleLowPriWork(root : FiberRoot) {
<add> // We must reset the current unit of work pointer so that we restart the
<add> // search from the root during the next tick, in case there is now higher
<add> // priority work somewhere earlier than before.
<add> nextUnitOfWork = null;
<add>
<add> if (root.isScheduled) {
<add> // If we're already scheduled, we can bail out.
<add> return;
<add> }
<add> root.isScheduled = true;
<add> if (lastScheduledRoot) {
<add> // Schedule ourselves to the end.
<add> lastScheduledRoot.nextScheduledRoot = root;
<add> lastScheduledRoot = root;
<add> } else {
<add> // We're the only work scheduled.
<add> nextScheduledRoot = root;
<add> lastScheduledRoot = root;
<add> scheduleLowPriCallback(performLowPriWork);
<add> }
<add> }
<add>
<add> /*
<add> function performHighPriWork() {
<add> // There is no such thing as high pri work yet.
<add> }
<add>
<add> function ensureHighPriIsScheduled() {
<add> scheduleHighPriCallback(performHighPriWork);
<add> }
<add> */
<add>
<add> return {
<add> scheduleLowPriWork: scheduleLowPriWork,
<add> };
<add>}; | 2 |
Ruby | Ruby | remove roflscaling constants | 6fe624f50d2572c6f7ac3062e9317abead07863e | <ide><path>activerecord/lib/arel/visitors/postgresql.rb
<ide> module Arel # :nodoc: all
<ide> module Visitors
<ide> class PostgreSQL < Arel::Visitors::ToSql
<del> CUBE = "CUBE"
<del> ROLLUP = "ROLLUP"
<del> GROUPING_SETS = "GROUPING SETS"
<del> LATERAL = "LATERAL"
<del>
<ide> private
<ide>
<ide> def visit_Arel_Nodes_Matches(o, collector)
<ide><path>activerecord/lib/arel/visitors/to_sql.rb
<ide> def initialize(object)
<ide> end
<ide>
<ide> class ToSql < Arel::Visitors::Visitor
<del> WHERE = " WHERE " # :nodoc:
<del> SPACE = " " # :nodoc:
<del> COMMA = ", " # :nodoc:
<del> GROUP_BY = " GROUP BY " # :nodoc:
<del> ORDER_BY = " ORDER BY " # :nodoc:
<del> WINDOW = " WINDOW " # :nodoc:
<del> AND = " AND " # :nodoc:
<del>
<del> DISTINCT = "DISTINCT" # :nodoc:
<del>
<ide> def initialize(connection)
<ide> super()
<ide> @connection = connection | 2 |
Python | Python | fix t5/mt5 tests | 5ae087cf8ec080b121c9cdc9bafdc2b35b6e110e | <ide><path>tests/models/mt5/test_modeling_tf_mt5.py
<ide> def test_small_integration_test(self):
<ide> labels = tokenizer("Hi I am", return_tensors="tf").input_ids
<ide>
<ide> loss = model(input_ids, labels=labels).loss
<del> mtf_score = -tf.math.reduce_sum(loss).numpy()
<add> mtf_score = -tf.math.reduce_mean(loss).numpy()
<ide>
<del> EXPECTED_SCORE = -84.9127
<add> EXPECTED_SCORE = -21.210594
<ide> self.assertTrue(abs(mtf_score - EXPECTED_SCORE) < 2e-4)
<ide><path>tests/models/t5/test_modeling_tf_t5.py
<ide> def test_small_integration_test(self):
<ide> labels = tokenizer("Hi I am", return_tensors="tf").input_ids
<ide>
<ide> loss = model(input_ids, labels=labels).loss
<del> mtf_score = -tf.math.reduce_sum(loss).numpy()
<add> mtf_score = -tf.math.reduce_mean(loss).numpy()
<ide>
<del> EXPECTED_SCORE = -19.0845
<add> EXPECTED_SCORE = -4.7710114
<ide> self.assertTrue(abs(mtf_score - EXPECTED_SCORE) < 1e-4)
<ide>
<ide> @slow
<ide> def test_small_v1_1_integration_test(self):
<ide> labels = tokenizer("Hi I am", return_tensors="tf").input_ids
<ide>
<ide> loss = model(input_ids, labels=labels).loss
<del> mtf_score = -tf.math.reduce_sum(loss).numpy()
<add> mtf_score = -tf.math.reduce_mean(loss).numpy()
<ide>
<del> EXPECTED_SCORE = -59.0293
<add> EXPECTED_SCORE = -14.759922
<ide> self.assertTrue(abs(mtf_score - EXPECTED_SCORE) < 1e-4)
<ide>
<ide> @slow
<ide> def test_small_byt5_integration_test(self):
<ide> labels = tokenizer("Hi I am", return_tensors="tf").input_ids
<ide>
<ide> loss = model(input_ids, labels=labels).loss
<del> mtf_score = -tf.math.reduce_sum(loss).numpy()
<add> mtf_score = -tf.math.reduce_mean(loss).numpy()
<ide>
<del> EXPECTED_SCORE = -60.7397
<add> EXPECTED_SCORE = -7.594554
<ide> self.assertTrue(abs(mtf_score - EXPECTED_SCORE) < 1e-4)
<ide>
<ide> @slow | 2 |
PHP | PHP | add mysql fulltext support | aaefbf1c2f927b0a7f7a2a1c487c2cffa6bc795f | <ide><path>lib/Cake/Model/Datasource/Database/Mysql.php
<ide> public function index($model) {
<ide> $table = $this->fullTableName($model);
<ide> $old = version_compare($this->getVersion(), '4.1', '<=');
<ide> if ($table) {
<del> $indices = $this->_execute('SHOW INDEX FROM ' . $table);
<add> $indexes = $this->_execute('SHOW INDEX FROM ' . $table);
<ide> // @codingStandardsIgnoreStart
<ide> // MySQL columns don't match the cakephp conventions.
<del> while ($idx = $indices->fetch(PDO::FETCH_OBJ)) {
<add> while ($idx = $indexes->fetch(PDO::FETCH_OBJ)) {
<ide> if ($old) {
<ide> $idx = (object)current((array)$idx);
<ide> }
<ide> if (!isset($index[$idx->Key_name]['column'])) {
<ide> $col = array();
<ide> $index[$idx->Key_name]['column'] = $idx->Column_name;
<del> $index[$idx->Key_name]['unique'] = intval($idx->Non_unique == 0);
<add> if ($idx->Index_type === 'FULLTEXT') {
<add> $index[$idx->Key_name]['type'] = strtolower($idx->Index_type);
<add> } else {
<add> $index[$idx->Key_name]['unique'] = intval($idx->Non_unique == 0);
<add> }
<ide> } else {
<ide> if (!empty($index[$idx->Key_name]['column']) && !is_array($index[$idx->Key_name]['column'])) {
<ide> $col[] = $index[$idx->Key_name]['column'];
<ide> public function index($model) {
<ide> }
<ide> }
<ide> // @codingStandardsIgnoreEnd
<del> $indices->closeCursor();
<add> $indexes->closeCursor();
<ide> }
<ide> return $index;
<ide> }
<ide> protected function _alterIndexes($table, $indexes) {
<ide> if (isset($indexes['drop'])) {
<ide> foreach ($indexes['drop'] as $name => $value) {
<ide> $out = 'DROP ';
<del> if ($name == 'PRIMARY') {
<add> if ($name === 'PRIMARY') {
<ide> $out .= 'PRIMARY KEY';
<ide> } else {
<del> $out .= 'KEY ' . $name;
<add> $out .= 'KEY ' . $this->startQuote . $name . $this->endQuote;
<ide> }
<ide> $alter[] = $out;
<ide> }
<ide> }
<ide> if (isset($indexes['add'])) {
<del> foreach ($indexes['add'] as $name => $value) {
<del> $out = 'ADD ';
<del> if ($name == 'PRIMARY') {
<del> $out .= 'PRIMARY ';
<del> $name = null;
<del> } else {
<del> if (!empty($value['unique'])) {
<del> $out .= 'UNIQUE ';
<del> }
<del> }
<del> if (is_array($value['column'])) {
<del> $out .= 'KEY ' . $name . ' (' . implode(', ', array_map(array(&$this, 'name'), $value['column'])) . ')';
<del> } else {
<del> $out .= 'KEY ' . $name . ' (' . $this->name($value['column']) . ')';
<del> }
<del> $alter[] = $out;
<add> $add = $this->buildIndex($indexes['add']);
<add> foreach ($add as $index) {
<add> $alter[] = 'ADD ' . $index;
<ide> }
<ide> }
<ide> return $alter;
<ide><path>lib/Cake/Model/Datasource/DboSource.php
<ide> protected function _buildFieldParameters($columnString, $columnData, $position)
<ide> }
<ide>
<ide> /**
<del> * Format indexes for create table
<add> * Format indexes for create table.
<ide> *
<ide> * @param array $indexes
<ide> * @param string $table
<ide> public function buildIndex($indexes, $table = null) {
<ide> } else {
<ide> if (!empty($value['unique'])) {
<ide> $out .= 'UNIQUE ';
<add> } elseif (!empty($value['type']) && strtoupper($value['type']) === 'FULLTEXT') {
<add> $out .= 'FULLTEXT ';
<ide> }
<ide> $name = $this->startQuote . $name . $this->endQuote;
<ide> }
<ide><path>lib/Cake/Test/Case/Model/CakeSchemaTest.php
<ide> public function testGenerateTable() {
<ide> );
<ide> $result = $this->Schema->generateTable('posts', $posts);
<ide> $this->assertRegExp('/public \$posts/', $result);
<add>
<add> $posts = array(
<add> 'id' => array('type' => 'integer', 'null' => false, 'default' => 0, 'key' => 'primary'),
<add> 'author_id' => array('type' => 'integer', 'null' => false),
<add> 'title' => array('type' => 'string', 'null' => false),
<add> 'body' => array('type' => 'text', 'null' => true, 'default' => null),
<add> 'published' => array('type' => 'string', 'null' => true, 'default' => 'N', 'length' => 1),
<add> 'created' => array('type' => 'datetime', 'null' => true, 'default' => null),
<add> 'updated' => array('type' => 'datetime', 'null' => true, 'default' => null),
<add> 'indexes' => array(
<add> 'PRIMARY' => array('column' => 'id', 'unique' => true),
<add> 'MyFtIndex' => array('column' => array('title', 'body'), 'type' => 'fulltext')
<add> )
<add> );
<add> $result = $this->Schema->generateTable('fields', $posts);
<add> $this->assertRegExp('/public \$fields/', $result);
<add> $this->assertPattern('/\'type\' \=\> \'fulltext\'/', $result);
<ide> }
<ide>
<ide> /**
<ide><path>lib/Cake/Test/Case/Model/Datasource/Database/MysqlTest.php
<ide> public function testIndexDetection() {
<ide> $result = $this->Dbo->index('with_multiple_compound_keys', false);
<ide> $this->Dbo->rawQuery('DROP TABLE ' . $name);
<ide> $this->assertEquals($expected, $result);
<add>
<add> $name = $this->Dbo->fullTableName('with_fulltext');
<add> $this->Dbo->rawQuery('CREATE TABLE ' . $name . ' (id int(11) AUTO_INCREMENT, name varchar(255), description text, primary key(id), FULLTEXT KEY `MyFtIndex` ( `name`, `description` )) ENGINE=MyISAM;');
<add> $expected = array(
<add> 'PRIMARY' => array('column' => 'id', 'unique' => 1),
<add> 'MyFtIndex' => array('column' => array('name', 'description'), 'type' => 'fulltext')
<add> );
<add> $result = $this->Dbo->index('with_fulltext', false);
<add> $this->Dbo->rawQuery('DROP TABLE ' . $name);
<add> $this->assertEquals($expected, $result);
<ide> }
<ide>
<ide> /**
<ide> public function testAlterSchemaIndexes() {
<ide>
<ide> $result = $this->Dbo->alterSchema($schemaB->compare($schemaA));
<ide> $this->assertContains("ALTER TABLE $table", $result);
<del> $this->assertContains('ADD KEY name_idx (`name`),', $result);
<del> $this->assertContains('ADD KEY group_idx (`group1`),', $result);
<del> $this->assertContains('ADD KEY compound_idx (`group1`, `group2`),', $result);
<add> $this->assertContains('ADD KEY `name_idx` (`name`),', $result);
<add> $this->assertContains('ADD KEY `group_idx` (`group1`),', $result);
<add> $this->assertContains('ADD KEY `compound_idx` (`group1`, `group2`),', $result);
<ide> $this->assertContains('ADD PRIMARY KEY (`id`);', $result);
<ide>
<ide> //Test that the string is syntactically correct
<ide> public function testAlterSchemaIndexes() {
<ide> $result = $this->Dbo->alterSchema($schemaC->compare($schemaB));
<ide> $this->assertContains("ALTER TABLE $table", $result);
<ide> $this->assertContains('DROP PRIMARY KEY,', $result);
<del> $this->assertContains('DROP KEY name_idx,', $result);
<del> $this->assertContains('DROP KEY group_idx,', $result);
<del> $this->assertContains('DROP KEY compound_idx,', $result);
<del> $this->assertContains('ADD KEY id_name_idx (`id`, `name`),', $result);
<del> $this->assertContains('ADD UNIQUE KEY name_idx (`name`),', $result);
<del> $this->assertContains('ADD KEY group_idx (`group2`),', $result);
<del> $this->assertContains('ADD KEY compound_idx (`group2`, `group1`);', $result);
<add> $this->assertContains('DROP KEY `name_idx`,', $result);
<add> $this->assertContains('DROP KEY `group_idx`,', $result);
<add> $this->assertContains('DROP KEY `compound_idx`,', $result);
<add> $this->assertContains('ADD KEY `id_name_idx` (`id`, `name`),', $result);
<add> $this->assertContains('ADD UNIQUE KEY `name_idx` (`name`),', $result);
<add> $this->assertContains('ADD KEY `group_idx` (`group2`),', $result);
<add> $this->assertContains('ADD KEY `compound_idx` (`group2`, `group1`);', $result);
<ide>
<ide> $query = $this->Dbo->getConnection()->prepare($result);
<ide> $this->assertEquals($query->queryString, $result);
<ide> public function testAlterSchemaIndexes() {
<ide> $result = $this->Dbo->alterSchema($schemaA->compare($schemaC));
<ide>
<ide> $this->assertContains("ALTER TABLE $table", $result);
<del> $this->assertContains('DROP KEY name_idx,', $result);
<del> $this->assertContains('DROP KEY group_idx,', $result);
<del> $this->assertContains('DROP KEY compound_idx,', $result);
<del> $this->assertContains('DROP KEY id_name_idx;', $result);
<add> $this->assertContains('DROP KEY `name_idx`,', $result);
<add> $this->assertContains('DROP KEY `group_idx`,', $result);
<add> $this->assertContains('DROP KEY `compound_idx`,', $result);
<add> $this->assertContains('DROP KEY `id_name_idx`;', $result);
<ide>
<ide> $query = $this->Dbo->getConnection()->prepare($result);
<ide> $this->assertEquals($query->queryString, $result);
<ide> public function testBuildIndex() {
<ide> $result = $this->Dbo->buildIndex($data);
<ide> $expected = array('UNIQUE KEY `MyIndex` (`id`, `name`)');
<ide> $this->assertEquals($expected, $result);
<add>
<add> $data = array(
<add> 'MyFtIndex' => array('column' => array('name', 'description'), 'type' => 'fulltext')
<add> );
<add> $result = $this->Dbo->buildIndex($data);
<add> $expected = array('FULLTEXT KEY `MyFtIndex` (`name`, `description`)');
<add> $this->assertEquals($expected, $result);
<ide> }
<ide>
<ide> /** | 4 |
Javascript | Javascript | add relevant placeholder to link submission | 21ca4e3f67096e4827c94773e2f232c49c7564ca | <add><path>client/src/templates/Challenges/projects/SolutionForm.js
<del><path>client/src/templates/Challenges/projects/ProjectForm.js
<ide> import { Form } from '../../../components/formHelpers';
<ide>
<ide> const propTypes = {
<ide> isFrontEnd: PropTypes.bool,
<add> isProject: PropTypes.bool,
<ide> isSubmitting: PropTypes.bool,
<ide> onSubmit: PropTypes.func.isRequired,
<del> updateProjectForm: PropTypes.func.isRequired
<add> updateSolutionForm: PropTypes.func.isRequired
<ide> };
<ide>
<del>const frontEndFields = ['solution'];
<del>const backEndFields = ['solution', 'githubLink'];
<add>const challengeFields = ['solution'];
<add>const backEndProjectFields = ['solution', 'githubLink'];
<ide>
<ide> const options = {
<ide> types: {
<ide> const options = {
<ide> required: ['solution']
<ide> };
<ide>
<del>export class ProjectForm extends Component {
<add>export class SolutionForm extends Component {
<ide> constructor(props) {
<ide> super(props);
<ide> this.handleSubmit = this.handleSubmit.bind(this);
<ide> }
<ide> componentDidMount() {
<del> this.props.updateProjectForm({});
<add> this.props.updateSolutionForm({});
<ide> }
<ide> handleSubmit(values) {
<del> this.props.updateProjectForm(values);
<add> this.props.updateSolutionForm(values);
<ide> this.props.onSubmit();
<ide> }
<ide> render() {
<del> const { isSubmitting, isFrontEnd } = this.props;
<add> const { isSubmitting, isFrontEnd, isProject } = this.props;
<ide> const buttonCopy = isSubmitting
<ide> ? 'Submit and go to my next challenge'
<ide> : "I've completed this challenge";
<ide> return (
<ide> <Form
<ide> buttonText={`${buttonCopy}`}
<del> formFields={isFrontEnd ? frontEndFields : backEndFields}
<add> formFields={
<add> isProject && !isFrontEnd ? backEndProjectFields : challengeFields
<add> }
<ide> id={isFrontEnd ? 'front-end-form' : 'back-end-form'}
<ide> options={{
<ide> ...options,
<ide> placeholders: {
<ide> solution:
<del> 'Link to solution, ex: ' +
<add> 'Link, ex: ' +
<ide> (isFrontEnd
<ide> ? 'https://codepen.io/camperbot/full/oNvPqqo'
<ide> : 'https://camperbot.glitch.me'),
<del> githubLink:
<del> 'Link to GitHub repo, ex: https://github.com/camperbot/hello'
<add> githubLink: 'ex: https://github.com/camperbot/hello'
<ide> }
<ide> }}
<ide> submit={this.handleSubmit}
<ide> export class ProjectForm extends Component {
<ide> }
<ide> }
<ide>
<del>ProjectForm.propTypes = propTypes;
<add>SolutionForm.propTypes = propTypes;
<ide>
<del>export default ProjectForm;
<add>export default SolutionForm;
<ide><path>client/src/templates/Challenges/projects/backend/Show.js
<ide> import {
<ide> initTests,
<ide> updateBackendFormValues,
<ide> updateChallengeMeta,
<del> updateProjectFormValues,
<del> backendNS
<add> updateSolutionFormValues
<ide> } from '../../redux';
<ide> import { getGuideUrl } from '../../utils';
<ide>
<ide> import Output from '../../components/Output';
<ide> import CompletionModal from '../../components/CompletionModal';
<ide> import HelpModal from '../../components/HelpModal';
<ide> import ProjectToolPanel from '../Tool-Panel';
<del>import ProjectForm from '../ProjectForm';
<del>import { Form } from '../../../../components/formHelpers';
<add>import SolutionForm from '../SolutionForm';
<ide> import Spacer from '../../../../components/helpers/Spacer';
<ide> import { ChallengeNode } from '../../../../redux/propTypes';
<ide> import { isSignedInSelector } from '../../../../redux';
<ide> import Hotkeys from '../../components/Hotkeys';
<ide>
<del>import { backend } from '../../../../../utils/challengeTypes';
<del>
<ide> import '../../components/test-frame.css';
<add>import { backEndProject } from '../../../../../utils/challengeTypes';
<ide>
<ide> const propTypes = {
<ide> challengeMounted: PropTypes.func.isRequired,
<ide> const propTypes = {
<ide> title: PropTypes.string,
<ide> updateBackendFormValues: PropTypes.func.isRequired,
<ide> updateChallengeMeta: PropTypes.func.isRequired,
<del> updateProjectFormValues: PropTypes.func.isRequired
<add> updateSolutionFormValues: PropTypes.func.isRequired
<ide> };
<ide>
<ide> const mapStateToProps = createSelector(
<ide> const mapDispatchToActions = {
<ide> initTests,
<ide> updateBackendFormValues,
<ide> updateChallengeMeta,
<del> updateProjectFormValues
<del>};
<del>
<del>const formFields = ['solution'];
<del>const options = {
<del> required: ['solution'],
<del> types: {
<del> solution: 'url'
<del> },
<del> placeholders: {
<del> solution: 'Link to solution, ex: https://codepen.io/camperbot/full/oNvPqqo'
<del> }
<add> updateSolutionFormValues
<ide> };
<ide>
<ide> export class BackEnd extends Component {
<ide> constructor(props) {
<ide> super(props);
<ide> this.state = {};
<ide> this.updateDimensions = this.updateDimensions.bind(this);
<del> this.handleSubmit = this.handleSubmit.bind(this);
<ide> }
<ide>
<ide> componentDidMount() {
<ide> export class BackEnd extends Component {
<ide> challengeMounted(challengeMeta.id);
<ide> }
<ide>
<del> handleSubmit(values) {
<del> const { updateBackendFormValues, executeChallenge } = this.props;
<del> updateBackendFormValues(values);
<del> executeChallenge();
<del> }
<del>
<ide> render() {
<ide> const {
<ide> data: {
<ide> export class BackEnd extends Component {
<ide> challengeMeta: { introPath, nextChallengePath, prevChallengePath }
<ide> },
<ide> tests,
<del> isSignedIn,
<ide> executeChallenge,
<del> updateProjectFormValues
<add> updateSolutionFormValues,
<add> updateBackendFormValues
<ide> } = this.props;
<ide>
<del> const buttonCopy = isSignedIn
<del> ? 'Submit and go to my next challenge'
<del> : "I've completed this challenge";
<ide> const blockNameTitle = `${blockName} - ${title}`;
<add> const isBackEndProject = challengeType === backEndProject;
<ide>
<ide> return (
<ide> <Hotkeys
<ide> export class BackEnd extends Component {
<ide> description={description}
<ide> instructions={instructions}
<ide> />
<del> {challengeType === backend ? (
<del> <Form
<del> buttonText={`${buttonCopy}`}
<del> formFields={formFields}
<del> id={backendNS}
<del> options={options}
<del> submit={this.handleSubmit}
<del> />
<del> ) : (
<del> <ProjectForm
<del> isFrontEnd={false}
<del> onSubmit={executeChallenge}
<del> updateProjectForm={updateProjectFormValues}
<del> />
<del> )}
<add> <SolutionForm
<add> isFrontEnd={false}
<add> isProject={isBackEndProject}
<add> onSubmit={executeChallenge}
<add> updateSolutionForm={values =>
<add> isBackEndProject
<add> ? updateSolutionFormValues(values)
<add> : updateBackendFormValues(values)
<add> }
<add> />
<ide> <ProjectToolPanel
<ide> guideUrl={getGuideUrl({ forumTopicId, title })}
<ide> />
<ide><path>client/src/templates/Challenges/projects/frontend/Show.js
<ide> import {
<ide> challengeMounted,
<ide> updateChallengeMeta,
<ide> openModal,
<del> updateProjectFormValues
<add> updateSolutionFormValues
<ide> } from '../../redux';
<ide> import { frontEndProject } from '../../../../../utils/challengeTypes';
<ide> import { getGuideUrl } from '../../utils';
<ide> import LearnLayout from '../../../../components/layouts/Learn';
<ide> import ChallengeTitle from '../../components/Challenge-Title';
<ide> import ChallengeDescription from '../../components/Challenge-Description';
<ide> import Spacer from '../../../../components/helpers/Spacer';
<del>import ProjectForm from '../ProjectForm';
<add>import SolutionForm from '../SolutionForm';
<ide> import ProjectToolPanel from '../Tool-Panel';
<ide> import CompletionModal from '../../components/CompletionModal';
<ide> import HelpModal from '../../components/HelpModal';
<ide> const mapDispatchToProps = dispatch =>
<ide> {
<ide> updateChallengeMeta,
<ide> challengeMounted,
<del> updateProjectFormValues,
<add> updateSolutionFormValues,
<ide> openCompletionModal: () => openModal('completion')
<ide> },
<ide> dispatch
<ide> const propTypes = {
<ide> challengeMeta: PropTypes.object
<ide> }),
<ide> updateChallengeMeta: PropTypes.func.isRequired,
<del> updateProjectFormValues: PropTypes.func.isRequired
<add> updateSolutionFormValues: PropTypes.func.isRequired
<ide> };
<ide>
<ide> export class Project extends Component {
<ide> export class Project extends Component {
<ide> pageContext: {
<ide> challengeMeta: { introPath, nextChallengePath, prevChallengePath }
<ide> },
<del> updateProjectFormValues
<add> updateSolutionFormValues
<ide> } = this.props;
<del> const isFrontEnd = challengeType === frontEndProject;
<ide>
<add> const isFrontEndProject = challengeType === frontEndProject;
<ide> const blockNameTitle = `${blockName} - ${title}`;
<add>
<ide> return (
<ide> <Hotkeys
<ide> innerRef={c => (this._container = c)}
<ide> export class Project extends Component {
<ide> <Spacer />
<ide> <ChallengeTitle>{blockNameTitle}</ChallengeTitle>
<ide> <ChallengeDescription description={description} />
<del> <ProjectForm
<del> isFrontEnd={isFrontEnd}
<add> <SolutionForm
<add> isFrontEnd={true}
<add> isProject={isFrontEndProject}
<ide> onSubmit={openCompletionModal}
<del> updateProjectForm={updateProjectFormValues}
<add> updateSolutionForm={updateSolutionFormValues}
<ide> />
<ide> <ProjectToolPanel
<ide> guideUrl={getGuideUrl({ forumTopicId, title })}
<ide><path>client/src/templates/Challenges/redux/completion-epic.js
<ide> import {
<ide> challengeTestsSelector,
<ide> closeModal,
<ide> challengeFilesSelector,
<del> updateProjectFormValues
<add> updateSolutionFormValues
<ide> } from './';
<ide> import {
<ide> userSelector,
<ide> function submitProject(type, state) {
<ide> payload: challengeInfo
<ide> };
<ide> return postChallenge(update, username).pipe(
<del> concat(of(updateProjectFormValues({})))
<add> concat(of(updateSolutionFormValues({})))
<ide> );
<ide> }
<ide>
<ide><path>client/src/templates/Challenges/redux/index.js
<ide> export const types = createTypes(
<ide> 'updateChallengeMeta',
<ide> 'updateFile',
<ide> 'updateJSEnabled',
<del> 'updateProjectFormValues',
<add> 'updateSolutionFormValues',
<ide> 'updateSuccessMessage',
<ide> 'updateTests',
<ide> 'updateLogs',
<ide> export const updateFile = createAction(types.updateFile);
<ide> export const updateConsole = createAction(types.updateConsole);
<ide> export const updateLogs = createAction(types.updateLogs);
<ide> export const updateJSEnabled = createAction(types.updateJSEnabled);
<del>export const updateProjectFormValues = createAction(
<del> types.updateProjectFormValues
<add>export const updateSolutionFormValues = createAction(
<add> types.updateSolutionFormValues
<ide> );
<ide> export const updateSuccessMessage = createAction(types.updateSuccessMessage);
<ide>
<ide> export const reducer = handleActions(
<ide> ...state,
<ide> backendFormValues: payload
<ide> }),
<del> [types.updateProjectFormValues]: (state, { payload }) => ({
<add> [types.updateSolutionFormValues]: (state, { payload }) => ({
<ide> ...state,
<ide> projectFormValues: payload
<ide> }), | 5 |
Javascript | Javascript | fix bug with date picker ios | 446ce49e9b097d2a5e95b0f17aa23756733c27ec | <ide><path>Libraries/Components/DatePicker/DatePickerIOS.ios.js
<ide>
<ide> const NativeMethodsMixin = require('NativeMethodsMixin');
<ide> const React = require('React');
<add>const invariant = require('fbjs/lib/invariant');
<ide> const PropTypes = require('prop-types');
<ide> const StyleSheet = require('StyleSheet');
<ide> const View = require('View');
<ide> const DatePickerIOS = createReactClass({
<ide> /**
<ide> * The currently selected date.
<ide> */
<del> date: PropTypes.instanceOf(Date).isRequired,
<add> date: PropTypes.instanceOf(Date),
<add>
<add> /**
<add> * Provides an initial value that will change when the user starts selecting
<add> * a date. It is useful for simple use-cases where you do not want to deal
<add> * with listening to events and updating the date prop to keep the
<add> * controlled state in sync. The controlled state has known bugs which
<add> * causes it to go out of sync with native. The initialDate prop is intended
<add> * to allow you to have native be source of truth.
<add> */
<add> initialDate: PropTypes.instanceOf(Date),
<ide>
<ide> /**
<ide> * Date change handler.
<ide> const DatePickerIOS = createReactClass({
<ide> };
<ide> },
<ide>
<add> componentDidUpdate: function() {
<add> if (this.props.date) {
<add> const propsTimeStamp = this.props.date.getTime();
<add> if (this._picker) {
<add> this._picker.setNativeProps({
<add> date: propsTimeStamp,
<add> });
<add> }
<add> }
<add> },
<add>
<ide> _onChange: function(event: Event) {
<ide> const nativeTimeStamp = event.nativeEvent.timestamp;
<ide> this.props.onDateChange && this.props.onDateChange(
<ide> new Date(nativeTimeStamp)
<ide> );
<ide> // $FlowFixMe(>=0.41.0)
<ide> this.props.onChange && this.props.onChange(event);
<del>
<del> // We expect the onChange* handlers to be in charge of updating our `date`
<del> // prop. That way they can also disallow/undo/mutate the selection of
<del> // certain values. In other words, the embedder of this component should
<del> // be the source of truth, not the native component.
<del> const propsTimeStamp = this.props.date.getTime();
<del> if (this._picker && nativeTimeStamp !== propsTimeStamp) {
<del> this._picker.setNativeProps({
<del> date: propsTimeStamp,
<del> });
<del> }
<ide> },
<ide>
<ide> render: function() {
<ide> const props = this.props;
<add> invariant(
<add> props.date || props.initialDate,
<add> 'A selected date or initial date should be specified.',
<add> );
<ide> return (
<ide> <View style={props.style}>
<ide> <RCTDatePickerIOS
<ide> ref={ picker => { this._picker = picker; } }
<ide> style={styles.datePickerIOS}
<del> date={props.date.getTime()}
<add> date={props.date ? props.date.getTime() : props.initialDate ? props.initialDate.getTime() : undefined}
<ide> locale={props.locale ? props.locale : undefined}
<ide> maximumDate={
<ide> props.maximumDate ? props.maximumDate.getTime() : undefined | 1 |
Java | Java | require ehcache 2.10+ | e8b5374d8776f040c9133ae341b17e780b1e2323 | <ide><path>spring-context-support/src/main/java/org/springframework/cache/ehcache/EhCacheFactoryBean.java
<ide> * <p>Note: If the named Cache instance is found, the properties will be ignored and the
<ide> * Cache instance will be retrieved from the CacheManager.
<ide> *
<del> * <p>Note: As of Spring 4.1, Spring's EhCache support requires EhCache 2.5 or higher.
<add> * <p>Note: As of Spring 5.0, Spring's EhCache support requires EhCache 2.10 or higher.
<ide> *
<ide> * @author Juergen Hoeller
<ide> * @author Dmitriy Kopylenko
<ide> public class EhCacheFactoryBean extends CacheConfiguration implements FactoryBea
<ide> private Ehcache cache;
<ide>
<ide>
<del> @SuppressWarnings("deprecation")
<ide> public EhCacheFactoryBean() {
<ide> setMaxEntriesLocalHeap(10000);
<del> setMaxElementsOnDisk(10000000);
<add> setMaxEntriesLocalDisk(10000000);
<ide> setTimeToLiveSeconds(120);
<ide> setTimeToIdleSeconds(120);
<ide> } | 1 |
Text | Text | fix undici version in changelog | 10f93679c5317bf9665279faf5593b1d63d4822d | <ide><path>doc/changelogs/CHANGELOG_V16.md
<ide> More detailed information on each of the vulnerabilities can be found in [Septem
<ide> * \[[`67cbbcc902`](https://github.com/nodejs/node/commit/67cbbcc902)] - **deps**: update corepack to 0.14.0 (Node.js GitHub Bot) [#44509](https://github.com/nodejs/node/pull/44509)
<ide> * \[[`9f14dc1a8f`](https://github.com/nodejs/node/commit/9f14dc1a8f)] - **deps**: update Acorn to v8.8.0 (Michaël Zasso) [#44437](https://github.com/nodejs/node/pull/44437)
<ide> * \[[`1811a6aaa8`](https://github.com/nodejs/node/commit/1811a6aaa8)] - **deps**: update icu tzdata to 2022b (Matías Zúñiga) [#44283](https://github.com/nodejs/node/pull/44283)
<del>* \[[`0c4953cbd1`](https://github.com/nodejs/node/commit/0c4953cbd1)] - **deps**: update undici to 5.9.1 (Node.js GitHub Bot) [#44319](https://github.com/nodejs/node/pull/44319)
<add>* \[[`0c4953cbd1`](https://github.com/nodejs/node/commit/0c4953cbd1)] - **deps**: update undici to 5.10.0(Node.js GitHub Bot) [#44319](https://github.com/nodejs/node/pull/44319)
<ide> * \[[`8a921fea74`](https://github.com/nodejs/node/commit/8a921fea74)] - **deps**: upgrade npm to 8.19.1 (npm team) [#44486](https://github.com/nodejs/node/pull/44486)
<ide> * \[[`763a63c14b`](https://github.com/nodejs/node/commit/763a63c14b)] - **deps**: update corepack to 0.13.0 (Node.js GitHub Bot) [#44318](https://github.com/nodejs/node/pull/44318)
<ide> * \[[`fdb699c84a`](https://github.com/nodejs/node/commit/fdb699c84a)] - **deps**: upgrade npm to 8.18.0 (npm team) [#44263](https://github.com/nodejs/node/pull/44263)
<ide><path>doc/changelogs/CHANGELOG_V18.md
<ide> More detailed information on each of the vulnerabilities can be found in [Septem
<ide> * \[[`754d26a53e`](https://github.com/nodejs/node/commit/754d26a53e)] - **deps**: patch V8 to 10.2.154.15 (Michaël Zasso) [#44294](https://github.com/nodejs/node/pull/44294)
<ide> * \[[`1b50ff2600`](https://github.com/nodejs/node/commit/1b50ff2600)] - **deps**: update icu tzdata to 2022b (Matías Zúñiga) [#44283](https://github.com/nodejs/node/pull/44283)
<ide> * \[[`1e451dca99`](https://github.com/nodejs/node/commit/1e451dca99)] - **deps**: upgrade llhttp to 6.0.9 (Paolo Insogna) [#44344](https://github.com/nodejs/node/pull/44344)
<del>* \[[`57da3db522`](https://github.com/nodejs/node/commit/57da3db522)] - **deps**: update undici to 5.9.1 (Node.js GitHub Bot) [#44319](https://github.com/nodejs/node/pull/44319)
<add>* \[[`57da3db522`](https://github.com/nodejs/node/commit/57da3db522)] - **deps**: update undici to 5.10.0 (Node.js GitHub Bot) [#44319](https://github.com/nodejs/node/pull/44319)
<ide> * \[[`1c87a7e8f6`](https://github.com/nodejs/node/commit/1c87a7e8f6)] - **doc**: add missing parenthesis in TLSSocket section (Tobias Nießen) [#44512](https://github.com/nodejs/node/pull/44512)
<ide> * \[[`05006eddb2`](https://github.com/nodejs/node/commit/05006eddb2)] - **doc**: do not use "Returns:" for crypto.constants (Tobias Nießen) [#44481](https://github.com/nodejs/node/pull/44481)
<ide> * \[[`54b6ed58bc`](https://github.com/nodejs/node/commit/54b6ed58bc)] - **doc**: use serial comma in addons docs (Tobias Nießen) [#44482](https://github.com/nodejs/node/pull/44482) | 2 |
Javascript | Javascript | add backfacevisibility to imagestyleproptypes | 484fe9155b9653959e0bccd9df76b34155b6679b | <ide><path>Libraries/Image/ImageStylePropTypes.js
<ide> var ImageStylePropTypes = {
<ide> ...LayoutPropTypes,
<ide> ...TransformPropTypes,
<ide> resizeMode: ReactPropTypes.oneOf(Object.keys(ImageResizeMode)),
<add> backfaceVisibility: ReactPropTypes.oneOf(['visible', 'hidden']),
<ide> backgroundColor: ReactPropTypes.string,
<ide> borderColor: ReactPropTypes.string,
<ide> borderWidth: ReactPropTypes.number, | 1 |
Javascript | Javascript | use string#repeat instead of array#join | 50125e27b763aa49a93788c4605addf3c5966f44 | <ide><path>lib/repl.js
<ide> REPLServer.prototype.displayPrompt = function(preserveCursor) {
<ide> var prompt = this._initialPrompt;
<ide> if (this.bufferedCommand.length) {
<ide> prompt = '...';
<del> var levelInd = new Array(this.lines.level.length).join('..');
<add> const len = this.lines.level.length ? this.lines.level.length - 1 : 0;
<add> const levelInd = '..'.repeat(len);
<ide> prompt += levelInd + ' ';
<ide> }
<ide>
<ide> REPLServer.prototype.memory = function memory(cmd) {
<ide> // save the line so I can do magic later
<ide> if (cmd) {
<ide> // TODO should I tab the level?
<del> self.lines.push(new Array(self.lines.level.length).join(' ') + cmd);
<add> const len = self.lines.level.length ? self.lines.level.length - 1 : 0;
<add> self.lines.push(' '.repeat(len) + cmd);
<ide> } else {
<ide> // I don't want to not change the format too much...
<ide> self.lines.push(''); | 1 |
Javascript | Javascript | remove superfluous regex in tools/doc/json.js | d408ee1be31f94c20397baeb0a1740b4a0d8167c | <ide><path>tools/doc/json.js
<ide> const maybeAncestors = r`(?:${id}\.?)*`;
<ide>
<ide> const callWithParams = r`\([^)]*\)`;
<ide>
<del>const noCallOrProp = '(?![.[(])';
<del>
<ide> const maybeExtends = `(?: +extends +${maybeAncestors}${classId})?`;
<ide>
<ide> /* eslint-disable max-len */
<ide> const headingExpressions = [
<ide> `^${maybeBacktick}${maybeAncestors}(${id})${callWithParams}${maybeBacktick}$`, 'i') },
<ide>
<ide> { type: 'property', re: RegExp(
<del> `^${maybeClassPropertyPrefix}${maybeBacktick}${ancestors}(${id})${maybeBacktick}${noCallOrProp}$`, 'i') },
<add> `^${maybeClassPropertyPrefix}${maybeBacktick}${ancestors}(${id})${maybeBacktick}$`, 'i') },
<ide> ];
<ide> /* eslint-enable max-len */
<ide> | 1 |
Ruby | Ruby | remove custom $home on --cleanup | 83ab0acfc0fae4968309d2ce248ec7478436c476 | <ide><path>Library/Homebrew/cmd/test-bot.rb
<ide> def cleanup_after
<ide> git "stash", "pop"
<ide> test "brew", "cleanup", "--prune=7"
<ide> git "gc", "--auto"
<add> if ARGV.include? "--local"
<add> FileUtils.rm_rf ENV["HOMEBREW_HOME"]
<add> FileUtils.rm_rf ENV["HOMEBREW_LOGS"]
<add> end
<ide> end
<ide>
<ide> FileUtils.rm_rf @brewbot_root unless ARGV.include? "--keep-logs"
<ide> def test_bot
<ide> end
<ide>
<ide> if ARGV.include? "--local"
<del> ENV["HOME"] = "#{Dir.pwd}/home"
<add> ENV["HOMEBREW_HOME"] = ENV["HOME"] = "#{Dir.pwd}/home"
<ide> mkdir_p ENV["HOME"]
<ide> ENV["HOMEBREW_LOGS"] = "#{Dir.pwd}/logs"
<ide> end | 1 |
Mixed | Go | add network interal mode | b70954e60a15d09756bd6b00a6fadedc64829477 | <ide><path>api/client/network.go
<ide> func (cli *DockerCli) CmdNetworkCreate(args ...string) error {
<ide> cmd.Var(flIpamAux, []string{"-aux-address"}, "auxiliary ipv4 or ipv6 addresses used by Network driver")
<ide> cmd.Var(flOpts, []string{"o", "-opt"}, "set driver specific options")
<ide>
<add> flInternal := cmd.Bool([]string{"-internal"}, false, "restricts external access to the network")
<add>
<ide> cmd.Require(flag.Exact, 1)
<ide> err := cmd.ParseFlags(args, true)
<ide> if err != nil {
<ide> func (cli *DockerCli) CmdNetworkCreate(args ...string) error {
<ide> IPAM: network.IPAM{Driver: *flIpamDriver, Config: ipamCfg},
<ide> Options: flOpts.GetAll(),
<ide> CheckDuplicate: true,
<add> Internal: *flInternal,
<ide> }
<ide>
<ide> resp, err := cli.client.NetworkCreate(nc)
<ide><path>api/server/router/network/backend.go
<ide> type Backend interface {
<ide> GetNetworksByID(partialID string) []libnetwork.Network
<ide> GetAllNetworks() []libnetwork.Network
<ide> CreateNetwork(name, driver string, ipam network.IPAM,
<del> options map[string]string) (libnetwork.Network, error)
<add> options map[string]string, internal bool) (libnetwork.Network, error)
<ide> ConnectContainerToNetwork(containerName, networkName string, endpointConfig *network.EndpointSettings) error
<ide> DisconnectContainerFromNetwork(containerName string,
<ide> network libnetwork.Network) error
<ide><path>api/server/router/network/network_routes.go
<ide> func (n *networkRouter) postNetworkCreate(ctx context.Context, w http.ResponseWr
<ide> warning = fmt.Sprintf("Network with name %s (id : %s) already exists", nw.Name(), nw.ID())
<ide> }
<ide>
<del> nw, err = n.backend.CreateNetwork(create.Name, create.Driver, create.IPAM, create.Options)
<add> nw, err = n.backend.CreateNetwork(create.Name, create.Driver, create.IPAM, create.Options, create.Internal)
<ide> if err != nil {
<ide> return err
<ide> }
<ide><path>daemon/network.go
<ide> func (daemon *Daemon) GetAllNetworks() []libnetwork.Network {
<ide> }
<ide>
<ide> // CreateNetwork creates a network with the given name, driver and other optional parameters
<del>func (daemon *Daemon) CreateNetwork(name, driver string, ipam network.IPAM, options map[string]string) (libnetwork.Network, error) {
<add>func (daemon *Daemon) CreateNetwork(name, driver string, ipam network.IPAM, options map[string]string, internal bool) (libnetwork.Network, error) {
<ide> c := daemon.netController
<ide> if driver == "" {
<ide> driver = c.Config().Daemon.DefaultDriver
<ide> func (daemon *Daemon) CreateNetwork(name, driver string, ipam network.IPAM, opti
<ide>
<ide> nwOptions = append(nwOptions, libnetwork.NetworkOptionIpam(ipam.Driver, "", v4Conf, v6Conf, nil))
<ide> nwOptions = append(nwOptions, libnetwork.NetworkOptionDriverOpts(options))
<add> if internal {
<add> nwOptions = append(nwOptions, libnetwork.NetworkOptionInternalNetwork())
<add> }
<ide> n, err := c.NewNetwork(driver, name, nwOptions...)
<ide> if err != nil {
<ide> return nil, err
<ide><path>docs/reference/api/docker_remote_api.md
<ide> This section lists each version from latest to oldest. Each listing includes a
<ide> * `POST /containers/create` now allows you to set the static IPv4 and/or IPv6 address for the container.
<ide> * `POST /networks/(id)/connect` now allows you to set the static IPv4 and/or IPv6 address for the container.
<ide> * `GET /info` now includes the number of containers running, stopped, and paused.
<add>* `POST /networks/create` now supports restricting external access to the network by setting the `internal` field.
<ide>
<ide> ### v1.21 API changes
<ide>
<ide><path>docs/reference/api/docker_remote_api_v1.22.md
<ide> Content-Type: application/json
<ide>
<ide> {
<ide> "Name":"isolated_nw",
<del> "Driver":"bridge"
<add> "Driver":"bridge",
<ide> "IPAM":{
<ide> "Config":[{
<ide> "Subnet":"172.20.0.0/16",
<ide> "IPRange":"172.20.10.0/24",
<ide> "Gateway":"172.20.10.11"
<ide> }]
<add> },
<add> "Internal":true
<ide> }
<ide> ```
<ide>
<ide><path>docs/reference/commandline/network_create.md
<ide> parent = "smn_cli"
<ide> -d --driver=DRIVER Driver to manage the Network bridge or overlay. The default is bridge.
<ide> --gateway=[] ipv4 or ipv6 Gateway for the master subnet
<ide> --help Print usage
<add> --internal Restricts external access to the network
<ide> --ip-range=[] Allocate container ip from a sub-range
<ide> --ipam-driver=default IP Address Management Driver
<ide> -o --opt=map[] Set custom network plugin options
<ide> docker network create -d overlay
<ide> ```
<ide> Be sure that your subnetworks do not overlap. If they do, the network create fails and Engine returns an error.
<ide>
<add>### Network internal mode
<add>
<add>By default, when you connect a container to an `overlay` network, Docker also connects a bridge network to it to provide external connectivity.
<add>If you want to create an externally isolated `overlay` network, you can specify the `--internal` option.
<add>
<ide> ## Related information
<ide>
<ide> * [network inspect](network_inspect.md)
<ide><path>man/docker-network-create.1.md
<ide> docker-network-create - create a new network
<ide> [**-d**|**--driver**=*DRIVER*]
<ide> [**--gateway**=*[]*]
<ide> [**--help**]
<add>[**--internal**]
<ide> [**--ip-range**=*[]*]
<ide> [**--ipam-driver**=*default*]
<ide> [**-o**|**--opt**=*map[]*]
<ide> docker network create -d overlay
<ide> ```
<ide> Be sure that your subnetworks do not overlap. If they do, the network create fails and Engine returns an error.
<ide>
<add>### Network internal mode
<add>
<add>By default, when you connect a container to an `overlay` network, Docker also connects a bridge network to it to provide external connectivity.
<add>If you want to create an externally isolated `overlay` network, you can specify the `--internal` option.
<add>
<ide> # OPTIONS
<ide> **--aux-address**=map[]
<ide> Auxiliary ipv4 or ipv6 addresses used by network driver
<ide> Be sure that your subnetworks do not overlap. If they do, the network create fai
<ide> **--help**
<ide> Print usage
<ide>
<add>**--internal**
<add> Restricts external access to the network
<add>
<ide> **--ip-range**=[]
<ide> Allocate container ip from a sub-range
<ide> | 8 |
PHP | PHP | restore previous return value on write() | feaf13b59d54e4b5ff6243a74c98acd5ef3d546e | <ide><path>src/Core/Configure.php
<ide> class Configure
<ide> * @param string|array $config The key to write, can be a dot notation value.
<ide> * Alternatively can be an array containing key(s) and value(s).
<ide> * @param mixed $value Value to set for var
<del> * @return void
<add> * @return bool
<ide> * @link https://book.cakephp.org/3.0/en/development/configuration.html#writing-configuration-data
<ide> */
<del> public static function write($config, $value = null): void
<add> public static function write($config, $value = null): bool
<ide> {
<ide> if (!is_array($config)) {
<ide> $config = [$config => $value];
<ide> public static function write($config, $value = null): void
<ide> ini_set('display_errors', $config['debug'] ? '1' : '0');
<ide> }
<ide> }
<add>
<add> return true;
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/Core/ConfigureTest.php
<ide> public function testRead()
<ide> */
<ide> public function testWrite()
<ide> {
<del> Configure::write('SomeName.someKey', 'myvalue');
<add> $this->assertTrue(Configure::write('SomeName.someKey', 'myvalue'));
<ide> $result = Configure::read('SomeName.someKey');
<ide> $this->assertEquals('myvalue', $result);
<ide>
<del> Configure::write('SomeName.someKey', null);
<add> $this->assertTrue(Configure::write('SomeName.someKey', null));
<ide> $result = Configure::read('SomeName.someKey');
<ide> $this->assertNull($result);
<ide> | 2 |
Go | Go | fix parser directive refactoring | c97170d618b7a1f35f47672143bd287666802287 | <ide><path>builder/dockerfile/parser/parser.go
<ide> func (d *Directive) setEscapeToken(s string) error {
<ide> return nil
<ide> }
<ide>
<del>// processLine looks for a parser directive '# escapeToken=<char>. Parser
<del>// directives must precede any builder instruction or other comments, and cannot
<del>// be repeated.
<del>func (d *Directive) processLine(line string) error {
<add>// possibleParserDirective looks for one or more parser directives '# escapeToken=<char>' and
<add>// '# platform=<string>'. Parser directives must precede any builder instruction
<add>// or other comments, and cannot be repeated.
<add>func (d *Directive) possibleParserDirective(line string) error {
<ide> if d.processingComplete {
<ide> return nil
<ide> }
<del> // Processing is finished after the first call
<del> defer func() { d.processingComplete = true }()
<ide>
<ide> tecMatch := tokenEscapeCommand.FindStringSubmatch(strings.ToLower(line))
<del> if len(tecMatch) == 0 {
<del> return nil
<del> }
<del> if d.escapeSeen == true {
<del> return errors.New("only one escape parser directive can be used")
<del> }
<del> for i, n := range tokenEscapeCommand.SubexpNames() {
<del> if n == "escapechar" {
<del> d.escapeSeen = true
<del> return d.setEscapeToken(tecMatch[i])
<add> if len(tecMatch) != 0 {
<add> for i, n := range tokenEscapeCommand.SubexpNames() {
<add> if n == "escapechar" {
<add> if d.escapeSeen == true {
<add> return errors.New("only one escape parser directive can be used")
<add> }
<add> d.escapeSeen = true
<add> return d.setEscapeToken(tecMatch[i])
<add> }
<ide> }
<ide> }
<add>
<add> d.processingComplete = true
<ide> return nil
<ide> }
<ide>
<ide> func Parse(rwc io.Reader) (*Result, error) {
<ide>
<ide> var err error
<ide> for scanner.Scan() {
<del> bytes := scanner.Bytes()
<del> switch currentLine {
<del> case 0:
<del> bytes, err = processFirstLine(d, bytes)
<del> if err != nil {
<del> return nil, err
<del> }
<del> default:
<del> bytes = processLine(bytes, true)
<add> bytesRead := scanner.Bytes()
<add> if currentLine == 0 {
<add> // First line, strip the byte-order-marker if present
<add> bytesRead = bytes.TrimPrefix(bytesRead, utf8bom)
<add> }
<add> bytesRead, err = processLine(d, bytesRead, true)
<add> if err != nil {
<add> return nil, err
<ide> }
<ide> currentLine++
<ide>
<ide> startLine := currentLine
<del> line, isEndOfLine := trimContinuationCharacter(string(bytes), d)
<add> line, isEndOfLine := trimContinuationCharacter(string(bytesRead), d)
<ide> if isEndOfLine && line == "" {
<ide> continue
<ide> }
<ide>
<ide> for !isEndOfLine && scanner.Scan() {
<del> bytes := processLine(scanner.Bytes(), false)
<add> bytesRead, err := processLine(d, scanner.Bytes(), false)
<add> if err != nil {
<add> return nil, err
<add> }
<ide> currentLine++
<ide>
<ide> // TODO: warn this is being deprecated/removed
<del> if isEmptyContinuationLine(bytes) {
<add> if isEmptyContinuationLine(bytesRead) {
<ide> continue
<ide> }
<ide>
<del> continuationLine := string(bytes)
<add> continuationLine := string(bytesRead)
<ide> continuationLine, isEndOfLine = trimContinuationCharacter(continuationLine, d)
<ide> line += continuationLine
<ide> }
<ide> func Parse(rwc io.Reader) (*Result, error) {
<ide> }
<ide> root.AddChild(child, startLine, currentLine)
<ide> }
<del>
<ide> return &Result{AST: root, EscapeToken: d.escapeToken}, nil
<ide> }
<ide>
<ide> func trimContinuationCharacter(line string, d *Directive) (string, bool) {
<ide>
<ide> // TODO: remove stripLeftWhitespace after deprecation period. It seems silly
<ide> // to preserve whitespace on continuation lines. Why is that done?
<del>func processLine(token []byte, stripLeftWhitespace bool) []byte {
<add>func processLine(d *Directive, token []byte, stripLeftWhitespace bool) ([]byte, error) {
<ide> if stripLeftWhitespace {
<ide> token = trimWhitespace(token)
<ide> }
<del> return trimComments(token)
<del>}
<del>
<del>func processFirstLine(d *Directive, token []byte) ([]byte, error) {
<del> token = bytes.TrimPrefix(token, utf8bom)
<del> token = trimWhitespace(token)
<del> err := d.processLine(string(token))
<add> err := d.possibleParserDirective(string(token))
<ide> return trimComments(token), err
<ide> } | 1 |
Javascript | Javascript | remove unused www shims | 8e251c5416b58fee3a45d39948a6288dd5d2129c | <ide><path>scripts/rollup/shims/facebook-www/ReactDOMInputSelection.js
<del>/**
<del> * Copyright 2013-present, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> *
<del> * @providesModule ReactDOMInputSelection
<del> */
<del>
<del>'use strict';
<del>
<del>const {
<del> __SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED,
<del>} = require('ReactDOM-fb');
<del>
<del>module.exports =
<del> __SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.ReactDOMInputSelection;
<ide><path>scripts/rollup/shims/facebook-www/ReactElement.js
<del>/**
<del> * Copyright 2014-present, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> *
<del> * @providesModule ReactElement
<del> */
<del>'use strict';
<del>
<del>const {createElement, createFactory, isValidElement} = require('React');
<del>
<del>module.exports = {
<del> createElement,
<del> createFactory,
<del> isValidElement,
<del>}; | 2 |
Python | Python | fix merge tests | 8da47b078d92bee2de3e5fb50a37483d8cb02f13 | <ide><path>transformers/modeling_ctrl.py
<ide> def forward(self, input_ids=None, past=None, attention_mask=None, token_type_ids
<ide> if inputs_embeds is None:
<ide> inputs_embeds = self.w(input_ids)
<ide> # inputs_embeds = embedded.unsqueeze(0) if len(input_ids.shape)<2 else embedded
<del> seq_len = input_shape.shape[-1]
<add> seq_len = input_shape[-1]
<ide> mask = torch.triu(torch.ones(seq_len + past_length, seq_len + past_length), 1).to(inputs_embeds.device)
<ide>
<ide> inputs_embeds *= np.sqrt(self.d_model_size) | 1 |
Text | Text | use backticks around file names in readme.md | f4235794981fa71f700e04d88d4cab015fc0143b | <ide><path>README.md
<ide> _docs_ subdirectory. Version-specific documentation is also at
<ide>
<ide> ### Verifying Binaries
<ide>
<del>Download directories contain a SHASUMS256.txt file with SHA checksums for the
<add>Download directories contain a `SHASUMS256.txt` file with SHA checksums for the
<ide> files.
<ide>
<del>To download SHASUMS256.txt using `curl`:
<add>To download `SHASUMS256.txt` using `curl`:
<ide>
<ide> ```console
<ide> $ curl -O https://nodejs.org/dist/vx.y.z/SHASUMS256.txt
<ide> it through `sha256sum` with a command such as:
<ide> $ grep node-vx.y.z.tar.gz SHASUMS256.txt | sha256sum -c -
<ide> ```
<ide>
<del>For Current and LTS, the GPG detached signature of SHASUMS256.txt is in
<del>SHASUMS256.txt.sig. You can use it with `gpg` to verify the integrity of
<del>SHASUM256.txt. You will first need to import all the GPG keys of individuals
<add>For Current and LTS, the GPG detached signature of `SHASUMS256.txt` is in
<add>`SHASUMS256.txt.sig`. You can use it with `gpg` to verify the integrity of
<add>`SHASUM256.txt`. You will first need to import all the GPG keys of individuals
<ide> authorized to create releases. They are at the bottom of this README under
<ide> [Release Team](#release-team). To import the keys:
<ide>
<ide> $ gpg --keyserver pool.sks-keyservers.net --recv-keys DD8F2338BAE7501E3DD5AC78C2
<ide>
<ide> See the bottom of this README for a full script to import active release keys.
<ide>
<del>Next, download the SHASUMS256.txt.sig for the release:
<add>Next, download the `SHASUMS256.txt.sig` for the release:
<ide>
<ide> ```console
<ide> $ curl -O https://nodejs.org/dist/vx.y.z/SHASUMS256.txt.sig | 1 |
Python | Python | add docstrings for simple_ner | da6e59519e8fcac4ff40e8199b5182f25769f03d | <ide><path>spacy/ml/models/simple_ner.py
<ide> def BiluoTagger(
<ide> tok2vec: Model[List[Doc], List[Floats2d]]
<ide> ) -> Model[List[Doc], List[Floats2d]]:
<add> """Construct a simple NER tagger, that predicts BILUO tag scores for each
<add> token and uses greedy decoding with transition-constraints to return a valid
<add> BILUO tag sequence.
<add>
<add> A BILUO tag sequence encodes a sequence of non-overlapping labelled spans
<add> into tags assigned to each token. The first token of a span is given the
<add> tag B-LABEL, the last token of the span is given the tag L-LABEL, and tokens
<add> within the span are given the tag U-LABEL. Single-token spans are given
<add> the tag U-LABEL. All other tokens are assigned the tag O.
<add>
<add> The BILUO tag scheme generally results in better linear separation between
<add> classes, especially for non-CRF models, because there are more distinct classes
<add> for the different situations (Ratinov et al., 2009).
<add> """
<ide> biluo = BILUO()
<ide> linear = Linear(
<ide> nO=None, nI=tok2vec.get_dim("nO"), init_W=configure_normal_init(mean=0.02)
<ide> def BiluoTagger(
<ide> def IOBTagger(
<ide> tok2vec: Model[List[Doc], List[Floats2d]]
<ide> ) -> Model[List[Doc], List[Floats2d]]:
<add> """Construct a simple NER tagger, that predicts IOB tag scores for each
<add> token and uses greedy decoding with transition-constraints to return a valid
<add> IOB tag sequence.
<add>
<add> A IOB tag sequence encodes a sequence of non-overlapping labelled spans
<add> into tags assigned to each token. The first token of a span is given the
<add> tag B-LABEL, and subsequent tokens are given the tag I-LABEL.
<add> All other tokens are assigned the tag O.
<add> """
<ide> biluo = IOB()
<ide> linear = Linear(nO=None, nI=tok2vec.get_dim("nO"))
<ide> model = chain( | 1 |
Python | Python | remove duplicate imports | 46083ffa0e8e6ee3985db4e37a1498d73e6e719b | <ide><path>numpy/core/tests/test_numeric.py
<ide> from numpy.testing import (
<ide> assert_, assert_equal, assert_raises, assert_raises_regex,
<ide> assert_array_equal, assert_almost_equal, assert_array_almost_equal,
<del> assert_raises, suppress_warnings, HAS_REFCOUNT
<add> suppress_warnings, HAS_REFCOUNT
<ide> )
<ide>
<ide>
<ide><path>numpy/f2py/tests/test_array_from_pyobj.py
<ide> import copy
<ide> import pytest
<ide>
<del>import pytest
<del>
<ide> from numpy import (
<ide> array, alltrue, ndarray, zeros, dtype, intp, clongdouble
<ide> ) | 2 |
Go | Go | move engine-api to docker/docker/api | ad10ae9b3bd03bf0073876ab0e792fbb82273cd7 | <ide><path>libnetwork/cluster/provider.go
<ide> package cluster
<ide>
<ide> import (
<del> "github.com/docker/engine-api/types/network"
<add> "github.com/docker/docker/api/types/network"
<ide> "golang.org/x/net/context"
<ide> )
<ide>
<ide><path>libnetwork/cmd/dnet/dnet.go
<ide> import (
<ide> "github.com/docker/docker/pkg/reexec"
<ide>
<ide> "github.com/Sirupsen/logrus"
<add> "github.com/docker/docker/api/types/network"
<ide> "github.com/docker/docker/pkg/term"
<del> "github.com/docker/engine-api/types/network"
<ide> "github.com/docker/libnetwork"
<ide> "github.com/docker/libnetwork/api"
<ide> "github.com/docker/libnetwork/config"
<ide><path>libnetwork/cmd/dnet/dnet_linux.go
<ide> func setupDumpStackTrap() {
<ide> signal.Notify(c, syscall.SIGUSR1)
<ide> go func() {
<ide> for range c {
<del> psignal.DumpStacks()
<add> psignal.DumpStacks("")
<ide> }
<ide> }()
<ide> }
<ide><path>libnetwork/cmd/dnet/dnet_windows.go
<ide> func setupDumpStackTrap() {
<ide> logrus.Debugf("Stackdump - waiting signal at %s", ev)
<ide> for {
<ide> syscall.WaitForSingleObject(h, syscall.INFINITE)
<del> signal.DumpStacks()
<add> signal.DumpStacks("")
<ide> }
<ide> }
<ide> }()
<ide><path>libnetwork/drivers/remote/driver_test.go
<ide> func TestGetEmptyCapabilities(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> d := newDriver(plugin, p.Client)
<add> d := newDriver(plugin, p.Client())
<ide> if d.Type() != plugin {
<ide> t.Fatal("Driver type does not match that given")
<ide> }
<ide> func TestGetExtraCapabilities(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> d := newDriver(plugin, p.Client)
<add> d := newDriver(plugin, p.Client())
<ide> if d.Type() != plugin {
<ide> t.Fatal("Driver type does not match that given")
<ide> }
<ide> func TestGetInvalidCapabilities(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> d := newDriver(plugin, p.Client)
<add> d := newDriver(plugin, p.Client())
<ide> if d.Type() != plugin {
<ide> t.Fatal("Driver type does not match that given")
<ide> }
<ide> func TestRemoteDriver(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> d := newDriver(plugin, p.Client)
<add> d := newDriver(plugin, p.Client())
<ide> if d.Type() != plugin {
<ide> t.Fatal("Driver type does not match that given")
<ide> }
<ide> func TestDriverError(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> driver := newDriver(plugin, p.Client)
<add> driver := newDriver(plugin, p.Client())
<ide>
<ide> if err := driver.CreateEndpoint("dummy", "dummy", &testEndpoint{t: t}, map[string]interface{}{}); err == nil {
<ide> t.Fatalf("Expected error from driver")
<ide> func TestMissingValues(t *testing.T) {
<ide> if err != nil {
<ide> t.Fatal(err)
<ide> }
<del> driver := newDriver(plugin, p.Client)
<add> driver := newDriver(plugin, p.Client())
<ide>
<ide> if err := driver.CreateEndpoint("dummy", "dummy", ep, map[string]interface{}{}); err != nil {
<ide> t.Fatal(err)
<ide> func TestRollback(t *testing.T) {
<ide> if err != nil {
<ide> t.Fatal(err)
<ide> }
<del> driver := newDriver(plugin, p.Client)
<add> driver := newDriver(plugin, p.Client())
<ide>
<ide> ep := &rollbackEndpoint{}
<ide>
<ide><path>libnetwork/ipams/remote/remote_test.go
<ide> func TestGetCapabilities(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> d := newAllocator(plugin, p.Client)
<add> d := newAllocator(plugin, p.Client())
<ide>
<ide> caps, err := d.(*allocator).getCapabilities()
<ide> if err != nil {
<ide> func TestGetCapabilitiesFromLegacyDriver(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> d := newAllocator(plugin, p.Client)
<add> d := newAllocator(plugin, p.Client())
<ide>
<ide> if _, err := d.(*allocator).getCapabilities(); err == nil {
<ide> t.Fatalf("Expected error, but got Success %v", err)
<ide> func TestGetDefaultAddressSpaces(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> d := newAllocator(plugin, p.Client)
<add> d := newAllocator(plugin, p.Client())
<ide>
<ide> l, g, err := d.(*allocator).GetDefaultAddressSpaces()
<ide> if err != nil {
<ide> func TestRemoteDriver(t *testing.T) {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> d := newAllocator(plugin, p.Client)
<add> d := newAllocator(plugin, p.Client())
<ide>
<ide> l, g, err := d.(*allocator).GetDefaultAddressSpaces()
<ide> if err != nil { | 6 |
Python | Python | refactor the password management | f7b94229b1228b69b6f4adbb141ebb30e3689a0b | <ide><path>glances/core/glances_main.py
<ide> def init_args(self):
<ide> # Server option
<ide> parser.add_argument('-p', '--port', default=self.server_port, type=int, dest='port',
<ide> help=_('define the client/server TCP port [default: %d]') % self.server_port)
<del> parser.add_argument('-P', '--password', dest='password_arg',
<del> help=_('old method to define a client/server password'))
<add> parser.add_argument('--password-badidea', dest='password_arg',
<add> help=_('Define password from the command line'))
<ide> parser.add_argument('--password', action='store_true', default=False, dest='password_prompt',
<del> help=_('define a client/server password from the prompt'))
<add> help=_('define a client/server password from the prompt or file'))
<ide> parser.add_argument('-s', '--server', action='store_true', default=False,
<ide> dest='server', help=_('run Glances in server mode'))
<ide> parser.add_argument('--snmp-community', default='public', dest='snmp_community',
<ide> def parse_args(self):
<ide> # Server or client login/password
<ide> args.username = self.username
<ide> if args.password_arg is not None:
<del> # Password is passed as an argument
<del> args.password = args.password_arg
<add> import hashlib
<add> # Password is given as an argument
<add> # Hash with MD5
<add> # Only the MD5 will be transmit on the network
<add> args.password = hashlib.md5(args.password_arg).hexdigest()
<ide> elif args.password_prompt:
<del> # Interactive password
<add> # Interactive or file password
<ide> if args.server:
<ide> args.password = self.__get_password(
<ide> description=_("Define the password for the Glances server"),
<ide> confirm=True)
<ide> elif args.client:
<ide> args.password = self.__get_password(
<ide> description=_("Enter the Glances server password"),
<del> confirm=False)
<add> clear=True)
<ide> else:
<ide> # Default is no password
<ide> args.password = self.password
<ide> def parse_args(self):
<ide> if args.client is not None:
<ide> self.client_tag = True
<ide> self.server_ip = args.client
<del>
<del> # if args.output is not None:
<del> # setattr(self, args.output.lower() + '_tag', True)
<del> # if args.file is not None:
<del> # output_file = args.file
<del> # output_folder = args.file
<ide> # /!!!
<ide>
<ide> # Interactive cmds like CLI args?
<ide> def parse_args(self):
<ide>
<ide> return args
<ide>
<del> def __get_password(self, description='', confirm=False):
<add> def __hash_password(self, plain_password):
<ide> """
<del> Read a password from the command line (with confirmation if confirm = True)
<add> Hash a plain password and return the hashed one
<ide> """
<del> import getpass
<add> from glances.core.glances_password import glancesPassword
<ide>
<del> if description != '':
<del> sys.stdout.write("%s\n" % description)
<add> password = glancesPassword()
<ide>
<del> password1 = getpass.getpass(_("Password: "))
<del> if confirm:
<del> password2 = getpass.getpass(_("Password (confirm): "))
<del> else:
<del> return password1
<add> return password.hash_password(plain_password)
<ide>
<del> if password1 == password2:
<del> return password1
<del> else:
<del> sys.stdout.write(_("[Warning] Passwords did not match, please try again...\n"))
<del> return self.__get_password(description=description, confirm=confirm)
<add> def __get_password(self, description='', confirm=False, clear=False):
<add> """
<add> Read a password from the command line
<add> - with confirmation if confirm = True
<add> - plain (clear password) if clear = True
<add> """
<add> from glances.core.glances_password import glancesPassword
<add>
<add> password = glancesPassword()
<add>
<add> return password.get_password(description, confirm, clear)
<ide>
<ide> def is_standalone(self):
<ide> """
<ide><path>glances/core/glances_password.py
<add># -*- coding: utf-8 -*-
<add>#
<add># This file is part of Glances.
<add>#
<add># Copyright (C) 2014 Nicolargo <[email protected]>
<add>#
<add># Glances is free software; you can redistribute it and/or modify
<add># it under the terms of the GNU Lesser General Public License as published by
<add># the Free Software Foundation, either version 3 of the License, or
<add># (at your option) any later version.
<add>#
<add># Glances is distributed in the hope that it will be useful,
<add># but WITHOUT ANY WARRANTY; without even the implied warranty of
<add># MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
<add># GNU Lesser General Public License for more details.
<add>#
<add># You should have received a copy of the GNU Lesser General Public License
<add># along with this program. If not, see <http://www.gnu.org/licenses/>.
<add>
<add># Import system libs
<add>import os
<add>import sys
<add>import uuid
<add>import hashlib
<add>import getpass
<add>
<add># Import Glances lib
<add>from glances.core.glances_globals import (
<add> __appname__,
<add> is_bsd,
<add> is_linux,
<add> is_mac,
<add> is_windows
<add>)
<add>
<add>
<add>class glancesPassword:
<add> """
<add> Manage password
<add> """
<add>
<add> def __init__(self):
<add> self.password_path = self.get_password_path()
<add> self.password_filename = __appname__ + '.pwd'
<add> self.password_filepath = os.path.join(self.password_path, self.password_filename)
<add>
<add> def get_password_path(self):
<add> """
<add> Get the path where the password file will be stored
<add> On Linux and BSD the right place should be $XDG_DATA_HOME aka $HOME/.local/share/glances/foo.
<add> On OS X: the right place is under user's Library folder aka $HOME/Library/glances/foo
<add> On Windows: os.environ['APPDATA']+'/glances/'+foo
<add> """
<add>
<add> # Get the system application data path for the current user
<add> if is_linux or is_bsd:
<add> app_path = os.environ.get('XDG_CONFIG_HOME') or os.path.expanduser('~/.config')
<add> elif is_mac:
<add> app_path = os.path.join(os.environ.get('HOME'), 'Library')
<add> elif is_windows:
<add> app_path = os.environ.get('APPDATA')
<add> else:
<add> app_path = '.'
<add>
<add> # Append the Glances folder
<add> app_path = os.path.join(app_path, __appname__)
<add>
<add> return app_path
<add>
<add> def get_hash(self, salt, plain_password):
<add> """
<add> Return the hashed password SHA265 + salt
<add> """
<add> return hashlib.sha256(salt.encode() + plain_password.encode()).hexdigest()
<add>
<add> def hash_password(self, plain_password):
<add> """
<add> Hash password with a salt based on UUID
<add> """
<add> salt = uuid.uuid4().hex
<add> encrypted_password = self.get_hash(salt, plain_password)
<add> return salt + '$' + encrypted_password
<add>
<add> def check_password(self, hashed_password, plain_password):
<add> """
<add> Encode the plain_password with the salt of the hashed_password
<add> and return the comparaison with the encrypted_password
<add> """
<add> salt, encrypted_password = hashed_password.split('$')
<add> re_encrypted_password = self.get_hash(salt, plain_password)
<add> return encrypted_password == re_encrypted_password
<add>
<add> def get_password(self, description='', confirm=False, clear=False):
<add> """
<add> For Glances server, get the password (confirm=True, clear=False)
<add> 1) from the password file (if the file exist)
<add> 2) from the CLI
<add> Optinnaly: save the password to a file (hashed with SHA256 + SALT)
<add>
<add> For Glances client, get the password (confirm=False, clear=True)
<add> 1) From the CLI
<add> 2) The password is hashed with MD5 (only MD5 string transit thrught the network)
<add> """
<add>
<add> if os.path.exists(self.password_filepath) and not clear:
<add> # If the password file exist then use it
<add> sys.stdout.write(_("[Info] Read password from file %s\n") % self.password_filepath)
<add> password = self.load_password()
<add> else:
<add> # Else enter the password from the command line
<add> if description != '':
<add> sys.stdout.write("%s\n" % description)
<add>
<add> # password_plain is the password MD5
<add> # password_hashed is the hashed password
<add> password_md5 = hashlib.md5(getpass.getpass(_("Password: "))).hexdigest()
<add> password_hashed = self.hash_password(password_md5)
<add> if confirm:
<add> # password_confirm is the clear password (only used to compare)
<add> password_confirm = hashlib.md5(getpass.getpass(_("Password (confirm): "))).hexdigest()
<add>
<add> if not self.check_password(password_hashed, password_confirm):
<add> sys.stdout.write(_("[Error] Sorry, but passwords did not match...\n"))
<add> sys.exit(1)
<add>
<add> # Return the clear or hashed password
<add> if clear:
<add> password = password_md5
<add> else:
<add> password = password_hashed
<add>
<add> # Save the hashed password to the password file
<add> if not clear:
<add> save_input = raw_input(_("Do you want to save the password (Yes|No) ? "))
<add> if len(save_input) > 0 and save_input[0].upper() == _('Y'):
<add> self.save_password(password_hashed)
<add>
<add> return password
<add>
<add> def save_password(self, hashed_password):
<add> """
<add> Save the hashed password to the Glances appdata folder
<add> """
<add>
<add> # Check if the Glances appdata folder already exist
<add> if not os.path.exists(self.password_path):
<add> # Create the Glances appdata folder
<add> try:
<add> os.mkdir(self.password_path)
<add> except Exception as e:
<add> sys.stdout.write(_("[Warning] Glances application data folder can not be created (%s)\n") % e)
<add> return
<add>
<add> # Create/overwrite the password file to the Glances application data folder
<add> try:
<add> file_pwd = open(self.password_filepath, 'w')
<add> except Exception as e:
<add> sys.stdout.write(_("[Warning] Glances wan not create the password file (%s)\n") % e)
<add> return
<add>
<add> # Generate the password file
<add> file_pwd.write(hashed_password)
<add> file_pwd.close()
<add>
<add> def load_password(self):
<add> """
<add> Load the hashed password from the Glances appdata folder
<add> """
<add>
<add> # Create/overwrite the password file to the Glances application data folder
<add> try:
<add> file_pwd = open(self.password_filepath, 'r')
<add> except Exception as e:
<add> sys.stdout.write(_("[Warning] Glances wan not read the password file (%s)\n") % e)
<add> return None
<add>
<add> # Read the password file
<add> hashed_password = file_pwd.read()
<add> file_pwd.close()
<add>
<add> return hashed_password
<ide><path>glances/core/glances_server.py
<ide> def authenticate(self, headers):
<ide> def check_user(self, username, password):
<ide> # Check username and password in the dictionnary
<ide> if username in self.server.user_dict:
<del> if self.server.user_dict[username] == md5(password).hexdigest():
<del> return True
<del> return False
<add> from glances.core.glances_password import glancesPassword
<add>
<add> pwd = glancesPassword()
<add>
<add> # print "Server password: %s" % self.server.user_dict[username]
<add> # print "Client password: %s" % password
<add> # print "MD5Cli password: %s" % md5(password).hexdigest()
<add> # print "check_password: %s" % pwd.check_password(self.server.user_dict[username], password)
<add> return pwd.check_password(self.server.user_dict[username], password)
<add> else:
<add> return False
<ide>
<ide> def parse_request(self):
<ide> if SimpleXMLRPCRequestHandler.parse_request(self):
<ide> def __init__(self, requestHandler=GlancesXMLRPCHandler,
<ide> sys.exit(2)
<ide>
<ide> # The users dict
<del> # username / MD5 password couple
<add> # username / password couple
<ide> # By default, no auth is needed
<ide> self.server.user_dict = {}
<ide> self.server.isAuth = False
<ide> def add_user(self, username, password):
<ide> """
<ide> Add an user to the dictionnary
<ide> """
<del> self.server.user_dict[username] = md5(password).hexdigest()
<add> self.server.user_dict[username] = password
<ide> self.server.isAuth = True
<ide>
<ide> def serve_forever(self): | 3 |
PHP | PHP | trim comment bloat in text class | a2406c017486019ab992d68b921d3202b7626284 | <ide><path>system/text.php
<ide> public static function words($value, $limit, $end = '…')
<ide> return $value;
<ide> }
<ide>
<del> // -----------------------------------------------------
<del> // Limit the words in the string.
<del> // -----------------------------------------------------
<ide> preg_match('/^\s*+(?:\S++\s*+){1,'.$limit.'}/', $value, $matches);
<ide>
<del> // -----------------------------------------------------
<del> // If the string did not exceed the limit, we won't
<del> // need an ending character.
<del> // -----------------------------------------------------
<ide> if (strlen($value) == strlen($matches[0]))
<ide> {
<ide> $end = '';
<ide> public static function characters($value, $limit, $end = '…')
<ide> return $value;
<ide> }
<ide>
<del> // -----------------------------------------------------
<ide> // Replace new lines and whitespace in the string.
<del> // -----------------------------------------------------
<ide> $value = preg_replace("/\s+/", ' ', str_replace(array("\r\n", "\r", "\n"), ' ', $value));
<ide>
<ide> if (strlen($value) <= $limit)
<ide> public static function characters($value, $limit, $end = '…')
<ide>
<ide> $out = '';
<ide>
<del> // -----------------------------------------------------
<del> // The string exceeds the character limit. Add each word
<del> // to the output individually until we reach the limit.
<del> // -----------------------------------------------------
<ide> foreach (explode(' ', trim($value)) as $val)
<ide> {
<ide> $out .= $val.' ';
<ide> public static function censor($value, $censored, $replacement = '####')
<ide> {
<ide> $value = ' '.$value.' ';
<ide>
<del> // -----------------------------------------------------
<ide> // Assume the word will be book-ended by the following.
<del> // -----------------------------------------------------
<ide> $delim = '[-_\'\"`(){}<>\[\]|!?@#%&,.:;^~*+=\/ 0-9\n\r\t]';
<ide>
<del> // -----------------------------------------------------
<del> // Replace the censored words.
<del> // -----------------------------------------------------
<ide> foreach ($censored as $word)
<ide> {
<ide> if ($replacement != '') | 1 |
Java | Java | expose environment configurationservice | 8770ea96b03106a06ee7ce401dc2fe39f419f816 | <ide><path>org.springframework.core/src/main/java/org/springframework/core/env/AbstractEnvironment.java
<ide> public abstract class AbstractEnvironment implements ConfigurableEnvironment {
<ide> new PropertyPlaceholderHelper(PLACEHOLDER_PREFIX, PLACEHOLDER_SUFFIX, VALUE_SEPARATOR, false);
<ide>
<ide>
<add> public ConversionService getConversionService() {
<add> return this.conversionService;
<add> }
<ide>
<add> public void setConversionService(ConversionService conversionService) {
<add> this.conversionService = conversionService;
<add> }
<ide>
<ide> public void addPropertySource(PropertySource<?> propertySource) {
<ide> propertySources.push(propertySource);
<ide><path>org.springframework.core/src/main/java/org/springframework/core/env/ConfigurableEnvironment.java
<ide>
<ide> package org.springframework.core.env;
<ide>
<add>import org.springframework.core.convert.ConversionService;
<add>
<ide> /**
<ide> * TODO SPR-7508: document
<ide> *
<ide> public interface ConfigurableEnvironment extends Environment, PropertySourceAggr
<ide> */
<ide> void setDefaultProfiles(String... profiles);
<ide>
<add> public ConversionService getConversionService();
<add>
<add> public void setConversionService(ConversionService conversionService);
<ide> } | 2 |
Mixed | Python | update codeparrot data preprocessing | e730e1256732b5dfeae2bdd427beacc3fbc20e2a | <ide><path>examples/research_projects/codeparrot/README.md
<ide> Additionally, sure you have git-lfs installed. You can find instructions for how
<ide> The source of the dataset is the GitHub dump available on Google's [BigQuery](https://cloud.google.com/blog/topics/public-datasets/github-on-bigquery-analyze-all-the-open-source-code). The database was queried for all Python files with less than 1MB in size resulting in a 180GB dataset with over 20M files. The dataset is available on the Hugging Face Hub [here](https://huggingface.co/datasets/transformersbook/codeparrot).
<ide>
<ide> ### Preprocessing
<del>The raw dataset contains many duplicates. We deduplicated and filtered the dataset using the heuristics proposed in OpenAI's Codex [paper](https://arxiv.org/abs/2107.03374):
<add>The raw dataset contains many duplicates. We deduplicated and filtered the dataset using the heuristics proposed in OpenAI's Codex [paper](https://arxiv.org/abs/2107.03374) and some new ones:
<ide>
<ide> - exact deduplication using each file's hash
<ide> - filtering files with max line length > 1000
<ide> - filtering files with mean line length > 100
<ide> - fraction of alphanumeric characters < 0.25
<ide> - containing the word "auto-generated" or similar in the first 5 lines
<add>- filtering with a probability of 0.7 of files with a mention of "test file" or "configuration file" or similar in the first 5 lines
<add>- filtering with a probability of 0.7 of files with high occurence of the keywords "test " or "config"
<add>- filtering with a probability of 0.7 of files without a mention of the keywords `def` , `for`, `while` and `class`
<add>- filtering files that use the assignment operator `=` less than 5 times
<add>- filtering files with ratio between number of characters and number of tokens after tokenization < 1.5 (the average ratio is 3.6)
<ide>
<del>The script to process the full dataset can be found in `scripts/preprocessing.py`. Executing the script on 16 vCPUs takes roughly 3h and removes 70% of the original dataset. The cleaned [train](https://huggingface.co/datasets/lvwerra/codeparrot-clean-train) and [validation](https://huggingface.co/datasets/lvwerra/codeparrot-clean-valid) splits are also available on the Hub if you want to skip this step or use the data for another project.
<add>The script to process the full dataset can be found in `scripts/preprocessing.py`. Executing the script on 16 vCPUs takes roughly 3h and removes 70% of the original dataset. The cleaned [train](https://huggingface.co/datasets/loubnabnl/codeparrot-clean-train-v2) and [validation](https://huggingface.co/datasets/loubnabnl/codeparrot-clean-valid-v2) splits are also available on the Hub if you want to skip this step or use the data for another project.
<ide>
<ide> To execute the preprocessing run the following command:
<ide> ```bash
<ide> python scripts/preprocessing.py \
<del>--dataset_name lvwerra/codeparrot \
<add>--dataset_name transformersbook/codeparrot \
<ide> --output_dir codeparrot-clean
<ide> ```
<ide> During preprocessing the dataset is downloaded and stored locally as well as caches of the computations. Make sure you have more than 500GB free disk space to execute it.
<ide><path>examples/research_projects/codeparrot/scripts/arguments.py
<ide> class PreprocessingArguments:
<ide> },
<ide> )
<ide> dataset_name: Optional[str] = field(
<del> default="codeparrot", metadata={"help": "Folder or name of dataset to process."}
<add> default="transformersbook/codeparrot", metadata={"help": "Folder or name of dataset to process."}
<ide> )
<ide> output_dir: Optional[str] = field(
<ide> default="codeparrot-clean", metadata={"help": "Folder to save processed processed dataset."}
<ide> class PreprocessingArguments:
<ide> alpha_frac: Optional[float] = field(
<ide> default=0.25, metadata={"help": "Maximum fraction of non-alphanumeric characters, otherwise file is filtered."}
<ide> )
<add> min_token_ratio: Optional[float] = field(
<add> default=1.5, metadata={"help": "Minimum character token ratio for the file, otherwise file is filtered."}
<add> )
<add> filter_proba: Optional[float] = field(
<add> default=0.7, metadata={"help": "Probability for filtering config, test and uncommon files."}
<add> )
<add> tokenizer: Optional[str] = field(
<add> default="lvwerra/codeparrot",
<add> metadata={"help": "Name or path to the tokenizer."},
<add> )
<ide>
<ide>
<ide> @dataclass
<ide><path>examples/research_projects/codeparrot/scripts/preprocessing.py
<ide> from datasets import load_dataset
<ide>
<ide> from arguments import PreprocessingArguments
<del>from transformers import HfArgumentParser
<add>from transformers import AutoTokenizer, HfArgumentParser
<ide>
<ide>
<ide> def get_hash(example):
<ide> def is_autogenerated(example, scan_width=5):
<ide> return {"autogenerated": False}
<ide>
<ide>
<add>def is_config_or_test(example, scan_width=5, coeff=0.05):
<add> """Check if file is a configuration file or a unit test by :
<add> 1- looking for keywords in the first few lines of the file.
<add> 2- counting number of occurence of the words 'config' and 'test' with respect to number of lines.
<add> """
<add>
<add> keywords = ["unit tests", "test file", "configuration file"]
<add> lines = example["content"].splitlines()
<add> count_config = 0
<add> count_test = 0
<add> # first test
<add> for _, line in zip(range(scan_width), lines):
<add> for keyword in keywords:
<add> if keyword in line.lower():
<add> return {"config_or_test": True}
<add> # second test
<add> nlines = example["content"].count("\n")
<add> threshold = int(coeff * nlines)
<add> for line in lines:
<add> count_config += line.lower().count("config")
<add> count_test += line.lower().count("test")
<add> if count_config > threshold or count_test > threshold:
<add> return {"config_or_test": True}
<add> return {"config_or_test": False}
<add>
<add>
<add>def has_no_keywords(example):
<add> """Check if a python file has none of the keywords for: funcion, class, for loop, while loop."""
<add> keywords = ["def ", "class ", "for ", "while "]
<add> lines = example["content"].splitlines()
<add> for line in lines:
<add> for keyword in keywords:
<add> if keyword in line.lower():
<add> return {"has_no_keywords": False}
<add> return {"has_no_keywords": True}
<add>
<add>
<add>def has_few_assignments(example, minimum=4):
<add> """Check if file uses symbol '=' less than `minimum` times."""
<add> lines = example["content"].splitlines()
<add> counter = 0
<add> for line in lines:
<add> counter += line.lower().count("=")
<add> if counter > minimum:
<add> return {"has_few_assignments": False}
<add> return {"has_few_assignments": True}
<add>
<add>
<add>def char_token_ratio(example):
<add> """Compute character/token ratio of the file with tokenizer."""
<add> input_ids = tokenizer(example["content"], truncation=False)["input_ids"]
<add> ratio = len(example["content"]) / len(input_ids)
<add> return {"ratio": ratio}
<add>
<add>
<ide> def preprocess(example):
<ide> """Chain all preprocessing steps into one function to not fill cache."""
<ide> results = dict()
<ide> results.update(get_hash(example))
<ide> results.update(line_stats(example))
<ide> results.update(alpha_stats(example))
<add> results.update(char_token_ratio(example))
<ide> results.update(is_autogenerated(example))
<add> results.update(is_config_or_test(example))
<add> results.update(has_no_keywords(example))
<add> results.update(has_few_assignments(example))
<ide> return results
<ide>
<ide>
<ide> def filter(example, uniques, args):
<del> """Filter dataset with heuristics."""
<add> """Filter dataset with heuristics. Config, test and has_no_keywords files are removed with a given probability."""
<ide> if not check_uniques(example, uniques):
<ide> return False
<ide> elif example["autogenerated"]:
<ide> def filter(example, uniques, args):
<ide> return False
<ide> elif example["alpha_frac"] < args.alpha_frac:
<ide> return False
<add> elif example["ratio"] < args.min_token_ratio:
<add> return False
<add> elif example["config_or_test"] and np.random.rand() <= args.filter_proba:
<add> return False
<add> elif example["has_no_keywords"] and np.random.rand() <= args.filter_proba:
<add> return False
<add> elif example["has_few_assignments"]:
<add> return False
<ide> else:
<ide> return True
<ide>
<ide> def compress_file(file_path):
<ide> args = parser.parse_args()
<ide> if args.num_workers is None:
<ide> args.num_workers = multiprocessing.cpu_count()
<add>tokenizer = AutoTokenizer.from_pretrained(args.tokenizer_dir)
<ide>
<ide> # Load dataset
<ide> t_start = time.time() | 3 |
Javascript | Javascript | fix issue with onendreached | d3658bc2b6437e858d3b3f5688277dedbca779b8 | <ide><path>Libraries/Lists/VirtualizedList.js
<ide> class VirtualizedList extends React.PureComponent<Props, State> {
<ide> componentDidUpdate(prevProps: Props) {
<ide> const {data, extraData} = this.props;
<ide> if (data !== prevProps.data || extraData !== prevProps.extraData) {
<del> this._hasDataChangedSinceEndReached = true;
<del>
<ide> // clear the viewableIndices cache to also trigger
<ide> // the onViewableItemsChanged callback with the new data
<ide> this._viewabilityTuples.forEach(tuple => {
<ide> class VirtualizedList extends React.PureComponent<Props, State> {
<ide> _fillRateHelper: FillRateHelper;
<ide> _frames = {};
<ide> _footerLength = 0;
<del> _hasDataChangedSinceEndReached = true;
<ide> _hasDoneInitialScroll = false;
<ide> _hasInteracted = false;
<ide> _hasMore = false;
<ide> class VirtualizedList extends React.PureComponent<Props, State> {
<ide> } = this.props;
<ide> const {contentLength, visibleLength, offset} = this._scrollMetrics;
<ide> const distanceFromEnd = contentLength - visibleLength - offset;
<add> const threshold = onEndReachedThreshold
<add> ? onEndReachedThreshold * visibleLength
<add> : 0;
<ide> if (
<ide> onEndReached &&
<ide> this.state.last === getItemCount(data) - 1 &&
<del> /* $FlowFixMe(>=0.63.0 site=react_native_fb) This comment suppresses an
<del> * error found when Flow v0.63 was deployed. To see the error delete this
<del> * comment and run Flow. */
<del> distanceFromEnd < onEndReachedThreshold * visibleLength &&
<del> (this._hasDataChangedSinceEndReached ||
<del> this._scrollMetrics.contentLength !== this._sentEndForContentLength)
<add> distanceFromEnd < threshold &&
<add> this._scrollMetrics.contentLength !== this._sentEndForContentLength
<ide> ) {
<del> // Only call onEndReached once for a given dataset + content length.
<del> this._hasDataChangedSinceEndReached = false;
<add> // Only call onEndReached once for a given content length
<ide> this._sentEndForContentLength = this._scrollMetrics.contentLength;
<ide> onEndReached({distanceFromEnd});
<add> } else if (distanceFromEnd > threshold) {
<add> // If the user scrolls away from the end and back again cause
<add> // an onEndReached to be triggered again
<add> this._sentEndForContentLength = 0;
<ide> }
<ide> }
<ide> | 1 |
Go | Go | improve message when pushing a non-existent image | 3a08bbc4d29835fe5b8f3d2c5bde442f6a7014a0 | <ide><path>graph/graph.go
<ide> func (graph *Graph) Exists(id string) bool {
<ide> func (graph *Graph) Get(name string) (*image.Image, error) {
<ide> id, err := graph.idIndex.Get(name)
<ide> if err != nil {
<del> return nil, err
<add> return nil, fmt.Errorf("could not find image: %v", err)
<ide> }
<ide> img, err := image.LoadImage(graph.ImageRoot(id))
<ide> if err != nil { | 1 |
Mixed | Python | add kannada examples | 4f27a24f5b78283435de85bca40b844c15b2cf4e | <ide><path>.github/contributors/nikhilsaldanha.md
<add># spaCy contributor agreement
<add>
<add>This spaCy Contributor Agreement (**"SCA"**) is based on the
<add>[Oracle Contributor Agreement](http://www.oracle.com/technetwork/oca-405177.pdf).
<add>The SCA applies to any contribution that you make to any product or project
<add>managed by us (the **"project"**), and sets out the intellectual property rights
<add>you grant to us in the contributed materials. The term **"us"** shall mean
<add>[ExplosionAI GmbH](https://explosion.ai/legal). The term
<add>**"you"** shall mean the person or entity identified below.
<add>
<add>If you agree to be bound by these terms, fill in the information requested
<add>below and include the filled-in version with your first pull request, under the
<add>folder [`.github/contributors/`](/.github/contributors/). The name of the file
<add>should be your GitHub username, with the extension `.md`. For example, the user
<add>example_user would create the file `.github/contributors/example_user.md`.
<add>
<add>Read this agreement carefully before signing. These terms and conditions
<add>constitute a binding legal agreement.
<add>
<add>## Contributor Agreement
<add>
<add>1. The term "contribution" or "contributed materials" means any source code,
<add>object code, patch, tool, sample, graphic, specification, manual,
<add>documentation, or any other material posted or submitted by you to the project.
<add>
<add>2. With respect to any worldwide copyrights, or copyright applications and
<add>registrations, in your contribution:
<add>
<add> * you hereby assign to us joint ownership, and to the extent that such
<add> assignment is or becomes invalid, ineffective or unenforceable, you hereby
<add> grant to us a perpetual, irrevocable, non-exclusive, worldwide, no-charge,
<add> royalty-free, unrestricted license to exercise all rights under those
<add> copyrights. This includes, at our option, the right to sublicense these same
<add> rights to third parties through multiple levels of sublicensees or other
<add> licensing arrangements;
<add>
<add> * you agree that each of us can do all things in relation to your
<add> contribution as if each of us were the sole owners, and if one of us makes
<add> a derivative work of your contribution, the one who makes the derivative
<add> work (or has it made will be the sole owner of that derivative work;
<add>
<add> * you agree that you will not assert any moral rights in your contribution
<add> against us, our licensees or transferees;
<add>
<add> * you agree that we may register a copyright in your contribution and
<add> exercise all ownership rights associated with it; and
<add>
<add> * you agree that neither of us has any duty to consult with, obtain the
<add> consent of, pay or render an accounting to the other for any use or
<add> distribution of your contribution.
<add>
<add>3. With respect to any patents you own, or that you can license without payment
<add>to any third party, you hereby grant to us a perpetual, irrevocable,
<add>non-exclusive, worldwide, no-charge, royalty-free license to:
<add>
<add> * make, have made, use, sell, offer to sell, import, and otherwise transfer
<add> your contribution in whole or in part, alone or in combination with or
<add> included in any product, work or materials arising out of the project to
<add> which your contribution was submitted, and
<add>
<add> * at our option, to sublicense these same rights to third parties through
<add> multiple levels of sublicensees or other licensing arrangements.
<add>
<add>4. Except as set out above, you keep all right, title, and interest in your
<add>contribution. The rights that you grant to us under these terms are effective
<add>on the date you first submitted a contribution to us, even if your submission
<add>took place before the date you sign these terms.
<add>
<add>5. You covenant, represent, warrant and agree that:
<add>
<add> * Each contribution that you submit is and shall be an original work of
<add> authorship and you can legally grant the rights set out in this SCA;
<add>
<add> * to the best of your knowledge, each contribution will not violate any
<add> third party's copyrights, trademarks, patents, or other intellectual
<add> property rights; and
<add>
<add> * each contribution shall be in compliance with U.S. export control laws and
<add> other applicable export and import laws. You agree to notify us if you
<add> become aware of any circumstance which would make any of the foregoing
<add> representations inaccurate in any respect. We may publicly disclose your
<add> participation in the project, including the fact that you have signed the SCA.
<add>
<add>6. This SCA is governed by the laws of the State of California and applicable
<add>U.S. Federal law. Any choice of law rules will not apply.
<add>
<add>7. Please place an “x” on one of the applicable statement below. Please do NOT
<add>mark both statements:
<add>
<add> * [x] I am signing on behalf of myself as an individual and no other person
<add> or entity, including my employer, has or will have rights with respect to my
<add> contributions.
<add>
<add> * [x] I am signing on behalf of my employer or a legal entity and I have the
<add> actual authority to contractually bind that entity.
<add>
<add>## Contributor Details
<add>
<add>| Field | Entry |
<add>|------------------------------- | -------------------- |
<add>| Name | Nikhil Saldanha |
<add>| Company name (if applicable) | |
<add>| Title or role (if applicable) | |
<add>| Date | 2020-03-17 |
<add>| GitHub username | nikhilsaldanha |
<add>| Website (optional) | |
<ide><path>spacy/lang/kn/examples.py
<add># coding: utf8
<add>from __future__ import unicode_literals
<add>
<add>
<add>"""
<add>Example sentences to test spaCy and its language models.
<add>
<add>>>> from spacy.lang.en.examples import sentences
<add>>>> docs = nlp.pipe(sentences)
<add>"""
<add>
<add>
<add>sentences = [
<add> "ಆಪಲ್ ಒಂದು ಯು.ಕೆ. ಸ್ಟಾರ್ಟ್ಅಪ್ ಅನ್ನು ೧ ಶತಕೋಟಿ ಡಾಲರ್ಗಳಿಗೆ ಖರೀದಿಸಲು ನೋಡುತ್ತಿದೆ.",
<add> "ಸ್ವಾಯತ್ತ ಕಾರುಗಳು ವಿಮಾ ಹೊಣೆಗಾರಿಕೆಯನ್ನು ತಯಾರಕರ ಕಡೆಗೆ ಬದಲಾಯಿಸುತ್ತವೆ.",
<add> "ಕಾಲುದಾರಿ ವಿತರಣಾ ರೋಬೋಟ್ಗಳನ್ನು ನಿಷೇಧಿಸುವುದನ್ನು ಸ್ಯಾನ್ ಫ್ರಾನ್ಸಿಸ್ಕೊ ಪರಿಗಣಿಸುತ್ತದೆ.",
<add> "ಲಂಡನ್ ಯುನೈಟೆಡ್ ಕಿಂಗ್ಡಂನ ದೊಡ್ಡ ನಗರ.",
<add> "ನೀನು ಎಲ್ಲಿದಿಯಾ?",
<add> "ಫ್ರಾನ್ಸಾದ ಅಧ್ಯಕ್ಷರು ಯಾರು?",
<add> "ಯುನೈಟೆಡ್ ಸ್ಟೇಟ್ಸ್ನ ರಾಜಧಾನಿ ಯಾವುದು?",
<add> "ಬರಾಕ್ ಒಬಾಮ ಯಾವಾಗ ಜನಿಸಿದರು?",
<add>] | 2 |
Ruby | Ruby | simplify token parsing | 44f1354d63d8c95a61bb12353838c03e9abbebb6 | <ide><path>Library/Homebrew/cask/lib/hbc/qualified_token.rb
<ide> module Hbc
<ide> module QualifiedToken
<del> REPO_PREFIX = "homebrew-".freeze
<del>
<del> # per https://github.com/Homebrew/homebrew/blob/4c7bc9ec3bca729c898ee347b6135ba692ee0274/Library/Homebrew/cmd/tap.rb#L121
<del> USER_REGEX = /[a-z0-9_\-]+/
<del>
<del> # per https://github.com/Homebrew/homebrew/blob/4c7bc9ec3bca729c898ee347b6135ba692ee0274/Library/Homebrew/cmd/tap.rb#L121
<del> REPO_REGEX = /(?:#{REPO_PREFIX})?\w+/
<del>
<del> # per https://github.com/caskroom/homebrew-cask/blob/master/CONTRIBUTING.md#generating-a-token-for-the-cask
<del> TOKEN_REGEX = /[a-z0-9\-]+/
<del>
<del> TAP_REGEX = %r{#{USER_REGEX}[/\-]#{REPO_REGEX}}
<del>
<del> QUALIFIED_TOKEN_REGEX = %r{#{TAP_REGEX}/#{TOKEN_REGEX}}
<del>
<ide> def self.parse(arg)
<del> return nil unless arg.is_a?(String) && arg.downcase =~ /^#{QUALIFIED_TOKEN_REGEX}$/
<del> path_elements = arg.downcase.split("/")
<del> if path_elements.count == 2
<del> # eg phinze-cask/google-chrome.
<del> # Not certain this form is needed, but it was supported in the past.
<del> token = path_elements[1]
<del> dash_elements = path_elements[0].split("-")
<del> repo = dash_elements.pop
<del> dash_elements.pop if dash_elements.count > 1 && dash_elements[-1] + "-" == REPO_PREFIX
<del> user = dash_elements.join("-")
<del> else
<del> # eg caskroom/cask/google-chrome
<del> # per https://github.com/Homebrew/brew/blob/master/docs/brew-tap.md
<del> user, repo, token = path_elements
<del> end
<del> repo.sub!(/^#{REPO_PREFIX}/, "")
<add> return nil unless arg.is_a?(String)
<add> return nil unless arg.downcase =~ HOMEBREW_TAP_CASK_REGEX
<add> # eg caskroom/cask/google-chrome
<add> # per https://github.com/Homebrew/brew/blob/master/docs/brew-tap.md
<add> user, repo, token = arg.downcase.split("/")
<ide> odebug "[user, repo, token] might be [#{user}, #{repo}, #{token}]"
<ide> [user, repo, token]
<ide> end
<ide><path>Library/Homebrew/tap_constants.rb
<ide> # match taps' formulae, e.g. someuser/sometap/someformula
<ide> HOMEBREW_TAP_FORMULA_REGEX = %r{^([\w-]+)/([\w-]+)/([\w+-.@]+)$}
<add># match taps' casks, e.g. someuser/sometap/somecask
<add>HOMEBREW_TAP_CASK_REGEX = %r{^([\w-]+)/([\w-]+)/([a-z0-9\-]+)$}
<ide> # match taps' directory paths, e.g. HOMEBREW_LIBRARY/Taps/someuser/sometap
<ide> HOMEBREW_TAP_DIR_REGEX = %r{#{Regexp.escape(HOMEBREW_LIBRARY.to_s)}/Taps/([\w-]+)/([\w-]+)}
<ide> # match taps' formula paths, e.g. HOMEBREW_LIBRARY/Taps/someuser/sometap/someformula | 2 |
Javascript | Javascript | fix autodetection of csp + better docs | 0113f2257415422729d5c2a9bdba76c1d0a17a13 | <ide><path>src/Angular.js
<ide> function equals(o1, o2) {
<ide> return false;
<ide> }
<ide>
<add>var csp = function() {
<add> if (isDefined(csp.isActive_)) return csp.isActive_;
<add>
<add> var active = !!(document.querySelector('[ng-csp]') ||
<add> document.querySelector('[data-ng-csp]'));
<add>
<add> if (!active) {
<add> try {
<add> /* jshint -W031, -W054 */
<add> new Function('');
<add> /* jshint +W031, +W054 */
<add> } catch (e) {
<add> active = true;
<add> }
<add> }
<add>
<add> return (csp.isActive_ = active);
<add>};
<ide>
<del>function csp() {
<del> return (document.securityPolicy && document.securityPolicy.isActive) ||
<del> (document.querySelector &&
<del> !!(document.querySelector('[ng-csp]') || document.querySelector('[data-ng-csp]')));
<del>}
<ide>
<ide>
<ide> function concat(array1, array2, index) {
<ide><path>src/ng/directive/ngCsp.js
<ide> * This is necessary when developing things like Google Chrome Extensions.
<ide> *
<ide> * CSP forbids apps to use `eval` or `Function(string)` generated functions (among other things).
<del> * For us to be compatible, we just need to implement the "getterFn" in $parse without violating
<del> * any of these restrictions.
<add> * For Angular to be CSP compatible there are only two things that we need to do differently:
<add> *
<add> * - don't use `Function` constructor to generate optimized value getters
<add> * - don't inject custom stylesheet into the document
<ide> *
<ide> * AngularJS uses `Function(string)` generated functions as a speed optimization. Applying the `ngCsp`
<ide> * directive will cause Angular to use CSP compatibility mode. When this mode is on AngularJS will
<ide> * includes some CSS rules (e.g. {@link ng.directive:ngCloak ngCloak}).
<ide> * To make those directives work in CSP mode, include the `angular-csp.css` manually.
<ide> *
<del> * In order to use this feature put the `ngCsp` directive on the root element of the application.
<add> * Angular tries to autodetect if CSP is active and automatically turn on the CSP-safe mode. This
<add> * autodetection however triggers a CSP error to be logged in the console:
<add> *
<add> * ```
<add> * Refused to evaluate a string as JavaScript because 'unsafe-eval' is not an allowed source of
<add> * script in the following Content Security Policy directive: "default-src 'self'". Note that
<add> * 'script-src' was not explicitly set, so 'default-src' is used as a fallback.
<add> * ```
<add> *
<add> * This error is harmless but annoying. To prevent the error from showing up, put the `ngCsp`
<add> * directive on the root element of the application or on the `angular.js` script tag, whichever
<add> * appears first in the html document.
<ide> *
<ide> * *Note: This directive is only available in the `ng-csp` and `data-ng-csp` attribute form.*
<ide> *
<ide> ```
<ide> */
<ide>
<del>// ngCsp is not implemented as a proper directive any more, because we need it be processed while we bootstrap
<del>// the system (before $parse is instantiated), for this reason we just have a csp() fn that looks for ng-csp attribute
<del>// anywhere in the current doc
<add>// ngCsp is not implemented as a proper directive any more, because we need it be processed while we
<add>// bootstrap the system (before $parse is instantiated), for this reason we just have
<add>// the csp.isActive() fn that looks for ng-csp attribute anywhere in the current doc
<ide><path>test/AngularSpec.js
<ide> describe('angular', function() {
<ide>
<ide>
<ide> describe('csp', function() {
<del> var originalSecurityPolicy;
<add> var originalFunction;
<ide>
<ide> beforeEach(function() {
<del> originalSecurityPolicy = document.securityPolicy;
<add> originalFunction = window.Function;
<ide> });
<ide>
<ide> afterEach(function() {
<del> document.securityPolicy = originalSecurityPolicy;
<add> window.Function = originalFunction;
<add> delete csp.isActive_;
<ide> });
<ide>
<ide>
<ide> describe('angular', function() {
<ide>
<ide>
<ide> it('should return true if CSP is autodetected via CSP v1.1 securityPolicy.isActive property', function() {
<del> document.securityPolicy = {isActive: true};
<add> window.Function = function() { throw new Error('CSP test'); };
<ide> expect(csp()).toBe(true);
<ide> });
<ide>
<add>
<ide> it('should return the true when CSP is enabled manually via [ng-csp]', function() {
<ide> spyOn(document, 'querySelector').andCallFake(function(selector) {
<ide> if (selector == '[ng-csp]') return {}; | 3 |
Javascript | Javascript | improve download button and behaviour | 88767e18612b35fa2a8dfe6e00b1e55d26e00829 | <ide><path>web/viewer.js
<ide> var PDFView = {
<ide> }
<ide> var url = this.url.split('#')[0];
<ide> //#if !(FIREFOX || MOZCENTRAL)
<add>
<add> var a = document.createElement('a');
<add>
<add> // If _parent == self, then opening an identical URL with different
<add> // location hash will only cause a navigation, not a download.
<add> if (window.top === window && !('download' in a) &&
<add> url === window.location.href.split('#')[0]) {
<add> url += url.indexOf('?') === -1 ? '?' : '&';
<add> }
<add>
<ide> url += '#pdfjs.action=download';
<del> window.open(url, '_parent');
<add> if (a.click) {
<add> // Use a.click() if available. Otherwise, Chrome might show
<add> // "Unsafe JavaScript attempt to initiate a navigation change
<add> // for frame with URL" and not open the PDF at all.
<add> // Supported by (not mentioned = untested):
<add> // - Firefox 6 - 19 (4- does not support a.click, 5 ignores a.click)
<add> // - Chrome 19 - 26 (18- does not support a.click)
<add> // - Opera 9 - 12.15
<add> // - Internet Explorer 6 - 10
<add> // - Safari 6 (5.1- does not support a.click)
<add> a.href = url;
<add> a.target = '_parent';
<add> // Use a.download if available. This increases the likelihood that
<add> // the file is downloaded instead of opened by another PDF plugin.
<add> if ('download' in a) {
<add> var filename = url.match(/([^\/?#=]+\.pdf)/i);
<add> a.download = filename ? filename[1] : 'file.pdf';
<add> }
<add> // <a> must be in the document for IE and recent Firefox versions.
<add> // (otherwise .click() is ignored)
<add> (document.body || document.documentElement).appendChild(a);
<add> a.click();
<add> a.parentNode.removeChild(a);
<add> } else {
<add> window.open(url, '_parent');
<add> }
<ide> //#else
<ide> // // Document isn't ready just try to download with the url.
<ide> // if (!this.pdfDocument) { | 1 |
Text | Text | improve declaration tests | f2f5dbc4b874f7d84c2ae587c4db0f57565dba81 | <ide><path>curriculum/challenges/english/02-javascript-algorithms-and-data-structures/basic-javascript/local-scope-and-functions.english.md
<ide> console.log(loc); // loc is not defined
<ide>
<ide> ## Instructions
<ide> <section id='instructions'>
<del>Declare a local variable <code>myVar</code> inside <code>myLocalScope</code>. Run the tests and then follow the instructions commented out in the editor.
<del><strong>Hint</strong><br>Refreshing the page may help if you get stuck.
<add>
<add>The editor has two `console.log`s to help you see what is happening. Check the console as you code to see how it changes. Declare a local variable `myVar` inside `myLocalScope` and run the tests.
<add>
<add>**Note:** The console will still have 'ReferenceError: myVar is not defined', but this will not cause the tests to fail.
<ide> </section>
<ide>
<ide> ## Tests
<ide> Declare a local variable <code>myVar</code> inside <code>myLocalScope</code>. Ru
<ide> ```yml
<ide> tests:
<ide> - text: The code should not contain a global <code>myVar</code> variable.
<del> testString: assert(typeof myVar === 'undefined');
<add> testString: |
<add> function declared(){
<add> myVar;
<add> }
<add> assert.throws(declared, ReferenceError);
<ide> - text: You should add a local <code>myVar</code> variable.
<del> testString: assert(/function\s+myLocalScope\s*\(\s*\)\s*\{\s[\s\S]+\s*var\s*myVar\s*(\s*|=[\s\S]+)\s*;[\s\S]+}/.test(code));
<add> testString: assert(/functionmyLocalScope\(\)\{.+(var|let|const)myVar.*}/s.test(code.replace(/\s/g, '')));
<ide>
<ide>
<ide> ```
<ide> function myLocalScope() {
<ide>
<ide> // Only change code below this line
<ide>
<del> console.log(myVar);
<add> console.log('inside myLocalScope', myVar);
<ide> }
<ide> myLocalScope();
<ide>
<ide> // Run and check the console
<ide> // myVar is not defined outside of myLocalScope
<del>console.log(myVar);
<del>
<del>// Now remove the console log line to pass the test
<del>
<del>```
<del>
<del></div>
<del>
<del>### Before Test
<del><div id='js-setup'>
<del>
<del>```js
<del>var logOutput = "";
<del>var originalConsole = console
<del>function capture() {
<del> var nativeLog = console.log;
<del> console.log = function (message) {
<del> logOutput = message;
<del> if(nativeLog.apply) {
<del> nativeLog.apply(originalConsole, arguments);
<del> } else {
<del> var nativeMsg = Array.prototype.slice.apply(arguments).join(' ');
<del> nativeLog(nativeMsg);
<del> }
<del> };
<del>}
<del>
<del>function uncapture() {
<del> console.log = originalConsole.log;
<del>}
<del>
<del>```
<del>
<del></div>
<del>
<del>### After Test
<del><div id='js-teardown'>
<add>console.log('outside myLocalScope', myVar);
<ide>
<del>```js
<del>typeof myLocalScope === 'function' && (capture(), myLocalScope(), uncapture());
<del>(function() { return logOutput || "console.log never called"; })();
<ide> ```
<ide>
<ide> </div>
<ide> typeof myLocalScope === 'function' && (capture(), myLocalScope(), uncapture());
<ide> function myLocalScope() {
<ide> 'use strict';
<ide>
<add> // Only change code below this line
<ide> var myVar;
<del> console.log(myVar);
<add> console.log('inside myLocalScope', myVar);
<ide> }
<ide> myLocalScope();
<add>
<add>// Run and check the console
<add>// myVar is not defined outside of myLocalScope
<add>console.log('outside myLocalScope', myVar);
<add>
<ide> ```
<ide>
<ide> </section> | 1 |
Mixed | Go | remove portmapping from container networksettings | db4f20404d2b8c4da5a15e447f52cd634445cfe3 | <ide><path>daemon/container_unix.go
<ide> func (container *Container) buildCreateEndpointOptions() ([]libnetwork.EndpointO
<ide> }
<ide> }
<ide>
<del> container.NetworkSettings.PortMapping = nil
<del>
<ide> ports := make([]nat.Port, len(portSpecs))
<ide> var i int
<ide> for p := range portSpecs {
<ide><path>daemon/daemon_test.go
<ide> func TestLoadWithVolume(t *testing.T) {
<ide> "StdinOnce":false,"Env":null,"Cmd":["top"],"Image":"ubuntu:latest","Volumes":null,"WorkingDir":"","Entrypoint":null,
<ide> "NetworkDisabled":false,"MacAddress":"","OnBuild":null,"Labels":{}},"Image":"07f8e8c5e66084bef8f848877857537ffe1c47edd01a93af27e7161672ad0e95",
<ide> "NetworkSettings":{"IPAddress":"172.17.0.1","IPPrefixLen":16,"MacAddress":"02:42:ac:11:00:01","LinkLocalIPv6Address":"fe80::42:acff:fe11:1",
<del>"LinkLocalIPv6PrefixLen":64,"GlobalIPv6Address":"","GlobalIPv6PrefixLen":0,"Gateway":"172.17.42.1","IPv6Gateway":"","Bridge":"docker0","PortMapping":null,"Ports":{}},
<add>"LinkLocalIPv6PrefixLen":64,"GlobalIPv6Address":"","GlobalIPv6PrefixLen":0,"Gateway":"172.17.42.1","IPv6Gateway":"","Bridge":"docker0","Ports":{}},
<ide> "ResolvConfPath":"/var/lib/docker/containers/d59df5276e7b219d510fe70565e0404bc06350e0d4b43fe961f22f339980170e/resolv.conf",
<ide> "HostnamePath":"/var/lib/docker/containers/d59df5276e7b219d510fe70565e0404bc06350e0d4b43fe961f22f339980170e/hostname",
<ide> "HostsPath":"/var/lib/docker/containers/d59df5276e7b219d510fe70565e0404bc06350e0d4b43fe961f22f339980170e/hosts",
<ide> func TestLoadWithBindMount(t *testing.T) {
<ide> "StdinOnce":false,"Env":null,"Cmd":["top"],"Image":"ubuntu:latest","Volumes":null,"WorkingDir":"","Entrypoint":null,
<ide> "NetworkDisabled":false,"MacAddress":"","OnBuild":null,"Labels":{}},"Image":"07f8e8c5e66084bef8f848877857537ffe1c47edd01a93af27e7161672ad0e95",
<ide> "NetworkSettings":{"IPAddress":"172.17.0.1","IPPrefixLen":16,"MacAddress":"02:42:ac:11:00:01","LinkLocalIPv6Address":"fe80::42:acff:fe11:1",
<del>"LinkLocalIPv6PrefixLen":64,"GlobalIPv6Address":"","GlobalIPv6PrefixLen":0,"Gateway":"172.17.42.1","IPv6Gateway":"","Bridge":"docker0","PortMapping":null,"Ports":{}},
<add>"LinkLocalIPv6PrefixLen":64,"GlobalIPv6Address":"","GlobalIPv6PrefixLen":0,"Gateway":"172.17.42.1","IPv6Gateway":"","Bridge":"docker0","Ports":{}},
<ide> "ResolvConfPath":"/var/lib/docker/containers/d59df5276e7b219d510fe70565e0404bc06350e0d4b43fe961f22f339980170e/resolv.conf",
<ide> "HostnamePath":"/var/lib/docker/containers/d59df5276e7b219d510fe70565e0404bc06350e0d4b43fe961f22f339980170e/hostname",
<ide> "HostsPath":"/var/lib/docker/containers/d59df5276e7b219d510fe70565e0404bc06350e0d4b43fe961f22f339980170e/hosts",
<ide> func TestLoadWithVolume17RC(t *testing.T) {
<ide> "StdinOnce":false,"Env":null,"Cmd":["top"],"Image":"ubuntu:latest","Volumes":null,"WorkingDir":"","Entrypoint":null,
<ide> "NetworkDisabled":false,"MacAddress":"","OnBuild":null,"Labels":{}},"Image":"07f8e8c5e66084bef8f848877857537ffe1c47edd01a93af27e7161672ad0e95",
<ide> "NetworkSettings":{"IPAddress":"172.17.0.1","IPPrefixLen":16,"MacAddress":"02:42:ac:11:00:01","LinkLocalIPv6Address":"fe80::42:acff:fe11:1",
<del>"LinkLocalIPv6PrefixLen":64,"GlobalIPv6Address":"","GlobalIPv6PrefixLen":0,"Gateway":"172.17.42.1","IPv6Gateway":"","Bridge":"docker0","PortMapping":null,"Ports":{}},
<add>"LinkLocalIPv6PrefixLen":64,"GlobalIPv6Address":"","GlobalIPv6PrefixLen":0,"Gateway":"172.17.42.1","IPv6Gateway":"","Bridge":"docker0","Ports":{}},
<ide> "ResolvConfPath":"/var/lib/docker/containers/d59df5276e7b219d510fe70565e0404bc06350e0d4b43fe961f22f339980170e/resolv.conf",
<ide> "HostnamePath":"/var/lib/docker/containers/d59df5276e7b219d510fe70565e0404bc06350e0d4b43fe961f22f339980170e/hostname",
<ide> "HostsPath":"/var/lib/docker/containers/d59df5276e7b219d510fe70565e0404bc06350e0d4b43fe961f22f339980170e/hosts",
<ide> func TestRemoveLocalVolumesFollowingSymlinks(t *testing.T) {
<ide> "StdinOnce":false,"Env":null,"Cmd":["top"],"Image":"ubuntu:latest","Volumes":null,"WorkingDir":"","Entrypoint":null,
<ide> "NetworkDisabled":false,"MacAddress":"","OnBuild":null,"Labels":{}},"Image":"07f8e8c5e66084bef8f848877857537ffe1c47edd01a93af27e7161672ad0e95",
<ide> "NetworkSettings":{"IPAddress":"172.17.0.1","IPPrefixLen":16,"MacAddress":"02:42:ac:11:00:01","LinkLocalIPv6Address":"fe80::42:acff:fe11:1",
<del>"LinkLocalIPv6PrefixLen":64,"GlobalIPv6Address":"","GlobalIPv6PrefixLen":0,"Gateway":"172.17.42.1","IPv6Gateway":"","Bridge":"docker0","PortMapping":null,"Ports":{}},
<add>"LinkLocalIPv6PrefixLen":64,"GlobalIPv6Address":"","GlobalIPv6PrefixLen":0,"Gateway":"172.17.42.1","IPv6Gateway":"","Bridge":"docker0","Ports":{}},
<ide> "ResolvConfPath":"/var/lib/docker/containers/d59df5276e7b219d510fe70565e0404bc06350e0d4b43fe961f22f339980170e/resolv.conf",
<ide> "HostnamePath":"/var/lib/docker/containers/d59df5276e7b219d510fe70565e0404bc06350e0d4b43fe961f22f339980170e/hostname",
<ide> "HostsPath":"/var/lib/docker/containers/d59df5276e7b219d510fe70565e0404bc06350e0d4b43fe961f22f339980170e/hosts",
<ide><path>daemon/network/settings.go
<ide> type Settings struct {
<ide> LinkLocalIPv6PrefixLen int
<ide> MacAddress string
<ide> NetworkID string
<del> PortMapping map[string]map[string]string // Deprecated
<ide> Ports nat.PortMap
<ide> SandboxKey string
<ide> SecondaryIPAddresses []Address
<ide><path>man/docker-inspect.1.md
<ide> To get information on a container use its ID or instance name:
<ide> "LinkLocalIPv6PrefixLen": 0,
<ide> "MacAddress": "",
<ide> "NetworkID": "",
<del> "PortMapping": null,
<ide> "Ports": null,
<ide> "SandboxKey": "",
<ide> "SecondaryIPAddresses": null,
<ide><path>pkg/parsers/parsers_test.go
<ide> func TestParseRepositoryTag(t *testing.T) {
<ide> }
<ide> }
<ide>
<del>func TestParsePortMapping(t *testing.T) {
<del> if _, err := PartParser("ip:public:private", "192.168.1.1:80"); err == nil {
<del> t.Fatalf("Expected an error, got %v", err)
<del> }
<del> data, err := PartParser("ip:public:private", "192.168.1.1:80:8080")
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del>
<del> if len(data) != 3 {
<del> t.FailNow()
<del> }
<del> if data["ip"] != "192.168.1.1" {
<del> t.Fail()
<del> }
<del> if data["public"] != "80" {
<del> t.Fail()
<del> }
<del> if data["private"] != "8080" {
<del> t.Fail()
<del> }
<del>}
<del>
<ide> func TestParseKeyValueOpt(t *testing.T) {
<ide> invalids := map[string]string{
<ide> "": "Unable to parse key/value option: ", | 5 |
Ruby | Ruby | convert `brew unpack` test to spec | be498acf4fee38912a497b26e2beed97ee1f8b9a | <ide><path>Library/Homebrew/test/cmd/unpack_spec.rb
<add>describe "brew unpack", :integration_test do
<add> it "unpacks a given Formula's archive" do
<add> setup_test_formula "testball"
<add>
<add> Dir.mktmpdir do |path|
<add> path = Pathname.new(path)
<add>
<add> shutup do
<add> expect { brew "unpack", "testball", "--destdir=#{path}" }
<add> .to be_a_success
<add> end
<add>
<add> expect(path/"testball-0.1").to be_a_directory
<add> end
<add> end
<add>end
<ide><path>Library/Homebrew/test/unpack_test.rb
<del>require "testing_env"
<del>
<del>class IntegrationCommandTestUnpack < IntegrationCommandTestCase
<del> def test_unpack
<del> setup_test_formula "testball"
<del>
<del> mktmpdir do |path|
<del> cmd "unpack", "testball", "--destdir=#{path}"
<del> assert File.directory?("#{path}/testball-0.1"),
<del> "The tarball should be unpacked"
<del> end
<del> end
<del>end | 2 |
PHP | PHP | fix some docblocks. | 1315f897bc4cd18a69c449cd64b01d6f9c46d815 | <ide><path>src/Illuminate/Http/JsonResponse.php
<ide> public function __construct($data = null, $status = 200, $headers = [], $options
<ide> *
<ide> * @param string|null $callback
<ide> * @return $this
<del> *
<del> * @throws \InvalidArgumentException
<ide> */
<ide> public function withCallback($callback = null)
<ide> {
<ide><path>src/Illuminate/Notifications/Messages/NexmoMessage.php
<ide> class NexmoMessage
<ide> /**
<ide> * Create a new message instance.
<ide> *
<del> * @param string $message
<add> * @param string $content
<ide> * @return void
<ide> */
<ide> public function __construct($content = '')
<ide> public function content($content)
<ide> /**
<ide> * Set the phone number the message should be sent from.
<ide> *
<del> * @param string $number
<add> * @param string $from
<ide> * @return $this
<ide> */
<ide> public function from($from) | 2 |
Python | Python | add type hints to tfpegasusmodel | 1e6141c3d4c2b77ef51754cc21445c549947f317 | <ide><path>src/transformers/models/pegasus/modeling_tf_pegasus.py
<ide> from ...modeling_tf_utils import (
<ide> DUMMY_INPUTS,
<ide> TFCausalLanguageModelingLoss,
<add> TFModelInputType,
<ide> TFPreTrainedModel,
<ide> keras_serializable,
<ide> unpack_inputs,
<ide> def get_decoder(self):
<ide> )
<ide> def call(
<ide> self,
<del> input_ids=None,
<del> attention_mask=None,
<del> decoder_input_ids=None,
<del> decoder_attention_mask=None,
<del> decoder_position_ids=None,
<del> head_mask=None,
<del> decoder_head_mask=None,
<del> cross_attn_head_mask=None,
<add> input_ids: Optional[TFModelInputType] = None,
<add> attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> decoder_input_ids: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> decoder_attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> decoder_position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> decoder_head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> cross_attn_head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None,
<ide> encoder_outputs: Optional[Union[Tuple, TFBaseModelOutput]] = None,
<del> past_key_values=None,
<del> inputs_embeds=None,
<del> decoder_inputs_embeds=None,
<del> use_cache=None,
<del> output_attentions=None,
<del> output_hidden_states=None,
<del> return_dict=None,
<del> training=False,
<add> past_key_values: Optional[Tuple[Tuple[Union[np.ndarray, tf.Tensor]]]] = None,
<add> inputs_embeds: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> decoder_inputs_embeds: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> use_cache: Optional[bool] = None,
<add> output_attentions: Optional[bool] = None,
<add> output_hidden_states: Optional[bool] = None,
<add> return_dict: Optional[bool] = None,
<add> training: bool = False,
<ide> **kwargs
<del> ):
<add> ) -> Union[TFSeq2SeqModelOutput, Tuple[tf.Tensor]]:
<ide>
<ide> outputs = self.model(
<ide> input_ids=input_ids,
<ide> def set_bias(self, value):
<ide> @add_end_docstrings(PEGASUS_GENERATION_EXAMPLE)
<ide> def call(
<ide> self,
<del> input_ids=None,
<del> attention_mask=None,
<del> decoder_input_ids=None,
<del> decoder_attention_mask=None,
<del> decoder_position_ids=None,
<del> head_mask=None,
<del> decoder_head_mask=None,
<del> cross_attn_head_mask=None,
<add> input_ids: Optional[TFModelInputType] = None,
<add> attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> decoder_input_ids: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> decoder_attention_mask: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> decoder_position_ids: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> decoder_head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> cross_attn_head_mask: Optional[Union[np.ndarray, tf.Tensor]] = None,
<ide> encoder_outputs: Optional[TFBaseModelOutput] = None,
<del> past_key_values=None,
<del> inputs_embeds=None,
<del> decoder_inputs_embeds=None,
<del> use_cache=None,
<del> output_attentions=None,
<del> output_hidden_states=None,
<del> return_dict=None,
<del> labels=None,
<del> training=False,
<del> ):
<add> past_key_values: Optional[Tuple[Tuple[Union[np.ndarray, tf.Tensor]]]] = None,
<add> inputs_embeds: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> decoder_inputs_embeds: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> use_cache: Optional[bool] = None,
<add> output_attentions: Optional[bool] = None,
<add> output_hidden_states: Optional[bool] = None,
<add> return_dict: Optional[bool] = None,
<add> labels: Optional[Union[np.ndarray, tf.Tensor]] = None,
<add> training: bool = False,
<add> ) -> Union[TFSeq2SeqLMOutput, Tuple[tf.Tensor]]:
<ide> """
<ide> labels (`tf.tensor` of shape `(batch_size, sequence_length)`, *optional*):
<ide> Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., | 1 |
Text | Text | update url for hub pr docs | 1c220ced8ecc5f12bc979239aa648747411f9fc4 | <ide><path>model_cards/README.md
<ide> You can either:
<ide>
<ide> **What if you want to create or update a model card for a model you don't have write access to?**
<ide>
<del>In that case, you can open a [Hub pull request](https://github.com/huggingface/hub-docs/blob/4befd62fb1f7502c9143ab228da538abb2de10e4/docs/hub/repositories-pull-requests-discussions.md)! Check out the [announcement](https://huggingface.co/blog/community-update) of this feature for more details 🤗.
<add>In that case, you can open a [Hub pull request](https://huggingface.co/docs/hub/repositories-pull-requests-discussions)! Check out the [announcement](https://huggingface.co/blog/community-update) of this feature for more details 🤗.
<ide>
<ide> ### What happened to the model cards here?
<ide> | 1 |
Javascript | Javascript | improve spawn() argument handling | 9d957747222da95e7c55142d25f59466c07857dc | <ide><path>lib/child_process.js
<ide> function _validateStdio(stdio, sync) {
<ide> }
<ide>
<ide>
<del>function normalizeSpawnArguments(/*file, args, options*/) {
<add>function normalizeSpawnArguments(file /*, args, options*/) {
<ide> var args, options;
<ide>
<del> var file = arguments[0];
<del>
<ide> if (Array.isArray(arguments[1])) {
<ide> args = arguments[1].slice(0);
<ide> options = arguments[2];
<del> } else if (arguments[1] && !Array.isArray(arguments[1])) {
<add> } else if (arguments[1] !== undefined && !util.isObject(arguments[1])) {
<ide> throw new TypeError('Incorrect value of args option');
<ide> } else {
<ide> args = [];
<ide> options = arguments[1];
<ide> }
<ide>
<del> if (!options)
<add> if (options === undefined)
<ide> options = {};
<add> else if (!util.isObject(options))
<add> throw new TypeError('options argument must be an object');
<ide>
<ide> args.unshift(file);
<ide>
<del> var env = (options && options.env ? options.env : null) || process.env;
<add> var env = options.env || process.env;
<ide> var envPairs = [];
<add>
<ide> for (var key in env) {
<ide> envPairs.push(key + '=' + env[key]);
<ide> }
<ide> function normalizeSpawnArguments(/*file, args, options*/) {
<ide>
<ide> var spawn = exports.spawn = function(/*file, args, options*/) {
<ide> var opts = normalizeSpawnArguments.apply(null, arguments);
<del>
<del> var file = opts.file;
<del> var args = opts.args;
<ide> var options = opts.options;
<del> var envPairs = opts.envPairs;
<del>
<ide> var child = new ChildProcess();
<ide>
<ide> child.spawn({
<del> file: file,
<del> args: args,
<del> cwd: options ? options.cwd : null,
<del> windowsVerbatimArguments: !!(options && options.windowsVerbatimArguments),
<del> detached: !!(options && options.detached),
<del> envPairs: envPairs,
<del> stdio: options ? options.stdio : null,
<del> uid: options ? options.uid : null,
<del> gid: options ? options.gid : null
<add> file: opts.file,
<add> args: opts.args,
<add> cwd: options.cwd,
<add> windowsVerbatimArguments: !!options.windowsVerbatimArguments,
<add> detached: !!options.detached,
<add> envPairs: opts.envPairs,
<add> stdio: options.stdio,
<add> uid: options.uid,
<add> gid: options.gid
<ide> });
<ide>
<ide> return child;
<ide> function checkExecSyncError(ret) {
<ide>
<ide> function execFileSync(/*command, options*/) {
<ide> var opts = normalizeSpawnArguments.apply(null, arguments);
<del> var inheritStderr = !!!opts.options.stdio;
<add> var inheritStderr = !opts.options.stdio;
<ide>
<ide> var ret = spawnSync(opts.file, opts.args.slice(1), opts.options);
<ide>
<ide> exports.execFileSync = execFileSync;
<ide>
<ide> function execSync(/*comand, options*/) {
<ide> var opts = normalizeExecArgs.apply(null, arguments);
<del> var inheritStderr = opts.options ? !!!opts.options.stdio : true;
<add> var inheritStderr = opts.options ? !opts.options.stdio : true;
<ide>
<ide> var ret = spawnSync(opts.file, opts.args, opts.options);
<ide> ret.cmd = opts.cmd;
<ide><path>test/simple/test-child-process-spawn-typeerror.js
<ide> var spawn = require('child_process').spawn,
<ide> assert = require('assert'),
<ide> windows = (process.platform === 'win32'),
<del> cmd = (windows) ? 'ls' : 'dir',
<add> cmd = (windows) ? 'dir' : 'ls',
<add> invalidcmd = (windows) ? 'ls' : 'dir',
<add> invalidArgsMsg = /Incorrect value of args option/,
<add> invalidOptionsMsg = /options argument must be an object/,
<ide> errors = 0;
<ide>
<ide> try {
<ide> // Ensure this throws a TypeError
<del> var child = spawn(cmd, 'this is not an array');
<add> var child = spawn(invalidcmd, 'this is not an array');
<ide>
<ide> child.on('error', function (err) {
<ide> errors++;
<ide> try {
<ide> assert.equal(e instanceof TypeError, true);
<ide> }
<ide>
<add>// verify that valid argument combinations do not throw
<add>assert.doesNotThrow(function() {
<add> spawn(cmd);
<add>});
<add>
<add>assert.doesNotThrow(function() {
<add> spawn(cmd, []);
<add>});
<add>
<add>assert.doesNotThrow(function() {
<add> spawn(cmd, {});
<add>});
<add>
<add>assert.doesNotThrow(function() {
<add> spawn(cmd, [], {});
<add>});
<add>
<add>// verify that invalid argument combinations throw
<add>assert.throws(function() {
<add> spawn();
<add>}, /Bad argument/);
<add>
<add>assert.throws(function() {
<add> spawn(cmd, null);
<add>}, invalidArgsMsg);
<add>
<add>assert.throws(function() {
<add> spawn(cmd, true);
<add>}, invalidArgsMsg);
<add>
<add>assert.throws(function() {
<add> spawn(cmd, [], null);
<add>}, invalidOptionsMsg);
<add>
<add>assert.throws(function() {
<add> spawn(cmd, [], 1);
<add>}, invalidOptionsMsg);
<add>
<ide> process.on('exit', function() {
<ide> assert.equal(errors, 0);
<ide> }); | 2 |
Javascript | Javascript | remove use of getdefaultcomputedstyle | 274feb53cc9a99633dfac785d8b3b837d192c43c | <ide><path>src/css/defaultDisplay.js
<ide> var iframe,
<ide> */
<ide> // Called only from within defaultDisplay
<ide> function actualDisplay( name, doc ) {
<del> var style,
<del> elem = jQuery( doc.createElement( name ) ).appendTo( doc.body ),
<add> var elem = jQuery( doc.createElement( name ) ).appendTo( doc.body ),
<ide>
<del> // getDefaultComputedStyle might be reliably used only on attached element
<del> display = window.getDefaultComputedStyle &&
<del> ( style = window.getDefaultComputedStyle( elem[ 0 ] ) ) ?
<del>
<del> // Use of this method is a temporary fix (more like optimization)
<del> // until something better comes along,
<del> // since it was removed from specification and supported only in FF
<del> style.display :
<del>
<del> jQuery.css( elem[ 0 ], "display" );
<add> display = jQuery.css( elem[ 0 ], "display" );
<ide>
<ide> // We don't have any data stored on the element,
<ide> // so use "detach" method as fast way to get rid of the element | 1 |
Ruby | Ruby | reset `version_scheme` only for runtime dependents | ddc23eb268328f45fb88031c7b011981c1266f1e | <ide><path>Library/Homebrew/cmd/update-report.rb
<ide> def migrate_gcc_dependents_if_needed
<ide>
<ide> Formula.installed.each do |formula|
<ide> next unless formula.tap&.core_tap?
<del> next unless formula.recursive_dependencies.map(&:name).include? "gcc"
<add>
<add> recursive_runtime_dependencies = Dependency.expand(
<add> formula,
<add> cache_key: "update-report",
<add> ) do |_, dependency|
<add> Dependency.prune if dependency.build? || dependency.test?
<add> end
<add> next unless recursive_runtime_dependencies.map(&:name).include? "gcc"
<ide>
<ide> keg = formula.installed_kegs.last
<ide> tab = Tab.for_keg(keg) | 1 |
Python | Python | fix py3 compatibility issue in test | c064963ef8553a96f6f487ad4c00ed5832cc1cf7 | <ide><path>tests/keras/layers/test_core.py
<ide> def test_merge():
<ide> input_a = Input(shape=input_shapes[0][1:])
<ide> input_b = Input(shape=input_shapes[1][1:])
<ide> merged = merge([input_a, input_b],
<del> mode=lambda (x, y): K.concatenate([x, y]),
<del> output_shape=lambda (s1, s2): (s1[:-1],) + (s1[-1] + s2[-1],))
<add> mode=lambda tup: K.concatenate([tup[0], tup[1]]),
<add> output_shape=lambda tup: (tup[0][:-1],) + (tup[0][-1] + tup[1][-1],))
<ide> expected_output_shape = model.get_output_shape_for(input_shapes)
<ide> actual_output_shape = model.predict(inputs).shape
<ide> assert expected_output_shape == actual_output_shape
<ide> def test_merge():
<ide> model.compile('rmsprop', 'mse')
<ide>
<ide> # test function with output_shape function
<del> def fn_mode((x, y)):
<add> def fn_mode(tup):
<add> x, y = tup
<ide> return K.concatenate([x, y])
<ide>
<del> def fn_output_shape((s1, s2)):
<add> def fn_output_shape(tup):
<add> s1, s2 = tup
<ide> return (s1[:-1],) + (s1[-1] + s2[-1],)
<ide>
<ide> input_a = Input(shape=input_shapes[0][1:]) | 1 |
Javascript | Javascript | add pan to test | fd70391367ef0c4588a4262634495ffc242fbde5 | <ide><path>packages/sproutcore-touch/lib/gesture_recognizers/pinch.js
<ide> SC.PinchGestureRecognizer = SC.Gesture.extend({
<ide>
<ide> touchEnd: function(evt, view) {
<ide> this.state = SC.Gesture.ENDED;
<del> //this.scale = 0;
<ide> this.redispatchEventToView(view,'touchend');
<ide> },
<ide>
<ide><path>packages/sproutcore-touch/tests/system/gesture_support.js
<ide> test("when 2 fingers move closer together, gesture should be in BEGAN state", fu
<ide> console.groupEnd();
<ide>
<ide> });
<add>
<add>test("pan test", function() {
<add> view = SC.View.create({
<add> elementId: 'gestureTest',
<add>
<add> pinchStart: function(recognizer, scale) {
<add> numStart++;
<add> startScale = scale;
<add> },
<add>
<add> pinchChange: function(recognizer, scale) {
<add> changeScale = scale;
<add> },
<add>
<add> panChange: function(recognizer, translation) {
<add>
<add> }
<add> });
<add>
<add> SC.run(function(){
<add> view.append();
<add> });
<add>
<add>});
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<add>
<ide>
<ide> window.shit = function () {
<ide>
<ide> window.shit = function () {
<ide> elementId: 'gestureTest',
<ide>
<ide> scale: 1,
<add> translate: {
<add> x: 0,
<add> y: 0
<add> },
<ide>
<ide> pinchChange: function(recognizer, scale) {
<ide> this.scale = scale;
<del> var string = 'scale3d('+this.scale+','+this.scale+',1)';
<del> //console.log(this.scale);
<add> this._applyTransforms();
<add> },
<add>
<add> panChange: function(recognizer, translation) {
<add> this.translate = translation;
<add> this._applyTransforms();
<add> },
<add>
<add> _applyTransforms: function() {
<add> var string = 'translate3d('+this.translate.x+'px,'+this.translate.y+'px,0)';
<add> string += ' scale3d('+this.scale+','+this.scale+',1)';
<add>
<ide> this.$().css('-webkit-transform',string);
<ide> },
<ide> | 2 |
Text | Text | update releasing guide | 1bee3467ad0f05015741d76d49af03c63c5ea8d4 | <ide><path>Releases.md
<ide> are green and then run
<ide>
<ide> This script runs end to end with a proxy npm repository on local PC and asks to check that Chrome Debugging works.
<ide>
<del>#### Cut a release branch and push to github
<del>
<del>To cut a release branch and check that everything works, you'll need Mac OS with the
<add>**Note**: In order to run the e2e tests, you'll need Mac OS with the
<ide> [Android dev environment set up](https://github.com/facebook/react-native/blob/master/ReactAndroid/README.md).
<ide>
<add>#### Cut a release branch and push to github
<add>
<ide> Run:
<ide>
<ide> ``` | 1 |
Text | Text | add note about features vs. bugs | 55870cd8ddab26149b8383651b6ef6bf68c404b2 | <ide><path>guides/source/contributing_to_ruby_on_rails.md
<ide> WARNING: Please do not report security vulnerabilities with public GitHub issue
<ide>
<ide> ### What about Feature Requests?
<ide>
<del>Please don't put "feature request" items into GitHub Issues. If there's a new feature that you want to see added to Ruby on Rails, you'll need to write the code yourself - or convince someone else to partner with you to write the code. Later in this guide you'll find detailed instructions for proposing a patch to Ruby on Rails. If you enter a wishlist item in GitHub Issues with no code, you can expect it to be marked "invalid" as soon as it's reviewed.
<del>
<del>If you'd like feedback on an idea for a feature before doing the work for make a patch, please send an email to the [rails-core mailing list](https://groups.google.com/forum/?fromgroups#!forum/rubyonrails-core). You might get no response, which means that everyone is indifferent. You might find someone who's also interested in building that feature. You might get a "This won't be accepted." But it's the proper place to discuss new ideas. GitHub Issues are not a particularly good venue for the sometimes long and involved discussions new features require.
<add>Please don't put "feature request" items into GitHub Issues. If there's a new
<add>feature that you want to see added to Ruby on Rails, you'll need to write the
<add>code yourself - or convince someone else to partner with you to write the code.
<add>Later in this guide you'll find detailed instructions for proposing a patch to
<add>Ruby on Rails. If you enter a wishlist item in GitHub Issues with no code, you
<add>can expect it to be marked "invalid" as soon as it's reviewed.
<add>
<add>Sometimes, the line between 'bug' and 'feature' is a hard one to draw.
<add>Generally, a feature is anything that adds new behavior, while a bug is
<add>anything that fixes already existing behavior that is mis-behaving. Sometimes,
<add>the core team will have to make a judgement call. That said, the distinction
<add>generally just affects which release your patch will get in to; we love feature
<add>submissions! They just won't get backported to maintenance branches.
<add>
<add>If you'd like feedback on an idea for a feature before doing the work for make
<add>a patch, please send an email to the [rails-core mailing
<add>list](https://groups.google.com/forum/?fromgroups#!forum/rubyonrails-core). You
<add>might get no response, which means that everyone is indifferent. You might find
<add>someone who's also interested in building that feature. You might get a "This
<add>won't be accepted." But it's the proper place to discuss new ideas. GitHub
<add>Issues are not a particularly good venue for the sometimes long and involved
<add>discussions new features require.
<ide>
<ide> Setting Up a Development Environment
<ide> ------------------------------------ | 1 |
Text | Text | update usagewithreact.md to react 0.14 | c0f366d7680b8941efe18ff8e1cc58f4c4bb6f94 | <ide><path>docs/basics/UsageWithReact.md
<ide> These are all normal React components, so we won’t stop to examine them in det
<ide> #### `components/AddTodo.js`
<ide>
<ide> ```js
<del>import React, { findDOMNode, Component, PropTypes } from 'react';
<add>import React, { Component, PropTypes } from 'react';
<ide>
<ide> export default class AddTodo extends Component {
<ide> render() {
<ide> export default class AddTodo extends Component {
<ide> }
<ide>
<ide> handleClick(e) {
<del> const node = findDOMNode(this.refs.input);
<add> const node = this.refs.input;
<ide> const text = node.value.trim();
<ide> this.props.onAddClick(text);
<ide> node.value = '';
<ide> First, we need to import `Provider` from [`react-redux`](http://github.com/gaear
<ide>
<ide> ```js
<ide> import React from 'react';
<add>import { render } from 'react-dom';
<ide> import { createStore } from 'redux';
<ide> import { Provider } from 'react-redux';
<ide> import App from './containers/App';
<ide> import todoApp from './reducers';
<ide> let store = createStore(todoApp);
<ide>
<ide> let rootElement = document.getElementById('root');
<del>React.render(
<add>render(
<ide> // The child must be wrapped in a function
<ide> // to work around an issue in React 0.13.
<ide> <Provider store={store}>
<del> {() => <App />}
<add> <App />
<ide> </Provider>,
<ide> rootElement
<ide> ); | 1 |
Go | Go | reduce the number of string copy operations | 73e79a3310f3976b61a295f45e12aead9af41962 | <ide><path>registry/registry.go
<ide> func (r *Registry) setUserAgent(req *http.Request, extra ...VersionChecker) {
<ide> if len(r.baseVersions)+len(extra) == 0 {
<ide> return
<ide> }
<del>
<del> userAgent := appendVersions(r.baseVersionsStr, extra...)
<del> req.Header.Set("User-Agent", userAgent)
<add> if len(extra) == 0 {
<add> req.Header.Set("User-Agent", r.baseVersionsStr)
<add> } else {
<add> req.Header.Set("User-Agent", appendVersions(r.baseVersionsStr, extra...))
<add> }
<ide> return
<ide> }
<ide> | 1 |
Javascript | Javascript | add coverage for client._addhandle() | a01c365193afc0c05b7181aae9ecd753de001865 | <ide><path>test/parallel/test-debugger-client-addhandle.js
<add>'use strict';
<add>
<add>require('../common');
<add>const assert = require('assert');
<add>const Client = require('_debugger').Client;
<add>
<add>{
<add> const client = new Client();
<add> assert.deepStrictEqual(client.handles, {});
<add>}
<add>
<add>{
<add> const client = new Client();
<add> client._addHandle(null);
<add> assert.deepStrictEqual(client.handles, {});
<add>}
<add>
<add>{
<add> const client = new Client();
<add> client._addHandle('not an object');
<add> assert.deepStrictEqual(client.handles, {});
<add>}
<add>
<add>{
<add> const client = new Client();
<add> client._addHandle({ handle: 'not a number' });
<add> assert.deepStrictEqual(client.handles, {});
<add>}
<add>
<add>{
<add> const client = new Client();
<add> const validNoScript = { handle: 6, id: 'foo', type: 'not a script' };
<add> client._addHandle(validNoScript);
<add> assert.deepStrictEqual(client.handles, { 6: validNoScript });
<add> assert.deepStrictEqual(client.scripts, {});
<add>}
<add>
<add>{
<add> const client = new Client();
<add> const validWithScript = { handle: 5, id: 'bar', type: 'script' };
<add> client._addHandle(validWithScript);
<add> assert.deepStrictEqual(client.handles, { 5: validWithScript });
<add> assert.deepStrictEqual(client.scripts, { bar: validWithScript });
<add>} | 1 |
Javascript | Javascript | reorganize mixins and mergemixins | 1948a604bf86200a5ab9187d93be669b370b5d5a | <ide><path>packages/ember-metal/lib/mixin.js
<ide> function isMethod(obj) {
<ide> obj !== Boolean && obj !== Object && obj !== Number && obj !== Array && obj !== Date && obj !== String;
<ide> }
<ide>
<del>function mergeMixins(mixins, m, descs, values, base) {
<del> var len = mixins.length, idx, mixin, guid, props, value, key, ovalue, concats, meta;
<add>function cloneDescriptor(desc) {
<add> var newDesc = new Ember.ComputedProperty();
<add> newDesc._cacheable = desc._cacheable;
<add> newDesc._dependentKeys = desc._dependentKeys;
<add> newDesc.func = desc.func;
<ide>
<del> function removeKeys(keyName) {
<del> delete descs[keyName];
<del> delete values[keyName];
<add> return newDesc;
<add>}
<add>
<add>var CONTINUE = {};
<add>
<add>function mixinProperties(mixinsMeta, mixin) {
<add> var guid;
<add>
<add> if (mixin instanceof Mixin) {
<add> guid = guidFor(mixin);
<add> if (mixinsMeta[guid]) { return CONTINUE; }
<add> mixinsMeta[guid] = mixin;
<add> return mixin.properties;
<add> } else {
<add> return mixin; // apply anonymous mixin properties
<add> }
<add>}
<add>
<add>function concatenatedProperties(props, values, base) {
<add> var concats;
<add>
<add> // reset before adding each new mixin to pickup concats from previous
<add> concats = values.concatenatedProperties || base.concatenatedProperties;
<add> if (props.concatenatedProperties) {
<add> concats = concats ? concats.concat(props.concatenatedProperties) : props.concatenatedProperties;
<ide> }
<ide>
<del> function cloneDescriptor(desc) {
<del> var newDesc = new Ember.ComputedProperty();
<del> newDesc._cacheable = desc._cacheable;
<del> newDesc._dependentKeys = desc._dependentKeys;
<del> newDesc.func = desc.func;
<add> return concats;
<add>}
<ide>
<del> return newDesc;
<add>function giveDescriptorSuper(meta, key, value, values, descs) {
<add> var ovalue = values[key] === undefined && descs[key];
<add> if (!ovalue) { ovalue = meta.descs[key]; }
<add> if (ovalue && ovalue.func) {
<add> // Since multiple mixins may inherit from the
<add> // same parent, we need to clone the computed
<add> // property so that other mixins do not receive
<add> // the wrapped version.
<add> value = cloneDescriptor(value);
<add> value.func = Ember.wrap(value.func, ovalue.func);
<ide> }
<ide>
<del> for(idx=0; idx < len; idx++) {
<del> mixin = mixins[idx];
<del> Ember.assert('Expected hash or Mixin instance, got ' + Object.prototype.toString.call(mixin), typeof mixin === 'object' && mixin !== null && Object.prototype.toString.call(mixin) !== '[object Array]');
<add> return value;
<add>}
<ide>
<del> if (mixin instanceof Mixin) {
<del> guid = guidFor(mixin);
<del> if (m[guid]) { continue; }
<del> m[guid] = mixin;
<del> props = mixin.properties;
<add>function giveMethodSuper(obj, key, value, values, descs) {
<add> var ovalue = descs[key] === undefined && values[key];
<add> if (!ovalue) { ovalue = obj[key]; }
<add> if ('function' !== typeof ovalue) { ovalue = null; }
<add> if (ovalue) {
<add> var o = value.__ember_observes__, ob = value.__ember_observesBefore__;
<add> value = Ember.wrap(value, ovalue);
<add> value.__ember_observes__ = o;
<add> value.__ember_observesBefore__ = ob;
<add> }
<add>
<add> return value;
<add>}
<add>
<add>function applyConcatenatedProperties(obj, key, value, values) {
<add> var baseValue = values[key] || obj[key];
<add>
<add> if (baseValue) {
<add> if ('function' === typeof baseValue.concat) {
<add> return baseValue.concat(value);
<ide> } else {
<del> props = mixin; // apply anonymous mixin properties
<add> return Ember.makeArray(baseValue).concat(value);
<add> }
<add> } else {
<add> return Ember.makeArray(value);
<add> }
<add>}
<add>
<add>function addNormalizedProperty(base, key, value, meta, descs, values, concats) {
<add> if (value instanceof Ember.Descriptor) {
<add> if (value === REQUIRED && descs[key]) { return CONTINUE; }
<add>
<add> // Wrap descriptor function to implement
<add> // _super() if needed
<add> if (value.func) {
<add> value = giveDescriptorSuper(meta, key, value, values, descs);
<add> }
<add>
<add> descs[key] = value;
<add> values[key] = undefined;
<add> } else {
<add> // impl super if needed...
<add> if (isMethod(value)) {
<add> value = giveMethodSuper(base, key, value, values, descs);
<add> } else if ((concats && a_indexOf.call(concats, key) >= 0) || key === 'concatenatedProperties') {
<add> value = applyConcatenatedProperties(base, key, value, values);
<ide> }
<ide>
<add> descs[key] = undefined;
<add> values[key] = value;
<add> }
<add>}
<add>
<add>function mergeMixins(mixins, m, descs, values, base) {
<add> var mixin, props, key, concats, meta;
<add>
<add> function removeKeys(keyName) {
<add> delete descs[keyName];
<add> delete values[keyName];
<add> }
<add>
<add> for(var i=0, l=mixins.length; i<l; i++) {
<add> mixin = mixins[i];
<add> Ember.assert('Expected hash or Mixin instance, got ' + Object.prototype.toString.call(mixin), typeof mixin === 'object' && mixin !== null && Object.prototype.toString.call(mixin) !== '[object Array]');
<add>
<add> props = mixinProperties(m, mixin);
<add> if (props === CONTINUE) { continue; }
<add>
<ide> if (props) {
<ide> meta = Ember.meta(base);
<del>
<del> // reset before adding each new mixin to pickup concats from previous
<del> concats = values.concatenatedProperties || base.concatenatedProperties;
<del> if (props.concatenatedProperties) {
<del> concats = concats ? concats.concat(props.concatenatedProperties) : props.concatenatedProperties;
<del> }
<add> concats = concatenatedProperties(props, values, base);
<ide>
<ide> for (key in props) {
<ide> if (!props.hasOwnProperty(key)) { continue; }
<del> value = props[key];
<del> if (value instanceof Ember.Descriptor) {
<del> if (value === REQUIRED && descs[key]) { continue; }
<del>
<del> // Wrap descriptor function to implement
<del> // _super() if needed
<del> if (value.func) {
<del> ovalue = values[key] === undefined && descs[key];
<del> if (!ovalue) { ovalue = meta.descs[key]; }
<del> if (ovalue && ovalue.func) {
<del> // Since multiple mixins may inherit from the
<del> // same parent, we need to clone the computed
<del> // property so that other mixins do not receive
<del> // the wrapped version.
<del> value = cloneDescriptor(value);
<del> value.func = Ember.wrap(value.func, ovalue.func);
<del> }
<del> }
<del>
<del> descs[key] = value;
<del> values[key] = undefined;
<del> } else {
<del> // impl super if needed...
<del> if (isMethod(value)) {
<del> ovalue = descs[key] === undefined && values[key];
<del> if (!ovalue) { ovalue = base[key]; }
<del> if ('function' !== typeof ovalue) { ovalue = null; }
<del> if (ovalue) {
<del> var o = value.__ember_observes__, ob = value.__ember_observesBefore__;
<del> value = Ember.wrap(value, ovalue);
<del> value.__ember_observes__ = o;
<del> value.__ember_observesBefore__ = ob;
<del> }
<del> } else if ((concats && a_indexOf.call(concats, key) >= 0) || key === 'concatenatedProperties') {
<del> var baseValue = values[key] || base[key];
<del> if (baseValue) {
<del> if ('function' === typeof baseValue.concat) {
<del> value = baseValue.concat(value);
<del> } else {
<del> value = Ember.makeArray(baseValue).concat(value);
<del> }
<del> } else {
<del> value = Ember.makeArray(value);
<del> }
<del> }
<del>
<del> descs[key] = undefined;
<del> values[key] = value;
<del> }
<add> addNormalizedProperty(base, key, props[key], meta, descs, values, concats);
<ide> }
<ide>
<ide> // manually copy toString() because some JS engines do not enumerate it
<del> if (props.hasOwnProperty('toString')) {
<del> base.toString = props.toString;
<del> }
<del>
<add> if (props.hasOwnProperty('toString')) { base.toString = props.toString; }
<ide> } else if (mixin.mixins) {
<ide> mergeMixins(mixin.mixins, m, descs, values, base);
<ide> if (mixin._without) { a_forEach.call(mixin._without, removeKeys); }
<ide> function finishPartial(obj, m) {
<ide> return obj;
<ide> }
<ide>
<add>function followAlias(obj, desc, m, descs, values) {
<add> var altKey = desc.methodName, value;
<add> if (descs[altKey] || values[altKey]) {
<add> value = values[altKey];
<add> desc = descs[altKey];
<add> } else if (m.descs[altKey]) {
<add> desc = m.descs[altKey];
<add> value = undefined;
<add> } else {
<add> desc = undefined;
<add> value = obj[altKey];
<add> }
<add>
<add> return { desc: desc, value: value };
<add>}
<add>
<add>function updateObservers(obj, key, observer, observerKey, method) {
<add> if ('function' !== typeof observer) { return; }
<add>
<add> var paths = observer[observerKey];
<add>
<add> if (paths) {
<add> for (var i=0, l=paths.length; i<l; i++) {
<add> Ember[method](obj, paths[i], null, key);
<add> }
<add> }
<add>}
<add>
<add>function replaceObservers(obj, key, observer) {
<add> var prevObserver = obj[key];
<add>
<add> updateObservers(obj, key, prevObserver, '__ember_observesBefore__', 'removeBeforeObserver');
<add> updateObservers(obj, key, prevObserver, '__ember_observes__', 'removeObserver');
<add>
<add> updateObservers(obj, key, observer, '__ember_observesBefore__', 'addBeforeObserver');
<add> updateObservers(obj, key, observer, '__ember_observes__', 'addObserver');
<add>}
<add>
<ide> function applyMixin(obj, mixins, partial) {
<del> var descs = {}, values = {}, m = Ember.meta(obj), req = m.required,
<del> key, value, desc, prevValue, paths, len, idx;
<add> var descs = {}, values = {}, m = Ember.meta(obj),
<add> key, value, desc;
<ide>
<ide> // Go through all mixins and hashes passed in, and:
<ide> //
<ide> function applyMixin(obj, mixins, partial) {
<ide> mergeMixins(mixins, mixinsMeta(obj), descs, values, obj);
<ide>
<ide> for(key in values) {
<del> if (key === 'contructor') { continue; }
<del> if (!values.hasOwnProperty(key)) { continue; }
<add> if (key === 'contructor' || !values.hasOwnProperty(key)) { continue; }
<ide>
<ide> desc = descs[key];
<ide> value = values[key];
<ide>
<del> if (desc === REQUIRED) {
<del> if (!(key in obj)) {
<del> Ember.assert('Required property not defined: '+key, !!partial);
<del>
<del> // for partial applies add to hash of required keys
<del> req = writableReq(obj);
<del> req.__ember_count__++;
<del> req[key] = true;
<del> }
<del> } else {
<del> while (desc && desc instanceof Alias) {
<del> var altKey = desc.methodName;
<del> if (descs[altKey] || values[altKey]) {
<del> value = values[altKey];
<del> desc = descs[altKey];
<del> } else if (m.descs[altKey]) {
<del> desc = m.descs[altKey];
<del> value = undefined;
<del> } else {
<del> desc = undefined;
<del> value = obj[altKey];
<del> }
<del> }
<add> if (desc === REQUIRED) { continue; }
<ide>
<del> if (desc === undefined && value === undefined) { continue; }
<del>
<del> prevValue = obj[key];
<del>
<del> if ('function' === typeof prevValue) {
<del> if ((paths = prevValue.__ember_observesBefore__)) {
<del> len = paths.length;
<del> for (idx=0; idx < len; idx++) {
<del> Ember.removeBeforeObserver(obj, paths[idx], null, key);
<del> }
<del> } else if ((paths = prevValue.__ember_observes__)) {
<del> len = paths.length;
<del> for (idx=0; idx < len; idx++) {
<del> Ember.removeObserver(obj, paths[idx], null, key);
<del> }
<del> }
<del> }
<add> while (desc && desc instanceof Alias) {
<add> var followed = followAlias(obj, desc, m, descs, values);
<add> desc = followed.desc;
<add> value = followed.value;
<add> }
<ide>
<del> detectBinding(obj, key, value, m);
<del>
<del> defineProperty(obj, key, desc, value, m);
<del>
<del> if ('function' === typeof value) {
<del> if (paths = value.__ember_observesBefore__) {
<del> len = paths.length;
<del> for (idx=0; idx < len; idx++) {
<del> Ember.addBeforeObserver(obj, paths[idx], null, key);
<del> }
<del> } else if (paths = value.__ember_observes__) {
<del> len = paths.length;
<del> for (idx=0; idx < len; idx++) {
<del> Ember.addObserver(obj, paths[idx], null, key);
<del> }
<del> }
<del> }
<add> if (desc === undefined && value === undefined) { continue; }
<ide>
<del> if (req && req[key]) {
<del> req = writableReq(obj);
<del> req.__ember_count__--;
<del> req[key] = false;
<del> }
<del> }
<add> replaceObservers(obj, key, value);
<add> detectBinding(obj, key, value, m);
<add> defineProperty(obj, key, desc, value, m);
<ide> }
<ide>
<ide> if (!partial) { // don't apply to prototype
<ide> finishPartial(obj, m);
<ide> }
<ide>
<del> // Make sure no required attrs remain
<del> if (!partial && req && req.__ember_count__>0) {
<del> var keys = [];
<del> for (key in req) {
<del> if (META_SKIP[key]) { continue; }
<del> keys.push(key);
<del> }
<del> // TODO: Remove surrounding if clause from production build
<del> Ember.assert('Required properties not defined: '+keys.join(','));
<del> }
<ide> return obj;
<ide> }
<ide>
<ide><path>packages/ember-metal/tests/mixin/required_test.js
<ide> module('Module.required', {
<ide> }
<ide> });
<ide>
<del>test('applying a mixin with unmet requirement', function() {
<del> raises(function() {
<del> PartialMixin.apply(obj);
<del> }, Error, 'should raise error for unmet requirement');
<del>});
<del>
<del>test('applying a mixin with unmet requirement using applyPartial', function() {
<del> PartialMixin.applyPartial(obj);
<del> equal(obj.foo, null, 'obj.foo has required');
<del>
<del> // applying regularly to object should throw
<del> raises(function() {
<del> Ember.Mixin.create({ bar: 'BAR' }).apply(obj);
<del> }, Error, 'should raise error for unmet requirement');
<del>
<del>});
<del>
<ide> test('applying a mixin to meet requirement', function() {
<ide> FinalMixin.apply(obj);
<ide> PartialMixin.apply(obj); | 2 |
Text | Text | add todos [ci skip] | 690bd77669a34b77cc9ad5b06b3f6f7d62e6a991 | <ide><path>website/docs/usage/embeddings-transformers.md
<ide> def MyCustomVectors(
<ide>
<ide> ## Pretraining {#pretraining}
<ide>
<del><!-- TODO: write -->
<add>- explain general concept and idea (short!)
<add>- present it as a separate lightweight mechanism for pretraining the tok2vec
<add> layer
<add>- advantages (could also be pros/cons table)
<add>- explain how it generates a separate file (!) and how it depends on the same
<add> vectors
<ide>
<ide> > #### Raw text format
<ide> > | 1 |
Python | Python | read only endpoint for xcom | 5744a4797e10fad04ac4814c02889af309b65130 | <ide><path>airflow/api_connexion/endpoints/xcom_endpoint.py
<ide> # KIND, either express or implied. See the License for the
<ide> # specific language governing permissions and limitations
<ide> # under the License.
<add>from flask import request
<add>from sqlalchemy import and_, func
<add>from sqlalchemy.orm.session import Session
<ide>
<del># TODO(mik-laj): We have to implement it.
<del># Do you want to help? Please look at: sshttps://github.com/apache/airflow/issues/8134
<add>from airflow.api_connexion import parameters
<add>from airflow.api_connexion.exceptions import NotFound
<add>from airflow.api_connexion.schemas.xcom_schema import (
<add> XComCollection, XComCollectionItemSchema, XComCollectionSchema, xcom_collection_item_schema,
<add> xcom_collection_schema,
<add>)
<add>from airflow.models import DagRun as DR, XCom
<add>from airflow.utils.session import provide_session
<ide>
<ide>
<ide> def delete_xcom_entry():
<ide> def delete_xcom_entry():
<ide> raise NotImplementedError("Not implemented yet.")
<ide>
<ide>
<del>def get_xcom_entries():
<add>@provide_session
<add>def get_xcom_entries(
<add> dag_id: str,
<add> dag_run_id: str,
<add> task_id: str,
<add> session: Session
<add>) -> XComCollectionSchema:
<ide> """
<ide> Get all XCom values
<ide> """
<del> raise NotImplementedError("Not implemented yet.")
<add> offset = request.args.get(parameters.page_offset, 0)
<add> limit = min(int(request.args.get(parameters.page_limit, 100)), 100)
<add> query = session.query(XCom)
<add> if dag_id != '~':
<add> query = query.filter(XCom.dag_id == dag_id)
<add> query.join(DR, and_(XCom.dag_id == DR.dag_id, XCom.execution_date == DR.execution_date))
<add> else:
<add> query.join(DR, XCom.execution_date == DR.execution_date)
<add> if task_id != '~':
<add> query = query.filter(XCom.task_id == task_id)
<add> if dag_run_id != '~':
<add> query = query.filter(DR.run_id == dag_run_id)
<add> query = query.order_by(
<add> XCom.execution_date, XCom.task_id, XCom.dag_id, XCom.key
<add> )
<add> total_entries = session.query(func.count(XCom.key)).scalar()
<add> query = query.offset(offset).limit(limit)
<add> return xcom_collection_schema.dump(XComCollection(xcom_entries=query.all(), total_entries=total_entries))
<ide>
<ide>
<del>def get_xcom_entry():
<add>@provide_session
<add>def get_xcom_entry(
<add> dag_id: str,
<add> task_id: str,
<add> dag_run_id: str,
<add> xcom_key: str,
<add> session: Session
<add>) -> XComCollectionItemSchema:
<ide> """
<ide> Get an XCom entry
<ide> """
<del> raise NotImplementedError("Not implemented yet.")
<add> query = session.query(XCom)
<add> query = query.filter(and_(XCom.dag_id == dag_id,
<add> XCom.task_id == task_id,
<add> XCom.key == xcom_key))
<add> query = query.join(DR, and_(XCom.dag_id == DR.dag_id, XCom.execution_date == DR.execution_date))
<add> query = query.filter(DR.run_id == dag_run_id)
<add>
<add> query_object = query.one_or_none()
<add> if not query_object:
<add> raise NotFound("XCom entry not found")
<add> return xcom_collection_item_schema.dump(query_object)
<ide>
<ide>
<ide> def patch_xcom_entry():
<ide><path>airflow/api_connexion/schemas/xcom_schema.py
<add># Licensed to the Apache Software Foundation (ASF) under one
<add># or more contributor license agreements. See the NOTICE file
<add># distributed with this work for additional information
<add># regarding copyright ownership. The ASF licenses this file
<add># to you under the Apache License, Version 2.0 (the
<add># "License"); you may not use this file except in compliance
<add># with the License. You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing,
<add># software distributed under the License is distributed on an
<add># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
<add># KIND, either express or implied. See the License for the
<add># specific language governing permissions and limitations
<add># under the License.
<add>from typing import List, NamedTuple
<add>
<add>from marshmallow import Schema, fields
<add>from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field
<add>
<add>from airflow.models import XCom
<add>
<add>
<add>class XComCollectionItemSchema(SQLAlchemySchema):
<add> """
<add> Schema for a xcom item
<add> """
<add>
<add> class Meta:
<add> """ Meta """
<add> model = XCom
<add>
<add> key = auto_field()
<add> timestamp = auto_field()
<add> execution_date = auto_field()
<add> task_id = auto_field()
<add> dag_id = auto_field()
<add>
<add>
<add>class XComSchema(XComCollectionItemSchema):
<add> """
<add> XCom schema
<add> """
<add>
<add> value = auto_field()
<add>
<add>
<add>class XComCollection(NamedTuple):
<add> """ List of XComs with meta"""
<add> xcom_entries: List[XCom]
<add> total_entries: int
<add>
<add>
<add>class XComCollectionSchema(Schema):
<add> """ XCom Collection Schema"""
<add> xcom_entries = fields.List(fields.Nested(XComCollectionItemSchema))
<add> total_entries = fields.Int()
<add>
<add>
<add>xcom_schema = XComSchema(strict=True)
<add>xcom_collection_item_schema = XComCollectionItemSchema(strict=True)
<add>xcom_collection_schema = XComCollectionSchema(strict=True)
<ide><path>tests/api_connexion/endpoints/test_xcom_endpoint.py
<ide> import unittest
<ide>
<ide> import pytest
<add>from parameterized import parameterized
<ide>
<add>from airflow.models import DagRun as DR, XCom
<add>from airflow.utils.dates import parse_execution_date
<add>from airflow.utils.session import create_session, provide_session
<add>from airflow.utils.types import DagRunType
<ide> from airflow.www import app
<ide>
<ide>
<del>class TesXComEndpoint(unittest.TestCase):
<add>class TestXComEndpoint(unittest.TestCase):
<ide> @classmethod
<ide> def setUpClass(cls) -> None:
<ide> super().setUpClass()
<ide> cls.app = app.create_app(testing=True) # type:ignore
<ide>
<ide> def setUp(self) -> None:
<add> """
<add> Setup For XCom endpoint TC
<add> """
<ide> self.client = self.app.test_client() # type:ignore
<add> # clear existing xcoms
<add> with create_session() as session:
<add> session.query(XCom).delete()
<add> session.query(DR).delete()
<ide>
<add> def tearDown(self) -> None:
<add> """
<add> Clear Hanging XComs
<add> """
<add> with create_session() as session:
<add> session.query(XCom).delete()
<add> session.query(DR).delete()
<ide>
<del>class TestDeleteXComEntry(TesXComEndpoint):
<add>
<add>class TestDeleteXComEntry(TestXComEndpoint):
<ide> @pytest.mark.skip(reason="Not implemented yet")
<ide> def test_should_response_200(self):
<ide> response = self.client.delete(
<del> "/dags/TEST_DAG_ID}/taskInstances/TEST_TASK_ID/2005-04-02T21:37:42Z/xcomEntries/XCOM_KEY"
<add> "/dags/TEST_DAG_ID/taskInstances/TEST_TASK_ID/2005-04-02T00:00:00Z/xcomEntries/XCOM_KEY"
<ide> )
<ide> assert response.status_code == 204
<ide>
<ide>
<del>class TestGetXComEntry(TesXComEndpoint):
<del> @pytest.mark.skip(reason="Not implemented yet")
<del> def test_should_response_200(self):
<add>class TestGetXComEntry(TestXComEndpoint):
<add>
<add> @provide_session
<add> def test_should_response_200(self, session):
<add> dag_id = 'test-dag-id'
<add> task_id = 'test-task-id'
<add> execution_date = '2005-04-02T00:00:00+00:00'
<add> xcom_key = 'test-xcom-key'
<add> execution_date_parsed = parse_execution_date(execution_date)
<add> xcom_model = XCom(key=xcom_key,
<add> execution_date=execution_date_parsed,
<add> task_id=task_id,
<add> dag_id=dag_id,
<add> timestamp=execution_date_parsed)
<add> dag_run_id = DR.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<add> dagrun = DR(dag_id=dag_id,
<add> run_id=dag_run_id,
<add> execution_date=execution_date_parsed,
<add> start_date=execution_date_parsed,
<add> run_type=DagRunType.MANUAL.value)
<add> session.add(xcom_model)
<add> session.add(dagrun)
<add> session.commit()
<ide> response = self.client.get(
<del> "/dags/TEST_DAG_ID}/taskInstances/TEST_TASK_ID/2005-04-02T21:37:42Z/xcomEntries/XCOM_KEY"
<add> f"/api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries/{xcom_key}"
<add> )
<add> self.assertEqual(200, response.status_code)
<add> self.assertEqual(
<add> response.json,
<add> {
<add> 'dag_id': dag_id,
<add> 'execution_date': execution_date,
<add> 'key': xcom_key,
<add> 'task_id': task_id,
<add> 'timestamp': execution_date
<add> }
<ide> )
<del> assert response.status_code == 200
<ide>
<ide>
<del>class TestGetXComEntries(TesXComEndpoint):
<del> @pytest.mark.skip(reason="Not implemented yet")
<del> def test_should_response_200(self):
<add>class TestGetXComEntries(TestXComEndpoint):
<add> @provide_session
<add> def test_should_response_200(self, session):
<add> dag_id = 'test-dag-id'
<add> task_id = 'test-task-id'
<add> execution_date = '2005-04-02T00:00:00+00:00'
<add> execution_date_parsed = parse_execution_date(execution_date)
<add> xcom_model_1 = XCom(key='test-xcom-key-1',
<add> execution_date=execution_date_parsed,
<add> task_id=task_id,
<add> dag_id=dag_id,
<add> timestamp=execution_date_parsed)
<add> xcom_model_2 = XCom(key='test-xcom-key-2',
<add> execution_date=execution_date_parsed,
<add> task_id=task_id,
<add> dag_id=dag_id,
<add> timestamp=execution_date_parsed)
<add> dag_run_id = DR.generate_run_id(DagRunType.MANUAL, execution_date_parsed)
<add> dagrun = DR(dag_id=dag_id,
<add> run_id=dag_run_id,
<add> execution_date=execution_date_parsed,
<add> start_date=execution_date_parsed,
<add> run_type=DagRunType.MANUAL.value)
<add> xcom_models = [xcom_model_1, xcom_model_2]
<add> session.add_all(xcom_models)
<add> session.add(dagrun)
<add> session.commit()
<ide> response = self.client.get(
<del> "/dags/TEST_DAG_ID}/taskInstances/TEST_TASK_ID/2005-04-02T21:37:42Z/xcomEntries/"
<add> f"/api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries"
<add> )
<add> self.assertEqual(200, response.status_code)
<add> self.assertEqual(
<add> response.json,
<add> {
<add> 'xcom_entries': [
<add> {
<add> 'dag_id': dag_id,
<add> 'execution_date': execution_date,
<add> 'key': 'test-xcom-key-1',
<add> 'task_id': task_id,
<add> 'timestamp': execution_date
<add> },
<add> {
<add> 'dag_id': dag_id,
<add> 'execution_date': execution_date,
<add> 'key': 'test-xcom-key-2',
<add> 'task_id': task_id,
<add> 'timestamp': execution_date
<add> }
<add> ],
<add> 'total_entries': 2,
<add> }
<ide> )
<add>
<add>
<add>class TestPaginationGetXComEntries(TestXComEndpoint):
<add>
<add> def setUp(self):
<add> super().setUp()
<add> self.dag_id = 'test-dag-id'
<add> self.task_id = 'test-task-id'
<add> self.execution_date = '2005-04-02T00:00:00+00:00'
<add> self.execution_date_parsed = parse_execution_date(self.execution_date)
<add> self.dag_run_id = DR.generate_run_id(DagRunType.MANUAL, self.execution_date_parsed)
<add>
<add> @parameterized.expand(
<add> [
<add> (
<add> "limit=1",
<add> ["TEST_XCOM_KEY1"],
<add> ),
<add> (
<add> "limit=2",
<add> ["TEST_XCOM_KEY1", "TEST_XCOM_KEY10"],
<add> ),
<add> (
<add> "offset=5",
<add> [
<add> "TEST_XCOM_KEY5",
<add> "TEST_XCOM_KEY6",
<add> "TEST_XCOM_KEY7",
<add> "TEST_XCOM_KEY8",
<add> "TEST_XCOM_KEY9",
<add> ]
<add> ),
<add> (
<add> "offset=0",
<add> [
<add> "TEST_XCOM_KEY1",
<add> "TEST_XCOM_KEY10",
<add> "TEST_XCOM_KEY2",
<add> "TEST_XCOM_KEY3",
<add> "TEST_XCOM_KEY4",
<add> "TEST_XCOM_KEY5",
<add> "TEST_XCOM_KEY6",
<add> "TEST_XCOM_KEY7",
<add> "TEST_XCOM_KEY8",
<add> "TEST_XCOM_KEY9"
<add> ]
<add> ),
<add> (
<add> "limit=1&offset=5",
<add> ["TEST_XCOM_KEY5"],
<add> ),
<add> (
<add> "limit=1&offset=1",
<add> ["TEST_XCOM_KEY10"],
<add> ),
<add> (
<add> "limit=2&offset=2",
<add> ["TEST_XCOM_KEY2", "TEST_XCOM_KEY3"],
<add> ),
<add> ]
<add> )
<add> @provide_session
<add> def test_handle_limit_offset(self, query_params, expected_xcom_ids, session):
<add> url = "/api/v1/dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/xcomEntries?{query_params}"
<add> url = url.format(dag_id=self.dag_id,
<add> dag_run_id=self.dag_run_id,
<add> task_id=self.task_id,
<add> query_params=query_params)
<add> dagrun = DR(dag_id=self.dag_id,
<add> run_id=self.dag_run_id,
<add> execution_date=self.execution_date_parsed,
<add> start_date=self.execution_date_parsed,
<add> run_type=DagRunType.MANUAL.value)
<add> xcom_models = self._create_xcoms(10)
<add> session.add_all(xcom_models)
<add> session.add(dagrun)
<add> session.commit()
<add> response = self.client.get(url)
<ide> assert response.status_code == 200
<add> self.assertEqual(response.json["total_entries"], 10)
<add> conn_ids = [conn["key"] for conn in response.json["xcom_entries"] if conn]
<add> self.assertEqual(conn_ids, expected_xcom_ids)
<add>
<add> def _create_xcoms(self, count):
<add> return [XCom(
<add> key=f'TEST_XCOM_KEY{i}',
<add> execution_date=self.execution_date_parsed,
<add> task_id=self.task_id,
<add> dag_id=self.dag_id,
<add> timestamp=self.execution_date_parsed,
<add> ) for i in range(1, count + 1)]
<ide>
<ide>
<del>class TestPatchXComEntry(TesXComEndpoint):
<add>class TestPatchXComEntry(TestXComEndpoint):
<ide> @pytest.mark.skip(reason="Not implemented yet")
<ide> def test_should_response_200(self):
<ide> response = self.client.patch(
<del> "/dags/TEST_DAG_ID}/taskInstances/TEST_TASK_ID/2005-04-02T21:37:42Z/xcomEntries"
<add> "/dags/TEST_DAG_ID/taskInstances/TEST_TASK_ID/2005-04-02T00:00:00Z/xcomEntries"
<ide> )
<ide> assert response.status_code == 200
<ide>
<ide>
<del>class TestPostXComEntry(TesXComEndpoint):
<add>class TestPostXComEntry(TestXComEndpoint):
<ide> @pytest.mark.skip(reason="Not implemented yet")
<ide> def test_should_response_200(self):
<ide> response = self.client.post(
<del> "/dags/TEST_DAG_ID}/taskInstances/TEST_TASK_ID/2005-04-02T21:37:42Z/xcomEntries/XCOM_KEY"
<add> "/dags/TEST_DAG_ID/taskInstances/TEST_TASK_ID/2005-04-02T00:00:00Z/xcomEntries/XCOM_KEY"
<ide> )
<ide> assert response.status_code == 200
<ide><path>tests/api_connexion/schemas/test_xcom_schema.py
<add># Licensed to the Apache Software Foundation (ASF) under one
<add># or more contributor license agreements. See the NOTICE file
<add># distributed with this work for additional information
<add># regarding copyright ownership. The ASF licenses this file
<add># to you under the Apache License, Version 2.0 (the
<add># "License"); you may not use this file except in compliance
<add># with the License. You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing,
<add># software distributed under the License is distributed on an
<add># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
<add># KIND, either express or implied. See the License for the
<add># specific language governing permissions and limitations
<add># under the License.
<add>import unittest
<add>
<add>from sqlalchemy import or_
<add>
<add>from airflow.api_connexion.schemas.xcom_schema import (
<add> XComCollection, xcom_collection_item_schema, xcom_collection_schema, xcom_schema,
<add>)
<add>from airflow.models import XCom
<add>from airflow.utils.dates import parse_execution_date
<add>from airflow.utils.session import create_session, provide_session
<add>
<add>
<add>class TestXComSchemaBase(unittest.TestCase):
<add>
<add> def setUp(self):
<add> """
<add> Clear Hanging XComs pre test
<add> """
<add> with create_session() as session:
<add> session.query(XCom).delete()
<add>
<add> def tearDown(self) -> None:
<add> """
<add> Clear Hanging XComs post test
<add> """
<add> with create_session() as session:
<add> session.query(XCom).delete()
<add>
<add>
<add>class TestXComCollectionItemSchema(TestXComSchemaBase):
<add>
<add> def setUp(self) -> None:
<add> super().setUp()
<add> self.default_time = '2005-04-02T21:00:00+00:00'
<add> self.default_time_parsed = parse_execution_date(self.default_time)
<add>
<add> @provide_session
<add> def test_serialize(self, session):
<add> xcom_model = XCom(
<add> key='test_key',
<add> timestamp=self.default_time_parsed,
<add> execution_date=self.default_time_parsed,
<add> task_id='test_task_id',
<add> dag_id='test_dag',
<add> )
<add> session.add(xcom_model)
<add> session.commit()
<add> xcom_model = session.query(XCom).first()
<add> deserialized_xcom = xcom_collection_item_schema.dump(xcom_model)
<add> self.assertEqual(
<add> deserialized_xcom[0],
<add> {
<add> 'key': 'test_key',
<add> 'timestamp': self.default_time,
<add> 'execution_date': self.default_time,
<add> 'task_id': 'test_task_id',
<add> 'dag_id': 'test_dag',
<add> }
<add> )
<add>
<add> def test_deserialize(self):
<add> xcom_dump = {
<add> 'key': 'test_key',
<add> 'timestamp': self.default_time,
<add> 'execution_date': self.default_time,
<add> 'task_id': 'test_task_id',
<add> 'dag_id': 'test_dag',
<add> }
<add> result = xcom_collection_item_schema.load(xcom_dump)
<add> self.assertEqual(
<add> result[0],
<add> {
<add> 'key': 'test_key',
<add> 'timestamp': self.default_time_parsed,
<add> 'execution_date': self.default_time_parsed,
<add> 'task_id': 'test_task_id',
<add> 'dag_id': 'test_dag',
<add> }
<add> )
<add>
<add>
<add>class TestXComCollectionSchema(TestXComSchemaBase):
<add>
<add> def setUp(self) -> None:
<add> super().setUp()
<add> self.default_time_1 = '2005-04-02T21:00:00+00:00'
<add> self.default_time_2 = '2005-04-02T21:01:00+00:00'
<add> self.time_1 = parse_execution_date(self.default_time_1)
<add> self.time_2 = parse_execution_date(self.default_time_2)
<add>
<add> @provide_session
<add> def test_serialize(self, session):
<add> xcom_model_1 = XCom(
<add> key='test_key_1',
<add> timestamp=self.time_1,
<add> execution_date=self.time_1,
<add> task_id='test_task_id_1',
<add> dag_id='test_dag_1',
<add> )
<add> xcom_model_2 = XCom(
<add> key='test_key_2',
<add> timestamp=self.time_2,
<add> execution_date=self.time_2,
<add> task_id='test_task_id_2',
<add> dag_id='test_dag_2',
<add> )
<add> xcom_models = [xcom_model_1, xcom_model_2]
<add> session.add_all(xcom_models)
<add> session.commit()
<add> xcom_models_query = session.query(XCom).filter(
<add> or_(XCom.execution_date == self.time_1, XCom.execution_date == self.time_2)
<add> )
<add> xcom_models_queried = xcom_models_query.all()
<add> deserialized_xcoms = xcom_collection_schema.dump(XComCollection(
<add> xcom_entries=xcom_models_queried,
<add> total_entries=xcom_models_query.count(),
<add> ))
<add> self.assertEqual(
<add> deserialized_xcoms[0],
<add> {
<add> 'xcom_entries': [
<add> {
<add> 'key': 'test_key_1',
<add> 'timestamp': self.default_time_1,
<add> 'execution_date': self.default_time_1,
<add> 'task_id': 'test_task_id_1',
<add> 'dag_id': 'test_dag_1',
<add> },
<add> {
<add> 'key': 'test_key_2',
<add> 'timestamp': self.default_time_2,
<add> 'execution_date': self.default_time_2,
<add> 'task_id': 'test_task_id_2',
<add> 'dag_id': 'test_dag_2',
<add> }
<add> ],
<add> 'total_entries': len(xcom_models),
<add> }
<add> )
<add>
<add>
<add>class TestXComSchema(TestXComSchemaBase):
<add>
<add> def setUp(self) -> None:
<add> super().setUp()
<add> self.default_time = '2005-04-02T21:00:00+00:00'
<add> self.default_time_parsed = parse_execution_date(self.default_time)
<add>
<add> @provide_session
<add> def test_serialize(self, session):
<add> xcom_model = XCom(
<add> key='test_key',
<add> timestamp=self.default_time_parsed,
<add> execution_date=self.default_time_parsed,
<add> task_id='test_task_id',
<add> dag_id='test_dag',
<add> value=b'test_binary',
<add> )
<add> session.add(xcom_model)
<add> session.commit()
<add> xcom_model = session.query(XCom).first()
<add> deserialized_xcom = xcom_schema.dump(xcom_model)
<add> self.assertEqual(
<add> deserialized_xcom[0],
<add> {
<add> 'key': 'test_key',
<add> 'timestamp': self.default_time,
<add> 'execution_date': self.default_time,
<add> 'task_id': 'test_task_id',
<add> 'dag_id': 'test_dag',
<add> 'value': 'test_binary',
<add> }
<add> )
<add>
<add> def test_deserialize(self):
<add> xcom_dump = {
<add> 'key': 'test_key',
<add> 'timestamp': self.default_time,
<add> 'execution_date': self.default_time,
<add> 'task_id': 'test_task_id',
<add> 'dag_id': 'test_dag',
<add> 'value': b'test_binary',
<add> }
<add> result = xcom_schema.load(xcom_dump)
<add> self.assertEqual(
<add> result[0],
<add> {
<add> 'key': 'test_key',
<add> 'timestamp': self.default_time_parsed,
<add> 'execution_date': self.default_time_parsed,
<add> 'task_id': 'test_task_id',
<add> 'dag_id': 'test_dag',
<add> 'value': 'test_binary',
<add> }
<add> ) | 4 |
Ruby | Ruby | remove detection for sparkle strategy | 3723cd7decc4d94ad772e3e6948262ceff35ee94 | <ide><path>Library/Homebrew/livecheck/strategy/sparkle.rb
<del># typed: false
<add># typed: true
<ide> # frozen_string_literal: true
<ide>
<ide> require "bundle_version"
<ide> module Strategy
<ide> # its contents as a Sparkle appcast in XML format.
<ide> #
<ide> # @api private
<del> class Sparkle
<add> class Sparkle < PageMatch
<ide> extend T::Sig
<ide>
<ide> NICE_NAME = "Sparkle"
<ide>
<del> PRIORITY = 1
<del>
<del> # Whether the strategy can be applied to the provided URL.
<del> sig { params(url: String).returns(T::Boolean) }
<del> def self.match?(url)
<del> return false unless url.match?(%r{^https?://})
<del>
<del> xml = url.end_with?(".xml")
<del> xml ||= begin
<del> headers = Strategy.page_headers(url)
<del> content_type = headers["content-type"]
<del> content_type.blank? || content_type.include?("xml")
<del> end
<del> return false unless xml
<del>
<del> contents = Strategy.page_contents(url)
<del>
<del> return true if contents.match?(%r{https?://www.andymatuschak.org/xml-namespaces/sparkle})
<del>
<del> contents.include?("rss") &&
<del> contents.include?("channel") &&
<del> contents.include?("item") &&
<del> contents.include?("enclosure")
<del> end
<del>
<ide> # Checks the content at the URL for new versions.
<ide> sig { params(url: String, regex: T.nilable(Regexp)).returns(T::Hash[Symbol, T.untyped]) }
<ide> def self.find_versions(url, regex, &block) | 1 |
PHP | PHP | show example base variables | d502747428494e634fe699dbc44c703845e2a79f | <ide><path>app/Jobs/Job.php
<ide>
<ide> abstract class Job
<ide> {
<del> //
<add> /**
<add> * The name of the queue the job should be sent to.
<add> *
<add> * @var string
<add> */
<add> public $queue;
<add>
<add> /**
<add> * The seconds before the job should be made available.
<add> *
<add> * @var int
<add> */
<add> public $delay;
<ide> } | 1 |
PHP | PHP | consolidate table prefix removal | 169b0a5c3f211e8ff8138514466b062acc7ff35e | <ide><path>cake/libs/model/cake_schema.php
<ide> function read($options = array()) {
<ide> if ($prefix && strpos($table, $prefix) !== 0) {
<ide> continue;
<ide> }
<del> $table = preg_replace('/^' . preg_quote($prefix) . '/', '', $table);
<add> $table = $this->_noPrefixTable($prefix, $table);
<ide>
<ide> if (in_array($fulltable, $currentTables)) {
<ide> $key = array_search($fulltable, $currentTables);
<ide> function read($options = array()) {
<ide> }
<ide> if (in_array($withTable, $currentTables)) {
<ide> $key = array_search($withTable, $currentTables);
<del> $noPrefixWith = str_replace($prefix, '', $withTable);
<add> $noPrefixWith = $this->_noPrefixTable($prefix, $withTable);
<ide>
<ide> $tables[$noPrefixWith] = $this->__columns($Object->$class);
<ide> $tables[$noPrefixWith]['indexes'] = $db->index($Object->$class);
<ide> function read($options = array()) {
<ide> if (strpos($table, $prefix) !== 0) {
<ide> continue;
<ide> }
<del> $table = preg_replace('/^' . preg_quote($prefix) . '/', '', $table);
<add> $table = $this->_noPrefixTable($prefix, $table);
<ide> }
<ide> $Object = new AppModel(array(
<ide> 'name' => Inflector::classify($table), 'table' => $table, 'ds' => $connection
<ide> function _compareIndexes($new, $old) {
<ide> }
<ide> return array_filter(compact('add', 'drop'));
<ide> }
<add>
<add> function _noPrefixTable($prefix, $table) {
<add> return preg_replace('/^' . preg_quote($prefix) . '/', '', $table);
<add> }
<ide> } | 1 |
Javascript | Javascript | use imports instead of globals | d422f83bc054cc39b9ceb009a08ac1ff2aecbe4a | <ide><path>packages/ember-metal/lib/expand_properties.js
<del>import Ember from "ember-metal/core";
<ide> import EmberError from 'ember-metal/error';
<ide> import { forEach } from 'ember-metal/enumerable_utils';
<add>import { typeOf } from 'ember-metal/utils';
<ide>
<ide> /**
<ide> @module ember-metal
<ide> export default function expandProperties(pattern, callback) {
<ide> 'e.g. `user.{firstName, lastName}` should be `user.{firstName,lastName}`');
<ide> }
<ide>
<del> if ('string' === Ember.typeOf(pattern)) {
<add> if ('string' === typeOf(pattern)) {
<ide> var parts = pattern.split(SPLIT_REGEX);
<ide> var properties = [parts];
<ide>
<ide><path>packages/ember-runtime/lib/mixins/controller.js
<ide> import { Mixin } from "ember-metal/mixin";
<del>import { computed } from "ember-metal/computed";
<add>import alias from 'ember-metal/alias';
<ide> import ActionHandler from "ember-runtime/mixins/action_handler";
<ide> import ControllerContentModelAliasDeprecation from "ember-runtime/mixins/controller_content_model_alias_deprecation";
<ide>
<ide> export default Mixin.create(ActionHandler, ControllerContentModelAliasDeprecatio
<ide> /**
<ide> @private
<ide> */
<del> content: computed.alias('model')
<add> content: alias('model')
<ide>
<ide> });
<ide>
<ide><path>packages/ember-runtime/lib/system/core_object.js
<ide> @submodule ember-runtime
<ide> */
<ide>
<del>import Ember from "ember-metal/core";
<add>// using ember-metal/lib/main here to ensure that ember-debug is setup
<add>// if present
<add>import Ember from "ember-metal";
<ide> import merge from "ember-metal/merge";
<ide> // Ember.assert, Ember.config
<ide>
<ide><path>packages/ember-views/lib/views/bound_component_view.js
<ide> import { read, chain, subscribe, unsubscribe } from "ember-metal/streams/utils";
<ide> import { readComponentFactory } from "ember-views/streams/utils";
<ide> import mergeViewBindings from "ember-htmlbars/system/merge-view-bindings";
<ide> import EmberError from "ember-metal/error";
<add>import ContainerView from "ember-views/views/container_view";
<ide>
<del>export default Ember.ContainerView.extend(_Metamorph, {
<add>export default ContainerView.extend(_Metamorph, {
<ide> init: function() {
<ide> this._super();
<ide> var componentNameStream = this._boundComponentOptions.componentNameStream; | 4 |
Java | Java | remove unnecessary cast in contentrequestmatchers | b808b53bcc4633f0622c7038df310b04e34bfe7b | <ide><path>spring-test/src/main/java/org/springframework/test/web/client/match/ContentRequestMatchers.java
<ide> private RequestMatcher multipartData(MultiValueMap<String, ?> expectedMap, boole
<ide> }
<ide> if (expected instanceof byte[]) {
<ide> assertTrue("Multipart is not a file", actual instanceof byte[]);
<del> assertEquals("Multipart content", expected, (byte[]) actual);
<add> assertEquals("Multipart content", expected, actual);
<ide> }
<ide> else if (expected instanceof String) {
<ide> assertTrue("Multipart is not a String", actual instanceof String); | 1 |
PHP | PHP | apply fixes from styleci | 8f31b6407b5b4bf4082984a8f88db4da19451c7e | <ide><path>src/Illuminate/Routing/UrlGenerator.php
<ide> public function hasCorrectSignature(Request $request, $absolute = true, array $i
<ide> {
<ide> $ignoreQuery[] = 'signature';
<ide>
<del> $url = $absolute ? $request->url() : '/' . $request->path();
<add> $url = $absolute ? $request->url() : '/'.$request->path();
<ide>
<ide> $original = rtrim($url.'?'.Arr::query(
<ide> Arr::except($request->query(), $ignoreQuery) | 1 |
Text | Text | move border opts to correct location for samples | 909c719331d36c5ef4a8871fc62de83f854bfb92 | <ide><path>docs/axes/cartesian/index.md
<ide> const config = {
<ide> options: {
<ide> scales: {
<ide> x: {
<del> grid: {
<del> borderColor: 'red'
<add> border: {
<add> color: 'red'
<ide> }
<ide> }
<ide> }
<ide><path>docs/samples/scales/stacked.md
<ide> const config = {
<ide> position: 'left',
<ide> stack: 'demo',
<ide> stackWeight: 2,
<del> grid: {
<del> borderColor: Utils.CHART_COLORS.red
<add> border: {
<add> color: Utils.CHART_COLORS.red
<ide> }
<ide> },
<ide> y2: {
<ide> const config = {
<ide> position: 'left',
<ide> stack: 'demo',
<ide> stackWeight: 1,
<del> grid: {
<del> borderColor: Utils.CHART_COLORS.blue
<add> border: {
<add> color: Utils.CHART_COLORS.blue
<ide> }
<ide> }
<ide> } | 2 |
Javascript | Javascript | add korean translation | 798cb53e7346c75ff49524cbd5657deb34ce59a6 | <ide><path>lang/kr.js
<add>(function () {
<add> var lang = {
<add> months : "1월_2월_3월_4월_5월_6월_7월_8월_9월_10월_11월_12월".split("_"),
<add> monthsShort : "1월_2월_3월_4월_5월_6월_7월_8월_9월_10월_11월_12월".split("_"),
<add> weekdays : "일요일_월요일_화요일_수요일_목요일_금요일_토요일".split("_"),
<add> weekdaysShort : "일_월_화_수_목_금_토".split("_"),
<add> longDateFormat : {
<add> L : "YYYY.MM.DD",
<add> LL : "YYYY년 MMMM D일",
<add> LLL : "YYYY년 MMMM D일 HH시 mm분",
<add> LLLL : "YYYY년 MMMM D일 dddd HH시 mm분"
<add> },
<add> relativeTime : {
<add> future : "%s 후",
<add> past : "%s 전",
<add> s : "방금",
<add> m : "몇 분",
<add> mm : "%d분",
<add> h : "한 시간",
<add> hh : "%d시간",
<add> d : "하루",
<add> dd : "%d일",
<add> M : "한 달",
<add> MM : "%d달",
<add> y : "일 년",
<add> yy : "%d년"
<add> },
<add> ordinal : function (number) {
<add> var b = number % 10;
<add> return (~~ (number % 100 / 10) === 1) ? '일' :
<add> (b === 1) ? '일' :
<add> (b === 2) ? '일' :
<add> (b === 3) ? '일' : '일';
<add> }
<add> };
<add>
<add> // Node
<add> if (typeof module !== 'undefined') {
<add> module.exports = lang;
<add> }
<add> // Browser
<add> if (typeof window !== 'undefined' && this.moment && this.moment.lang) {
<add> this.moment.lang('kr', lang);
<add> }
<add>}());
<ide><path>lang/test/kr.js
<add>
<add>/**************************************************
<add> Korean
<add> *************************************************/
<add>
<add>module("lang:kr");
<add>
<add>test("format", 18, function() {
<add> moment.lang('kr');
<add> var a = [
<add> ['dddd, MMMM Do YYYY, h:mm:ss a', 'Sunday, February 14th 2010, 3:25:50 pm'],
<add> ['ddd, hA', 'Sun, 3PM'],
<add> ['M Mo MM MMMM MMM', '2 2nd 02 February Feb'],
<add> ['YYYY YY', '2010 10'],
<add> ['D Do DD', '14 14th 14'],
<add> ['d do dddd ddd', '0 0th Sunday Sun'],
<add> ['DDD DDDo DDDD', '45 45th 045'],
<add> ['w wo ww', '8 8th 08'],
<add> ['h hh', '3 03'],
<add> ['H HH', '15 15'],
<add> ['m mm', '25 25'],
<add> ['s ss', '50 50'],
<add> ['a A', 'pm PM'],
<add> ['t\\he DDDo \\d\\ay of t\\he ye\\ar', 'the 45th day of the year'],
<add> ['L', '02/14/2010'],
<add> ['LL', 'February 14 2010'],
<add> ['LLL', 'February 14 2010 3:25 PM'],
<add> ['LLLL', 'Sunday, February 14 2010 3:25 PM']
<add> ],
<add> b = moment(new Date(2010, 1, 14, 15, 25, 50, 125)),
<add> i;
<add> for (i = 0; i < a.length; i++) {
<add> equal(b.format(a[i][0]), a[i][1], a[i][0] + ' ---> ' + a[i][1]);
<add> }
<add>});
<add>
<add>test("format ordinal", 31, function() {
<add> moment.lang('kr');
<add> equal(moment([2011, 0, 1]).format('DDDo'), '1일', '1일');
<add> equal(moment([2011, 0, 2]).format('DDDo'), '2일', '2일');
<add> equal(moment([2011, 0, 3]).format('DDDo'), '3일', '3일');
<add> equal(moment([2011, 0, 4]).format('DDDo'), '4일', '4일');
<add> equal(moment([2011, 0, 5]).format('DDDo'), '5일', '5일');
<add> equal(moment([2011, 0, 6]).format('DDDo'), '6일', '6일');
<add> equal(moment([2011, 0, 7]).format('DDDo'), '7일', '7일');
<add> equal(moment([2011, 0, 8]).format('DDDo'), '8일', '8일');
<add> equal(moment([2011, 0, 9]).format('DDDo'), '9일', '9일');
<add> equal(moment([2011, 0, 10]).format('DDDo'), '10일', '10일');
<add>
<add> equal(moment([2011, 0, 11]).format('DDDo'), '11일', '11일');
<add> equal(moment([2011, 0, 12]).format('DDDo'), '12일', '12일');
<add> equal(moment([2011, 0, 13]).format('DDDo'), '13일', '13일');
<add> equal(moment([2011, 0, 14]).format('DDDo'), '14일', '14일');
<add> equal(moment([2011, 0, 15]).format('DDDo'), '15일', '15일');
<add> equal(moment([2011, 0, 16]).format('DDDo'), '16일', '16일');
<add> equal(moment([2011, 0, 17]).format('DDDo'), '17일', '17일');
<add> equal(moment([2011, 0, 18]).format('DDDo'), '18일', '18일');
<add> equal(moment([2011, 0, 19]).format('DDDo'), '19일', '19일');
<add> equal(moment([2011, 0, 20]).format('DDDo'), '20일', '20일');
<add>
<add> equal(moment([2011, 0, 21]).format('DDDo'), '21일', '21일');
<add> equal(moment([2011, 0, 22]).format('DDDo'), '22일', '22일');
<add> equal(moment([2011, 0, 23]).format('DDDo'), '23일', '23일');
<add> equal(moment([2011, 0, 24]).format('DDDo'), '24일', '24일');
<add> equal(moment([2011, 0, 25]).format('DDDo'), '25일', '25일');
<add> equal(moment([2011, 0, 26]).format('DDDo'), '26일', '26일');
<add> equal(moment([2011, 0, 27]).format('DDDo'), '27일', '27일');
<add> equal(moment([2011, 0, 28]).format('DDDo'), '28일', '28일');
<add> equal(moment([2011, 0, 29]).format('DDDo'), '29일', '29일');
<add> equal(moment([2011, 0, 30]).format('DDDo'), '30일', '30일');
<add>
<add> equal(moment([2011, 0, 31]).format('DDDo'), '31일', '31일');
<add>});
<add>
<add>test("format month", 12, function() {
<add> moment.lang('kr');
<add> var expected = '1월 1월_2월 2월_3월 3월_4월 4월_5월 5월_6월 6월_7월 7월_8월 8월_9월 9월_10월 10월_11월 11월_12월 12월'.split("_");
<add> var i;
<add> for (i = 0; i < expected.length; i++) {
<add> equal(moment([2011, i, 0]).format('MMMM MMM'), expected[i], expected[i]);
<add> }
<add>});
<add>
<add>test("format week", 7, function() {
<add> moment.lang('kr');
<add> var expected = '일요일 일_월요일 월_화요일 화_수요일 수_목요일 목_금요일 금_토요일 토'.split("_");
<add> var i;
<add> for (i = 0; i < expected.length; i++) {
<add> equal(moment([2011, 0, 2 + i]).format('dddd ddd'), expected[i], expected[i]);
<add> }
<add>});
<add>
<add>test("from", 30, function() {
<add> moment.lang('en');
<add> var start = moment([2007, 1, 28]);
<add> equal(start.from(moment([2007, 1, 28]).add({s:44}), true), "a few seconds", "44 seconds = a few seconds");
<add> equal(start.from(moment([2007, 1, 28]).add({s:45}), true), "a minute", "45 seconds = a minute");
<add> equal(start.from(moment([2007, 1, 28]).add({s:89}), true), "a minute", "89 seconds = a minute");
<add> equal(start.from(moment([2007, 1, 28]).add({s:90}), true), "2 minutes", "90 seconds = 2 minutes");
<add> equal(start.from(moment([2007, 1, 28]).add({m:44}), true), "44 minutes", "44 minutes = 44 minutes");
<add> equal(start.from(moment([2007, 1, 28]).add({m:45}), true), "an hour", "45 minutes = an hour");
<add> equal(start.from(moment([2007, 1, 28]).add({m:89}), true), "an hour", "89 minutes = an hour");
<add> equal(start.from(moment([2007, 1, 28]).add({m:90}), true), "2 hours", "90 minutes = 2 hours");
<add> equal(start.from(moment([2007, 1, 28]).add({h:5}), true), "5 hours", "5 hours = 5 hours");
<add> equal(start.from(moment([2007, 1, 28]).add({h:21}), true), "21 hours", "21 hours = 21 hours");
<add> equal(start.from(moment([2007, 1, 28]).add({h:22}), true), "a day", "22 hours = a day");
<add> equal(start.from(moment([2007, 1, 28]).add({h:35}), true), "a day", "35 hours = a day");
<add> equal(start.from(moment([2007, 1, 28]).add({h:36}), true), "2 days", "36 hours = 2 days");
<add> equal(start.from(moment([2007, 1, 28]).add({d:1}), true), "a day", "1 day = a day");
<add> equal(start.from(moment([2007, 1, 28]).add({d:5}), true), "5 days", "5 days = 5 days");
<add> equal(start.from(moment([2007, 1, 28]).add({d:25}), true), "25 days", "25 days = 25 days");
<add> equal(start.from(moment([2007, 1, 28]).add({d:26}), true), "a month", "26 days = a month");
<add> equal(start.from(moment([2007, 1, 28]).add({d:30}), true), "a month", "30 days = a month");
<add> equal(start.from(moment([2007, 1, 28]).add({d:45}), true), "a month", "45 days = a month");
<add> equal(start.from(moment([2007, 1, 28]).add({d:46}), true), "2 months", "46 days = 2 months");
<add> equal(start.from(moment([2007, 1, 28]).add({d:74}), true), "2 months", "75 days = 2 months");
<add> equal(start.from(moment([2007, 1, 28]).add({d:76}), true), "3 months", "76 days = 3 months");
<add> equal(start.from(moment([2007, 1, 28]).add({M:1}), true), "a month", "1 month = a month");
<add> equal(start.from(moment([2007, 1, 28]).add({M:5}), true), "5 months", "5 months = 5 months");
<add> equal(start.from(moment([2007, 1, 28]).add({d:344}), true), "11 months", "344 days = 11 months");
<add> equal(start.from(moment([2007, 1, 28]).add({d:345}), true), "a year", "345 days = a year");
<add> equal(start.from(moment([2007, 1, 28]).add({d:547}), true), "a year", "547 days = a year");
<add> equal(start.from(moment([2007, 1, 28]).add({d:548}), true), "2 years", "548 days = 2 years");
<add> equal(start.from(moment([2007, 1, 28]).add({y:1}), true), "a year", "1 year = a year");
<add> equal(start.from(moment([2007, 1, 28]).add({y:5}), true), "5 years", "5 years = 5 years");
<add>});
<add>
<add>test("suffix", 2, function() {
<add> moment.lang('en');
<add> equal(moment(30000).from(0), "in a few seconds", "prefix");
<add> equal(moment(0).from(30000), "a few seconds ago", "suffix");
<add>});
<add>
<add>
<add>test("now from now", 1, function() {
<add> moment.lang('en');
<add> equal(moment().fromNow(), "a few seconds ago", "now from now should display as in the past");
<add>});
<add>
<add>
<add>test("fromNow", 2, function() {
<add> moment.lang('en');
<add> equal(moment().add({s:30}).fromNow(), "in a few seconds", "in a few seconds");
<add> equal(moment().add({d:5}).fromNow(), "in 5 days", "in 5 days");
<add>});
<add> | 2 |
Javascript | Javascript | support multiple classes in key | 7eaaca8ef2b3db76b7c87e98d264d4b16d90a392 | <ide><path>src/ng/directive/ngClass.js
<ide> function classDirective(name, selector) {
<ide> var classes = [], i = 0;
<ide> forEach(classVal, function(v, k) {
<ide> if (v) {
<del> classes.push(k);
<add> classes = classes.concat(k.split(' '));
<ide> }
<ide> });
<ide> return classes;
<ide><path>test/ng/directive/ngClassSpec.js
<ide> describe('ngClass', function() {
<ide> })
<ide> );
<ide>
<add> it('should allow ngClass with overlapping classes', inject(function($rootScope, $compile, $animate) {
<add> element = $compile('<div ng-class="{\'same yes\': test, \'same no\': !test}"></div>')($rootScope);
<add> $rootScope.$digest();
<add>
<add> expect(element).toHaveClass('same');
<add> expect(element).not.toHaveClass('yes');
<add> expect(element).toHaveClass('no');
<add>
<add> $rootScope.$apply(function() {
<add> $rootScope.test = true;
<add> });
<add>
<add> expect(element).toHaveClass('same');
<add> expect(element).toHaveClass('yes');
<add> expect(element).not.toHaveClass('no');
<add> }));
<add>
<ide> it('should allow both ngClass and ngClassOdd/Even with multiple classes', inject(function($rootScope, $compile) {
<ide> element = $compile('<ul>' +
<ide> '<li ng-repeat="i in [0,1]" ng-class="[\'A\', \'B\']" ' + | 2 |
Text | Text | move urls.py changes down and add necessary import | 44ae037e81d0ca24e28a3506564a92e1ac247eb1 | <ide><path>docs/tutorial/1-serialization.md
<ide> We'll need to add our new `snippets` app and the `rest_framework` app to `INSTAL
<ide> 'snippets',
<ide> )
<ide>
<del>We also need to wire up the root urlconf, in the `tutorial/urls.py` file, to include our snippet app's URLs.
<del>
<del> urlpatterns = [
<del> url(r'^', include('snippets.urls')),
<del> ]
<del>
<ide> Okay, we're ready to roll.
<ide>
<ide> ## Creating a model to work with
<ide> Finally we need to wire these views up. Create the `snippets/urls.py` file:
<ide> url(r'^snippets/$', views.snippet_list),
<ide> url(r'^snippets/(?P<pk>[0-9]+)/$', views.snippet_detail),
<ide> ]
<add>
<add>We also need to wire up the root urlconf, in the `tutorial/urls.py` file, to include our snippet app's URLs.
<add>
<add> from django.conf.urls import url, include
<add>
<add> urlpatterns = [
<add> url(r'^', include('snippets.urls')),
<add> ]
<ide>
<ide> It's worth noting that there are a couple of edge cases we're not dealing with properly at the moment. If we send malformed `json`, or if a request is made with a method that the view doesn't handle, then we'll end up with a 500 "server error" response. Still, this'll do for now.
<ide> | 1 |
Java | Java | add baseurl to defaulturitemplatehandler | 1ba0625cd9d9043b3b80a0de872cfbaf3e324467 | <ide><path>spring-web/src/main/java/org/springframework/web/util/DefaultUriTemplateHandler.java
<ide> package org.springframework.web.util;
<ide>
<ide> import java.net.URI;
<add>import java.net.URISyntaxException;
<ide> import java.util.List;
<ide> import java.util.Map;
<ide>
<add>import org.springframework.util.Assert;
<add>
<ide> /**
<ide> * Default implementation of {@link UriTemplateHandler} that relies on
<ide> * {@link UriComponentsBuilder} internally.
<ide> */
<ide> public class DefaultUriTemplateHandler implements UriTemplateHandler {
<ide>
<add> private String baseUrl;
<add>
<ide> private boolean parsePath;
<ide>
<ide>
<add> /**
<add> * Configure a base URL to prepend URI templates with. The base URL should
<add> * have a scheme and host but may also contain a port and a partial path.
<add> * Individual URI templates then may provide the remaining part of the URL
<add> * including additional path, query and fragment.
<add> *
<add> * <p><strong>Note: </strong>Individual URI templates are expanded and
<add> * encoded before being appended to the base URL. Therefore the base URL is
<add> * expected to be fully expanded and encoded, which can be done with the help
<add> * of {@link UriComponentsBuilder}.
<add> *
<add> * @param baseUrl the base URL.
<add> */
<add> public void setBaseUrl(String baseUrl) {
<add> if (baseUrl != null) {
<add> UriComponents uriComponents = UriComponentsBuilder.fromUriString(baseUrl).build();
<add> Assert.hasText(uriComponents.getScheme(), "'baseUrl' must have a scheme");
<add> Assert.hasText(uriComponents.getHost(), "'baseUrl' must have a host");
<add> Assert.isNull(uriComponents.getQuery(), "'baseUrl' cannot have a query");
<add> Assert.isNull(uriComponents.getFragment(), "'baseUrl' cannot have a fragment");
<add> }
<add> this.baseUrl = baseUrl;
<add> }
<add>
<add> /**
<add> * Return the configured base URL.
<add> */
<add> public String getBaseUrl() {
<add> return this.baseUrl;
<add> }
<add>
<ide> /**
<ide> * Whether to parse the path of a URI template string into path segments.
<ide> * <p>If set to {@code true} the path of parsed URI templates is decomposed
<ide> public boolean shouldParsePath() {
<ide>
<ide> @Override
<ide> public URI expand(String uriTemplate, Map<String, ?> uriVariables) {
<del> UriComponentsBuilder builder = initBuilder(uriTemplate);
<del> return builder.build().expand(uriVariables).encode().toUri();
<add> UriComponentsBuilder uriComponentsBuilder = initUriComponentsBuilder(uriTemplate);
<add> UriComponents uriComponents = uriComponentsBuilder.build().expand(uriVariables).encode();
<add> return insertBaseUrl(uriComponents);
<ide> }
<ide>
<ide> @Override
<ide> public URI expand(String uriTemplate, Object... uriVariableValues) {
<del> UriComponentsBuilder builder = initBuilder(uriTemplate);
<del> return builder.build().expand(uriVariableValues).encode().toUri();
<add> UriComponentsBuilder uriComponentsBuilder = initUriComponentsBuilder(uriTemplate);
<add> UriComponents uriComponents = uriComponentsBuilder.build().expand(uriVariableValues).encode();
<add> return insertBaseUrl(uriComponents);
<ide> }
<ide>
<del> protected UriComponentsBuilder initBuilder(String uriTemplate) {
<add> protected UriComponentsBuilder initUriComponentsBuilder(String uriTemplate) {
<ide> UriComponentsBuilder builder = UriComponentsBuilder.fromUriString(uriTemplate);
<ide> if (shouldParsePath()) {
<ide> List<String> pathSegments = builder.build().getPathSegments();
<ide> protected UriComponentsBuilder initBuilder(String uriTemplate) {
<ide> return builder;
<ide> }
<ide>
<add> protected URI insertBaseUrl(UriComponents uriComponents) {
<add> if (getBaseUrl() == null || uriComponents.getHost() != null) {
<add> return uriComponents.toUri();
<add> }
<add> String url = getBaseUrl() + uriComponents.toUriString();
<add> try {
<add> return new URI(url);
<add> }
<add> catch (URISyntaxException ex) {
<add> throw new IllegalArgumentException("Invalid URL after inserting base URL: " + url, ex);
<add> }
<add> }
<add>
<ide> }
<ide><path>spring-web/src/test/java/org/springframework/web/util/DefaultUriTemplateHandlerTests.java
<ide> public void setUp() throws Exception {
<ide> }
<ide>
<ide>
<add> @Test
<add> public void baseUrl() throws Exception {
<add> this.handler.setBaseUrl("http://localhost:8080");
<add> URI actual = this.handler.expand("/myapiresource");
<add>
<add> URI expected = new URI("http://localhost:8080/myapiresource");
<add> assertEquals(expected, actual);
<add> }
<add>
<add> @Test
<add> public void baseUrlWithPartialPath() throws Exception {
<add> this.handler.setBaseUrl("http://localhost:8080/context");
<add> URI actual = this.handler.expand("/myapiresource");
<add>
<add> URI expected = new URI("http://localhost:8080/context/myapiresource");
<add> assertEquals(expected, actual);
<add> }
<add>
<ide> @Test
<ide> public void expandWithFullPath() throws Exception {
<ide> Map<String, String> vars = new HashMap<String, String>(2);
<ide> public void expandWithFullPath() throws Exception {
<ide> URI actual = this.handler.expand(template, vars);
<ide>
<ide> URI expected = new URI("http://example.com/hotels/1/pic/pics/logo.png");
<del> assertEquals("Invalid expanded template", expected, actual);
<add> assertEquals(expected, actual);
<ide> }
<ide>
<ide> @Test
<del> public void expandWithFullPathParsedIntoPathSegments() throws Exception {
<add> public void expandWithFullPathAndParsePathEnabled() throws Exception {
<ide> Map<String, String> vars = new HashMap<String, String>(2);
<ide> vars.put("hotel", "1");
<ide> vars.put("publicpath", "pics/logo.png");
<ide> public void expandWithFullPathParsedIntoPathSegments() throws Exception {
<ide> URI actual = this.handler.expand(template, vars);
<ide>
<ide> URI expected = new URI("http://example.com/hotels/1/pic/pics%2Flogo.png/size/150x150");
<del> assertEquals("Invalid expanded template", expected, actual);
<add> assertEquals(expected, actual);
<ide> }
<ide>
<ide> } | 2 |
Python | Python | use tiny models for onnx tests - text modality | d2357a0133f0157cb7def2995f62b6088f97ad0f | <ide><path>tests/onnx/test_onnx_v2.py
<ide> def test_values_override(self):
<ide>
<ide>
<ide> PYTORCH_EXPORT_MODELS = {
<del> ("albert", "hf-internal-testing/tiny-albert"),
<del> ("bert", "bert-base-cased"),
<del> ("big-bird", "google/bigbird-roberta-base"),
<del> ("ibert", "kssteven/ibert-roberta-base"),
<add> ("albert", "hf-internal-testing/tiny-random-AlbertModel"),
<add> ("bert", "hf-internal-testing/tiny-random-BertModel"),
<add> ("beit", "microsoft/beit-base-patch16-224"),
<add> ("big-bird", "hf-internal-testing/tiny-random-BigBirdModel"),
<ide> ("camembert", "camembert-base"),
<del> ("clip", "openai/clip-vit-base-patch32"),
<del> ("convbert", "YituTech/conv-bert-base"),
<del> ("codegen", "Salesforce/codegen-350M-multi"),
<del> ("deberta", "microsoft/deberta-base"),
<del> ("deberta-v2", "microsoft/deberta-v2-xlarge"),
<add> ("clip", "hf-internal-testing/tiny-random-CLIPModel"),
<add> ("convbert", "hf-internal-testing/tiny-random-ConvBertModel"),
<add> ("codegen", "hf-internal-testing/tiny-random-CodeGenModel"),
<add> ("data2vec-text", "hf-internal-testing/tiny-random-Data2VecTextModel"),
<add> ("data2vec-vision", "facebook/data2vec-vision-base"),
<add> ("deberta", "hf-internal-testing/tiny-random-DebertaModel"),
<add> ("deberta-v2", "hf-internal-testing/tiny-random-DebertaV2Model"),
<add> ("deit", "facebook/deit-small-patch16-224"),
<ide> ("convnext", "facebook/convnext-tiny-224"),
<ide> ("detr", "facebook/detr-resnet-50"),
<del> ("distilbert", "distilbert-base-cased"),
<del> ("electra", "google/electra-base-generator"),
<add> ("distilbert", "hf-internal-testing/tiny-random-DistilBertModel"),
<add> ("electra", "hf-internal-testing/tiny-random-ElectraModel"),
<add> ("groupvit", "nvidia/groupvit-gcc-yfcc"),
<add> ("ibert", "kssteven/ibert-roberta-base"),
<ide> ("imagegpt", "openai/imagegpt-small"),
<del> ("resnet", "microsoft/resnet-50"),
<del> ("roberta", "roberta-base"),
<del> ("roformer", "junnyu/roformer_chinese_base"),
<del> ("squeezebert", "squeezebert/squeezebert-uncased"),
<del> ("mobilebert", "google/mobilebert-uncased"),
<add> ("levit", "facebook/levit-128S"),
<add> ("layoutlm", "hf-internal-testing/tiny-random-LayoutLMModel"),
<add> ("layoutlmv3", "microsoft/layoutlmv3-base"),
<add> ("longformer", "allenai/longformer-base-4096"),
<add> ("mobilebert", "hf-internal-testing/tiny-random-MobileBertModel"),
<ide> ("mobilenet_v1", "google/mobilenet_v1_0.75_192"),
<ide> ("mobilenet_v2", "google/mobilenet_v2_0.35_96"),
<ide> ("mobilevit", "apple/mobilevit-small"),
<del> ("xlm", "xlm-clm-ende-1024"),
<del> ("xlm-roberta", "xlm-roberta-base"),
<del> ("layoutlm", "microsoft/layoutlm-base-uncased"),
<del> ("layoutlmv3", "microsoft/layoutlmv3-base"),
<del> ("groupvit", "nvidia/groupvit-gcc-yfcc"),
<del> ("levit", "facebook/levit-128S"),
<ide> ("owlvit", "google/owlvit-base-patch32"),
<del> ("vit", "google/vit-base-patch16-224"),
<del> ("deit", "facebook/deit-small-patch16-224"),
<del> ("beit", "microsoft/beit-base-patch16-224"),
<del> ("data2vec-text", "facebook/data2vec-text-base"),
<del> ("data2vec-vision", "facebook/data2vec-vision-base"),
<del> ("perceiver", "deepmind/language-perceiver", ("masked-lm", "sequence-classification")),
<del> ("perceiver", "deepmind/vision-perceiver-conv", ("image-classification",)),
<del> ("longformer", "allenai/longformer-base-4096"),
<del> ("yolos", "hustvl/yolos-tiny"),
<add> ("perceiver", "hf-internal-testing/tiny-random-PerceiverModel", ("masked-lm", "sequence-classification")),
<add> ("perceiver", "hf-internal-testing/tiny-random-PerceiverModel", ("image-classification",)),
<add> ("resnet", "microsoft/resnet-50"),
<add> ("roberta", "hf-internal-testing/tiny-random-RobertaModel"),
<add> ("roformer", "hf-internal-testing/tiny-random-RoFormerModel"),
<ide> ("segformer", "nvidia/segformer-b0-finetuned-ade-512-512"),
<add> ("squeezebert", "hf-internal-testing/tiny-random-SqueezeBertModel"),
<ide> ("swin", "microsoft/swin-tiny-patch4-window7-224"),
<add> ("vit", "google/vit-base-patch16-224"),
<add> ("yolos", "hustvl/yolos-tiny"),
<ide> ("whisper", "openai/whisper-tiny.en"),
<add> ("xlm", "hf-internal-testing/tiny-random-XLMModel"),
<add> ("xlm-roberta", "hf-internal-testing/tiny-random-XLMRobertaXLModel"),
<ide> }
<ide>
<ide> PYTORCH_EXPORT_ENCODER_DECODER_MODELS = {
<ide> ("vision-encoder-decoder", "nlpconnect/vit-gpt2-image-captioning"),
<ide> }
<ide>
<ide> PYTORCH_EXPORT_WITH_PAST_MODELS = {
<del> ("bloom", "bigscience/bloom-560m"),
<del> ("gpt2", "gpt2"),
<del> ("gpt-neo", "EleutherAI/gpt-neo-125M"),
<add> ("bloom", "hf-internal-testing/tiny-random-BloomModel"),
<add> ("gpt2", "hf-internal-testing/tiny-random-GPT2Model"),
<add> ("gpt-neo", "hf-internal-testing/tiny-random-GPTNeoModel"),
<ide> }
<ide>
<ide> PYTORCH_EXPORT_SEQ2SEQ_WITH_PAST_MODELS = {
<del> ("bart", "facebook/bart-base"),
<del> ("mbart", "sshleifer/tiny-mbart"),
<del> ("t5", "t5-small"),
<add> ("bart", "hf-internal-testing/tiny-random-BartModel"),
<add> ("bigbird-pegasus", "hf-internal-testing/tiny-random-BigBirdPegasusModel"),
<add> ("blenderbot-small", "facebook/blenderbot_small-90M"),
<add> ("blenderbot", "hf-internal-testing/tiny-random-BlenderbotModel"),
<add> ("longt5", "hf-internal-testing/tiny-random-LongT5Model"),
<ide> ("marian", "Helsinki-NLP/opus-mt-en-de"),
<add> ("mbart", "sshleifer/tiny-mbart"),
<ide> ("mt5", "google/mt5-base"),
<del> ("m2m-100", "facebook/m2m100_418M"),
<del> ("blenderbot-small", "facebook/blenderbot_small-90M"),
<del> ("blenderbot", "facebook/blenderbot-400M-distill"),
<del> ("bigbird-pegasus", "google/bigbird-pegasus-large-arxiv"),
<del> ("longt5", "google/long-t5-local-base"),
<del> # Disable for now as it causes fatal error `Floating point exception (core dumped)` and the subsequential tests are
<del> # not run.
<del> # ("longt5", "google/long-t5-tglobal-base"),
<add> ("m2m-100", "hf-internal-testing/tiny-random-M2M100Model"),
<add> ("t5", "hf-internal-testing/tiny-random-T5Model"),
<ide> }
<ide>
<ide> # TODO(lewtun): Include the same model types in `PYTORCH_EXPORT_MODELS` once TensorFlow has parity with the PyTorch model implementations.
<ide> TENSORFLOW_EXPORT_DEFAULT_MODELS = {
<ide> ("albert", "hf-internal-testing/tiny-albert"),
<del> ("bert", "bert-base-cased"),
<add> ("bert", "hf-internal-testing/tiny-random-BertModel"),
<ide> ("camembert", "camembert-base"),
<del> ("distilbert", "distilbert-base-cased"),
<del> ("roberta", "roberta-base"),
<add> ("distilbert", "hf-internal-testing/tiny-random-DistilBertModel"),
<add> ("roberta", "hf-internal-testing/tiny-random-RobertaModel"),
<ide> }
<ide>
<ide> # TODO(lewtun): Include the same model types in `PYTORCH_EXPORT_WITH_PAST_MODELS` once TensorFlow has parity with the PyTorch model implementations. | 1 |
Text | Text | fix typos in doc/api/https.md | e028ea0291b845e4bec3c7cff7319a027b8c815e | <ide><path>doc/api/https.md
<ide> const options = {
<ide> return new Error(msg);
<ide> }
<ide>
<del> // Pin the exact certificate, rather then the pub key
<add> // Pin the exact certificate, rather than the pub key
<ide> const cert256 = '25:FE:39:32:D9:63:8C:8A:FC:A1:9A:29:87:' +
<ide> 'D8:3E:4C:1D:98:DB:71:E4:1A:48:03:98:EA:22:6A:BD:8B:93:16';
<ide> if (cert.fingerprint256 !== cert256) { | 1 |
Javascript | Javascript | improve flow typing and linting for messagequeue | 7b575d669d8d6806f17fb679aa34ecb05c75883a | <ide><path>Libraries/BatchedBridge/MessageQueue.js
<ide> * @format
<ide> */
<ide>
<del>/*eslint no-bitwise: 0*/
<del>
<ide> 'use strict';
<ide>
<ide> const ErrorUtils = require('ErrorUtils');
<ide> export type SpyData = {
<ide> type: number,
<ide> module: ?string,
<ide> method: string | number,
<del> args: any,
<add> args: any[],
<ide> };
<ide>
<ide> const TO_JS = 0;
<ide> const METHOD_IDS = 1;
<ide> const PARAMS = 2;
<ide> const MIN_TIME_BETWEEN_FLUSHES_MS = 5;
<ide>
<add>// eslint-disable-next-line no-bitwise
<ide> const TRACE_TAG_REACT_APPS = 1 << 17;
<ide>
<ide> const DEBUG_INFO_LIMIT = 32;
<ide> let JSTimers = null;
<ide>
<ide> class MessageQueue {
<ide> _lazyCallableModules: {[key: string]: (void) => Object};
<del> _queue: [Array<number>, Array<number>, Array<any>, number];
<del> _successCallbacks: Array<?Function>;
<del> _failureCallbacks: Array<?Function>;
<add> _queue: [number[], number[], any[], number];
<add> _successCallbacks: (?Function)[];
<add> _failureCallbacks: (?Function)[];
<ide> _callID: number;
<ide> _inCall: number;
<ide> _lastFlush: number;
<ide> _eventLoopStartTime: number;
<ide>
<del> _debugInfo: Object;
<del> _remoteModuleTable: Object;
<del> _remoteMethodTable: Object;
<add> _debugInfo: {[number]: [number, number]};
<add> _remoteModuleTable: {[number]: string};
<add> _remoteMethodTable: {[number]: string[]};
<ide>
<ide> __spy: ?(data: SpyData) => void;
<ide>
<ide> class MessageQueue {
<ide> }
<ide> }
<ide>
<del> callFunctionReturnFlushedQueue(
<del> module: string,
<del> method: string,
<del> args: Array<any>,
<del> ) {
<add> callFunctionReturnFlushedQueue(module: string, method: string, args: any[]) {
<ide> this.__guard(() => {
<ide> this.__callFunction(module, method, args);
<ide> });
<ide> class MessageQueue {
<ide> callFunctionReturnResultAndFlushedQueue(
<ide> module: string,
<ide> method: string,
<del> args: Array<any>,
<add> args: any[],
<ide> ) {
<ide> let result;
<ide> this.__guard(() => {
<ide> class MessageQueue {
<ide> return [result, this.flushedQueue()];
<ide> }
<ide>
<del> invokeCallbackAndReturnFlushedQueue(cbID: number, args: Array<any>) {
<add> invokeCallbackAndReturnFlushedQueue(cbID: number, args: any[]) {
<ide> this.__guard(() => {
<ide> this.__invokeCallback(cbID, args);
<ide> });
<ide> class MessageQueue {
<ide> enqueueNativeCall(
<ide> moduleID: number,
<ide> methodID: number,
<del> params: Array<any>,
<add> params: any[],
<ide> onFail: ?Function,
<ide> onSucc: ?Function,
<ide> ) {
<ide> class MessageQueue {
<ide> }
<ide> // Encode callIDs into pairs of callback identifiers by shifting left and using the rightmost bit
<ide> // to indicate fail (0) or success (1)
<add> // eslint-disable-next-line no-bitwise
<ide> onFail && params.push(this._callID << 1);
<add> // eslint-disable-next-line no-bitwise
<ide> onSucc && params.push((this._callID << 1) | 1);
<ide> this._successCallbacks[this._callID] = onSucc;
<ide> this._failureCallbacks[this._callID] = onFail;
<ide> class MessageQueue {
<ide> }
<ide> }
<ide>
<del> createDebugLookup(moduleID: number, name: string, methods: Array<string>) {
<add> createDebugLookup(moduleID: number, name: string, methods: string[]) {
<ide> if (__DEV__) {
<ide> this._remoteModuleTable[moduleID] = name;
<ide> this._remoteMethodTable[moduleID] = methods;
<ide> class MessageQueue {
<ide> Systrace.endEvent();
<ide> }
<ide>
<del> __callFunction(module: string, method: string, args: Array<any>) {
<add> __callFunction(module: string, method: string, args: any[]): any {
<ide> this._lastFlush = new Date().getTime();
<ide> this._eventLoopStartTime = this._lastFlush;
<ide> Systrace.beginEvent(`${module}.${method}()`);
<ide> class MessageQueue {
<ide> return result;
<ide> }
<ide>
<del> __invokeCallback(cbID: number, args: Array<any>) {
<add> __invokeCallback(cbID: number, args: any[]) {
<ide> this._lastFlush = new Date().getTime();
<ide> this._eventLoopStartTime = this._lastFlush;
<ide>
<ide> // The rightmost bit of cbID indicates fail (0) or success (1), the other bits are the callID shifted left.
<add> // eslint-disable-next-line no-bitwise
<ide> const callID = cbID >>> 1;
<del> const callback =
<del> cbID & 1
<del> ? this._successCallbacks[callID]
<del> : this._failureCallbacks[callID];
<add> // eslint-disable-next-line no-bitwise
<add> const isSuccess = cbID & 1;
<add> const callback = isSuccess
<add> ? this._successCallbacks[callID]
<add> : this._failureCallbacks[callID];
<ide>
<ide> if (__DEV__) {
<ide> const debug = this._debugInfo[callID];
<ide> class MessageQueue {
<ide> }
<ide>
<ide> this._successCallbacks[callID] = this._failureCallbacks[callID] = null;
<del> callback.apply(null, args);
<add> callback(...args);
<ide>
<ide> if (__DEV__) {
<ide> Systrace.endEvent();
<ide><path>Libraries/BatchedBridge/__tests__/MessageQueue-test.js
<ide> describe('MessageQueue', function() {
<ide> it('should call the stored callback', () => {
<ide> let done = false;
<ide> queue.enqueueNativeCall(0, 1, [], () => {}, () => { done = true; });
<del> queue.__invokeCallback(1);
<add> queue.__invokeCallback(1, []);
<ide> expect(done).toEqual(true);
<ide> });
<ide>
<ide> it('should throw when calling the same callback twice', () => {
<ide> queue.enqueueNativeCall(0, 1, [], () => {}, () => {});
<del> queue.__invokeCallback(1);
<del> expect(() => queue.__invokeCallback(1)).toThrow();
<add> queue.__invokeCallback(1, []);
<add> expect(() => queue.__invokeCallback(1, [])).toThrow();
<ide> });
<ide>
<ide> it('should throw when calling both success and failure callback', () => {
<ide> queue.enqueueNativeCall(0, 1, [], () => {}, () => {});
<del> queue.__invokeCallback(1);
<del> expect(() => queue.__invokeCallback(0)).toThrow();
<add> queue.__invokeCallback(1, []);
<add> expect(() => queue.__invokeCallback(0, [])).toThrow();
<ide> });
<ide>
<ide> it('should throw when calling with unknown module', () => {
<ide><path>Libraries/Interaction/BridgeSpyStallHandler.js
<ide> const BridgeSpyStallHandler = {
<ide> }
<ide> }
<ide> return `${info.type === TO_JS ? 'N->JS' : 'JS->N'} : ` +
<del> `${info.module ? (info.module + '.') : ''}${info.method}(${args})`;
<add> `${info.module ? (info.module + '.') : ''}${info.method}(${JSON.stringify(args)})`;
<ide> }),
<ide> );
<ide> }, | 3 |
Javascript | Javascript | add freecodecamp to blocklist | f80e34a9509f3208c01524ea25dd573fcacbe760 | <ide><path>config/constants.js
<ide> let blocklist = [
<ide> 'field-guide',
<ide> 'forgot',
<ide> 'forum',
<add> 'freecodecamp',
<ide> 'get-help',
<ide> 'get-pai',
<ide> 'guide', | 1 |
Python | Python | update tok2vec layer | 07b47eaac8bc169fdf88677e0660b34ea5f24d7a | <ide><path>spacy/ml/models/tok2vec.py
<ide> def make_hash_embed(feature):
<ide> )
<ide> else:
<ide> model = chain(
<del> chain(
<del> FeatureExtractor(cols),
<del> list2ragged(),
<del> with_array(concatenate(*embeddings)),
<del> ),
<add> FeatureExtractor(cols),
<add> list2ragged(),
<add> with_array(concatenate(*embeddings)),
<ide> with_array(Maxout(width, concat_size, nP=3, dropout=0.0, normalize=True)),
<ide> ragged2list(),
<ide> )
<ide> def make_hash_embed(feature):
<ide>
<ide> @registry.architectures.register("spacy.CharacterEmbed.v1")
<ide> def CharacterEmbed(width: int, rows: int, nM: int, nC: int):
<del> model = concatenate(
<del> _character_embed.CharacterEmbed(nM=nM, nC=nC),
<del> chain(
<del> FeatureExtractor([NORM]),
<del> with_array(HashEmbed(nO=width, nV=rows, column=0, seed=5))
<del> )
<add> model = chain(
<add> concatenate(
<add> chain(_character_embed.CharacterEmbed(nM=nM, nC=nC), list2ragged()),
<add> chain(
<add> FeatureExtractor([NORM]),
<add> list2ragged(),
<add> with_array(HashEmbed(nO=width, nV=rows, column=0, seed=5))
<add> )
<add> ),
<add> with_array(Maxout(width, nM * nC + width, nP=3, normalize=True, dropout=0.0)),
<add> ragged2list()
<ide> )
<del> model.set_dim("nO", nM * nC + width)
<ide> return model
<ide>
<ide>
<ide> def MaxoutWindowEncoder(width: int, window_size: int, maxout_pieces: int, depth:
<ide> def MishWindowEncoder(width, window_size, depth):
<ide> cnn = chain(
<ide> expand_window(window_size=window_size),
<del> Mish(nO=width, nI=width * ((window_size * 2) + 1)),
<del> LayerNorm(width),
<add> Mish(
<add> nO=width,
<add> nI=width * ((window_size * 2) + 1),
<add> dropout=0.0,
<add> normalize=True
<add> ),
<ide> )
<ide> model = clone(residual(cnn), depth)
<ide> model.set_dim("nO", width) | 1 |
PHP | PHP | remove git junk | b5efbbc0a836ad292c2427bdb617bb5d17d73a8f | <ide><path>src/Illuminate/Database/Query/Builder.php
<ide> public function chunkById($count, callable $callback, $column = 'id')
<ide> return false;
<ide> }
<ide>
<del><<<<<<< HEAD
<del> if ($column) {
<del> $lastId = $results->last()->{$column};
<del> }
<del>=======
<del> $lastId = last($results)->{$column};
<del>>>>>>>> 5.2
<add> $lastId = $results->last()->{$column};
<ide>
<ide> $results = $this->forPageAfterId($count, $lastId, $column)->get();
<ide> } | 1 |
Python | Python | fix thinko in assert_deprecated() | 7747c3a88cb0cad5687093d1345efcb2743fc1d5 | <ide><path>numpy/core/tests/test_deprecations.py
<ide> def assert_deprecated(self, function, num=1, ignore_others=False,
<ide> if num is not None and num_found != num:
<ide> msg = "%i warnings found but %i expected." % (len(self.log), num)
<ide> lst = [w.category for w in self.log]
<del> raise AssertionError("\n".join([msg] + [lst]))
<add> raise AssertionError("\n".join([msg] + lst))
<ide>
<ide> with warnings.catch_warnings():
<ide> warnings.filterwarnings("error", message=self.message,
<ide> def test_fortran_contiguous(self):
<ide> self.assert_deprecated(np.ones((2,2)).T.view, args=(np.int8,))
<ide>
<ide>
<add>class TestTestDeprecated(object):
<add> def test_assert_deprecated(self):
<add> test_case_instance = _DeprecationTestCase()
<add> test_case_instance.setUp()
<add> assert_raises(AssertionError,
<add> test_case_instance.assert_deprecated,
<add> lambda: None)
<add>
<add> def foo():
<add> warnings.warn("foo", category=DeprecationWarning)
<add>
<add> test_case_instance.assert_deprecated(foo)
<add>
<ide> if __name__ == "__main__":
<ide> run_module_suite() | 1 |
Javascript | Javascript | add test for oaep hash mismatch | 636dbfe787690f675305e8d5d0f642df8b29834d | <ide><path>test/parallel/test-crypto-rsa-dsa.js
<ide> test_rsa('RSA_PKCS1_OAEP_PADDING', undefined, 'sha1');
<ide> test_rsa('RSA_PKCS1_OAEP_PADDING', 'sha1', undefined);
<ide> test_rsa('RSA_PKCS1_OAEP_PADDING', 'sha256', 'sha256');
<ide> test_rsa('RSA_PKCS1_OAEP_PADDING', 'sha512', 'sha512');
<add>common.expectsError(() => {
<add> test_rsa('RSA_PKCS1_OAEP_PADDING', 'sha256', 'sha512');
<add>}, {
<add> code: 'ERR_OSSL_RSA_OAEP_DECODING_ERROR'
<add>});
<ide>
<ide> // The following RSA-OAEP test cases were created using the WebCrypto API to
<ide> // ensure compatibility when using non-SHA1 hash functions. | 1 |
Python | Python | use airflow.security.utils instead of socket | feaffa9f300be40f6bddb48257c926a1a722f4f7 | <ide><path>airflow/hooks/hive_hooks.py
<ide> import logging
<ide> import re
<ide> import subprocess
<del>import socket
<ide> from tempfile import NamedTemporaryFile
<ide>
<ide>
<ide> from airflow.hooks.base_hook import BaseHook
<ide> from airflow.utils import TemporaryDirectory
<ide> from airflow.configuration import conf
<del>
<add>import airflow.security.utils
<ide>
<ide> class HiveCliHook(BaseHook):
<ide> """
<ide> def run_cli(self, hql, schema=None, verbose=True):
<ide>
<ide> if self.use_beeline:
<ide> hive_bin = 'beeline'
<del> if conf.get('security','enabled'):
<add> if conf.get('security', 'enabled'):
<ide> template = conn.extra_dejson.get('principal',"hive/[email protected]")
<del> template = template.replace("_HOST", socket.getfqdn())
<add> template = airflow.security.utils.replace_hostname_pattern(
<add> airflow.security.utils.get_components(template)
<add> )
<ide>
<ide> proxy_user = ""
<ide> if conn.extra_dejson.get('proxy_user') == "login" and conn.login:
<ide> def run_cli(self, hql, schema=None, verbose=True):
<ide>
<ide> return stdout
<ide>
<del>
<ide> def test_hql(self, hql):
<ide> """
<ide> Test an hql statement using the hive cli and EXPLAIN
<ide> def test_hql(self, hql):
<ide> context = '\n'.join(query.split('\n')[begin:end])
<ide> logging.info("Context :\n {0}".format(context))
<ide> else:
<del> logging.info("SUCCESS")
<del>
<add> logging.info("SUCCESS")
<ide>
<ide> def load_file(
<ide> self, | 1 |
Javascript | Javascript | improve error handling in runtimemodules | 6a7b1b777c089300d26d464edbe88222a2891e87 | <ide><path>lib/RuntimeModule.js
<ide> class RuntimeModule extends Module {
<ide> updateHash(hash, chunkGraph) {
<ide> // Do not use getGeneratedCode here, because i. e. compilation hash is not
<ide> // ready at this point. We will cache it later instead.
<del> hash.update(this.generate());
<add> try {
<add> hash.update(this.generate());
<add> } catch (err) {
<add> hash.update(err.message);
<add> }
<ide> super.updateHash(hash, chunkGraph);
<ide> }
<ide>
<ide><path>test/configCases/errors/exception-in-chunk-renderer/errors.js
<del>module.exports = [
<del> [/Test exception/],
<del> [/Test exception/]
<del>];
<add>module.exports = [[/Test exception/]]; | 2 |
PHP | PHP | throw an exception on invalid cache configuration | 4c51cb99af3be03adc5a49120cc96f044b9a359c | <ide><path>src/Cache/Engine/MemcachedEngine.php
<ide> public function init(array $config = []): bool
<ide> }
<ide> $this->_setOptions();
<ide>
<del> if (count($this->_Memcached->getServerList())) {
<add> $serverList = $this->_Memcached->getServerList();
<add> if (count($serverList)) {
<add> if ($this->_config['persistent'] !== false) {
<add> $actualServers = [];
<add> foreach ($serverList as $server) {
<add> $actualServers[] = $server['host'] . ':' . $server['port'];
<add> }
<add> unset($server);
<add> sort($actualServers);
<add> $configuredServers = $this->_config['servers'];
<add> sort($configuredServers);
<add> if ($actualServers !== $configuredServers) {
<add> $message = "Invalid cache configuration. Multiple persistent cache configurations are detected" .
<add> " with different 'servers' values. 'servers' values for persistent cache configurations" .
<add> " must be the same when using the same persistence id.";
<add> throw new InvalidArgumentException($message);
<add> }
<add> }
<add>
<ide> return true;
<ide> }
<ide>
<ide><path>tests/TestCase/Cache/Engine/MemcachedEngineTest.php
<ide> public function testSaslAuthException()
<ide> $MemcachedEngine->init($config);
<ide> }
<ide>
<add> /**
<add> * testConfigDifferentPorts method
<add> *
<add> * @expectedException InvalidArgumentException
<add> * @expectedExceptionMessage Invalid cache configuration. Multiple persistent cache configurations are detected with different 'servers' values. 'servers' values for persistent cache configurations must be the same when using the same persistence id.
<add> * @return void
<add> */
<add> public function testConfigDifferentPorts()
<add> {
<add> $Memcached1 = new MemcachedEngine();
<add> $config1 = [
<add> 'className' => 'Memcached',
<add> 'servers' => ['127.0.0.1:11211'],
<add> 'persistent' => true,
<add> ];
<add> $Memcached1->init($config1);
<add>
<add> $Memcached2 = new MemcachedEngine();
<add> $config2 = [
<add> 'className' => 'Memcached',
<add> 'servers' => ['127.0.0.1:11212'],
<add> 'persistent' => true,
<add> ];
<add> $Memcached2->init($config2);
<add> }
<add>
<ide> /**
<ide> * testConfig method
<ide> * | 2 |
PHP | PHP | add replacement for lower danish "æ" | a32792233dd7db53a0177ac5a9ae3f3a13d53662 | <ide><path>src/Illuminate/Support/Str.php
<ide> protected static function languageSpecificCharsArray($language)
<ide> ['h', 'H', 'sht', 'SHT', 'a', 'А', 'y', 'Y'],
<ide> ],
<ide> 'da' => [
<del> ['ø', 'å', 'Æ', 'Ø', 'Å'],
<del> ['oe', 'aa', 'Ae', 'Oe', 'Aa'],
<add> ['æ', 'ø', 'å', 'Æ', 'Ø', 'Å'],
<add> ['ae', 'oe', 'aa', 'Ae', 'Oe', 'Aa'],
<ide> ],
<ide> 'de' => [
<ide> ['ä', 'ö', 'ü', 'Ä', 'Ö', 'Ü'], | 1 |
Go | Go | fix reloading of daemon labels from config | 455858fc70e3b752fb2495bbdda368613e6587c2 | <ide><path>daemon/daemon.go
<ide> func (daemon *Daemon) initDiscovery(config *Config) error {
<ide> func (daemon *Daemon) Reload(config *Config) error {
<ide> daemon.configStore.reloadLock.Lock()
<ide> defer daemon.configStore.reloadLock.Unlock()
<del> if config.IsValueSet("label") {
<add> if config.IsValueSet("labels") {
<ide> daemon.configStore.Labels = config.Labels
<ide> }
<ide> if config.IsValueSet("debug") {
<ide><path>daemon/daemon_test.go
<ide> func TestDaemonReloadLabels(t *testing.T) {
<ide> }
<ide>
<ide> valuesSets := make(map[string]interface{})
<del> valuesSets["label"] = "foo:baz"
<add> valuesSets["labels"] = "foo:baz"
<ide> newConfig := &Config{
<ide> CommonConfig: CommonConfig{
<ide> Labels: []string{"foo:baz"},
<ide> func TestDaemonReloadNotAffectOthers(t *testing.T) {
<ide> }
<ide>
<ide> valuesSets := make(map[string]interface{})
<del> valuesSets["label"] = "foo:baz"
<add> valuesSets["labels"] = "foo:baz"
<ide> newConfig := &Config{
<ide> CommonConfig: CommonConfig{
<ide> Labels: []string{"foo:baz"}, | 2 |
Python | Python | add stepcounterhook to hooks_helper.py | 8155eb9dbbd487d063458de0dd3de26541d06196 | <ide><path>official/utils/logs/hooks_helper.py
<ide> def get_logging_metric_hook(tensors_to_log=None,
<ide> every_n_secs=every_n_secs)
<ide>
<ide>
<add>def get_step_counter_hook(**kwargs):
<add> """Function to get StepCounterHook."""
<add> del kwargs
<add> return tf.estimator.StepCounterHook()
<add>
<add>
<ide> # A dictionary to map one hook name and its corresponding function
<ide> HOOKS = {
<ide> 'loggingtensorhook': get_logging_tensor_hook,
<ide> 'profilerhook': get_profiler_hook,
<ide> 'examplespersecondhook': get_examples_per_second_hook,
<ide> 'loggingmetrichook': get_logging_metric_hook,
<add> 'stepcounterhook': get_step_counter_hook
<ide> } | 1 |
Javascript | Javascript | add queues to compilation | 5b4cbb5ee041bd96c80f0d657bda5f6fe5bc1450 | <ide><path>lib/Compilation.js
<ide> const ModuleTemplate = require("./ModuleTemplate");
<ide> const RuntimeTemplate = require("./RuntimeTemplate");
<ide> const Stats = require("./Stats");
<ide> const compareLocations = require("./compareLocations");
<add>const AsyncQueue = require("./util/AsyncQueue");
<ide> const Queue = require("./util/Queue");
<del>const Semaphore = require("./util/Semaphore");
<ide> const SortableSet = require("./util/SortableSet");
<ide> const {
<ide> concatComparators,
<ide> const { arrayToSetDeprecation } = require("./util/deprecation");
<ide>
<ide> // TODO use @callback
<ide> /** @typedef {{[assetName: string]: Source}} CompilationAssets */
<del>/** @typedef {(err?: Error|null, result?: Module) => void } ModuleCallback */
<del>/** @typedef {(err?: Error|null, result?: Module) => void } ModuleChainCallback */
<add>/** @typedef {(err?: WebpackError|null, result?: Module) => void } ModuleCallback */
<ide> /** @typedef {(err?: Error|null) => void} Callback */
<ide> /** @typedef {(d: Dependency) => any} DepBlockVarDependenciesCallback */
<ide> /** @typedef {new (...args: any[]) => Dependency} DepConstructor */
<ide> const { arrayToSetDeprecation } = require("./util/deprecation");
<ide> * @property {(data: ModuleFactoryCreateData, callback: ModuleCallback) => any} create
<ide> */
<ide>
<del>/**
<del> * @typedef {Object} SortedDependency
<del> * @property {ModuleFactory} factory
<del> * @property {Dependency[]} dependencies
<del> */
<del>
<ide> /**
<ide> * @typedef {Object} AvailableModulesChunkGroupMapping
<ide> * @property {ChunkGroup} chunkGroup
<ide> class Compilation {
<ide> buildModule: new SyncHook(["module"]),
<ide> /** @type {SyncHook<Module>} */
<ide> rebuildModule: new SyncHook(["module"]),
<del> /** @type {SyncHook<Module, Error>} */
<add> /** @type {SyncHook<Module, WebpackError>} */
<ide> failedModule: new SyncHook(["module", "error"]),
<ide> /** @type {SyncHook<Module>} */
<ide> succeedModule: new SyncHook(["module"]),
<ide> class Compilation {
<ide> this.moduleGraph = new ModuleGraph();
<ide> this.chunkGraph = undefined;
<ide>
<del> this.semaphore = new Semaphore(options.parallelism || 100);
<add> this.factorizeQueue = new AsyncQueue({
<add> name: "factorize",
<add> parallelism: options.parallelism || 100,
<add> processor: this._factorizeModule.bind(this)
<add> });
<add> this.buildQueue = new AsyncQueue({
<add> name: "build",
<add> parallelism: options.parallelism || 100,
<add> processor: this._buildModule.bind(this)
<add> });
<add> this.rebuildQueue = new AsyncQueue({
<add> name: "rebuild",
<add> parallelism: options.parallelism || 100,
<add> processor: this._rebuildModule.bind(this)
<add> });
<add> this.processDependenciesQueue = new AsyncQueue({
<add> name: "processDependencies",
<add> parallelism: options.parallelism || 100,
<add> processor: this._processModuleDependencies.bind(this)
<add> });
<ide>
<ide> /** @type {Map<string, EntryDependency[]>} */
<ide> this.entryDependencies = new Map();
<ide> class Compilation {
<ide> /** @type {WeakSet<Module>} */
<ide> this.builtModules = new WeakSet();
<ide> /** @private @type {Map<Module, Callback[]>} */
<del> this._buildingModules = new Map();
<del> /** @private @type {Map<Module, Callback[]>} */
<ide> this._rebuildingModules = new Map();
<ide> }
<ide>
<ide> class Compilation {
<ide> * @typedef {Object} AddModuleResult
<ide> * @property {Module} module the added or existing module
<ide> * @property {boolean} issuer was this the first request for this module
<del> * @property {boolean} build should the module be build
<del> * @property {boolean} dependencies should dependencies be walked
<ide> */
<ide>
<ide> /**
<ide> * @param {Module} module module to be added that was created
<ide> * @param {any=} cacheGroup cacheGroup it is apart of
<del> * @returns {AddModuleResult} returns meta about whether or not the module had built
<del> * had an issuer, or any dependnecies
<add> * @returns {Module} returns the module in the compilation,
<add> * it could be the passed one (if new), or an already existing in the compilation
<ide> */
<ide> addModule(module, cacheGroup) {
<ide> const identifier = module.identifier();
<ide> const alreadyAddedModule = this._modules.get(identifier);
<ide> if (alreadyAddedModule) {
<del> return {
<del> module: alreadyAddedModule,
<del> issuer: false,
<del> build: false,
<del> dependencies: false
<del> };
<add> return alreadyAddedModule;
<ide> }
<ide> const cacheName = (cacheGroup || "m") + identifier;
<ide> if (this.cache && this.cache[cacheName]) {
<ide> const cacheModule = this.cache[cacheName];
<ide>
<ide> cacheModule.updateCacheModule(module);
<ide>
<del> let rebuild = true;
<del> if (this.fileTimestamps && this.contextTimestamps) {
<del> rebuild = cacheModule.needRebuild(
<del> this.fileTimestamps,
<del> this.contextTimestamps
<del> );
<del> }
<del>
<del> if (!rebuild) {
<del> this._modules.set(identifier, cacheModule);
<del> this.modules.add(cacheModule);
<del> for (const err of cacheModule.errors) {
<del> this.errors.push(err);
<del> }
<del> for (const err of cacheModule.warnings) {
<del> this.warnings.push(err);
<del> }
<del> ModuleGraph.setModuleGraphForModule(cacheModule, this.moduleGraph);
<del> return {
<del> module: cacheModule,
<del> issuer: true,
<del> build: false,
<del> dependencies: true
<del> };
<del> }
<del> cacheModule.unbuild();
<ide> module = cacheModule;
<ide> }
<ide> this._modules.set(identifier, module);
<ide> class Compilation {
<ide> }
<ide> this.modules.add(module);
<ide> ModuleGraph.setModuleGraphForModule(module, this.moduleGraph);
<del> return {
<del> module: module,
<del> issuer: true,
<del> build: true,
<del> dependencies: true
<del> };
<add> return module;
<ide> }
<ide>
<ide> /**
<ide> class Compilation {
<ide> }
<ide>
<ide> /**
<del> * @param {Module} module module with its callback list
<del> * @param {Callback} callback the callback function
<add> * Schedules a build of the module object
<add> *
<add> * @param {Module} module module to be built
<add> * @param {ModuleCallback} callback the callback
<ide> * @returns {void}
<ide> */
<del> waitForBuildingFinished(module, callback) {
<del> let callbackList = this._buildingModules.get(module);
<del> if (callbackList) {
<del> callbackList.push(() => callback());
<del> } else {
<del> process.nextTick(callback);
<del> }
<add> buildModule(module, callback) {
<add> this.buildQueue.add(module, callback);
<ide> }
<ide>
<ide> /**
<ide> * Builds the module object
<ide> *
<ide> * @param {Module} module module to be built
<del> * @param {TODO} thisCallback the callback
<add> * @param {ModuleCallback} callback the callback
<ide> * @returns {TODO} returns the callback function with results
<ide> */
<del> buildModule(module, thisCallback) {
<del> let callbackList = this._buildingModules.get(module);
<del> if (callbackList) {
<del> callbackList.push(thisCallback);
<del> return;
<add> _buildModule(module, callback) {
<add> const currentProfile = this.profile
<add> ? this.moduleGraph.getProfile(module)
<add> : undefined;
<add> if (currentProfile !== undefined) {
<add> currentProfile.markBuildingStart();
<ide> }
<del> this._buildingModules.set(module, (callbackList = [thisCallback]));
<ide>
<del> const callback = err => {
<del> this._buildingModules.delete(module);
<del> for (const cb of callbackList) {
<del> cb(err);
<add> let rebuild = true;
<add> if (this.fileTimestamps && this.contextTimestamps) {
<add> rebuild = module.needBuild(this.fileTimestamps, this.contextTimestamps);
<add> }
<add>
<add> if (!rebuild) {
<add> for (const err of module.errors) {
<add> this.errors.push(err);
<ide> }
<del> };
<add> for (const err of module.warnings) {
<add> this.warnings.push(err);
<add> }
<add> if (currentProfile !== undefined) {
<add> currentProfile.markBuildingEnd();
<add> }
<add> return callback();
<add> }
<ide>
<ide> this.hooks.buildModule.call(module);
<ide> this.builtModules.add(module);
<ide> class Compilation {
<ide> this,
<ide> this.resolverFactory.get("normal", module.resolveOptions),
<ide> this.inputFileSystem,
<del> error => {
<add> err => {
<ide> module.dependencies.sort((a, b) => compareLocations(a.loc, b.loc));
<del> if (error) {
<del> this.hooks.failedModule.call(module, error);
<del> return callback(error);
<add> if (currentProfile !== undefined) {
<add> currentProfile.markBuildingEnd();
<add> }
<add> if (err) {
<add> this.hooks.failedModule.call(module, err);
<add> return callback(err);
<ide> }
<ide> this.hooks.succeedModule.call(module);
<ide> return callback();
<ide> class Compilation {
<ide> * @returns {void}
<ide> */
<ide> processModuleDependencies(module, callback) {
<add> this.processDependenciesQueue.add(module, callback);
<add> }
<add>
<add> /**
<add> * @param {Module} module to be processed for deps
<add> * @param {ModuleCallback} callback callback to be triggered
<add> * @returns {void}
<add> */
<add> _processModuleDependencies(module, callback) {
<ide> const dependencies = new Map();
<ide>
<ide> let currentBlock = module;
<ide> class Compilation {
<ide> }
<ide> }
<ide>
<del> this.addModuleDependencies(module, sortedDependencies, this.bail, callback);
<add> // This is nested so we need to allow one additional task
<add> this.processDependenciesQueue.increaseParallelism();
<add>
<add> asyncLib.forEach(
<add> sortedDependencies,
<add> (item, callback) => {
<add> this.handleModuleCreation(
<add> {
<add> factory: item.factory,
<add> dependencies: item.dependencies,
<add> originModule: module
<add> },
<add> err => {
<add> // In V8, the Error objects keep a reference to the functions on the stack. These warnings &
<add> // errors are created inside closures that keep a reference to the Compilation, so errors are
<add> // leaking the Compilation object.
<add> if (err && this.bail) {
<add> // eslint-disable-next-line no-self-assign
<add> err.stack = err.stack;
<add> return callback(err);
<add> }
<add> callback();
<add> }
<add> );
<add> },
<add> err => {
<add> this.processDependenciesQueue.decreaseParallelism();
<add>
<add> return callback(err);
<add> }
<add> );
<ide> }
<ide>
<ide> /**
<del> * @typedef {Object} HandleNewModuleOptions
<del> * @property {Module} newModule
<del> * @property {Module | null} originModule
<add> * @typedef {Object} HandleModuleCreationOptions
<add> * @property {ModuleFactory} factory
<ide> * @property {Dependency[]} dependencies
<del> * @property {ModuleProfile} currentProfile
<del> * @property {boolean=} bail
<add> * @property {Module | null} originModule
<add> * @property {string=} context
<ide> */
<ide>
<ide> /**
<del> * @param {HandleNewModuleOptions} options options object
<add> * @param {HandleModuleCreationOptions} options options object
<ide> * @param {ModuleCallback} callback callback
<ide> * @returns {void}
<ide> */
<del> handleNewModule(
<del> { newModule, originModule, dependencies, currentProfile, bail },
<add> handleModuleCreation(
<add> { factory, dependencies, originModule, context },
<ide> callback
<ide> ) {
<del> this.semaphore.acquire(() => {
<del> const moduleGraph = this.moduleGraph;
<del>
<del> const addModuleResult = this.addModule(newModule);
<del>
<del> const module = addModuleResult.module;
<add> const moduleGraph = this.moduleGraph;
<ide>
<del> for (let i = 0; i < dependencies.length; i++) {
<del> const dependency = dependencies[i];
<del> moduleGraph.setResolvedModule(originModule, dependency, module);
<del> }
<add> const currentProfile = this.profile ? new ModuleProfile() : undefined;
<ide>
<del> if (addModuleResult.issuer) {
<del> if (currentProfile !== undefined) {
<del> moduleGraph.setProfile(module, currentProfile);
<add> this.factorizeModule(
<add> { currentProfile, factory, dependencies, originModule, context },
<add> (err, newModule) => {
<add> if (err) {
<add> if (dependencies.every(d => d.optional)) {
<add> this.warnings.push(err);
<add> } else {
<add> this.errors.push(err);
<add> }
<add> return callback(err);
<ide> }
<ide>
<del> if (originModule !== undefined) {
<del> moduleGraph.setIssuer(module, originModule);
<add> if (!newModule) {
<add> return callback();
<ide> }
<del> } else {
<del> if (currentProfile !== undefined) {
<del> currentProfile.mergeInto(moduleGraph.getProfile(module));
<add>
<add> const module = this.addModule(newModule);
<add>
<add> for (let i = 0; i < dependencies.length; i++) {
<add> const dependency = dependencies[i];
<add> moduleGraph.setResolvedModule(originModule, dependency, module);
<ide> }
<del> }
<ide>
<del> const afterBuild = () => {
<del> if (addModuleResult.dependencies) {
<del> this.processModuleDependencies(module, err => {
<del> if (err) return callback(err);
<del> callback(null, module);
<del> });
<add> if (module === newModule) {
<add> if (currentProfile !== undefined) {
<add> moduleGraph.setProfile(module, currentProfile);
<add> }
<add>
<add> if (originModule !== undefined) {
<add> moduleGraph.setIssuer(module, originModule);
<add> }
<ide> } else {
<del> return callback(null, module);
<add> if (currentProfile !== undefined) {
<add> currentProfile.mergeInto(moduleGraph.getProfile(module));
<add> }
<ide> }
<del> };
<ide>
<del> if (addModuleResult.build) {
<del> if (currentProfile !== undefined) {
<del> currentProfile.markBuildingStart();
<del> }
<del> this.buildModule(module, err => {
<del> if (err) {
<add> this.buildModule(module, error => {
<add> if (error) {
<add> const err = /** @type {WebpackError} */ (error);
<ide> if (!err.module) {
<ide> err.module = module;
<ide> }
<ide> this.errors.push(err);
<del> this.semaphore.release();
<del> if (bail) return callback(err);
<del> return callback();
<add>
<add> return callback(err);
<ide> }
<ide>
<del> if (currentProfile !== undefined) {
<del> currentProfile.markBuildingEnd();
<add> // This avoids deadlocks for circular dependencies
<add> if (this.processDependenciesQueue.isProcessing(module)) {
<add> return callback();
<ide> }
<ide>
<del> this.semaphore.release();
<del> afterBuild();
<add> this.processModuleDependencies(module, err => {
<add> if (err) {
<add> return callback(err);
<add> }
<add> callback(null, module);
<add> });
<ide> });
<del> } else {
<del> this.semaphore.release();
<del> this.waitForBuildingFinished(module, afterBuild);
<ide> }
<del> });
<add> );
<ide> }
<ide>
<ide> /**
<del> * @typedef {Object} HandleModuleCreationOptions
<add> * @typedef {Object} FactorizeModuleOptions
<add> * @property {ModuleProfile} currentProfile
<ide> * @property {ModuleFactory} factory
<ide> * @property {Dependency[]} dependencies
<ide> * @property {Module | null} originModule
<ide> * @property {string=} context
<del> * @property {boolean=} bail
<ide> */
<ide>
<ide> /**
<del> * @param {HandleModuleCreationOptions} options options object
<add> * @param {FactorizeModuleOptions} options options object
<ide> * @param {ModuleCallback} callback callback
<ide> * @returns {void}
<ide> */
<del> handleModuleCreation(
<del> { factory, dependencies, originModule, context, bail },
<del> callback
<del> ) {
<del> const semaphore = this.semaphore;
<del> semaphore.acquire(() => {
<del> const currentProfile = this.profile ? new ModuleProfile() : undefined;
<del> factory.create(
<del> {
<del> contextInfo: {
<del> issuer: originModule ? originModule.nameForCondition() : "",
<del> compiler: this.compiler.name
<del> },
<del> resolveOptions: originModule
<del> ? originModule.resolveOptions
<del> : undefined,
<del> context: context
<del> ? context
<del> : originModule
<del> ? originModule.context
<del> : this.compiler.context,
<del> dependencies: dependencies
<del> },
<del> (err, newModule) => {
<del> semaphore.release();
<del> if (err) {
<del> const notFoundError = new ModuleNotFoundError(
<del> originModule,
<del> err,
<del> dependencies.map(d => d.loc).filter(Boolean)[0]
<del> );
<del> if (dependencies.every(d => d.optional)) {
<del> this.warnings.push(notFoundError);
<del> } else {
<del> this.errors.push(notFoundError);
<del> }
<del> if (bail) return callback(notFoundError);
<del> return callback();
<del> }
<del> if (!newModule) {
<del> return process.nextTick(callback);
<del> }
<del> if (currentProfile !== undefined) {
<del> currentProfile.markFactoryEnd();
<del> }
<del>
<del> this.handleNewModule(
<del> {
<del> newModule,
<del> originModule,
<del> dependencies,
<del> currentProfile,
<del> bail
<del> },
<del> callback
<del> );
<del> }
<del> );
<del> });
<add> factorizeModule(options, callback) {
<add> this.factorizeQueue.add(options, callback);
<ide> }
<ide>
<ide> /**
<del> * @param {Module} module module to add deps to
<del> * @param {SortedDependency[]} dependencies set of sorted dependencies to iterate through
<del> * @param {(boolean|null)=} bail whether to bail or not
<del> * @param {function} callback callback for when dependencies are finished being added
<add> * @param {FactorizeModuleOptions} options options object
<add> * @param {ModuleCallback} callback callback
<ide> * @returns {void}
<ide> */
<del> addModuleDependencies(module, dependencies, bail, callback) {
<del> asyncLib.forEach(
<del> dependencies,
<del> (item, callback) => {
<del> this.handleModuleCreation(
<del> {
<del> factory: item.factory,
<del> dependencies: item.dependencies,
<del> originModule: module,
<del> bail
<del> },
<del> callback
<del> );
<add> _factorizeModule(
<add> { currentProfile, factory, dependencies, originModule, context },
<add> callback
<add> ) {
<add> if (currentProfile !== undefined) {
<add> currentProfile.markFactoryStart();
<add> }
<add> factory.create(
<add> {
<add> contextInfo: {
<add> issuer: originModule ? originModule.nameForCondition() : "",
<add> compiler: this.compiler.name
<add> },
<add> resolveOptions: originModule ? originModule.resolveOptions : undefined,
<add> context: context
<add> ? context
<add> : originModule
<add> ? originModule.context
<add> : this.compiler.context,
<add> dependencies: dependencies
<ide> },
<del> err => {
<del> // In V8, the Error objects keep a reference to the functions on the stack. These warnings &
<del> // errors are created inside closures that keep a reference to the Compilation, so errors are
<del> // leaking the Compilation object.
<del>
<add> (err, newModule) => {
<ide> if (err) {
<del> // eslint-disable-next-line no-self-assign
<del> err.stack = err.stack;
<del> return callback(err);
<add> const notFoundError = new ModuleNotFoundError(
<add> originModule,
<add> err,
<add> dependencies.map(d => d.loc).filter(Boolean)[0]
<add> );
<add> return callback(notFoundError);
<add> }
<add> if (!newModule) {
<add> return callback();
<add> }
<add> if (currentProfile !== undefined) {
<add> currentProfile.markFactoryEnd();
<ide> }
<ide>
<del> return process.nextTick(callback);
<add> callback(null, newModule);
<ide> }
<ide> );
<ide> }
<ide> class Compilation {
<ide> *
<ide> * @param {string} context context string path
<ide> * @param {Dependency} dependency dependency used to create Module chain
<del> * @param {ModuleChainCallback} callback callback for when module chain is complete
<add> * @param {ModuleCallback} callback callback for when module chain is complete
<ide> * @returns {void} will throw if dependency instance is not a valid Dependency
<ide> */
<ide> addModuleChain(context, dependency, callback) {
<ide> class Compilation {
<ide> factory: moduleFactory,
<ide> dependencies: [dependency],
<ide> originModule: null,
<del> context,
<del> bail: this.bail
<add> context
<ide> },
<del> callback
<add> err => {
<add> if (this.bail) {
<add> this.buildQueue.stop();
<add> this.rebuildQueue.stop();
<add> this.processDependenciesQueue.stop();
<add> this.factorizeQueue.stop();
<add> return callback(err);
<add> }
<add> return callback();
<add> }
<ide> );
<ide> }
<ide>
<ide> class Compilation {
<ide>
<ide> /**
<ide> * @param {Module} module module to be rebuilt
<del> * @param {Callback} thisCallback callback when module finishes rebuilding
<add> * @param {ModuleCallback} callback callback when module finishes rebuilding
<ide> * @returns {void}
<ide> */
<del> rebuildModule(module, thisCallback) {
<del> let callbackList = this._rebuildingModules.get(module);
<del> if (callbackList) {
<del> callbackList.push(thisCallback);
<del> return;
<del> }
<del> this._rebuildingModules.set(module, (callbackList = [thisCallback]));
<del>
<del> const callback = err => {
<del> this._rebuildingModules.delete(module);
<del> for (const cb of callbackList) {
<del> cb(err);
<del> }
<del> };
<add> rebuildModule(module, callback) {
<add> this.rebuildQueue.add(module, callback);
<add> }
<ide>
<add> /**
<add> * @param {Module} module module to be rebuilt
<add> * @param {ModuleCallback} callback callback when module finishes rebuilding
<add> * @returns {void}
<add> */
<add> _rebuildModule(module, callback) {
<ide> this.hooks.rebuildModule.call(module);
<ide> const oldDependencies = module.dependencies.slice();
<ide> const oldBlocks = module.blocks.slice();
<del> module.unbuild();
<add> module.invalidateBuild();
<add> this.buildQueue.invalidate(module);
<ide> this.buildModule(module, err => {
<ide> if (err) {
<ide> this.hooks.finishRebuildingModule.call(module);
<ide><path>lib/ContextModule.js
<ide> const { OriginalSource, RawSource } = require("webpack-sources");
<ide> const AsyncDependenciesBlock = require("./AsyncDependenciesBlock");
<ide> const Module = require("./Module");
<ide> const Template = require("./Template");
<add>const WebpackError = require("./WebpackError");
<ide> const { compareModulesById } = require("./util/comparators");
<ide> const contextify = require("./util/identifier").contextify;
<ide>
<ide> class ContextModule extends Module {
<ide> }
<ide>
<ide> this._identifier = this._createIdentifier();
<add> this._forceBuild = true;
<ide> }
<ide>
<ide> /**
<ide> class ContextModule extends Module {
<ide> return identifier;
<ide> }
<ide>
<add> /**
<add> * @returns {void}
<add> */
<add> invalidateBuild() {
<add> this._forceBuild = true;
<add> }
<add>
<ide> /**
<ide> * @param {TODO} fileTimestamps timestamps of files
<ide> * @param {TODO} contextTimestamps timestamps of directories
<ide> * @returns {boolean} true, if the module needs a rebuild
<ide> */
<del> needRebuild(fileTimestamps, contextTimestamps) {
<add> needBuild(fileTimestamps, contextTimestamps) {
<add> if (this._forceBuild) return true;
<ide> const ts = contextTimestamps.get(this.context);
<ide> if (!ts) {
<ide> return true;
<ide> class ContextModule extends Module {
<ide> * @param {Compilation} compilation the compilation
<ide> * @param {TODO} resolver TODO
<ide> * @param {TODO} fs the file system
<del> * @param {function(Error=): void} callback callback function
<add> * @param {function(WebpackError=): void} callback callback function
<ide> * @returns {void}
<ide> */
<ide> build(options, compilation, resolver, fs, callback) {
<add> this._forceBuild = false;
<ide> this.buildMeta = {};
<ide> this.buildInfo = {
<ide> builtTime: Date.now(),
<ide> contextDependencies: this._contextDependencies
<ide> };
<add> this.dependencies.length = 0;
<add> this.blocks.length = 0;
<ide> this.resolveDependencies(fs, this.options, (err, dependencies) => {
<ide> if (err) return callback(err);
<ide>
<ide> class ContextModule extends Module {
<ide> }
<ide> } else {
<ide> callback(
<del> new Error(`Unsupported mode "${this.options.mode}" in context`)
<add> new WebpackError(`Unsupported mode "${this.options.mode}" in context`)
<ide> );
<ide> return;
<ide> }
<ide><path>lib/DelegatedModule.js
<ide> const DelegatedSourceDependency = require("./dependencies/DelegatedSourceDepende
<ide> /** @typedef {import("./Module").SourceContext} SourceContext */
<ide> /** @typedef {import("./RequestShortener")} RequestShortener */
<ide> /** @typedef {import("./RuntimeTemplate")} RuntimeTemplate */
<add>/** @typedef {import("./WebpackError")} WebpackError */
<ide> /** @typedef {import("./dependencies/ModuleDependency")} ModuleDependency */
<ide> /** @typedef {import("./util/createHash").Hash} Hash */
<ide>
<ide> class DelegatedModule extends Module {
<ide> * @param {TODO} contextTimestamps timestamps of directories
<ide> * @returns {boolean} true, if the module needs a rebuild
<ide> */
<del> needRebuild(fileTimestamps, contextTimestamps) {
<del> return false;
<add> needBuild(fileTimestamps, contextTimestamps) {
<add> return !this.buildMeta;
<ide> }
<ide>
<ide> /**
<ide> * @param {TODO} options TODO
<ide> * @param {Compilation} compilation the compilation
<ide> * @param {TODO} resolver TODO
<ide> * @param {TODO} fs the file system
<del> * @param {function(Error=): void} callback callback function
<add> * @param {function(WebpackError=): void} callback callback function
<ide> * @returns {void}
<ide> */
<ide> build(options, compilation, resolver, fs, callback) {
<ide> this.buildMeta = Object.assign({}, this.delegateData.buildMeta);
<ide> this.buildInfo = {};
<add> this.dependencies.length = 0;
<ide> this.delegatedSourceDependency = new DelegatedSourceDependency(
<ide> this.sourceRequest
<ide> );
<ide><path>lib/DllModule.js
<ide> const Module = require("./Module");
<ide> /** @typedef {import("./Module").SourceContext} SourceContext */
<ide> /** @typedef {import("./RequestShortener")} RequestShortener */
<ide> /** @typedef {import("./RuntimeTemplate")} RuntimeTemplate */
<add>/** @typedef {import("./WebpackError")} WebpackError */
<ide> /** @typedef {import("./util/createHash").Hash} Hash */
<ide>
<ide> class DllModule extends Module {
<ide> class DllModule extends Module {
<ide> * @param {Compilation} compilation the compilation
<ide> * @param {TODO} resolver TODO
<ide> * @param {TODO} fs the file system
<del> * @param {function(Error=): void} callback callback function
<add> * @param {function(WebpackError=): void} callback callback function
<ide> * @returns {void}
<ide> */
<ide> build(options, compilation, resolver, fs, callback) {
<ide> class DllModule extends Module {
<ide> * @param {TODO} contextTimestamps timestamps of directories
<ide> * @returns {boolean} true, if the module needs a rebuild
<ide> */
<del> needRebuild(fileTimestamps, contextTimestamps) {
<del> return false;
<add> needBuild(fileTimestamps, contextTimestamps) {
<add> return !this.buildMeta;
<ide> }
<ide>
<ide> /**
<ide><path>lib/ExternalModule.js
<ide> const Template = require("./Template");
<ide> /** @typedef {import("./Module").SourceContext} SourceContext */
<ide> /** @typedef {import("./RequestShortener")} RequestShortener */
<ide> /** @typedef {import("./RuntimeTemplate")} RuntimeTemplate */
<add>/** @typedef {import("./WebpackError")} WebpackError */
<ide> /** @typedef {import("./util/createHash").Hash} Hash */
<ide>
<ide> /**
<ide> class ExternalModule extends Module {
<ide> * @param {TODO} contextTimestamps timestamps of directories
<ide> * @returns {boolean} true, if the module needs a rebuild
<ide> */
<del> needRebuild(fileTimestamps, contextTimestamps) {
<del> return false;
<add> needBuild(fileTimestamps, contextTimestamps) {
<add> return !this.buildMeta;
<ide> }
<ide>
<ide> /**
<ide> * @param {TODO} options TODO
<ide> * @param {Compilation} compilation the compilation
<ide> * @param {TODO} resolver TODO
<ide> * @param {TODO} fs the file system
<del> * @param {function(Error=): void} callback callback function
<add> * @param {function(WebpackError=): void} callback callback function
<ide> * @returns {void}
<ide> */
<ide> build(options, compilation, resolver, fs, callback) {
<ide><path>lib/Module.js
<ide> class Module extends DependenciesBlock {
<ide> * @param {TODO} contextTimestamps timestamps of directories
<ide> * @returns {boolean} true, if the module needs a rebuild
<ide> */
<add> needBuild(fileTimestamps, contextTimestamps) {
<add> return (
<add> !this.buildMeta || this.needRebuild(fileTimestamps, contextTimestamps)
<add> );
<add> }
<add>
<add> /**
<add> * @deprecated Use needBuild instead
<add> * @param {TODO} fileTimestamps timestamps of files
<add> * @param {TODO} contextTimestamps timestamps of directories
<add> * @returns {boolean} true, if the module needs a rebuild
<add> */
<ide> needRebuild(fileTimestamps, contextTimestamps) {
<ide> return true;
<ide> }
<ide> class Module extends DependenciesBlock {
<ide> /**
<ide> * @returns {void}
<ide> */
<del> unbuild() {
<del> this.dependencies.length = 0;
<del> this.blocks.length = 0;
<del> this.buildMeta = undefined;
<del> this.buildInfo = undefined;
<add> invalidateBuild() {
<add> // should be overriden to support this feature
<ide> }
<ide>
<ide> /**
<ide> class Module extends DependenciesBlock {
<ide> * @param {Compilation} compilation the compilation
<ide> * @param {TODO} resolver TODO
<ide> * @param {TODO} fs the file system
<del> * @param {function(Error=): void} callback callback function
<add> * @param {function(WebpackError=): void} callback callback function
<ide> * @returns {void}
<ide> */
<ide> build(options, compilation, resolver, fs, callback) {
<ide><path>lib/ModuleProfile.js
<ide> class ModuleProfile {
<ide> this.additionalIntegration = 0;
<ide> }
<ide>
<add> markFactoryStart() {
<add> this.factoryStartTime = Date.now();
<add> }
<add>
<ide> markFactoryEnd() {
<del> this.factoryTime = Date.now();
<del> this.factory = this.factoryTime - this.startTime;
<add> this.factoryEndTime = Date.now();
<add> this.factory = this.factoryEndTime - this.factoryStartTime;
<ide> }
<ide>
<ide> markIntegrationStart() {
<ide><path>lib/NormalModule.js
<ide> const contextify = require("./util/identifier").contextify;
<ide> /** @typedef {import("./Module").SourceContext} SourceContext */
<ide> /** @typedef {import("./RequestShortener")} RequestShortener */
<ide> /** @typedef {import("./RuntimeTemplate")} RuntimeTemplate */
<add>/** @typedef {import("./WebpackError")} WebpackError */
<ide> /** @typedef {import("./util/createHash").Hash} Hash */
<ide>
<ide> const asString = buf => {
<ide> class NormalModule extends Module {
<ide>
<ide> // Cache
<ide> this._lastSuccessfulBuildMeta = {};
<add> this._forceBuild = true;
<ide> }
<ide>
<ide> /**
<ide> class NormalModule extends Module {
<ide> * @param {Compilation} compilation the compilation
<ide> * @param {TODO} resolver TODO
<ide> * @param {TODO} fs the file system
<del> * @param {function(Error=): void} callback callback function
<add> * @param {function(WebpackError=): void} callback callback function
<ide> * @returns {void}
<ide> */
<ide> build(options, compilation, resolver, fs, callback) {
<ide> this.buildTimestamp = Date.now();
<add> this._forceBuild = false;
<ide> this._source = null;
<ide> this._ast = null;
<ide> this._buildHash = "";
<ide> this.error = null;
<ide> this.errors.length = 0;
<ide> this.warnings.length = 0;
<add> this.dependencies.length = 0;
<add> this.blocks.length = 0;
<ide> this.buildMeta = {};
<ide> this.buildInfo = {
<ide> cacheable: false,
<ide> class NormalModule extends Module {
<ide> return this._source;
<ide> }
<ide>
<add> /**
<add> * @returns {void}
<add> */
<add> invalidateBuild() {
<add> this._forceBuild = true;
<add> }
<add>
<ide> /**
<ide> * @param {TODO} fileTimestamps timestamps of files
<ide> * @param {TODO} contextTimestamps timestamps of directories
<ide> * @returns {boolean} true, if the module needs a rebuild
<ide> */
<del> needRebuild(fileTimestamps, contextTimestamps) {
<add> needBuild(fileTimestamps, contextTimestamps) {
<add> // build if enforced
<add> if (this._forceBuild) return true;
<add>
<ide> // always try to rebuild in case of an error
<ide> if (this.error) return true;
<ide>
<ide><path>lib/RawModule.js
<ide> const Module = require("./Module");
<ide> /** @typedef {import("./Module").SourceContext} SourceContext */
<ide> /** @typedef {import("./RequestShortener")} RequestShortener */
<ide> /** @typedef {import("./RuntimeTemplate")} RuntimeTemplate */
<add>/** @typedef {import("./WebpackError")} WebpackError */
<ide> /** @typedef {import("./util/createHash").Hash} Hash */
<ide>
<ide> module.exports = class RawModule extends Module {
<ide> module.exports = class RawModule extends Module {
<ide> * @param {TODO} contextTimestamps timestamps of directories
<ide> * @returns {boolean} true, if the module needs a rebuild
<ide> */
<del> needRebuild(fileTimestamps, contextTimestamps) {
<del> return false;
<add> needBuild(fileTimestamps, contextTimestamps) {
<add> return !this.buildMeta;
<ide> }
<ide>
<ide> /**
<ide> * @param {TODO} options TODO
<ide> * @param {Compilation} compilation the compilation
<ide> * @param {TODO} resolver TODO
<ide> * @param {TODO} fs the file system
<del> * @param {function(Error=): void} callback callback function
<add> * @param {function(WebpackError=): void} callback callback function
<ide> * @returns {void}
<ide> */
<ide> build(options, compilation, resolver, fs, callback) {
<ide><path>lib/dependencies/LoaderPlugin.js
<ide>
<ide> "use strict";
<ide>
<del>const NormalModule = require("../NormalModule");
<ide> const LoaderDependency = require("./LoaderDependency");
<ide>
<ide> /** @typedef {import("../Module")} Module */
<ide> class LoaderPlugin {
<ide> )
<ide> );
<ide> }
<del> compilation.semaphore.release();
<del> compilation.addModuleDependencies(
<del> module,
<del> [
<del> {
<del> factory,
<del> dependencies: [dep]
<del> }
<del> ],
<del> true,
<add> compilation.buildQueue.increaseParallelism();
<add> compilation.handleModuleCreation(
<add> {
<add> factory,
<add> dependencies: [dep],
<add> originModule: loaderContext._module,
<add> context: loaderContext.context
<add> },
<ide> err => {
<del> compilation.semaphore.acquire(() => {
<del> if (err) {
<del> return callback(err);
<del> }
<del> const referencedModule = moduleGraph.getModule(dep);
<del> if (!referencedModule) {
<del> return callback(new Error("Cannot load the module"));
<del> }
<del> // TODO consider removing this in webpack 5
<del> if (
<del> referencedModule instanceof NormalModule &&
<del> referencedModule.error
<del> ) {
<del> return callback(referencedModule.error);
<del> }
<del> const moduleSource = referencedModule.originalSource();
<del> if (!moduleSource) {
<del> throw new Error(
<del> "The module created for a LoaderDependency must have an original source"
<del> );
<del> }
<del> let source, map;
<del> if (moduleSource.sourceAndMap) {
<del> const sourceAndMap = moduleSource.sourceAndMap();
<del> map = sourceAndMap.map;
<del> source = sourceAndMap.source;
<del> } else {
<del> map = moduleSource.map();
<del> source = moduleSource.source();
<del> }
<del> if (referencedModule.buildInfo.fileDependencies) {
<del> for (const d of referencedModule.buildInfo
<del> .fileDependencies) {
<del> loaderContext.addDependency(d);
<del> }
<add> compilation.buildQueue.decreaseParallelism();
<add> if (err) {
<add> return callback(err);
<add> }
<add> const referencedModule = moduleGraph.getModule(dep);
<add> if (!referencedModule) {
<add> return callback(new Error("Cannot load the module"));
<add> }
<add> const moduleSource = referencedModule.originalSource();
<add> if (!moduleSource) {
<add> throw new Error(
<add> "The module created for a LoaderDependency must have an original source"
<add> );
<add> }
<add> let source, map;
<add> if (moduleSource.sourceAndMap) {
<add> const sourceAndMap = moduleSource.sourceAndMap();
<add> map = sourceAndMap.map;
<add> source = sourceAndMap.source;
<add> } else {
<add> map = moduleSource.map();
<add> source = moduleSource.source();
<add> }
<add> if (referencedModule.buildInfo.fileDependencies) {
<add> for (const d of referencedModule.buildInfo.fileDependencies) {
<add> loaderContext.addDependency(d);
<ide> }
<del> if (referencedModule.buildInfo.contextDependencies) {
<del> for (const d of referencedModule.buildInfo
<del> .contextDependencies) {
<del> loaderContext.addContextDependency(d);
<del> }
<add> }
<add> if (referencedModule.buildInfo.contextDependencies) {
<add> for (const d of referencedModule.buildInfo
<add> .contextDependencies) {
<add> loaderContext.addContextDependency(d);
<ide> }
<del> return callback(null, source, map, referencedModule);
<del> });
<add> }
<add> return callback(null, source, map, referencedModule);
<ide> }
<ide> );
<ide> };
<ide><path>lib/optimize/ConcatenatedModule.js
<ide> const createHash = require("../util/createHash");
<ide> /** @typedef {import("../ModuleGraph")} ModuleGraph */
<ide> /** @typedef {import("../RequestShortener")} RequestShortener */
<ide> /** @typedef {import("../RuntimeTemplate")} RuntimeTemplate */
<add>/** @typedef {import("../WebpackError")} WebpackError */
<ide> /** @typedef {import("../util/createHash").Hash} Hash */
<ide>
<ide> /**
<ide> class ConcatenatedModule extends Module {
<ide> * @param {Compilation} compilation the compilation
<ide> * @param {TODO} resolver TODO
<ide> * @param {TODO} fs the file system
<del> * @param {function(Error=): void} callback callback function
<add> * @param {function(WebpackError=): void} callback callback function
<ide> * @returns {void}
<ide> */
<ide> build(options, compilation, resolver, fs, callback) {
<ide><path>lib/util/AsyncQueue.js
<add>/*
<add> MIT License http://www.opensource.org/licenses/mit-license.php
<add> Author Tobias Koppers @sokra
<add>*/
<add>
<add>"use strict";
<add>
<add>const { SyncHook, AsyncSeriesHook } = require("tapable");
<add>
<add>/** @template R @typedef {(err?: Error|null, result?: R) => void} Callback<T> */
<add>
<add>/**
<add> * @template T
<add> * @template R
<add> */
<add>class AsyncQueue {
<add> /**
<add> * @param {Object} options options object
<add> * @param {string=} options.name name of the queue
<add> * @param {number} options.parallelism how many items should be processed at once
<add> * @param {function(T, Callback<R>): void} options.processor async function to process items
<add> */
<add> constructor({ name, parallelism, processor }) {
<add> this._name = name;
<add> this._parallelism = parallelism;
<add> this._processor = processor;
<add> /** @type {Map<T, Callback<R>[]>} */
<add> this._callbacks = new Map();
<add> /** @type {Set<T>} */
<add> this._queued = new Set();
<add> /** @type {Set<T>} */
<add> this._processing = new Set();
<add> /** @type {Map<T, [Error, R]>} */
<add> this._results = new Map();
<add> this._activeTasks = 0;
<add> this._willEnsureProcessing = false;
<add> this._stopped = false;
<add>
<add> this.hooks = {
<add> beforeAdd: new AsyncSeriesHook(["item"]),
<add> added: new SyncHook(["item"]),
<add> beforeStart: new AsyncSeriesHook(["item"]),
<add> started: new SyncHook(["item"]),
<add> result: new SyncHook(["item", "error", "result"])
<add> };
<add>
<add> this._ensureProcessing = this._ensureProcessing.bind(this);
<add> }
<add>
<add> /**
<add> * @param {T} item a item
<add> * @param {Callback<R>} callback callback function
<add> * @returns {void}
<add> */
<add> add(item, callback) {
<add> if (this._stopped) return callback(new Error("Queue was stopped"));
<add> this.hooks.beforeAdd.callAsync(item, err => {
<add> if (err) {
<add> callback(err);
<add> return;
<add> }
<add> const result = this._results.get(item);
<add> if (result !== undefined) {
<add> process.nextTick(() => callback(result[0], result[1]));
<add> return;
<add> }
<add> let callbacks = this._callbacks.get(item);
<add> if (callbacks !== undefined) {
<add> callbacks.push(callback);
<add> return;
<add> }
<add> callbacks = [callback];
<add> this._callbacks.set(item, callbacks);
<add> if (this._stopped) {
<add> this.hooks.added.call(item);
<add> this._activeTasks++;
<add> this._handleResult(item, new Error("Queue was stopped"));
<add> } else {
<add> this._queued.add(item);
<add> if (this._willEnsureProcessing === false) {
<add> this._willEnsureProcessing = true;
<add> process.nextTick(this._ensureProcessing);
<add> }
<add> this.hooks.added.call(item);
<add> }
<add> });
<add> }
<add>
<add> /**
<add> * @param {T} item a item
<add> * @returns {void}
<add> */
<add> invalidate(item) {
<add> this._results.delete(item);
<add> }
<add>
<add> /**
<add> * @returns {void}
<add> */
<add> stop() {
<add> this._stopped = true;
<add> for (const item of this._queued) {
<add> this._activeTasks++;
<add> this._queued.delete(item);
<add> this._handleResult(item, new Error("Queue was stopped"));
<add> }
<add> }
<add>
<add> /**
<add> * @returns {void}
<add> */
<add> increaseParallelism() {
<add> this._parallelism++;
<add> if (this._willEnsureProcessing === false && this._queued.size > 0) {
<add> this._willEnsureProcessing = true;
<add> process.nextTick(this._ensureProcessing);
<add> }
<add> }
<add>
<add> /**
<add> * @returns {void}
<add> */
<add> decreaseParallelism() {
<add> this._parallelism--;
<add> }
<add>
<add> /**
<add> * @param {T} item an item
<add> * @returns {boolean} true, if the item is currently being processed
<add> */
<add> isProcessing(item) {
<add> return this._processing.has(item);
<add> }
<add>
<add> /**
<add> * @param {T} item an item
<add> * @returns {boolean} true, if the item is currently queued
<add> */
<add> isQueued(item) {
<add> return this._queued.has(item);
<add> }
<add>
<add> /**
<add> * @param {T} item an item
<add> * @returns {boolean} true, if the item is currently queued
<add> */
<add> isDone(item) {
<add> return this._results.has(item);
<add> }
<add>
<add> /**
<add> * @returns {void}
<add> */
<add> _ensureProcessing() {
<add> if (this._activeTasks >= this._parallelism) {
<add> this._willEnsureProcessing = false;
<add> return;
<add> }
<add> for (const item of this._queued) {
<add> this._activeTasks++;
<add> this._queued.delete(item);
<add> this._processing.add(item);
<add> this._startProcessing(item);
<add> if (this._activeTasks >= this._parallelism) {
<add> this._willEnsureProcessing = false;
<add> return;
<add> }
<add> }
<add> this._willEnsureProcessing = false;
<add> }
<add>
<add> /**
<add> * @param {T} item an item
<add> * @returns {void}
<add> */
<add> _startProcessing(item) {
<add> this.hooks.beforeStart.callAsync(item, err => {
<add> if (err) {
<add> this._handleResult(item, err);
<add> return;
<add> }
<add> try {
<add> this._processor(item, (e, r) => {
<add> process.nextTick(() => {
<add> this._handleResult(item, e, r);
<add> });
<add> });
<add> } catch (err) {
<add> console.error(err);
<add> this._handleResult(item, err, null);
<add> }
<add> this.hooks.started.call(item);
<add> });
<add> }
<add>
<add> /**
<add> * @param {T} item an item
<add> * @param {Error=} err error, if any
<add> * @param {R=} result result, if any
<add> * @returns {void}
<add> */
<add> _handleResult(item, err, result) {
<add> this.hooks.result.callAsync(item, err, result, hookError => {
<add> const error = hookError || err;
<add>
<add> const callbacks = this._callbacks.get(item);
<add> this._processing.delete(item);
<add> this._results.set(item, [error, result]);
<add> this._callbacks.delete(item);
<add> this._activeTasks--;
<add>
<add> if (this._willEnsureProcessing === false && this._queued.size > 0) {
<add> this._willEnsureProcessing = true;
<add> process.nextTick(this._ensureProcessing);
<add> }
<add>
<add> for (const callback of callbacks) {
<add> callback(error, result);
<add> }
<add> });
<add> }
<add>}
<add>
<add>module.exports = AsyncQueue;
<ide><path>test/NormalModule.unittest.js
<ide> describe("NormalModule", () => {
<ide> expect(normalModule.hasDependencies()).toBe(false);
<ide> });
<ide> });
<del> describe("#needRebuild", () => {
<add> describe("#needBuild", () => {
<ide> let fileTimestamps;
<ide> let contextTimestamps;
<ide> let fileDependencies;
<ide> describe("NormalModule", () => {
<ide> fileTimestamps = new Map([[fileA, 1], [fileB, 1]]);
<ide> contextTimestamps = new Map([[fileA, 1], [fileB, 1]]);
<ide> normalModule.buildTimestamp = 2;
<add> normalModule._forceBuild = false;
<ide> setDeps(fileDependencies, contextDependencies);
<ide> });
<ide> describe("given all timestamps are older than the buildTimestamp", () => {
<ide> it("returns false", () => {
<del> expect(
<del> normalModule.needRebuild(fileTimestamps, contextTimestamps)
<del> ).toBe(false);
<add> expect(normalModule.needBuild(fileTimestamps, contextTimestamps)).toBe(
<add> false
<add> );
<ide> });
<ide> });
<ide> describe("given a file timestamp is newer than the buildTimestamp", () => {
<ide> beforeEach(() => {
<ide> fileTimestamps.set(fileA, 3);
<ide> });
<ide> it("returns true", () => {
<del> expect(
<del> normalModule.needRebuild(fileTimestamps, contextTimestamps)
<del> ).toBe(true);
<add> expect(normalModule.needBuild(fileTimestamps, contextTimestamps)).toBe(
<add> true
<add> );
<ide> });
<ide> });
<ide> describe("given a no file timestamp exists", () => {
<ide> beforeEach(() => {
<ide> fileTimestamps = new Map();
<ide> });
<ide> it("returns true", () => {
<del> expect(
<del> normalModule.needRebuild(fileTimestamps, contextTimestamps)
<del> ).toBe(true);
<add> expect(normalModule.needBuild(fileTimestamps, contextTimestamps)).toBe(
<add> true
<add> );
<ide> });
<ide> });
<ide> describe("given a context timestamp is newer than the buildTimestamp", () => {
<ide> beforeEach(() => {
<ide> contextTimestamps.set(fileA, 3);
<ide> });
<ide> it("returns true", () => {
<del> expect(
<del> normalModule.needRebuild(fileTimestamps, contextTimestamps)
<del> ).toBe(true);
<add> expect(normalModule.needBuild(fileTimestamps, contextTimestamps)).toBe(
<add> true
<add> );
<ide> });
<ide> });
<ide> describe("given a no context timestamp exists", () => {
<ide> beforeEach(() => {
<ide> contextTimestamps = new Map();
<ide> });
<ide> it("returns true", () => {
<del> expect(
<del> normalModule.needRebuild(fileTimestamps, contextTimestamps)
<del> ).toBe(true);
<add> expect(normalModule.needBuild(fileTimestamps, contextTimestamps)).toBe(
<add> true
<add> );
<ide> });
<ide> });
<ide> });
<ide><path>test/RawModule.unittest.js
<ide> describe("RawModule", () => {
<ide> );
<ide> });
<ide>
<del> describe("needRebuild", () => {
<del> it("returns false", () => {
<del> expect(myRawModule.needRebuild()).toBe(false);
<del> });
<del> });
<del>
<ide> describe("source", () => {
<ide> it(
<ide> "returns a new OriginalSource instance with sourceStr attribute and " + | 14 |
Javascript | Javascript | add dot after short months | 8937591a1df6cf9d26795c6e68d1d6ba85a00318 | <ide><path>src/locale/hu.js
<ide> //! moment.js locale configuration
<ide> //! locale : Hungarian [hu]
<ide> //! author : Adam Brunner : https://github.com/adambrunner
<add>//! author : Peter Viszt : https://github.com/passatgt
<ide>
<ide> import moment from '../moment';
<ide>
<ide> export default moment.defineLocale('hu', {
<ide> months: 'január_február_március_április_május_június_július_augusztus_szeptember_október_november_december'.split(
<ide> '_'
<ide> ),
<del> monthsShort: 'jan_feb_márc_ápr_máj_jún_júl_aug_szept_okt_nov_dec'.split(
<add> monthsShort: 'jan._feb._márc._ápr._máj._jún._júl._aug._szept._okt._nov._dec.'.split(
<ide> '_'
<ide> ),
<add> monthsParseExact: true,
<ide> weekdays: 'vasárnap_hétfő_kedd_szerda_csütörtök_péntek_szombat'.split('_'),
<ide> weekdaysShort: 'vas_hét_kedd_sze_csüt_pén_szo'.split('_'),
<ide> weekdaysMin: 'v_h_k_sze_cs_p_szo'.split('_'),
<ide><path>src/test/locale/hu.js
<ide> import moment from '../../moment';
<ide> localeModule('hu');
<ide>
<ide> test('parse', function (assert) {
<del> var tests = 'január jan_február feb_március márc_április ápr_május máj_június jún_július júl_augusztus aug_szeptember szept_október okt_november nov_december dec'.split(
<add> var tests = 'január jan._február feb._március márc._április ápr._május máj._június jún._július júl._augusztus aug._szeptember szept._október okt._november nov._december dec.'.split(
<ide> '_'
<ide> ),
<ide> i;
<ide> test('format', function (assert) {
<ide> 'vasárnap, február 14. 2010, 15:25:50',
<ide> ],
<ide> ['ddd, HH', 'vas, 15'],
<del> ['M Mo MM MMMM MMM', '2 2. 02 február feb'],
<add> ['M Mo MM MMMM MMM', '2 2. 02 február feb.'],
<ide> ['YYYY YY', '2010 10'],
<ide> ['D Do DD', '14 14. 14'],
<ide> ['d do dddd ddd dd', '0 0. vasárnap vas v'],
<ide> test('format', function (assert) {
<ide> ['LLL', '2010. február 14. 15:25'],
<ide> ['LLLL', '2010. február 14., vasárnap 15:25'],
<ide> ['l', '2010.2.14.'],
<del> ['ll', '2010. feb 14.'],
<del> ['lll', '2010. feb 14. 15:25'],
<del> ['llll', '2010. feb 14., vas 15:25'],
<add> ['ll', '2010. feb. 14.'],
<add> ['lll', '2010. feb. 14. 15:25'],
<add> ['llll', '2010. feb. 14., vas 15:25'],
<ide> ],
<ide> b = moment(new Date(2010, 1, 14, 15, 25, 50, 125)),
<ide> i;
<ide> test('format ordinal', function (assert) {
<ide> });
<ide>
<ide> test('format month', function (assert) {
<del> var expected = 'január jan_február feb_március márc_április ápr_május máj_június jún_július júl_augusztus aug_szeptember szept_október okt_november nov_december dec'.split(
<add> var expected = 'január jan._február feb._március márc._április ápr._május máj._június jún._július júl._augusztus aug._szeptember szept._október okt._november nov._december dec.'.split(
<ide> '_'
<ide> ),
<ide> i; | 2 |
Go | Go | move container exit state to after cleanup | e192ce4009865ef96383f3162b40f58d52596790 | <ide><path>daemon/monitor.go
<ide> func (daemon *Daemon) handleContainerExit(c *container.Container, e *libcontaine
<ide> }
<ide>
<ide> restart, wait, err := c.RestartManager().ShouldRestart(ec, daemon.IsShuttingDown() || c.HasBeenManuallyStopped, time.Since(c.StartedAt))
<add>
<add> // cancel healthcheck here, they will be automatically
<add> // restarted if/when the container is started again
<add> daemon.stopHealthchecks(c)
<add> attributes := map[string]string{
<add> "exitCode": strconv.Itoa(int(ec)),
<add> }
<add> daemon.Cleanup(c)
<add>
<ide> if err == nil && restart {
<ide> c.RestartCount++
<ide> c.SetRestarting(&exitStatus)
<ide> func (daemon *Daemon) handleContainerExit(c *container.Container, e *libcontaine
<ide> }
<ide> defer c.Unlock() // needs to be called before autoRemove
<ide>
<del> // cancel healthcheck here, they will be automatically
<del> // restarted if/when the container is started again
<del> daemon.stopHealthchecks(c)
<del> attributes := map[string]string{
<del> "exitCode": strconv.Itoa(int(ec)),
<del> }
<del> daemon.LogContainerEventWithAttributes(c, "die", attributes)
<del> daemon.Cleanup(c)
<ide> daemon.setStateCounter(c)
<ide> cpErr := c.CheckpointTo(daemon.containersReplica)
<ide>
<add> daemon.LogContainerEventWithAttributes(c, "die", attributes)
<add>
<ide> if err == nil && restart {
<ide> go func() {
<ide> err := <-wait | 1 |
Java | Java | simplify operationzip itemobserver | f3dbf3c03919e39c2ce6a2e3d94216534a7bb5c8 | <ide><path>rxjava-core/src/main/java/rx/operators/OperationZip.java
<ide> public void onNext(List<T> value) {
<ide> /** Reader-writer lock. */
<ide> protected final ReadWriteLock rwLock;
<ide> /** The queue. */
<del> public final Queue<Object> queue = new LinkedList<Object>();
<add> public final Queue<T> queue = new LinkedList<T>();
<ide> /** The list of the other observers. */
<ide> public final List<ItemObserver<T>> all;
<del> /** The null sentinel value. */
<del> protected static final Object NULL_SENTINEL = new Object();
<ide> /** The global cancel. */
<ide> protected final Subscription cancel;
<ide> /** The subscription to the source. */
<ide> public void onNext(T value) {
<ide> if (done) {
<ide> return;
<ide> }
<del> queue.add(value != null ? value : NULL_SENTINEL);
<add> queue.add(value);
<ide> } finally {
<ide> rwLock.readLock().unlock();
<ide> }
<ide> public void unsubscribe() {
<ide> toSource.unsubscribe();
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> private void runCollector() {
<ide> if (rwLock.writeLock().tryLock()) {
<ide> boolean cu = false;
<ide> private void runCollector() {
<ide> cu = true;
<ide> return;
<ide> }
<del> continue;
<add> } else {
<add> T value = io.queue.peek();
<add> values.add(value);
<ide> }
<del> Object v = io.queue.peek();
<del> if (v == NULL_SENTINEL) {
<del> v = null;
<del> }
<del> values.add((T) v);
<ide> }
<ide> if (values.size() == all.size()) {
<ide> for (ItemObserver<T> io : all) { | 1 |
Python | Python | add tf.float32 to unittest args | 5c0c749b5e4fdf0588ae929008cd0036f0f83b9b | <ide><path>official/resnet/cifar10_test.py
<ide> def test_dataset_input_fn(self):
<ide> fake_dataset = tf.data.FixedLengthRecordDataset(
<ide> filename, cifar10_main._RECORD_BYTES) # pylint: disable=protected-access
<ide> fake_dataset = fake_dataset.map(
<del> lambda val: cifar10_main.parse_record(val, False))
<add> lambda val: cifar10_main.parse_record(val, False, tf.float32))
<ide> image, label = fake_dataset.make_one_shot_iterator().get_next()
<ide>
<ide> self.assertAllEqual(label.shape, ()) | 1 |
Ruby | Ruby | add spdx module | 753b8621dfe3630e9e78ed286b59980c8efc9641 | <ide><path>Library/Homebrew/dev-cmd/audit.rb
<ide> require "formula_versions"
<ide> require "utils/curl"
<ide> require "utils/notability"
<add>require "utils/spdx"
<ide> require "extend/ENV"
<ide> require "formula_cellar_checks"
<ide> require "cmd/search"
<ide> def audit
<ide> # Check style in a single batch run up front for performance
<ide> style_results = Style.check_style_json(style_files, options) if style_files
<ide> # load licenses
<del> spdx = HOMEBREW_LIBRARY_PATH/"data/spdx.json"
<del> spdx_data = JSON.parse(spdx.read)
<add> spdx_data = SPDX.spdx_data
<ide> new_formula_problem_lines = []
<ide> audit_formulae.sort.each do |f|
<ide> only = only_cops ? ["style"] : args.only
<ide><path>Library/Homebrew/dev-cmd/update-license-data.rb
<ide> # frozen_string_literal: true
<ide>
<ide> require "cli/parser"
<del>require "utils/github"
<add>require "utils/spdx"
<ide>
<ide> module Homebrew
<ide> module_function
<ide>
<del> SPDX_PATH = (HOMEBREW_LIBRARY_PATH/"data/spdx.json").freeze
<del> SPDX_API_URL = "https://api.github.com/repos/spdx/license-list-data/releases/latest"
<del>
<ide> def update_license_data_args
<ide> Homebrew::CLI::Parser.new do
<ide> usage_banner <<~EOS
<ide> def update_license_data
<ide> args = update_license_data_args.parse
<ide> ohai "Updating SPDX license data..."
<ide>
<del> latest_tag = GitHub.open_api(SPDX_API_URL)["tag_name"]
<del> data_url = "https://raw.githubusercontent.com/spdx/license-list-data/#{latest_tag}/json/licenses.json"
<del> curl_download(data_url, to: SPDX_PATH, partial: false)
<add> SPDX.download_latest_license_data!
<ide>
<del> Homebrew.failed = system("git", "diff", "--stat", "--exit-code", SPDX_PATH) if args.fail_if_not_changed?
<add> Homebrew.failed = system("git", "diff", "--stat", "--exit-code", SPDX::JSON_PATH) if args.fail_if_not_changed?
<ide>
<ide> return unless args.commit?
<ide>
<ide> ohai "git add"
<del> safe_system "git", "add", SPDX_PATH
<add> safe_system "git", "add", SPDX::JSON_PATH
<ide> ohai "git commit"
<ide> system "git", "commit", "--message", "data/spdx.json: update to #{latest_tag}"
<ide> end
<ide><path>Library/Homebrew/utils/spdx.rb
<add># frozen_string_literal: true
<add>
<add>require "utils/github"
<add>
<add>module SPDX
<add> module_function
<add>
<add> JSON_PATH = (HOMEBREW_LIBRARY_PATH/"data/spdx.json").freeze
<add> API_URL = "https://api.github.com/repos/spdx/license-list-data/releases/latest"
<add>
<add> def spdx_data
<add> @spdx_data ||= JSON.parse(JSON_PATH.read)
<add> end
<add>
<add> def download_latest_license_data!
<add> latest_tag = GitHub.open_api(API_URL)["tag_name"]
<add> data_url = "https://raw.githubusercontent.com/spdx/license-list-data/#{latest_tag}/json/licenses.json"
<add> curl_download(data_url, to: JSON_PATH, partial: false)
<add> end
<add>end | 3 |
PHP | PHP | add missing scheduler tests | ec98e98f8609b3e56400829e5ebe108c45517037 | <ide><path>tests/Console/Scheduling/FrequencyTest.php
<ide> public function testEveryXMinutes()
<ide> $this->assertSame('*/3 * * * *', $this->event->everyThreeMinutes()->getExpression());
<ide> $this->assertSame('*/4 * * * *', $this->event->everyFourMinutes()->getExpression());
<ide> $this->assertSame('*/5 * * * *', $this->event->everyFiveMinutes()->getExpression());
<add> $this->assertSame('*/10 * * * *', $this->event->everyTenMinutes()->getExpression());
<add> $this->assertSame('*/15 * * * *', $this->event->everyFifteenMinutes()->getExpression());
<add> $this->assertSame('0,30 * * * *', $this->event->everyThirtyMinutes()->getExpression());
<ide> }
<ide>
<ide> public function testDaily()
<ide> {
<ide> $this->assertSame('0 0 * * *', $this->event->daily()->getExpression());
<ide> }
<ide>
<add> public function testDailyAt()
<add> {
<add> $this->assertSame('8 13 * * *', $this->event->dailyAt('13:08')->getExpression());
<add> }
<add>
<ide> public function testTwiceDaily()
<ide> {
<ide> $this->assertSame('0 3,15 * * *', $this->event->twiceDaily(3, 15)->getExpression());
<ide> }
<ide>
<add> public function testWeekly()
<add> {
<add> $this->assertSame('0 0 * * 0', $this->event->weekly()->getExpression());
<add> }
<add>
<add> public function testWeeklyOn()
<add> {
<add> $this->assertSame('0 8 * * 1', $this->event->weeklyOn(1, '8:00')->getExpression());
<add> }
<add>
<ide> public function testOverrideWithHourly()
<ide> {
<ide> $this->assertSame('0 * * * *', $this->event->everyFiveMinutes()->hourly()->getExpression());
<ide> public function testHourly()
<ide> $this->assertSame('0 */6 * * *', $this->event->everySixHours()->getExpression());
<ide> }
<ide>
<add> public function testMonthly()
<add> {
<add> $this->assertSame('0 0 1 * *', $this->event->monthly()->getExpression());
<add> }
<add>
<ide> public function testMonthlyOn()
<ide> {
<ide> $this->assertSame('0 15 4 * *', $this->event->monthlyOn(4, '15:00')->getExpression());
<ide> }
<ide>
<add> public function testLastDayOfMonth()
<add> {
<add> $this->assertSame('0 0 31 * *', $this->event->lastDayOfMonth()->getExpression());
<add> }
<add>
<ide> public function testTwiceMonthly()
<ide> {
<ide> $this->assertSame('0 0 1,16 * *', $this->event->twiceMonthly(1, 16)->getExpression());
<ide> public function testQuarterly()
<ide> $this->assertSame('0 0 1 1-12/3 *', $this->event->quarterly()->getExpression());
<ide> }
<ide>
<add> public function testYearly()
<add> {
<add> $this->assertSame('0 0 1 1 *', $this->event->yearly()->getExpression());
<add> }
<add>
<ide> public function testFrequencyMacro()
<ide> {
<ide> Event::macro('everyXMinutes', function ($x) { | 1 |
Javascript | Javascript | add newlines at end of km lang file and tests | 40df767355160bf6c26be34153fe01e91b40019e | <ide><path>lang/km.js
<ide> doy: 4 // The week that contains Jan 4th is the first week of the year.
<ide> }
<ide> });
<del>}));
<ide>\ No newline at end of file
<add>}));
<ide><path>test/lang/km.js
<ide> exports["lang:km"] = {
<ide>
<ide> test.done();
<ide> }
<del>};
<ide>\ No newline at end of file
<add>}; | 2 |
Javascript | Javascript | fix obj loader with empty uvs or normals | 171de9dfbbe6c0e133408c1c0fd5e57c544690d7 | <ide><path>examples/js/loaders/OBJLoader.js
<ide> THREE.OBJLoader = ( function () {
<ide>
<ide> this.addVertex( ia, ib, ic );
<ide>
<del> if ( ua !== undefined ) {
<add> if ( ua !== undefined && ua !== '' ) {
<ide>
<ide> var uvLen = this.uvs.length;
<del>
<ide> ia = this.parseUVIndex( ua, uvLen );
<ide> ib = this.parseUVIndex( ub, uvLen );
<ide> ic = this.parseUVIndex( uc, uvLen );
<del>
<ide> this.addUV( ia, ib, ic );
<ide>
<ide> }
<ide>
<del> if ( na !== undefined ) {
<add> if ( na !== undefined && na !== '' ) {
<ide>
<ide> // Normals are many times the same. If so, skip function call and parseInt.
<ide> var nLen = this.normals.length; | 1 |
Text | Text | fix comma of the list in worker_threads.md | 296712602b4cba785ccda72623c0cbe3b4584abb | <ide><path>doc/api/worker_threads.md
<ide> Notable differences inside a Worker environment are:
<ide> - The [`process.stdin`][], [`process.stdout`][] and [`process.stderr`][]
<ide> may be redirected by the parent thread.
<ide> - The [`require('worker_threads').isMainThread`][] property is set to `false`.
<del>- The [`require('worker_threads').parentPort`][] message port is available,
<add>- The [`require('worker_threads').parentPort`][] message port is available.
<ide> - [`process.exit()`][] does not stop the whole program, just the single thread,
<ide> and [`process.abort()`][] is not available.
<ide> - [`process.chdir()`][] and `process` methods that set group or user ids | 1 |
Text | Text | add yorkie to collaborators | 841c7025a99719aad0c621dc4f57bfeefd789b5d | <ide><path>README.md
<ide> information about the governance of the Node.js project, see
<ide> * [tunniclm](https://github.com/tunniclm) - **Mike Tunnicliffe** <[email protected]>
<ide> * [vkurchatkin](https://github.com/vkurchatkin) - **Vladimir Kurchatkin** <[email protected]>
<ide> * [whitlockjc](https://github.com/whitlockjc) - **Jeremy Whitlock** <[email protected]>
<add>* [yorkie](https://github.com/yorkie) - **Yorkie Liu** <[email protected]>
<ide> * [yosuke-furukawa](https://github.com/yosuke-furukawa) - **Yosuke Furukawa** <[email protected]>
<ide> * [zkat](https://github.com/zkat) - **Kat Marchán** <[email protected]>
<ide> | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.