content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
Javascript
Javascript
add example of default/module export types
e266d3e4d7533f7fea8da7f22144b96f6e066bea
<ide><path>type-definitions/tests/immutable-flow.js <ide> const Immutable2Range = Immutable2.Range <ide> const Immutable2Repeat = Immutable2.Repeat <ide> const Immutable2IndexedSeq = Immutable2.IndexedSeq <ide> <add>var defaultExport: List<*> = Immutable.List(); <add>var moduleExport: List<*> = Immutable2.List(); <add> <ide> var numberList: List<number> = List() <ide> var numberOrStringList: List<string | number> = List() <ide> var nullableNumberList: List<?number> = List()
1
Ruby
Ruby
remove stuff that was deprecated in 2-1-stable
a5cdb7a813515fa0cbee23101d2f911b4017ed90
<ide><path>actionpack/lib/action_controller/caching/fragments.rb <ide> module Caching <ide> # <ide> # expire_fragment(:controller => "topics", :action => "list", :action_suffix => "all_topics") <ide> module Fragments <del> def self.included(base) #:nodoc: <del> base.class_eval do <del> class << self <del> def fragment_cache_store=(store_option) #:nodoc: <del> ActiveSupport::Deprecation.warn('The fragment_cache_store= method is now use cache_store=') <del> self.cache_store = store_option <del> end <del> <del> def fragment_cache_store #:nodoc: <del> ActiveSupport::Deprecation.warn('The fragment_cache_store method is now use cache_store') <del> cache_store <del> end <del> end <del> <del> def fragment_cache_store=(store_option) #:nodoc: <del> ActiveSupport::Deprecation.warn('The fragment_cache_store= method is now use cache_store=') <del> self.cache_store = store_option <del> end <del> <del> def fragment_cache_store #:nodoc: <del> ActiveSupport::Deprecation.warn('The fragment_cache_store method is now use cache_store') <del> cache_store <del> end <del> end <del> end <del> <ide> # Given a key (as described in <tt>expire_fragment</tt>), returns a key suitable for use in reading, <ide> # writing, or expiring a cached fragment. If the key is a hash, the generated key is the return <ide> # value of url_for on that hash (without the protocol). All keys are prefixed with "views/" and uses <ide><path>actionpack/lib/action_view/base.rb <ide> class << self <ide> delegate :logger, :to => 'ActionController::Base' <ide> end <ide> <del> def self.cache_template_loading=(*args) <del> ActiveSupport::Deprecation.warn( <del> "config.action_view.cache_template_loading option has been deprecated" + <del> "and has no effect. Please remove it from your config files.", caller) <del> end <del> <del> def self.cache_template_extensions=(*args) <del> ActiveSupport::Deprecation.warn( <del> "config.action_view.cache_template_extensions option has been" + <del> "deprecated and has no effect. Please remove it from your config files.", caller) <del> end <del> <ide> # Templates that are exempt from layouts <ide> @@exempt_from_layout = Set.new([/\.rjs$/]) <ide> <ide> def render(options = {}, local_assigns = {}, &block) #:nodoc: <ide> if options[:layout] <ide> _render_with_layout(options, local_assigns, &block) <ide> elsif options[:file] <del> if options[:use_full_path] <del> ActiveSupport::Deprecation.warn("use_full_path option has been deprecated and has no affect.", caller) <del> end <del> <ide> _pick_template(options[:file]).render_template(self, options[:locals]) <ide> elsif options[:partial] <ide> render_partial(options) <ide><path>activesupport/lib/active_support.rb <ide> require 'active_support/rescuable' <ide> <ide> I18n.load_path << File.dirname(__FILE__) + '/active_support/locale/en-US.yml' <del> <del>Inflector = ActiveSupport::Deprecation::DeprecatedConstantProxy.new('Inflector', 'ActiveSupport::Inflector') <del>Dependencies = ActiveSupport::Deprecation::DeprecatedConstantProxy.new('Dependencies', 'ActiveSupport::Dependencies') <del>TimeZone = ActiveSupport::Deprecation::DeprecatedConstantProxy.new('TimeZone', 'ActiveSupport::TimeZone') <ide><path>activesupport/test/dependencies_test.rb <ide> def test_qualified_const_defined <ide> assert ActiveSupport::Dependencies.qualified_const_defined?("Object") <ide> assert ActiveSupport::Dependencies.qualified_const_defined?("::Object") <ide> assert ActiveSupport::Dependencies.qualified_const_defined?("::Object::Kernel") <del> assert ActiveSupport::Dependencies.qualified_const_defined?("::Object::Dependencies") <ide> assert ActiveSupport::Dependencies.qualified_const_defined?("::Test::Unit::TestCase") <ide> end <ide>
4
Ruby
Ruby
avoid repeated interpolation here too
010a1461f7fd02f14b9cfe9bd3c9018fddcb4431
<ide><path>Library/Homebrew/cmd/doctor.rb <ide> def check_for_broken_symlinks <ide> Keg::PRUNEABLE_DIRECTORIES.each do |d| <ide> next unless d.directory? <ide> d.find do |pn| <del> broken_symlinks << pn if pn.symlink? and pn.readlink.expand_path.to_s =~ /^#{HOMEBREW_PREFIX}/ and not pn.exist? <add> broken_symlinks << pn if pn.symlink? and pn.readlink.expand_path.to_s =~ /^#{HOMEBREW_PREFIX}/o and not pn.exist? <ide> end <ide> end <ide> unless broken_symlinks.empty? then <<-EOS.undent
1
Go
Go
update libcontainer references
cee6f4506c79c6fc21769d427ac4dd51c28450c3
<ide><path>daemon/execdriver/lxc/lxc_init_linux.go <ide> func finalizeNamespace(args *execdriver.InitArgs) error { <ide> <ide> if !args.Privileged { <ide> // drop capabilities in bounding set before changing user <del> if err := capabilities.DropBoundingSet(container); err != nil { <add> if err := capabilities.DropBoundingSet(container.Capabilities); err != nil { <ide> return fmt.Errorf("drop bounding set %s", err) <ide> } <ide> <ide> func finalizeNamespace(args *execdriver.InitArgs) error { <ide> } <ide> <ide> // drop all other capabilities <del> if err := capabilities.DropCapabilities(container); err != nil { <add> if err := capabilities.DropCapabilities(container.Capabilities); err != nil { <ide> return fmt.Errorf("drop capabilities %s", err) <ide> } <ide> } <ide><path>daemon/execdriver/native/configuration/parse.go <ide> func dropNamespace(container *libcontainer.Container, context interface{}, value <ide> func readonlyFs(container *libcontainer.Container, context interface{}, value string) error { <ide> switch value { <ide> case "1", "true": <del> container.ReadonlyFs = true <add> container.MountConfig.ReadonlyFs = true <ide> default: <del> container.ReadonlyFs = false <add> container.MountConfig.ReadonlyFs = false <ide> } <ide> return nil <ide> } <ide> func joinNetNamespace(container *libcontainer.Container, context interface{}, va <ide> if cmd == nil || cmd.Process == nil { <ide> return fmt.Errorf("%s is not a valid running container to join", value) <ide> } <add> <ide> nspath := filepath.Join("/proc", fmt.Sprint(cmd.Process.Pid), "ns", "net") <ide> container.Networks = append(container.Networks, &libcontainer.Network{ <del> Type: "netns", <del> Context: libcontainer.Context{ <del> "nspath": nspath, <del> }, <add> Type: "netns", <add> NsPath: nspath, <ide> }) <del> return nil <del>} <ide> <del>func vethMacAddress(container *libcontainer.Container, context interface{}, value string) error { <del> var veth *libcontainer.Network <del> for _, network := range container.Networks { <del> if network.Type == "veth" { <del> veth = network <del> break <del> } <del> } <del> if veth == nil { <del> return fmt.Errorf("not veth configured for container") <del> } <del> veth.Context["mac"] = value <ide> return nil <ide> } <ide> <ide><path>daemon/execdriver/native/configuration/parse_test.go <ide> package configuration <ide> import ( <ide> "testing" <ide> <del> "github.com/docker/libcontainer" <add> "github.com/docker/libcontainer/security/capabilities" <ide> "github.com/dotcloud/docker/daemon/execdriver/native/template" <ide> ) <ide> <ide> func TestSetReadonlyRootFs(t *testing.T) { <ide> } <ide> ) <ide> <del> if container.ReadonlyFs { <add> if container.MountConfig.ReadonlyFs { <ide> t.Fatal("container should not have a readonly rootfs by default") <ide> } <ide> if err := ParseConfiguration(container, nil, opts); err != nil { <ide> t.Fatal(err) <ide> } <ide> <del> if !container.ReadonlyFs { <add> if !container.MountConfig.ReadonlyFs { <ide> t.Fatal("container should have a readonly rootfs") <ide> } <ide> } <ide> func TestDropCap(t *testing.T) { <ide> } <ide> ) <ide> // enabled all caps like in privileged mode <del> container.Capabilities = libcontainer.GetAllCapabilities() <add> container.Capabilities = capabilities.GetAllCapabilities() <ide> if err := ParseConfiguration(container, nil, opts); err != nil { <ide> t.Fatal(err) <ide> } <ide><path>daemon/execdriver/native/create.go <ide> import ( <ide> "github.com/docker/libcontainer" <ide> "github.com/docker/libcontainer/apparmor" <ide> "github.com/docker/libcontainer/devices" <add> "github.com/docker/libcontainer/mount" <add> "github.com/docker/libcontainer/security/capabilities" <ide> "github.com/dotcloud/docker/daemon/execdriver" <ide> "github.com/dotcloud/docker/daemon/execdriver/native/configuration" <ide> "github.com/dotcloud/docker/daemon/execdriver/native/template" <ide> func (d *driver) createContainer(c *execdriver.Command) (*libcontainer.Container <ide> container.Env = c.Env <ide> container.Cgroups.Name = c.ID <ide> container.Cgroups.AllowedDevices = c.AllowedDevices <del> container.DeviceNodes = c.AutoCreatedDevices <add> container.MountConfig.DeviceNodes = c.AutoCreatedDevices <add> <ide> // check to see if we are running in ramdisk to disable pivot root <del> container.NoPivotRoot = os.Getenv("DOCKER_RAMDISK") != "" <add> container.MountConfig.NoPivotRoot = os.Getenv("DOCKER_RAMDISK") != "" <ide> container.Context["restrictions"] = "true" <ide> <ide> if err := d.createNetwork(container, c); err != nil { <ide> return nil, err <ide> } <add> <ide> if c.Privileged { <ide> if err := d.setPrivileged(container); err != nil { <ide> return nil, err <ide> } <ide> } <add> <ide> if err := d.setupCgroups(container, c); err != nil { <ide> return nil, err <ide> } <add> <ide> if err := d.setupMounts(container, c); err != nil { <ide> return nil, err <ide> } <add> <ide> if err := d.setupLabels(container, c); err != nil { <ide> return nil, err <ide> } <add> <ide> cmds := make(map[string]*exec.Cmd) <ide> d.Lock() <ide> for k, v := range d.activeContainers { <ide> cmds[k] = v.cmd <ide> } <ide> d.Unlock() <add> <ide> if err := configuration.ParseConfiguration(container, cmds, c.Config["native"]); err != nil { <ide> return nil, err <ide> } <add> <ide> return container, nil <ide> } <ide> <ide> func (d *driver) createNetwork(container *libcontainer.Container, c *execdriver. <ide> container.Namespaces["NEWNET"] = false <ide> return nil <ide> } <add> <ide> container.Networks = []*libcontainer.Network{ <ide> { <ide> Mtu: c.Network.Mtu, <ide> Address: fmt.Sprintf("%s/%d", "127.0.0.1", 0), <ide> Gateway: "localhost", <ide> Type: "loopback", <del> Context: libcontainer.Context{}, <ide> }, <ide> } <ide> <ide> if c.Network.Interface != nil { <ide> vethNetwork := libcontainer.Network{ <del> Mtu: c.Network.Mtu, <del> Address: fmt.Sprintf("%s/%d", c.Network.Interface.IPAddress, c.Network.Interface.IPPrefixLen), <del> Gateway: c.Network.Interface.Gateway, <del> Type: "veth", <del> Context: libcontainer.Context{ <del> "prefix": "veth", <del> "bridge": c.Network.Interface.Bridge, <del> }, <add> Mtu: c.Network.Mtu, <add> Address: fmt.Sprintf("%s/%d", c.Network.Interface.IPAddress, c.Network.Interface.IPPrefixLen), <add> Gateway: c.Network.Interface.Gateway, <add> Type: "veth", <add> Bridge: c.Network.Interface.Bridge, <add> VethPrefix: "veth", <ide> } <ide> container.Networks = append(container.Networks, &vethNetwork) <ide> } <ide> func (d *driver) createNetwork(container *libcontainer.Container, c *execdriver. <ide> d.Lock() <ide> active := d.activeContainers[c.Network.ContainerID] <ide> d.Unlock() <add> <ide> if active == nil || active.cmd.Process == nil { <ide> return fmt.Errorf("%s is not a valid running container to join", c.Network.ContainerID) <ide> } <ide> cmd := active.cmd <ide> <ide> nspath := filepath.Join("/proc", fmt.Sprint(cmd.Process.Pid), "ns", "net") <ide> container.Networks = append(container.Networks, &libcontainer.Network{ <del> Type: "netns", <del> Context: libcontainer.Context{ <del> "nspath": nspath, <del> }, <add> Type: "netns", <add> NsPath: nspath, <ide> }) <ide> } <add> <ide> return nil <ide> } <ide> <ide> func (d *driver) setPrivileged(container *libcontainer.Container) (err error) { <del> container.Capabilities = libcontainer.GetAllCapabilities() <add> container.Capabilities = capabilities.GetAllCapabilities() <ide> container.Cgroups.AllowAllDevices = true <ide> <ide> hostDeviceNodes, err := devices.GetHostDeviceNodes() <ide> if err != nil { <ide> return err <ide> } <del> container.DeviceNodes = hostDeviceNodes <add> container.MountConfig.DeviceNodes = hostDeviceNodes <ide> <ide> delete(container.Context, "restrictions") <ide> <ide> if apparmor.IsEnabled() { <ide> container.Context["apparmor_profile"] = "unconfined" <ide> } <add> <ide> return nil <ide> } <ide> <ide> func (d *driver) setupCgroups(container *libcontainer.Container, c *execdriver.C <ide> container.Cgroups.MemorySwap = c.Resources.MemorySwap <ide> container.Cgroups.CpusetCpus = c.Resources.Cpuset <ide> } <add> <ide> return nil <ide> } <ide> <ide> func (d *driver) setupMounts(container *libcontainer.Container, c *execdriver.Command) error { <ide> for _, m := range c.Mounts { <del> container.Mounts = append(container.Mounts, libcontainer.Mount{ <add> container.MountConfig.Mounts = append(container.MountConfig.Mounts, mount.Mount{ <ide> Type: "bind", <ide> Source: m.Source, <ide> Destination: m.Destination, <ide> Writable: m.Writable, <ide> Private: m.Private, <ide> }) <ide> } <add> <ide> return nil <ide> } <ide> <ide> func (d *driver) setupLabels(container *libcontainer.Container, c *execdriver.Command) error { <ide> container.Context["process_label"] = c.Config["process_label"][0] <ide> container.Context["mount_label"] = c.Config["mount_label"][0] <add> <ide> return nil <ide> } <ide><path>daemon/execdriver/native/template/default_template.go <ide> func New() *libcontainer.Container { <ide> Parent: "docker", <ide> AllowAllDevices: false, <ide> }, <del> Context: libcontainer.Context{}, <add> MountConfig: &libcontainer.MountConfig{}, <add> Context: make(map[string]string), <ide> } <add> <ide> if apparmor.IsEnabled() { <ide> container.Context["apparmor_profile"] = "docker-default" <ide> } <add> <ide> return container <ide> }
5
Java
Java
relax test assertion when jacoco is enabled
5a51351fa4e582571a442b331fb3f2bec0e63f30
<ide><path>spring-webflux/src/test/java/org/springframework/web/reactive/result/method/annotation/ResponseBodyResultHandlerTests.java <ide> public void supports() throws NoSuchMethodException { <ide> method = on(TestController.class).annotPresent(ResponseBody.class).resolveMethod(); <ide> testSupports(controller, method); <ide> <del> method = on(TestController.class).annotNotPresent(ResponseBody.class).resolveMethod(); <add> method = on(TestController.class).annotNotPresent(ResponseBody.class).resolveMethod("doWork"); <ide> HandlerResult handlerResult = getHandlerResult(controller, method); <ide> assertFalse(this.resultHandler.supports(handlerResult)); <ide> }
1
Java
Java
remove jibx support
3c8724ba3d0375e4a50354c15383972fee788e9c
<ide><path>spring-oxm/src/main/java/org/springframework/oxm/config/JibxMarshallerBeanDefinitionParser.java <del>/* <del> * Copyright 2002-2019 the original author or authors. <del> * <del> * Licensed under the Apache License, Version 2.0 (the "License"); <del> * you may not use this file except in compliance with the License. <del> * You may obtain a copy of the License at <del> * <del> * https://www.apache.org/licenses/LICENSE-2.0 <del> * <del> * Unless required by applicable law or agreed to in writing, software <del> * distributed under the License is distributed on an "AS IS" BASIS, <del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <del> * See the License for the specific language governing permissions and <del> * limitations under the License. <del> */ <del> <del>package org.springframework.oxm.config; <del> <del>import org.w3c.dom.Element; <del> <del>import org.springframework.beans.factory.xml.AbstractSimpleBeanDefinitionParser; <del> <del>/** <del> * Parser for the {@code <oxm:jibx-marshaller/>} element. <del> * <del> * @author Arjen Poutsma <del> * @since 3.0 <del> * @deprecated as of Spring Framework 5.1.5, due to the lack of activity on the JiBX project <del> */ <del>@Deprecated <del>class JibxMarshallerBeanDefinitionParser extends AbstractSimpleBeanDefinitionParser { <del> <del> @Override <del> protected String getBeanClassName(Element element) { <del> return "org.springframework.oxm.jibx.JibxMarshaller"; <del> } <del> <del>} <ide><path>spring-oxm/src/main/java/org/springframework/oxm/config/OxmNamespaceHandler.java <ide> /* <del> * Copyright 2002-2019 the original author or authors. <add> * Copyright 2002-2021 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> public class OxmNamespaceHandler extends NamespaceHandlerSupport { <ide> <ide> @Override <del> @SuppressWarnings("deprecation") <ide> public void init() { <ide> registerBeanDefinitionParser("jaxb2-marshaller", new Jaxb2MarshallerBeanDefinitionParser()); <del> registerBeanDefinitionParser("jibx-marshaller", new JibxMarshallerBeanDefinitionParser()); <ide> } <ide> <ide> } <ide><path>spring-oxm/src/main/java/org/springframework/oxm/jibx/JibxMarshaller.java <del>/* <del> * Copyright 2002-2019 the original author or authors. <del> * <del> * Licensed under the Apache License, Version 2.0 (the "License"); <del> * you may not use this file except in compliance with the License. <del> * You may obtain a copy of the License at <del> * <del> * https://www.apache.org/licenses/LICENSE-2.0 <del> * <del> * Unless required by applicable law or agreed to in writing, software <del> * distributed under the License is distributed on an "AS IS" BASIS, <del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <del> * See the License for the specific language governing permissions and <del> * limitations under the License. <del> */ <del> <del>package org.springframework.oxm.jibx; <del> <del>import java.io.ByteArrayInputStream; <del>import java.io.ByteArrayOutputStream; <del>import java.io.IOException; <del>import java.io.InputStream; <del>import java.io.OutputStream; <del>import java.io.Reader; <del>import java.io.Writer; <del> <del>import javax.xml.stream.XMLEventReader; <del>import javax.xml.stream.XMLEventWriter; <del>import javax.xml.stream.XMLStreamException; <del>import javax.xml.stream.XMLStreamReader; <del>import javax.xml.stream.XMLStreamWriter; <del>import javax.xml.transform.OutputKeys; <del>import javax.xml.transform.Result; <del>import javax.xml.transform.Source; <del>import javax.xml.transform.Transformer; <del>import javax.xml.transform.TransformerException; <del>import javax.xml.transform.TransformerFactory; <del>import javax.xml.transform.dom.DOMResult; <del>import javax.xml.transform.dom.DOMSource; <del>import javax.xml.transform.sax.SAXResult; <del>import javax.xml.transform.sax.SAXSource; <del>import javax.xml.transform.stream.StreamResult; <del>import javax.xml.transform.stream.StreamSource; <del> <del>import org.jibx.runtime.BindingDirectory; <del>import org.jibx.runtime.IBindingFactory; <del>import org.jibx.runtime.IMarshallingContext; <del>import org.jibx.runtime.IUnmarshallingContext; <del>import org.jibx.runtime.IXMLReader; <del>import org.jibx.runtime.IXMLWriter; <del>import org.jibx.runtime.JiBXException; <del>import org.jibx.runtime.ValidationException; <del>import org.jibx.runtime.impl.MarshallingContext; <del>import org.jibx.runtime.impl.StAXReaderWrapper; <del>import org.jibx.runtime.impl.StAXWriter; <del>import org.jibx.runtime.impl.UnmarshallingContext; <del>import org.w3c.dom.Node; <del>import org.xml.sax.ContentHandler; <del>import org.xml.sax.InputSource; <del>import org.xml.sax.XMLReader; <del>import org.xml.sax.ext.LexicalHandler; <del> <del>import org.springframework.beans.factory.InitializingBean; <del>import org.springframework.lang.Nullable; <del>import org.springframework.oxm.MarshallingFailureException; <del>import org.springframework.oxm.UnmarshallingFailureException; <del>import org.springframework.oxm.ValidationFailureException; <del>import org.springframework.oxm.XmlMappingException; <del>import org.springframework.oxm.support.AbstractMarshaller; <del>import org.springframework.util.Assert; <del>import org.springframework.util.ClassUtils; <del>import org.springframework.util.StringUtils; <del>import org.springframework.util.xml.StaxUtils; <del> <del>/** <del> * Implementation of the {@code Marshaller} and {@code Unmarshaller} interfaces for JiBX. <del> * <del> * <p>The typical usage will be to set the {@code targetClass} and optionally the <del> * {@code bindingName} property on this bean. <del> * <del> * @author Arjen Poutsma <del> * @since 3.0 <del> * @see org.jibx.runtime.IMarshallingContext <del> * @see org.jibx.runtime.IUnmarshallingContext <del> * @deprecated as of Spring Framework 5.1.5, due to the lack of activity on the JiBX project <del> */ <del>@Deprecated <del>public class JibxMarshaller extends AbstractMarshaller implements InitializingBean { <del> <del> private static final String DEFAULT_BINDING_NAME = "binding"; <del> <del> <del> @Nullable <del> private Class<?> targetClass; <del> <del> @Nullable <del> private String targetPackage; <del> <del> @Nullable <del> private String bindingName; <del> <del> private int indent = -1; <del> <del> private String encoding = "UTF-8"; <del> <del> @Nullable <del> private Boolean standalone; <del> <del> @Nullable <del> private String docTypeRootElementName; <del> <del> @Nullable <del> private String docTypeSystemId; <del> <del> @Nullable <del> private String docTypePublicId; <del> <del> @Nullable <del> private String docTypeInternalSubset; <del> <del> @Nullable <del> private IBindingFactory bindingFactory; <del> <del> private final TransformerFactory transformerFactory = TransformerFactory.newInstance(); <del> <del> <del> /** <del> * Set the target class for this instance. Setting either this property or the <del> * {@link #setTargetPackage(String) targetPackage} property is required. <del> * <p>If this property is set, {@link #setTargetPackage(String) targetPackage} is ignored. <del> */ <del> public void setTargetClass(Class<?> targetClass) { <del> this.targetClass = targetClass; <del> } <del> <del> /** <del> * Set the target package for this instance. Setting either this property or the <del> * {@link #setTargetClass(Class) targetClass} property is required. <del> * <p>If {@link #setTargetClass(Class) targetClass} is set, this property is ignored. <del> */ <del> public void setTargetPackage(String targetPackage) { <del> this.targetPackage = targetPackage; <del> } <del> <del> /** <del> * Set the optional binding name for this instance. <del> */ <del> public void setBindingName(String bindingName) { <del> this.bindingName = bindingName; <del> } <del> <del> /** <del> * Set the number of nesting indent spaces. Default is {@code -1}, i.e. no indentation. <del> */ <del> public void setIndent(int indent) { <del> this.indent = indent; <del> } <del> <del> /** <del> * Set the document encoding using for marshalling. Default is UTF-8. <del> */ <del> public void setEncoding(String encoding) { <del> this.encoding = encoding; <del> } <del> <del> @Override <del> protected String getDefaultEncoding() { <del> return this.encoding; <del> } <del> <del> /** <del> * Set the document standalone flag for marshalling. By default, this flag is not present. <del> */ <del> public void setStandalone(Boolean standalone) { <del> this.standalone = standalone; <del> } <del> <del> /** <del> * Set the root element name for the DTD declaration written when marshalling. <del> * By default, this is {@code null} (i.e. no DTD declaration is written). <del> * <p>If set to a value, the system ID or public ID also need to be set. <del> * @see #setDocTypeSystemId(String) <del> * @see #setDocTypePublicId(String) <del> */ <del> public void setDocTypeRootElementName(String docTypeRootElementName) { <del> this.docTypeRootElementName = docTypeRootElementName; <del> } <del> <del> /** <del> * Set the system id for the DTD declaration written when marshalling. <del> * By default, this is {@code null}. Only used when the root element also has been set. <del> * <p>Set either this property or {@code docTypePublicId}, not both. <del> * @see #setDocTypeRootElementName(String) <del> */ <del> public void setDocTypeSystemId(String docTypeSystemId) { <del> this.docTypeSystemId = docTypeSystemId; <del> } <del> <del> /** <del> * Set the public id for the DTD declaration written when marshalling. <del> * By default, this is {@code null}. Only used when the root element also has been set. <del> * <p>Set either this property or {@code docTypeSystemId}, not both. <del> * @see #setDocTypeRootElementName(String) <del> */ <del> public void setDocTypePublicId(String docTypePublicId) { <del> this.docTypePublicId = docTypePublicId; <del> } <del> <del> /** <del> * Set the internal subset Id for the DTD declaration written when marshalling. <del> * By default, this is {@code null}. Only used when the root element also has been set. <del> * @see #setDocTypeRootElementName(String) <del> */ <del> public void setDocTypeInternalSubset(String docTypeInternalSubset) { <del> this.docTypeInternalSubset = docTypeInternalSubset; <del> } <del> <del> <del> @Override <del> public void afterPropertiesSet() throws JiBXException { <del> if (this.targetClass != null) { <del> if (StringUtils.hasLength(this.bindingName)) { <del> if (logger.isDebugEnabled()) { <del> logger.debug("Configured for target class [" + this.targetClass + <del> "] using binding [" + this.bindingName + "]"); <del> } <del> this.bindingFactory = BindingDirectory.getFactory(this.bindingName, this.targetClass); <del> } <del> else { <del> if (logger.isDebugEnabled()) { <del> logger.debug("Configured for target class [" + this.targetClass + "]"); <del> } <del> this.bindingFactory = BindingDirectory.getFactory(this.targetClass); <del> } <del> } <del> else if (this.targetPackage != null) { <del> if (!StringUtils.hasLength(this.bindingName)) { <del> this.bindingName = DEFAULT_BINDING_NAME; <del> } <del> if (logger.isDebugEnabled()) { <del> logger.debug("Configured for target package [" + this.targetPackage + <del> "] using binding [" + this.bindingName + "]"); <del> } <del> this.bindingFactory = BindingDirectory.getFactory(this.bindingName, this.targetPackage); <del> } <del> else { <del> throw new IllegalArgumentException("Either 'targetClass' or 'targetPackage' is required"); <del> } <del> } <del> <del> <del> @Override <del> public boolean supports(Class<?> clazz) { <del> Assert.notNull(clazz, "Class must not be null"); <del> if (this.targetClass != null) { <del> return (this.targetClass == clazz); <del> } <del> Assert.state(this.bindingFactory != null, "JibxMarshaller not initialized"); <del> String[] mappedClasses = this.bindingFactory.getMappedClasses(); <del> String className = clazz.getName(); <del> for (String mappedClass : mappedClasses) { <del> if (className.equals(mappedClass)) { <del> return true; <del> } <del> } <del> return false; <del> } <del> <del> <del> // Supported marshalling <del> <del> @Override <del> protected void marshalOutputStream(Object graph, OutputStream outputStream) <del> throws XmlMappingException, IOException { <del> try { <del> IMarshallingContext marshallingContext = createMarshallingContext(); <del> marshallingContext.startDocument(this.encoding, this.standalone, outputStream); <del> marshalDocument(marshallingContext, graph); <del> } <del> catch (JiBXException ex) { <del> throw convertJibxException(ex, true); <del> } <del> } <del> <del> @Override <del> protected void marshalWriter(Object graph, Writer writer) throws XmlMappingException, IOException { <del> try { <del> IMarshallingContext marshallingContext = createMarshallingContext(); <del> marshallingContext.startDocument(this.encoding, this.standalone, writer); <del> marshalDocument(marshallingContext, graph); <del> } <del> catch (JiBXException ex) { <del> throw convertJibxException(ex, true); <del> } <del> } <del> <del> private void marshalDocument(IMarshallingContext marshallingContext, Object graph) throws IOException, JiBXException { <del> if (StringUtils.hasLength(this.docTypeRootElementName)) { <del> IXMLWriter xmlWriter = marshallingContext.getXmlWriter(); <del> xmlWriter.writeDocType(this.docTypeRootElementName, this.docTypeSystemId, <del> this.docTypePublicId, this.docTypeInternalSubset); <del> } <del> marshallingContext.marshalDocument(graph); <del> } <del> <del> <del> // Unsupported marshalling <del> <del> @Override <del> protected void marshalDomNode(Object graph, Node node) throws XmlMappingException { <del> try { <del> // JiBX does not support DOM natively, so we write to a buffer first, and transform that to the Node <del> Result result = new DOMResult(node); <del> transformAndMarshal(graph, result); <del> } <del> catch (IOException ex) { <del> throw new MarshallingFailureException("JiBX marshalling exception", ex); <del> } <del> } <del> <del> @Override <del> protected void marshalXmlEventWriter(Object graph, XMLEventWriter eventWriter) { <del> XMLStreamWriter streamWriter = StaxUtils.createEventStreamWriter(eventWriter); <del> marshalXmlStreamWriter(graph, streamWriter); <del> } <del> <del> @Override <del> protected void marshalXmlStreamWriter(Object graph, XMLStreamWriter streamWriter) throws XmlMappingException { <del> try { <del> MarshallingContext marshallingContext = (MarshallingContext) createMarshallingContext(); <del> IXMLWriter xmlWriter = new StAXWriter(marshallingContext.getNamespaces(), streamWriter); <del> marshallingContext.setXmlWriter(xmlWriter); <del> marshallingContext.marshalDocument(graph); <del> } <del> catch (JiBXException ex) { <del> throw convertJibxException(ex, false); <del> } <del> } <del> <del> @Override <del> protected void marshalSaxHandlers(Object graph, ContentHandler contentHandler, @Nullable LexicalHandler lexicalHandler) <del> throws XmlMappingException { <del> try { <del> // JiBX does not support SAX natively, so we write to a buffer first, and transform that to the handlers <del> SAXResult saxResult = new SAXResult(contentHandler); <del> saxResult.setLexicalHandler(lexicalHandler); <del> transformAndMarshal(graph, saxResult); <del> } <del> catch (IOException ex) { <del> throw new MarshallingFailureException("JiBX marshalling exception", ex); <del> } <del> } <del> <del> private void transformAndMarshal(Object graph, Result result) throws IOException { <del> try { <del> ByteArrayOutputStream os = new ByteArrayOutputStream(1024); <del> marshalOutputStream(graph, os); <del> ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray()); <del> Transformer transformer = this.transformerFactory.newTransformer(); <del> transformer.transform(new StreamSource(is), result); <del> } <del> catch (TransformerException ex) { <del> throw new MarshallingFailureException( <del> "Could not transform to [" + ClassUtils.getShortName(result.getClass()) + "]", ex); <del> } <del> <del> } <del> <del> <del> // Unmarshalling <del> <del> @Override <del> protected Object unmarshalXmlEventReader(XMLEventReader eventReader) { <del> try { <del> XMLStreamReader streamReader = StaxUtils.createEventStreamReader(eventReader); <del> return unmarshalXmlStreamReader(streamReader); <del> } <del> catch (XMLStreamException ex) { <del> return new UnmarshallingFailureException("JiBX unmarshalling exception", ex); <del> } <del> } <del> <del> @Override <del> protected Object unmarshalXmlStreamReader(XMLStreamReader streamReader) { <del> try { <del> UnmarshallingContext unmarshallingContext = (UnmarshallingContext) createUnmarshallingContext(); <del> IXMLReader xmlReader = new StAXReaderWrapper(streamReader, null, true); <del> unmarshallingContext.setDocument(xmlReader); <del> return unmarshallingContext.unmarshalElement(); <del> } <del> catch (JiBXException ex) { <del> throw convertJibxException(ex, false); <del> } <del> } <del> <del> @Override <del> protected Object unmarshalInputStream(InputStream inputStream) throws XmlMappingException, IOException { <del> try { <del> IUnmarshallingContext unmarshallingContext = createUnmarshallingContext(); <del> return unmarshallingContext.unmarshalDocument(inputStream, this.encoding); <del> } <del> catch (JiBXException ex) { <del> throw convertJibxException(ex, false); <del> } <del> } <del> <del> @Override <del> protected Object unmarshalReader(Reader reader) throws XmlMappingException, IOException { <del> try { <del> IUnmarshallingContext unmarshallingContext = createUnmarshallingContext(); <del> return unmarshallingContext.unmarshalDocument(reader); <del> } <del> catch (JiBXException ex) { <del> throw convertJibxException(ex, false); <del> } <del> } <del> <del> <del> // Unsupported Unmarshalling <del> <del> @Override <del> protected Object unmarshalDomNode(Node node) throws XmlMappingException { <del> try { <del> return transformAndUnmarshal(new DOMSource(node), null); <del> } <del> catch (IOException ex) { <del> throw new UnmarshallingFailureException("JiBX unmarshalling exception", ex); <del> } <del> } <del> <del> @Override <del> protected Object unmarshalSaxReader(XMLReader xmlReader, InputSource inputSource) <del> throws XmlMappingException, IOException { <del> <del> return transformAndUnmarshal(new SAXSource(xmlReader, inputSource), inputSource.getEncoding()); <del> } <del> <del> private Object transformAndUnmarshal(Source source, @Nullable String encoding) throws IOException { <del> try { <del> Transformer transformer = this.transformerFactory.newTransformer(); <del> if (encoding != null) { <del> transformer.setOutputProperty(OutputKeys.ENCODING, encoding); <del> } <del> ByteArrayOutputStream os = new ByteArrayOutputStream(1024); <del> transformer.transform(source, new StreamResult(os)); <del> ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray()); <del> return unmarshalInputStream(is); <del> } <del> catch (TransformerException ex) { <del> throw new MarshallingFailureException( <del> "Could not transform from [" + ClassUtils.getShortName(source.getClass()) + "]", ex); <del> } <del> } <del> <del> <del> /** <del> * Create a new {@code IMarshallingContext}, configured with the correct indentation. <del> * @return the created marshalling context <del> * @throws JiBXException in case of errors <del> */ <del> protected IMarshallingContext createMarshallingContext() throws JiBXException { <del> Assert.state(this.bindingFactory != null, "JibxMarshaller not initialized"); <del> IMarshallingContext marshallingContext = this.bindingFactory.createMarshallingContext(); <del> marshallingContext.setIndent(this.indent); <del> return marshallingContext; <del> } <del> <del> /** <del> * Create a new {@code IUnmarshallingContext}. <del> * @return the created unmarshalling context <del> * @throws JiBXException in case of errors <del> */ <del> protected IUnmarshallingContext createUnmarshallingContext() throws JiBXException { <del> Assert.state(this.bindingFactory != null, "JibxMarshaller not initialized"); <del> return this.bindingFactory.createUnmarshallingContext(); <del> } <del> <del> /** <del> * Convert the given {@code JiBXException} to an appropriate exception from the <del> * {@code org.springframework.oxm} hierarchy. <del> * <p>A boolean flag is used to indicate whether this exception occurs during marshalling or <del> * unmarshalling, since JiBX itself does not make this distinction in its exception hierarchy. <del> * @param ex {@code JiBXException} that occurred <del> * @param marshalling indicates whether the exception occurs during marshalling ({@code true}), <del> * or unmarshalling ({@code false}) <del> * @return the corresponding {@code XmlMappingException} <del> */ <del> public XmlMappingException convertJibxException(JiBXException ex, boolean marshalling) { <del> if (ex instanceof ValidationException) { <del> return new ValidationFailureException("JiBX validation exception", ex); <del> } <del> else { <del> if (marshalling) { <del> return new MarshallingFailureException("JiBX marshalling exception", ex); <del> } <del> else { <del> return new UnmarshallingFailureException("JiBX unmarshalling exception", ex); <del> } <del> } <del> } <del> <del>} <ide><path>spring-oxm/src/main/java/org/springframework/oxm/jibx/package-info.java <del>/** <del> * Package providing integration of <a href="http://jibx.sourceforge.net/">JiBX</a> <del> * with Spring's O/X Mapping support. <del> */ <del>@NonNullApi <del>@NonNullFields <del>package org.springframework.oxm.jibx; <del> <del>import org.springframework.lang.NonNullApi; <del>import org.springframework.lang.NonNullFields; <ide><path>spring-oxm/src/test/java/org/springframework/oxm/jibx/FlightType.java <del>/* <del> * Copyright 2006-2012 the original author or authors. <del> * <del> * Licensed under the Apache License, Version 2.0 (the "License"); <del> * you may not use this file except in compliance with the License. <del> * You may obtain a copy of the License at <del> * <del> * https://www.apache.org/licenses/LICENSE-2.0 <del> * <del> * Unless required by applicable law or agreed to in writing, software <del> * distributed under the License is distributed on an "AS IS" BASIS, <del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <del> * See the License for the specific language governing permissions and <del> * limitations under the License. <del> */ <del> <del>package org.springframework.oxm.jibx; <del> <del>public class FlightType { <del> <del> protected String airline; <del> <del> protected long number; <del> <del> public String getAirline() { <del> return this.airline; <del> } <del> <del> public void setAirline(String airline) { <del> this.airline = airline; <del> } <del> <del> public long getNumber() { <del> return this.number; <del> } <del> <del> public void setNumber(long number) { <del> this.number = number; <del> } <del>} <ide><path>spring-oxm/src/test/java/org/springframework/oxm/jibx/Flights.java <del>/* <del> * Copyright 2002-2016 the original author or authors. <del> * <del> * Licensed under the Apache License, Version 2.0 (the "License"); <del> * you may not use this file except in compliance with the License. <del> * You may obtain a copy of the License at <del> * <del> * https://www.apache.org/licenses/LICENSE-2.0 <del> * <del> * Unless required by applicable law or agreed to in writing, software <del> * distributed under the License is distributed on an "AS IS" BASIS, <del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <del> * See the License for the specific language governing permissions and <del> * limitations under the License. <del> */ <del> <del>package org.springframework.oxm.jibx; <del> <del>import java.util.ArrayList; <del> <del>public class Flights { <del> <del> protected ArrayList<FlightType> flightList = new ArrayList<>(); <del> <del> public void addFlight(FlightType flight) { <del> flightList.add(flight); <del> } <del> <del> public FlightType getFlight(int index) { <del> return flightList.get(index); <del> } <del> <del> public int sizeFlightList() { <del> return flightList.size(); <del> } <del>} <ide><path>spring-oxm/src/test/java/org/springframework/oxm/jibx/JibxMarshallerTests.java <del>/* <del> * Copyright 2002-2019 the original author or authors. <del> * <del> * Licensed under the Apache License, Version 2.0 (the "License"); <del> * you may not use this file except in compliance with the License. <del> * You may obtain a copy of the License at <del> * <del> * https://www.apache.org/licenses/LICENSE-2.0 <del> * <del> * Unless required by applicable law or agreed to in writing, software <del> * distributed under the License is distributed on an "AS IS" BASIS, <del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <del> * See the License for the specific language governing permissions and <del> * limitations under the License. <del> */ <del> <del>package org.springframework.oxm.jibx; <del> <del>import java.io.StringWriter; <del> <del>import javax.xml.transform.stream.StreamResult; <del> <del>import org.junit.jupiter.api.Test; <del>import org.junit.jupiter.api.condition.EnabledOnJre; <del> <del>import org.springframework.core.testfixture.xml.XmlContent; <del>import org.springframework.oxm.AbstractMarshallerTests; <del> <del>import static org.assertj.core.api.Assertions.assertThat; <del>import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; <del>import static org.junit.jupiter.api.condition.JRE.JAVA_8; <del> <del>/** <del> * NOTE: These tests fail under Eclipse/IDEA because JiBX binding does not occur by <del> * default. The Gradle build should succeed, however. <del> * <del> * @author Arjen Poutsma <del> * @author Sam Brannen <del> */ <del>@Deprecated <del>@EnabledOnJre(JAVA_8) // JiBX compiler is currently not compatible with JDK 9 <del>public class JibxMarshallerTests extends AbstractMarshallerTests<JibxMarshaller> { <del> <del> @Override <del> protected JibxMarshaller createMarshaller() throws Exception { <del> JibxMarshaller marshaller = new JibxMarshaller(); <del> marshaller.setTargetPackage("org.springframework.oxm.jibx"); <del> marshaller.afterPropertiesSet(); <del> return marshaller; <del> } <del> <del> @Override <del> protected Object createFlights() { <del> Flights flights = new Flights(); <del> FlightType flight = new FlightType(); <del> flight.setNumber(42L); <del> flights.addFlight(flight); <del> return flights; <del> } <del> <del> <del> @Test <del> public void afterPropertiesSetNoContextPath() throws Exception { <del> JibxMarshaller marshaller = new JibxMarshaller(); <del> assertThatIllegalArgumentException().isThrownBy( <del> marshaller::afterPropertiesSet); <del> } <del> <del> @Test <del> public void indentation() throws Exception { <del> marshaller.setIndent(4); <del> StringWriter writer = new StringWriter(); <del> marshaller.marshal(flights, new StreamResult(writer)); <del> String expected = <del> "<?xml version=\"1.0\"?>\n" + "<flights xmlns=\"http://samples.springframework.org/flight\">\n" + <del> " <flight>\n" + " <number>42</number>\n" + " </flight>\n" + "</flights>"; <del> assertThat(XmlContent.from(writer)).isSimilarToIgnoringWhitespace(expected); <del> } <del> <del> @Test <del> public void encodingAndStandalone() throws Exception { <del> marshaller.setEncoding("ISO-8859-1"); <del> marshaller.setStandalone(Boolean.TRUE); <del> StringWriter writer = new StringWriter(); <del> marshaller.marshal(flights, new StreamResult(writer)); <del> assertThat(writer.toString().startsWith("<?xml version=\"1.0\" encoding=\"ISO-8859-1\" standalone=\"yes\"?>")).as("Encoding and standalone not set").isTrue(); <del> } <del> <del> @Test <del> public void dtd() throws Exception { <del> marshaller.setDocTypeRootElementName("flights"); <del> marshaller.setDocTypeSystemId("flights.dtd"); <del> StringWriter writer = new StringWriter(); <del> marshaller.marshal(flights, new StreamResult(writer)); <del> assertThat(writer.toString().contains("<!DOCTYPE flights SYSTEM \"flights.dtd\">")).as("doc type not written").isTrue(); <del> } <del> <del> @Test <del> public void supports() throws Exception { <del> assertThat(marshaller.supports(Flights.class)).as("JibxMarshaller does not support Flights").isTrue(); <del> assertThat(marshaller.supports(FlightType.class)).as("JibxMarshaller does not support FlightType").isTrue(); <del> assertThat(marshaller.supports(getClass())).as("JibxMarshaller supports illegal type").isFalse(); <del> } <del> <del>} <ide><path>spring-oxm/src/test/java/org/springframework/oxm/jibx/JibxUnmarshallerTests.java <del>/* <del> * Copyright 2002-2019 the original author or authors. <del> * <del> * Licensed under the Apache License, Version 2.0 (the "License"); <del> * you may not use this file except in compliance with the License. <del> * You may obtain a copy of the License at <del> * <del> * https://www.apache.org/licenses/LICENSE-2.0 <del> * <del> * Unless required by applicable law or agreed to in writing, software <del> * distributed under the License is distributed on an "AS IS" BASIS, <del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <del> * See the License for the specific language governing permissions and <del> * limitations under the License. <del> */ <del> <del>package org.springframework.oxm.jibx; <del> <del>import java.io.ByteArrayInputStream; <del> <del>import javax.xml.transform.stream.StreamSource; <del> <del>import org.junit.jupiter.api.Test; <del>import org.junit.jupiter.api.condition.EnabledOnJre; <del> <del>import org.springframework.oxm.AbstractUnmarshallerTests; <del> <del>import static org.assertj.core.api.Assertions.assertThat; <del>import static org.junit.jupiter.api.condition.JRE.JAVA_8; <del> <del>/** <del> * NOTE: These tests fail under Eclipse/IDEA because JiBX binding does <del> * not occur by default. The Gradle build should succeed, however. <del> * <del> * @author Arjen Poutsma <del> * @author Sam Brannen <del> */ <del>@Deprecated <del>@EnabledOnJre(JAVA_8) // JiBX compiler is currently not compatible with JDK 9 <del>public class JibxUnmarshallerTests extends AbstractUnmarshallerTests<JibxMarshaller> { <del> <del> protected static final String INPUT_STRING_WITH_SPECIAL_CHARACTERS = <del> "<tns:flights xmlns:tns=\"http://samples.springframework.org/flight\">" + <del> "<tns:flight><tns:airline>Air Libert\u00e9</tns:airline><tns:number>42</tns:number></tns:flight></tns:flights>"; <del> <del> <del> @Override <del> protected JibxMarshaller createUnmarshaller() throws Exception { <del> JibxMarshaller unmarshaller = new JibxMarshaller(); <del> unmarshaller.setTargetClass(Flights.class); <del> unmarshaller.afterPropertiesSet(); <del> return unmarshaller; <del> } <del> <del> @Override <del> protected void testFlights(Object o) { <del> Flights flights = (Flights) o; <del> assertThat(flights).as("Flights is null").isNotNull(); <del> assertThat(flights.sizeFlightList()).as("Invalid amount of flight elements").isEqualTo(1); <del> testFlight(flights.getFlight(0)); <del> } <del> <del> @Override <del> protected void testFlight(Object o) { <del> FlightType flight = (FlightType) o; <del> assertThat(flight).as("Flight is null").isNotNull(); <del> assertThat(flight.getNumber()).as("Number is invalid").isEqualTo(42L); <del> } <del> <del> <del> @Test <del> @Override <del> public void unmarshalPartialStaxSourceXmlStreamReader() throws Exception { <del> // JiBX does not support reading XML fragments, hence the override here <del> } <del> <del> @Test <del> public void unmarshalStreamSourceInputStreamUsingNonDefaultEncoding() throws Exception { <del> String encoding = "ISO-8859-1"; <del> unmarshaller.setEncoding(encoding); <del> <del> StreamSource source = new StreamSource(new ByteArrayInputStream(INPUT_STRING_WITH_SPECIAL_CHARACTERS.getBytes(encoding))); <del> Object flights = unmarshaller.unmarshal(source); <del> testFlights(flights); <del> <del> FlightType flight = ((Flights)flights).getFlight(0); <del> assertThat(flight.getAirline()).as("Airline is invalid").isEqualTo("Air Libert\u00e9"); <del> } <del> <del>}
8
Go
Go
update image pull tests
f324f4851f28edfd8dab82cd624a4ec1f01cd207
<ide><path>integration-cli/docker_cli_pull_local_test.go <add>package main <add> <add>import ( <add> "fmt" <add> <add> "github.com/go-check/check" <add>) <add> <add>// TestPullImageWithAliases pulls a specific image tag and verifies that any aliases (i.e., other <add>// tags for the same image) are not also pulled down. <add>// <add>// Ref: docker/docker#8141 <add>func (s *DockerRegistrySuite) TestPullImageWithAliases(c *check.C) { <add> repoName := fmt.Sprintf("%v/dockercli/busybox", privateRegistryURL) <add> <add> repos := []string{} <add> for _, tag := range []string{"recent", "fresh"} { <add> repos = append(repos, fmt.Sprintf("%v:%v", repoName, tag)) <add> } <add> <add> // Tag and push the same image multiple times. <add> for _, repo := range repos { <add> dockerCmd(c, "tag", "busybox", repo) <add> dockerCmd(c, "push", repo) <add> } <add> <add> // Clear local images store. <add> args := append([]string{"rmi"}, repos...) <add> dockerCmd(c, args...) <add> <add> // Pull a single tag and verify it doesn't bring down all aliases. <add> dockerCmd(c, "pull", repos[0]) <add> dockerCmd(c, "inspect", repos[0]) <add> for _, repo := range repos[1:] { <add> if _, _, err := dockerCmdWithError("inspect", repo); err == nil { <add> c.Fatalf("Image %v shouldn't have been pulled down", repo) <add> } <add> } <add>} <ide><path>integration-cli/docker_cli_pull_test.go <ide> package main <ide> <ide> import ( <ide> "fmt" <del> "os/exec" <add> "regexp" <ide> "strings" <ide> "time" <ide> <del> "io/ioutil" <del> <add> "github.com/docker/distribution/digest" <add> "github.com/docker/docker/integration-cli/checker" <ide> "github.com/go-check/check" <ide> ) <ide> <del>// See issue docker/docker#8141 <del>func (s *DockerRegistrySuite) TestPullImageWithAliases(c *check.C) { <del> repoName := fmt.Sprintf("%v/dockercli/busybox", privateRegistryURL) <del> <del> repos := []string{} <del> for _, tag := range []string{"recent", "fresh"} { <del> repos = append(repos, fmt.Sprintf("%v:%v", repoName, tag)) <del> } <del> <del> // Tag and push the same image multiple times. <del> for _, repo := range repos { <del> dockerCmd(c, "tag", "busybox", repo) <del> dockerCmd(c, "push", repo) <del> } <del> <del> // Clear local images store. <del> args := append([]string{"rmi"}, repos...) <del> dockerCmd(c, args...) <del> <del> // Pull a single tag and verify it doesn't bring down all aliases. <del> dockerCmd(c, "pull", repos[0]) <del> dockerCmd(c, "inspect", repos[0]) <del> for _, repo := range repos[1:] { <del> if _, _, err := dockerCmdWithError("inspect", repo); err == nil { <del> c.Fatalf("Image %v shouldn't have been pulled down", repo) <del> } <add>// TestPullFromCentralRegistry pulls an image from the central registry and verifies that the client <add>// prints all expected output. <add>func (s *DockerHubPullSuite) TestPullFromCentralRegistry(c *check.C) { <add> out := s.Cmd(c, "pull", "hello-world") <add> defer deleteImages("hello-world") <add> <add> c.Assert(out, checker.Contains, "Using default tag: latest", check.Commentf("expected the 'latest' tag to be automatically assumed")) <add> c.Assert(out, checker.Contains, "Pulling from library/hello-world", check.Commentf("expected the 'library/' prefix to be automatically assumed")) <add> c.Assert(out, checker.Contains, "Downloaded newer image for hello-world:latest") <add> <add> matches := regexp.MustCompile(`Digest: (.+)\n`).FindAllStringSubmatch(out, -1) <add> c.Assert(len(matches), checker.Equals, 1, check.Commentf("expected exactly one image digest in the output")) <add> c.Assert(len(matches[0]), checker.Equals, 2, check.Commentf("unexpected number of submatches for the digest")) <add> _, err := digest.ParseDigest(matches[0][1]) <add> c.Check(err, checker.IsNil, check.Commentf("invalid digest %q in output", matches[0][1])) <add> <add> // We should have a single entry in images. <add> img := strings.TrimSpace(s.Cmd(c, "images")) <add> if splitImg := strings.Split(img, "\n"); len(splitImg) != 2 { <add> c.Fatalf("expected only two lines in the output of `docker images`, got %d", len(splitImg)) <add> } else if re := regexp.MustCompile(`^hello-world\s+latest`); !re.Match([]byte(splitImg[1])) { <add> c.Fatal("invalid output for `docker images` (expected image and tag name") <ide> } <ide> } <ide> <del>// pulling library/hello-world should show verified message <del>func (s *DockerSuite) TestPullVerified(c *check.C) { <del> c.Skip("Skipping hub dependent test") <del> <del> // Image must be pulled from central repository to get verified message <del> // unless keychain is manually updated to contain the daemon's sign key. <del> <del> verifiedName := "hello-world" <del> <del> // pull it <del> expected := "The image you are pulling has been verified" <del> if out, exitCode, err := dockerCmdWithError("pull", verifiedName); err != nil || !strings.Contains(out, expected) { <del> if err != nil || exitCode != 0 { <del> c.Skip(fmt.Sprintf("pulling the '%s' image from the registry has failed: %v", verifiedName, err)) <del> } <del> c.Fatalf("pulling a verified image failed. expected: %s\ngot: %s, %v", expected, out, err) <del> } <del> <del> // pull it again <del> if out, exitCode, err := dockerCmdWithError("pull", verifiedName); err != nil || strings.Contains(out, expected) { <del> if err != nil || exitCode != 0 { <del> c.Skip(fmt.Sprintf("pulling the '%s' image from the registry has failed: %v", verifiedName, err)) <del> } <del> c.Fatalf("pulling a verified image failed. unexpected verify message\ngot: %s, %v", out, err) <del> } <del> <del>} <del> <del>// pulling an image from the central registry should work <del>func (s *DockerSuite) TestPullImageFromCentralRegistry(c *check.C) { <del> testRequires(c, Network) <del> <del> dockerCmd(c, "pull", "hello-world") <del>} <del> <del>// pulling a non-existing image from the central registry should return a non-zero exit code <del>func (s *DockerSuite) TestPullNonExistingImage(c *check.C) { <del> testRequires(c, Network) <del> <del> name := "sadfsadfasdf" <del> out, _, err := dockerCmdWithError("pull", name) <del> <del> if err == nil || !strings.Contains(out, fmt.Sprintf("Error: image library/%s:latest not found", name)) { <del> c.Fatalf("expected non-zero exit status when pulling non-existing image: %s", out) <add>// TestPullNonExistingImage pulls non-existing images from the central registry, with different <add>// combinations of implicit tag and library prefix. <add>func (s *DockerHubPullSuite) TestPullNonExistingImage(c *check.C) { <add> for _, e := range []struct { <add> Image string <add> Alias string <add> }{ <add> {"library/asdfasdf:foobar", "asdfasdf:foobar"}, <add> {"library/asdfasdf:foobar", "library/asdfasdf:foobar"}, <add> {"library/asdfasdf:latest", "asdfasdf"}, <add> {"library/asdfasdf:latest", "asdfasdf:latest"}, <add> {"library/asdfasdf:latest", "library/asdfasdf"}, <add> {"library/asdfasdf:latest", "library/asdfasdf:latest"}, <add> } { <add> out, err := s.CmdWithError("pull", e.Alias) <add> c.Assert(err, checker.NotNil, check.Commentf("expected non-zero exit status when pulling non-existing image: %s", out)) <add> c.Assert(out, checker.Contains, fmt.Sprintf("Error: image %s not found", e.Image), check.Commentf("expected image not found error messages")) <ide> } <ide> } <ide> <del>// pulling an image from the central registry using official names should work <del>// ensure all pulls result in the same image <del>func (s *DockerSuite) TestPullImageOfficialNames(c *check.C) { <del> testRequires(c, Network) <del> <del> names := []string{ <add>// TestPullFromCentralRegistryImplicitRefParts pulls an image from the central registry and verifies <add>// that pulling the same image with different combinations of implicit elements of the the image <add>// reference (tag, repository, central registry url, ...) doesn't trigger a new pull nor leads to <add>// multiple images. <add>func (s *DockerHubPullSuite) TestPullFromCentralRegistryImplicitRefParts(c *check.C) { <add> s.Cmd(c, "pull", "hello-world") <add> defer deleteImages("hello-world") <add> <add> for _, i := range []string{ <add> "hello-world", <add> "hello-world:latest", <ide> "library/hello-world", <add> "library/hello-world:latest", <ide> "docker.io/library/hello-world", <ide> "index.docker.io/library/hello-world", <del> } <del> for _, name := range names { <del> out, exitCode, err := dockerCmdWithError("pull", name) <del> if err != nil || exitCode != 0 { <del> c.Errorf("pulling the '%s' image from the registry has failed: %s", name, err) <del> continue <del> } <del> <del> // ensure we don't have multiple image names. <del> out, _ = dockerCmd(c, "images") <del> if strings.Contains(out, name) { <del> c.Errorf("images should not have listed '%s'", name) <del> } <del> } <del>} <del> <del>func (s *DockerSuite) TestPullScratchNotAllowed(c *check.C) { <del> testRequires(c, Network) <del> <del> out, exitCode, err := dockerCmdWithError("pull", "scratch") <del> if err == nil { <del> c.Fatal("expected pull of scratch to fail, but it didn't") <del> } <del> if exitCode != 1 { <del> c.Fatalf("pulling scratch expected exit code 1, got %d", exitCode) <del> } <del> if strings.Contains(out, "Pulling repository scratch") { <del> c.Fatalf("pulling scratch should not have begun: %s", out) <del> } <del> if !strings.Contains(out, "'scratch' is a reserved name") { <del> c.Fatalf("unexpected output pulling scratch: %s", out) <del> } <del>} <del> <del>// pulling an image with --all-tags=true <del>func (s *DockerSuite) TestPullImageWithAllTagFromCentralRegistry(c *check.C) { <del> testRequires(c, Network) <del> <del> dockerCmd(c, "pull", "busybox") <del> <del> outImageCmd, _ := dockerCmd(c, "images", "busybox") <del> <del> dockerCmd(c, "pull", "--all-tags=true", "busybox") <del> <del> outImageAllTagCmd, _ := dockerCmd(c, "images", "busybox") <del> <del> if strings.Count(outImageCmd, "busybox") >= strings.Count(outImageAllTagCmd, "busybox") { <del> c.Fatalf("Pulling with all tags should get more images") <del> } <del> <del> // FIXME has probably no effect (tags already pushed) <del> dockerCmd(c, "pull", "-a", "busybox") <del> <del> outImageAllTagCmd, _ = dockerCmd(c, "images", "busybox") <del> <del> if strings.Count(outImageCmd, "busybox") >= strings.Count(outImageAllTagCmd, "busybox") { <del> c.Fatalf("Pulling with all tags should get more images") <del> } <del>} <del> <del>func (s *DockerTrustSuite) TestTrustedPull(c *check.C) { <del> repoName := s.setupTrustedImage(c, "trusted-pull") <del> <del> // Try pull <del> pullCmd := exec.Command(dockerBinary, "pull", repoName) <del> s.trustedCmd(pullCmd) <del> out, _, err := runCommandWithOutput(pullCmd) <del> if err != nil { <del> c.Fatalf("Error running trusted pull: %s\n%s", err, out) <del> } <del> <del> if !strings.Contains(string(out), "Tagging") { <del> c.Fatalf("Missing expected output on trusted push:\n%s", out) <del> } <del> <del> dockerCmd(c, "rmi", repoName) <del> <del> // Try untrusted pull to ensure we pushed the tag to the registry <del> pullCmd = exec.Command(dockerBinary, "pull", "--disable-content-trust=true", repoName) <del> s.trustedCmd(pullCmd) <del> out, _, err = runCommandWithOutput(pullCmd) <del> if err != nil { <del> c.Fatalf("Error running trusted pull: %s\n%s", err, out) <del> } <del> <del> if !strings.Contains(string(out), "Status: Downloaded") { <del> c.Fatalf("Missing expected output on trusted pull with --disable-content-trust:\n%s", out) <del> } <del>} <del> <del>func (s *DockerTrustSuite) TestTrustedIsolatedPull(c *check.C) { <del> repoName := s.setupTrustedImage(c, "trusted-isolatd-pull") <del> <del> // Try pull (run from isolated directory without trust information) <del> pullCmd := exec.Command(dockerBinary, "--config", "/tmp/docker-isolated", "pull", repoName) <del> s.trustedCmd(pullCmd) <del> out, _, err := runCommandWithOutput(pullCmd) <del> if err != nil { <del> c.Fatalf("Error running trusted pull: %s\n%s", err, out) <add> } { <add> out := s.Cmd(c, "pull", i) <add> c.Assert(out, checker.Contains, "Image is up to date for hello-world:latest") <ide> } <ide> <del> if !strings.Contains(string(out), "Tagging") { <del> c.Fatalf("Missing expected output on trusted push:\n%s", out) <add> // We should have a single entry in images. <add> img := strings.TrimSpace(s.Cmd(c, "images")) <add> if splitImg := strings.Split(img, "\n"); len(splitImg) != 2 { <add> c.Fatalf("expected only two lines in the output of `docker images`, got %d", len(splitImg)) <add> } else if re := regexp.MustCompile(`^hello-world\s+latest`); !re.Match([]byte(splitImg[1])) { <add> c.Fatal("invalid output for `docker images` (expected image and tag name") <ide> } <del> <del> dockerCmd(c, "rmi", repoName) <ide> } <ide> <del>func (s *DockerTrustSuite) TestUntrustedPull(c *check.C) { <del> repoName := fmt.Sprintf("%v/dockercli/trusted:latest", privateRegistryURL) <del> // tag the image and upload it to the private registry <del> dockerCmd(c, "tag", "busybox", repoName) <del> dockerCmd(c, "push", repoName) <del> dockerCmd(c, "rmi", repoName) <del> <del> // Try trusted pull on untrusted tag <del> pullCmd := exec.Command(dockerBinary, "pull", repoName) <del> s.trustedCmd(pullCmd) <del> out, _, err := runCommandWithOutput(pullCmd) <del> if err == nil { <del> c.Fatalf("Error expected when running trusted pull with:\n%s", out) <del> } <del> <del> if !strings.Contains(string(out), "no trust data available") { <del> c.Fatalf("Missing expected output on trusted pull:\n%s", out) <del> } <add>// TestPullScratchNotAllowed verifies that pulling 'scratch' is rejected. <add>func (s *DockerHubPullSuite) TestPullScratchNotAllowed(c *check.C) { <add> out, err := s.CmdWithError("pull", "scratch") <add> c.Assert(err, checker.NotNil, check.Commentf("expected pull of scratch to fail")) <add> c.Assert(out, checker.Contains, "'scratch' is a reserved name") <add> c.Assert(out, checker.Not(checker.Contains), "Pulling repository scratch") <ide> } <ide> <del>func (s *DockerTrustSuite) TestPullWhenCertExpired(c *check.C) { <del> c.Skip("Currently changes system time, causing instability") <del> repoName := s.setupTrustedImage(c, "trusted-cert-expired") <del> <del> // Certificates have 10 years of expiration <del> elevenYearsFromNow := time.Now().Add(time.Hour * 24 * 365 * 11) <del> <del> runAtDifferentDate(elevenYearsFromNow, func() { <del> // Try pull <del> pullCmd := exec.Command(dockerBinary, "pull", repoName) <del> s.trustedCmd(pullCmd) <del> out, _, err := runCommandWithOutput(pullCmd) <del> if err == nil { <del> c.Fatalf("Error running trusted pull in the distant future: %s\n%s", err, out) <del> } <del> <del> if !strings.Contains(string(out), "could not validate the path to a trusted root") { <del> c.Fatalf("Missing expected output on trusted pull in the distant future:\n%s", out) <del> } <del> }) <del> <del> runAtDifferentDate(elevenYearsFromNow, func() { <del> // Try pull <del> pullCmd := exec.Command(dockerBinary, "pull", "--disable-content-trust", repoName) <del> s.trustedCmd(pullCmd) <del> out, _, err := runCommandWithOutput(pullCmd) <del> if err != nil { <del> c.Fatalf("Error running untrusted pull in the distant future: %s\n%s", err, out) <del> } <del> <del> if !strings.Contains(string(out), "Status: Downloaded") { <del> c.Fatalf("Missing expected output on untrusted pull in the distant future:\n%s", out) <add>// TestPullAllTagsFromCentralRegistry pulls using `all-tags` for a given image and verifies that it <add>// results in more images than a naked pull. <add>func (s *DockerHubPullSuite) TestPullAllTagsFromCentralRegistry(c *check.C) { <add> s.Cmd(c, "pull", "busybox") <add> outImageCmd := s.Cmd(c, "images", "busybox") <add> splitOutImageCmd := strings.Split(strings.TrimSpace(outImageCmd), "\n") <add> c.Assert(splitOutImageCmd, checker.HasLen, 2, check.Commentf("expected a single entry in images\n%v", outImageCmd)) <add> <add> s.Cmd(c, "pull", "--all-tags=true", "busybox") <add> outImageAllTagCmd := s.Cmd(c, "images", "busybox") <add> if linesCount := strings.Count(outImageAllTagCmd, "\n"); linesCount <= 2 { <add> c.Fatalf("pulling all tags should provide more images, got %d", linesCount-1) <add> } <add> <add> // Verify that the line for 'busybox:latest' is left unchanged. <add> var latestLine string <add> for _, line := range strings.Split(outImageAllTagCmd, "\n") { <add> if strings.HasPrefix(line, "busybox") && strings.Contains(line, "latest") { <add> latestLine = line <add> break <ide> } <del> }) <del>} <del> <del>func (s *DockerTrustSuite) TestTrustedPullFromBadTrustServer(c *check.C) { <del> repoName := fmt.Sprintf("%v/dockerclievilpull/trusted:latest", privateRegistryURL) <del> evilLocalConfigDir, err := ioutil.TempDir("", "evil-local-config-dir") <del> if err != nil { <del> c.Fatalf("Failed to create local temp dir") <del> } <del> <del> // tag the image and upload it to the private registry <del> dockerCmd(c, "tag", "busybox", repoName) <del> <del> pushCmd := exec.Command(dockerBinary, "push", repoName) <del> s.trustedCmd(pushCmd) <del> out, _, err := runCommandWithOutput(pushCmd) <del> if err != nil { <del> c.Fatalf("Error running trusted push: %s\n%s", err, out) <del> } <del> if !strings.Contains(string(out), "Signing and pushing trust metadata") { <del> c.Fatalf("Missing expected output on trusted push:\n%s", out) <del> } <del> <del> dockerCmd(c, "rmi", repoName) <del> <del> // Try pull <del> pullCmd := exec.Command(dockerBinary, "pull", repoName) <del> s.trustedCmd(pullCmd) <del> out, _, err = runCommandWithOutput(pullCmd) <del> if err != nil { <del> c.Fatalf("Error running trusted pull: %s\n%s", err, out) <del> } <del> <del> if !strings.Contains(string(out), "Tagging") { <del> c.Fatalf("Missing expected output on trusted push:\n%s", out) <del> } <del> <del> dockerCmd(c, "rmi", repoName) <del> <del> // Kill the notary server, start a new "evil" one. <del> s.not.Close() <del> s.not, err = newTestNotary(c) <del> if err != nil { <del> c.Fatalf("Restarting notary server failed.") <del> } <del> <del> // In order to make an evil server, lets re-init a client (with a different trust dir) and push new data. <del> // tag an image and upload it to the private registry <del> dockerCmd(c, "--config", evilLocalConfigDir, "tag", "busybox", repoName) <del> <del> // Push up to the new server <del> pushCmd = exec.Command(dockerBinary, "--config", evilLocalConfigDir, "push", repoName) <del> s.trustedCmd(pushCmd) <del> out, _, err = runCommandWithOutput(pushCmd) <del> if err != nil { <del> c.Fatalf("Error running trusted push: %s\n%s", err, out) <del> } <del> if !strings.Contains(string(out), "Signing and pushing trust metadata") { <del> c.Fatalf("Missing expected output on trusted push:\n%s", out) <del> } <del> <del> // Now, try pulling with the original client from this new trust server. This should fail. <del> pullCmd = exec.Command(dockerBinary, "pull", repoName) <del> s.trustedCmd(pullCmd) <del> out, _, err = runCommandWithOutput(pullCmd) <del> if err == nil { <del> c.Fatalf("Expected to fail on this pull due to different remote data: %s\n%s", err, out) <del> } <del> <del> if !strings.Contains(string(out), "failed to validate data with current trusted certificates") { <del> c.Fatalf("Missing expected output on trusted push:\n%s", out) <ide> } <add> c.Assert(latestLine, checker.Not(checker.Equals), "", check.Commentf("no entry for busybox:latest found after pulling all tags")) <add> splitLatest := strings.Fields(latestLine) <add> splitCurrent := strings.Fields(splitOutImageCmd[1]) <add> c.Assert(splitLatest, checker.DeepEquals, splitCurrent, check.Commentf("busybox:latest was changed after pulling all tags")) <ide> } <ide> <del>func (s *DockerTrustSuite) TestTrustedPullWithExpiredSnapshot(c *check.C) { <del> c.Skip("Currently changes system time, causing instability") <del> repoName := fmt.Sprintf("%v/dockercliexpiredtimestamppull/trusted:latest", privateRegistryURL) <del> // tag the image and upload it to the private registry <del> dockerCmd(c, "tag", "busybox", repoName) <del> <del> // Push with default passphrases <del> pushCmd := exec.Command(dockerBinary, "push", repoName) <del> s.trustedCmd(pushCmd) <del> out, _, err := runCommandWithOutput(pushCmd) <del> if err != nil { <del> c.Fatalf("trusted push failed: %s\n%s", err, out) <del> } <del> <del> if !strings.Contains(string(out), "Signing and pushing trust metadata") { <del> c.Fatalf("Missing expected output on trusted push:\n%s", out) <del> } <del> <del> dockerCmd(c, "rmi", repoName) <del> <del> // Snapshots last for three years. This should be expired <del> fourYearsLater := time.Now().Add(time.Hour * 24 * 365 * 4) <del> <del> // Should succeed because the server transparently re-signs one <del> runAtDifferentDate(fourYearsLater, func() { <del> // Try pull <del> pullCmd := exec.Command(dockerBinary, "pull", repoName) <del> s.trustedCmd(pullCmd) <del> out, _, err = runCommandWithOutput(pullCmd) <del> if err == nil { <del> c.Fatalf("Missing expected error running trusted pull with expired snapshots") <del> } <del> <del> if !strings.Contains(string(out), "repository out-of-date") { <del> c.Fatalf("Missing expected output on trusted pull with expired snapshot:\n%s", out) <del> } <del> }) <del>} <del> <del>// Test that pull continues after client has disconnected. #15589 <del>func (s *DockerSuite) TestPullClientDisconnect(c *check.C) { <del> testRequires(c, Network) <del> <add>// TestPullClientDisconnect kills the client during a pull operation and verifies that the operation <add>// still succesfully completes on the daemon side. <add>// <add>// Ref: docker/docker#15589 <add>func (s *DockerHubPullSuite) TestPullClientDisconnect(c *check.C) { <ide> repoName := "hello-world:latest" <ide> <del> dockerCmdWithError("rmi", repoName) // clean just in case <del> <del> pullCmd := exec.Command(dockerBinary, "pull", repoName) <del> <add> pullCmd := s.MakeCmd("pull", repoName) <ide> stdout, err := pullCmd.StdoutPipe() <del> c.Assert(err, check.IsNil) <del> <add> c.Assert(err, checker.IsNil) <ide> err = pullCmd.Start() <del> c.Assert(err, check.IsNil) <add> c.Assert(err, checker.IsNil) <ide> <del> // cancel as soon as we get some output <add> // Cancel as soon as we get some output. <ide> buf := make([]byte, 10) <ide> _, err = stdout.Read(buf) <del> c.Assert(err, check.IsNil) <add> c.Assert(err, checker.IsNil) <ide> <ide> err = pullCmd.Process.Kill() <del> c.Assert(err, check.IsNil) <add> c.Assert(err, checker.IsNil) <ide> <ide> maxAttempts := 20 <ide> for i := 0; ; i++ { <del> if _, _, err := dockerCmdWithError("inspect", repoName); err == nil { <add> if _, err := s.CmdWithError("inspect", repoName); err == nil { <ide> break <ide> } <ide> if i >= maxAttempts { <del> c.Fatal("Timeout reached. Image was not pulled after client disconnected.") <add> c.Fatal("timeout reached: image was not pulled after client disconnected") <ide> } <ide> time.Sleep(500 * time.Millisecond) <ide> } <del> <ide> } <ide><path>integration-cli/docker_cli_pull_trusted_test.go <add>package main <add> <add>import ( <add> "fmt" <add> "io/ioutil" <add> "os/exec" <add> "strings" <add> "time" <add> <add> "github.com/go-check/check" <add>) <add> <add>func (s *DockerTrustSuite) TestTrustedPull(c *check.C) { <add> repoName := s.setupTrustedImage(c, "trusted-pull") <add> <add> // Try pull <add> pullCmd := exec.Command(dockerBinary, "pull", repoName) <add> s.trustedCmd(pullCmd) <add> out, _, err := runCommandWithOutput(pullCmd) <add> if err != nil { <add> c.Fatalf("Error running trusted pull: %s\n%s", err, out) <add> } <add> <add> if !strings.Contains(string(out), "Tagging") { <add> c.Fatalf("Missing expected output on trusted push:\n%s", out) <add> } <add> <add> dockerCmd(c, "rmi", repoName) <add> <add> // Try untrusted pull to ensure we pushed the tag to the registry <add> pullCmd = exec.Command(dockerBinary, "pull", "--disable-content-trust=true", repoName) <add> s.trustedCmd(pullCmd) <add> out, _, err = runCommandWithOutput(pullCmd) <add> if err != nil { <add> c.Fatalf("Error running trusted pull: %s\n%s", err, out) <add> } <add> <add> if !strings.Contains(string(out), "Status: Downloaded") { <add> c.Fatalf("Missing expected output on trusted pull with --disable-content-trust:\n%s", out) <add> } <add>} <add> <add>func (s *DockerTrustSuite) TestTrustedIsolatedPull(c *check.C) { <add> repoName := s.setupTrustedImage(c, "trusted-isolatd-pull") <add> <add> // Try pull (run from isolated directory without trust information) <add> pullCmd := exec.Command(dockerBinary, "--config", "/tmp/docker-isolated", "pull", repoName) <add> s.trustedCmd(pullCmd) <add> out, _, err := runCommandWithOutput(pullCmd) <add> if err != nil { <add> c.Fatalf("Error running trusted pull: %s\n%s", err, out) <add> } <add> <add> if !strings.Contains(string(out), "Tagging") { <add> c.Fatalf("Missing expected output on trusted push:\n%s", out) <add> } <add> <add> dockerCmd(c, "rmi", repoName) <add>} <add> <add>func (s *DockerTrustSuite) TestUntrustedPull(c *check.C) { <add> repoName := fmt.Sprintf("%v/dockercli/trusted:latest", privateRegistryURL) <add> // tag the image and upload it to the private registry <add> dockerCmd(c, "tag", "busybox", repoName) <add> dockerCmd(c, "push", repoName) <add> dockerCmd(c, "rmi", repoName) <add> <add> // Try trusted pull on untrusted tag <add> pullCmd := exec.Command(dockerBinary, "pull", repoName) <add> s.trustedCmd(pullCmd) <add> out, _, err := runCommandWithOutput(pullCmd) <add> if err == nil { <add> c.Fatalf("Error expected when running trusted pull with:\n%s", out) <add> } <add> <add> if !strings.Contains(string(out), "no trust data available") { <add> c.Fatalf("Missing expected output on trusted pull:\n%s", out) <add> } <add>} <add> <add>func (s *DockerTrustSuite) TestPullWhenCertExpired(c *check.C) { <add> c.Skip("Currently changes system time, causing instability") <add> repoName := s.setupTrustedImage(c, "trusted-cert-expired") <add> <add> // Certificates have 10 years of expiration <add> elevenYearsFromNow := time.Now().Add(time.Hour * 24 * 365 * 11) <add> <add> runAtDifferentDate(elevenYearsFromNow, func() { <add> // Try pull <add> pullCmd := exec.Command(dockerBinary, "pull", repoName) <add> s.trustedCmd(pullCmd) <add> out, _, err := runCommandWithOutput(pullCmd) <add> if err == nil { <add> c.Fatalf("Error running trusted pull in the distant future: %s\n%s", err, out) <add> } <add> <add> if !strings.Contains(string(out), "could not validate the path to a trusted root") { <add> c.Fatalf("Missing expected output on trusted pull in the distant future:\n%s", out) <add> } <add> }) <add> <add> runAtDifferentDate(elevenYearsFromNow, func() { <add> // Try pull <add> pullCmd := exec.Command(dockerBinary, "pull", "--disable-content-trust", repoName) <add> s.trustedCmd(pullCmd) <add> out, _, err := runCommandWithOutput(pullCmd) <add> if err != nil { <add> c.Fatalf("Error running untrusted pull in the distant future: %s\n%s", err, out) <add> } <add> <add> if !strings.Contains(string(out), "Status: Downloaded") { <add> c.Fatalf("Missing expected output on untrusted pull in the distant future:\n%s", out) <add> } <add> }) <add>} <add> <add>func (s *DockerTrustSuite) TestTrustedPullFromBadTrustServer(c *check.C) { <add> repoName := fmt.Sprintf("%v/dockerclievilpull/trusted:latest", privateRegistryURL) <add> evilLocalConfigDir, err := ioutil.TempDir("", "evil-local-config-dir") <add> if err != nil { <add> c.Fatalf("Failed to create local temp dir") <add> } <add> <add> // tag the image and upload it to the private registry <add> dockerCmd(c, "tag", "busybox", repoName) <add> <add> pushCmd := exec.Command(dockerBinary, "push", repoName) <add> s.trustedCmd(pushCmd) <add> out, _, err := runCommandWithOutput(pushCmd) <add> if err != nil { <add> c.Fatalf("Error running trusted push: %s\n%s", err, out) <add> } <add> if !strings.Contains(string(out), "Signing and pushing trust metadata") { <add> c.Fatalf("Missing expected output on trusted push:\n%s", out) <add> } <add> <add> dockerCmd(c, "rmi", repoName) <add> <add> // Try pull <add> pullCmd := exec.Command(dockerBinary, "pull", repoName) <add> s.trustedCmd(pullCmd) <add> out, _, err = runCommandWithOutput(pullCmd) <add> if err != nil { <add> c.Fatalf("Error running trusted pull: %s\n%s", err, out) <add> } <add> <add> if !strings.Contains(string(out), "Tagging") { <add> c.Fatalf("Missing expected output on trusted push:\n%s", out) <add> } <add> <add> dockerCmd(c, "rmi", repoName) <add> <add> // Kill the notary server, start a new "evil" one. <add> s.not.Close() <add> s.not, err = newTestNotary(c) <add> if err != nil { <add> c.Fatalf("Restarting notary server failed.") <add> } <add> <add> // In order to make an evil server, lets re-init a client (with a different trust dir) and push new data. <add> // tag an image and upload it to the private registry <add> dockerCmd(c, "--config", evilLocalConfigDir, "tag", "busybox", repoName) <add> <add> // Push up to the new server <add> pushCmd = exec.Command(dockerBinary, "--config", evilLocalConfigDir, "push", repoName) <add> s.trustedCmd(pushCmd) <add> out, _, err = runCommandWithOutput(pushCmd) <add> if err != nil { <add> c.Fatalf("Error running trusted push: %s\n%s", err, out) <add> } <add> if !strings.Contains(string(out), "Signing and pushing trust metadata") { <add> c.Fatalf("Missing expected output on trusted push:\n%s", out) <add> } <add> <add> // Now, try pulling with the original client from this new trust server. This should fail. <add> pullCmd = exec.Command(dockerBinary, "pull", repoName) <add> s.trustedCmd(pullCmd) <add> out, _, err = runCommandWithOutput(pullCmd) <add> if err == nil { <add> c.Fatalf("Expected to fail on this pull due to different remote data: %s\n%s", err, out) <add> } <add> <add> if !strings.Contains(string(out), "failed to validate data with current trusted certificates") { <add> c.Fatalf("Missing expected output on trusted push:\n%s", out) <add> } <add>} <add> <add>func (s *DockerTrustSuite) TestTrustedPullWithExpiredSnapshot(c *check.C) { <add> c.Skip("Currently changes system time, causing instability") <add> repoName := fmt.Sprintf("%v/dockercliexpiredtimestamppull/trusted:latest", privateRegistryURL) <add> // tag the image and upload it to the private registry <add> dockerCmd(c, "tag", "busybox", repoName) <add> <add> // Push with default passphrases <add> pushCmd := exec.Command(dockerBinary, "push", repoName) <add> s.trustedCmd(pushCmd) <add> out, _, err := runCommandWithOutput(pushCmd) <add> if err != nil { <add> c.Fatalf("trusted push failed: %s\n%s", err, out) <add> } <add> <add> if !strings.Contains(string(out), "Signing and pushing trust metadata") { <add> c.Fatalf("Missing expected output on trusted push:\n%s", out) <add> } <add> <add> dockerCmd(c, "rmi", repoName) <add> <add> // Snapshots last for three years. This should be expired <add> fourYearsLater := time.Now().Add(time.Hour * 24 * 365 * 4) <add> <add> // Should succeed because the server transparently re-signs one <add> runAtDifferentDate(fourYearsLater, func() { <add> // Try pull <add> pullCmd := exec.Command(dockerBinary, "pull", repoName) <add> s.trustedCmd(pullCmd) <add> out, _, err = runCommandWithOutput(pullCmd) <add> if err == nil { <add> c.Fatalf("Missing expected error running trusted pull with expired snapshots") <add> } <add> <add> if !strings.Contains(string(out), "repository out-of-date") { <add> c.Fatalf("Missing expected output on trusted pull with expired snapshot:\n%s", out) <add> } <add> }) <add>}
3
Python
Python
add shuffle to the model api
b36c982934334f3bde7187d3a3a25aafa8f60ece
<ide><path>keras/models.py <ide> def fit_generator(self, generator, <ide> max_queue_size=10, <ide> workers=1, <ide> use_multiprocessing=False, <add> shuffle=False, <ide> initial_epoch=0): <ide> """Fits the model on data generated batch-by-batch by a Python generator. <ide> <ide> def fit_generator(self, generator, <ide> non picklable arguments to the generator <ide> as they can't be passed <ide> easily to children processes. <add> shuffle: Whether to shuffle the data at the beginning of each <add> epoch. Only used with instances of `Sequence` ( <add> keras.utils.Sequence). <ide> initial_epoch: Epoch at which to start training <ide> (useful for resuming a previous training run). <ide> <ide> def generate_arrays_from_file(path): <ide> max_queue_size=max_queue_size, <ide> workers=workers, <ide> use_multiprocessing=use_multiprocessing, <add> shuffle=shuffle, <ide> initial_epoch=initial_epoch) <ide> <ide> @interfaces.legacy_generator_methods_support
1
Text
Text
fix description of dep0024
9f06a057959edc258fd0014223d9fb0ec917525f
<ide><path>doc/api/deprecations.md <ide> The `os.getNetworkInterfaces()` method is deprecated. Please use the <ide> <ide> Type: End-of-Life <ide> <del>The `REPLServer.prototype.convertToContext()` API is deprecated and should <del>not be used. <add>The `REPLServer.prototype.convertToContext()` API has been removed. <ide> <ide> <a id="DEP0025"></a> <ide> ### DEP0025: require('sys')
1
Javascript
Javascript
use fullhash to avoid deprecation warning
781398355ada986b7f1300da7318186468ad735d
<ide><path>lib/WebpackOptionsDefaulter.js <ide> class WebpackOptionsDefaulter extends OptionsDefaulter { <ide> options => options.mode === "development" <ide> ); <ide> this.set("output.sourceMapFilename", "[file].map[query]"); <del> this.set("output.hotUpdateChunkFilename", "[id].[hash].hot-update.js"); <del> this.set("output.hotUpdateMainFilename", "[hash].hot-update.json"); <add> this.set("output.hotUpdateChunkFilename", "[id].[fullhash].hot-update.js"); <add> this.set("output.hotUpdateMainFilename", "[fullhash].hot-update.json"); <ide> this.set("output.crossOriginLoading", false); <ide> this.set("output.jsonpScriptType", false); <ide> this.set("output.chunkLoadTimeout", 120000);
1
Text
Text
add hex alpha
b9d01d13e6b63f9f1ac83eae693079beb6c3a2dc
<ide><path>guide/english/css/background-opacity/index.md <ide> img:hover { <ide> Using the rgba value is most preferable when the background has content like text compared to using the background-color property then going on to use the opacity property. First, it's shorter and second, it eliminates the problem of having the content's transparency change with that of its background, if it's something you do not want. <ide> <ide> <add>**Hex Alpha** <add> <add>```css <add> <add>.class-name { <add> background: #00ff0080; <add> } <add> ``` <add> The example above sets the background with a 50% opacity using hex alpha code. The alpha digit is the last two numbers `80`. The formats are sometimes referred to as #RRGGBBAA and #RGBA and the the AA part is a hex representation of 0-100. For example the hex alpha code of 0% is `00` and the hex alpha code of 100% is `FF`. <add>[A codepen example to show hex alpha values](https://codepen.io/chriscoyier/pen/XjbzAW) <add> <add> <ide> [A codepen example to show background opacity ranges](https://codepen.io/lvcoulter/full/dVrwmK/) <ide> <ide>
1
PHP
PHP
apply fixes from styleci
df341e01848027ae9e1e2fb79bf3ae1fca50cad6
<ide><path>tests/Integration/Routing/ResponsableTest.php <ide> public function test_responsable_objects_are_rendered() <ide> } <ide> } <ide> <del> <ide> class TestResponsableResponse implements Responsable <ide> { <ide> public function toResponse()
1
Python
Python
use itervalues instead of iteritems
d5c465ddf4125db71a072cf3033533b02290d08f
<ide><path>flask/cli.py <ide> from werkzeug.utils import import_string <ide> <ide> from . import __version__ <del>from ._compat import getargspec, iteritems, reraise, text_type <add>from ._compat import getargspec, itervalues, reraise, text_type <ide> from .globals import current_app <ide> from .helpers import get_debug_flag, get_env, get_load_dotenv <ide> <ide> def find_best_app(script_info, module): <ide> <ide> # Otherwise find the only object that is a Flask instance. <ide> matches = [ <del> v for k, v in iteritems(module.__dict__) if isinstance(v, Flask) <add> v for v in itervalues(module.__dict__) if isinstance(v, Flask) <ide> ] <ide> <ide> if len(matches) == 1:
1
Javascript
Javascript
do less manual conversion
29eae2c468776e2d82ef6272cc5183244c6f6eca
<ide><path>fonts.js <ide> CFF.prototype = { <ide> } <ide> break; <ide> <del> case "div": <del> var num2 = charstring[i - 1]; <del> var num1 = charstring[i - 2]; <del> charstring.splice(i - 2, 3, num1 / num2); <del> i -= 2; <del> break; <del> <ide> case "hsbw": <del> var charWidthVector = charstring[i - 1]; <del> var leftSidebearing = charstring[i - 2]; <add> var charWidthVector = charstring[1]; <add> var leftSidebearing = charstring[0]; <ide> <del> if (leftSidebearing) <del> charstring.splice(i - 2, 3, charWidthVector, leftSidebearing, "hmoveto"); <del> else <del> charstring.splice(i - 2, 3, charWidthVector); <add> charstring.splice(i, 1, leftSidebearing, "hmoveto"); <add> charstring.splice(0, 1); <ide> break; <ide> <ide> case "endchar": <ide> CFF.prototype = { <ide> } else { <ide> charstring[j] = command; <ide> } <del> } else { <del> charstring.splice(j, 1); <del> <del> // command has already been translated, just add them to the <del> // charstring directly <del> for (var k = 0; k < command.length; k++) <del> charstring.splice(j + k, 0, command[k]); <del> j+= command.length - 1; <ide> } <ide> } <ide> return charstring;
1
Text
Text
translate 08.1-more-about-refs.md to japanese
55c3d92bc88fade33f87c5be704002d314eb832f
<ide><path>docs/docs/08.1-more-about-refs.ja-JP.md <add>--- <add>id: more-about-refs <add>title: 参照についての詳細 <add>permalink: more-about-refs-ja-JP.html <add>prev: working-with-the-browser-ja-JP.html <add>next: tooling-integration-ja-JP.html <add> <add>--- <add>renderメソッドからUIの構成をリターンした後、あなたは「リーチアウト」を見て、renderから返ってきたコンポーネントのインスタンスの上でメソッドを実行するでしょう。多くの場合、アプリケーションにおいて、データフローを作成することは必要ではありません。リアクティブなデータフローは常に最新の `props` が `render()` から出力されたそれぞれの子要素に送られたことを保証するからです。しかし、まだ必要であったり、利益をもたらすケースもあります。 <add> <add>`''` という空の文字列でその値をアップデートした後にフォーカスするということを `<input />` 要素(インスタンスのサブ階層に存在します)に伝えたいという場合を考えましょう。 <add> <add>```javascript <add> var App = React.createClass({ <add> getInitialState: function() { <add> return {userInput: ''}; <add> }, <add> handleChange: function(e) { <add> this.setState({userInput: e.target.value}); <add> }, <add> clearAndFocusInput: function() { <add> this.setState({userInput: ''}); // inputをクリアします <add> // ここで、<input /> にフォーカスさせたいです! <add> }, <add> render: function() { <add> return ( <add> <div> <add> <div onClick={this.clearAndFocusInput}> <add> Click to Focus and Reset <add> </div> <add> <input <add> value={this.state.userInput} <add> onChange={this.handleChange} <add> /> <add> </div> <add> ); <add> } <add> }); <add>``` <add> <add>この例では、どうにかしてinputに何かを「伝え」たいということに着目してください。何かというのは、propsから推測できるものではありません。このケースでは、inputが今フォーカスされるべきであるということを「伝え」たいのです。しかし、いくつか問題があります。`render()` で返されるものは実際の「子供の」要素ではありません。しかし、特別なインスタンスにおいて子要素の *説明* を行うよりかは、スナップショットを見ていったほうがいいでしょう。 <add> <add>> 注意: <add>> `render()` からリターンされるものは *実際に* レンダリングされた子要素のインスタンスではないと覚えておいてください。 `render()` からリターンされるものは、ある特定の時点においてのコンポーネントの副階層にある単なる子要素のインスタンスの *説明* に過ぎません。 <add> <add>これは、 `render()` からリターンされる何かを「保持し続ける」ことはできないことを意味します。そして、それは何かしら意味のあることであると予測できます。 <add> <add>```javascript <add> // 反例: このようには記述しないでください! <add> render: function() { <add> var myInput = <input />; // このinputの上にあるメソッドを未来のいつかの <add> this.rememberThisInput = myInput; // タイミングで呼ぼうとしています!いえいっ! <add> return ( <add> <div> <add> <div>...</div> <add> {myInput} <add> </div> <add> ); <add> } <add>``` <add> <add>この反例では、 `<input />` は単に `<input />` の *説明* に過ぎず、この説明は `<input />` の *現実の* **バッキングインスタンス** を作るために使われます。 <add> <add>では、inputの *現実の* バッキングインスタンスについて、どのように話していきましょうか? <add> <add>## 参照の文字列属性 <add> <add>Reactは `render()` からの出力であるコンポーネントであれば何でもアタッチできるとても特別なプロパティをサポートしています。この特別なプロパティは `render()` からのリターンである **バッキングインスタンス** に対応したものに参照することを許可します。これはどのタイミングにおいても、固有のインスタンスであることを保証されています。 <add> <add>これは以下のように単純です。 <add> <add>1. 以下のように、 `render` からのリターンであれば何でも `ref` 属性をアサインする。 <add> <add> ```html <add> <input ref="myInput" /> <add> ``` <add> <add>2. いくつかの他のコードは(一般的にイベントハンドラのコード)、以下にあるように `this.refs` を通して **バッキングインスタンス** にアクセスします。 <add> <add> ```javascript <add> this.refs.myInput <add> ``` <add> <add>`React.findDOMNode(this.refs.myInput)` を呼ぶことで、コンポーネントのDOMノードに直接アクセスできる。 <add> <add>## 参照のコールバック属性 <add> <add>`ref` 属性は名前の代わりのコールバック関数になり得ます。このコールバックはコンポーネントがマウントされた直後に実行されます。参照されたコンポーネントはパラメータとして渡され、コールバック関数はコンポーネントを即座に使用するか、または将来使用するために参照を保存します(またはその両方を行います)。 <add> <add>これは、以下のように、 `render` でリターンされてきたものに `ref` 属性をアサインするのと同じくらい簡単です。 <add> <add> ```html <add> <input ref={ function(component){ React.findDOMNode(component).focus();} } /> <add> ``` <add> <add> <add>## 完全な例 <add> <add>```javascript <add> var App = React.createClass({ <add> getInitialState: function() { <add> return {userInput: ''}; <add> }, <add> handleChange: function(e) { <add> this.setState({userInput: e.target.value}); <add> }, <add> clearAndFocusInput: function() { <add> // inputをクリアする <add> this.setState({userInput: ''}, function() { <add> // このコードはコンポーネントが再度レンダリングされた後に実行されます。 <add> React.findDOMNode(this.refs.theInput).focus(); // どーん!フォーカスされました! <add> }); <add> }, <add> render: function() { <add> return ( <add> <div> <add> <div onClick={this.clearAndFocusInput}> <add> Click to Focus and Reset <add> </div> <add> <input <add> ref="theInput" <add> value={this.state.userInput} <add> onChange={this.handleChange} <add> /> <add> </div> <add> ); <add> } <add> }); <add>``` <add> <add>この例では、レンダリング関数は `<input />` インスタンスの説明をリターンします。しかし、実際のインスタンスは `this.refs.theInput` を通してアクセスされます。 `ref="theInput"` を持つ子要素のコンポーネントが `render` からリターンされる限り、 `this.refs.theInput` は固有のインスタンスにアクセスするでしょう。これは `<Typeahead ref="myTypeahead" />` のような高階層の(DOMでない)コンポーネントでも同様に動きます。 <add> <add>## 要約 <add> <add>リアクティブな `props` と `state` を通してのストリーミングのアクセスは便利とは言えないため、参照は特定の子要素のインスタンスにメッセージを送るための素晴らしい方法です。しかし、それらは、アプリケーションを通したデータフローの抽象化につながるわけではありません。デフォルトで、リアクティブなデータフローを使ってください。そして、ユースケースのために `ref` を保存するのは本質的にはリアクティブではありません。 <add> <add>### 利益: <add> <add>- コンポーネントクラス(例えば、Typeaheadのリセットメソッドのようなもの)にパブリックなメソッドを定義できる。また、参照(例えば、 `this.refs.myTypeahead.reset()` のように)を通してそれらのパブリックなメソッドを呼べる。 <add>- DOMの計測を行うことは大体いつも `<input />` のような「ネイティブの」コンポーネントや `React.findDOMNode(this.refs.myInput)` を通した根本のDOMノードにアクセスすることを必要とします。参照は、こういったことを期待通りに行う唯一の実用的な方法です。 <add>- 参照は自動的に管理されます!もし子要素が削除されたら、その参照もまた削除されます。メモリに関しての心配は要りません(あなた自身が参照を維持するために何かおかしなことを行っていなければ)。 <add> <add>### 警告: <add> <add>- *決して* コンポーネントのレンダリングメソッドの中の参照にアクセスしてはいけません。たとえコンポーネントのレンダリングメソッドのいずれかがコールスタックの中のどこかで動いているとしても。 <add>- もしGoogle Closure Compilerのクラッシュからの回復を守りたいなら、文字列として指定されたプロパティとしてアクセスしてはいけないことに気をつけてください。これは、 `ref="myRefString"` として参照が定義されている場合は、 `this.refs['myRefString']` を使ってアクセスしなければいけないことを意味します。 <add>- まだReactでプログラムを書いたことがない場合は、アプリケーションで「何かを起こす」ために参照を使おうとするでしょう。もしそのケースだった場合は、時間をかけて `state` がコンポーネントの階層のどこで保持されるべきか批評的に考えてください。多くの場合は、そのstateを「保持する」ための固有の場所が階層の高いレベルにあることがクリアになります。そのstateをその場所に配置することはよく「何かを起こす」ために `ref` を使うための願望を排除します。代わりに、データフローは普通、目標を達成します。 <ide>\ No newline at end of file
1
Go
Go
add unit test to check wrong uid case
e41507bde2d87cb9bbb0c328e414a39354dae10e
<ide><path>container_test.go <ide> func TestUser(t *testing.T) { <ide> if !strings.Contains(string(output), "uid=1(daemon) gid=1(daemon)") { <ide> t.Error(string(output)) <ide> } <add> <add> // Test an wrong username <add> container, err = builder.Create(&Config{ <add> Image: GetTestImage(runtime).ID, <add> Cmd: []string{"id"}, <add> <add> User: "unkownuser", <add> }, <add> ) <add> if err != nil { <add> t.Fatal(err) <add> } <add> defer runtime.Destroy(container) <add> output, err = container.Output() <add> if container.State.ExitCode == 0 { <add> t.Fatal("Starting container with wrong uid should fail but it passed.") <add> } <ide> } <ide> <ide> func TestMultipleContainers(t *testing.T) { <ide><path>utils/utils.go <ide> func ParseRepositoryTag(repos string) (string, string) { <ide> return repos, "" <ide> } <ide> <add>// UserLookup check if the given username or uid is present in /etc/passwd <add>// and returns the user struct. <add>// If the username is not found, an error is returned. <ide> func UserLookup(uid string) (*user.User, error) { <ide> file, err := ioutil.ReadFile("/etc/passwd") <ide> if err != nil {
2
PHP
PHP
remove all todo from core, create tickets for them
e02eab05d5e598bc2af3489a4e4cba61a2419d36
<ide><path>lib/Cake/Console/Command/Task/TemplateTask.php <ide> protected function _findThemes() { <ide> <ide> $paths[] = $core; <ide> <del> // TEMPORARY TODO remove when all paths are DS terminated <ide> foreach ($paths as $i => $path) { <ide> $paths[$i] = rtrim($path, DS) . DS; <ide> } <ide><path>lib/Cake/Controller/Controller.php <ide> public function redirect($url, $status = null, $exit = true) { <ide> extract($status, EXTR_OVERWRITE); <ide> } <ide> $event = new CakeEvent('Controller.beforeRedirect', $this, array($url, $status, $exit)); <del> //TODO: Remove the following line when the events are fully migrated to the CakeEventManager <add> <ide> list($event->break, $event->breakOn, $event->collectReturn) = array(true, false, true); <ide> $this->getEventManager()->dispatch($event); <ide> <ide><path>lib/Cake/I18n/Multibyte.php <ide> public static function substr($string, $start, $length = null) { <ide> * @param string $charset charset to use for encoding. defaults to UTF-8 <ide> * @param string $newline <ide> * @return string <del> * @TODO: add support for 'Q'('Quoted Printable') encoding <ide> */ <ide> public static function mimeEncode($string, $charset = null, $newline = "\r\n") { <ide> if (!Multibyte::checkMultibyte($string) && strlen($string) < 75) { <ide><path>lib/Cake/Model/Behavior/TreeBehavior.php <ide> public function moveUp(Model $Model, $id = null, $number = 1) { <ide> * 'parent' the values of the parent_id field will be used to populate the left and right fields. The missingParentAction <ide> * parameter only applies to "parent" mode and determines what to do if the parent field contains an id that is not present. <ide> * <del> * @todo Could be written to be faster, *maybe*. Ideally using a subquery and putting all the logic burden on the DB. <ide> * @param Model $Model Model instance <ide> * @param string $mode parent or tree <ide> * @param string|integer $missingParentAction 'return' to do nothing and return, 'delete' to <ide><path>lib/Cake/Model/BehaviorCollection.php <ide> class BehaviorCollection extends ObjectCollection implements CakeEventListener { <ide> /** <ide> * Attaches a model object and loads a list of behaviors <ide> * <del> * @todo Make this method a constructor instead.. <ide> * @param string $modelName <ide> * @param array $behaviors <ide> * @return void <ide><path>lib/Cake/Model/Datasource/DataSource.php <ide> protected function _cacheDescription($object, $data = null) { <ide> * @param Model $linkModel Instance of model to replace $__cakeForeignKey__$ <ide> * @param array $stack <ide> * @return string String of query data with placeholders replaced. <del> * @todo Remove and refactor $assocData, ensure uses of the method have the param removed too. <ide> */ <ide> public function insertQueryData($query, $data, $association, $assocData, Model $model, Model $linkModel, $stack) { <ide> $keys = array('{$__cakeID__$}', '{$__cakeForeignKey__$}'); <ide><path>lib/Cake/Network/Http/HttpResponse.php <ide> protected function _decodeChunkedBody($body) { <ide> <ide> $chunkSize = 0; <ide> $hexLength = 0; <del> $chunkExtensionName = ''; <ide> $chunkExtensionValue = ''; <ide> if (isset($match[0])) { <ide> $chunkSize = $match[0]; <ide> } <ide> if (isset($match[1])) { <ide> $hexLength = $match[1]; <ide> } <del> if (isset($match[2])) { <del> $chunkExtensionName = $match[2]; <del> } <ide> if (isset($match[3])) { <ide> $chunkExtensionValue = $match[3]; <ide> } <ide> <ide> $body = substr($body, strlen($chunkSize)); <ide> $chunkLength = hexdec($hexLength); <ide> $chunk = substr($body, 0, $chunkLength); <del> if (!empty($chunkExtensionName)) { <del> // @todo See if there are popular chunk extensions we should implement <del> } <ide> $decodedBody .= $chunk; <ide> if ($chunkLength !== 0) { <ide> $body = substr($body, $chunkLength + strlen("\r\n")); <ide> protected function _parseHeader($header) { <ide> * <ide> * @param array $header Header array containing one ore more 'Set-Cookie' headers. <ide> * @return mixed Either false on no cookies, or an array of cookies received. <del> * @todo Make this 100% RFC 2965 confirm <ide> */ <ide> public function parseCookies($header) { <ide> $cookieHeader = $this->getHeader('Set-Cookie', $header); <ide> public function parseCookies($header) { <ide> * @param string $token Token to unescape <ide> * @param array $chars <ide> * @return string Unescaped token <del> * @todo Test $chars parameter <ide> */ <ide> protected function _unescapeToken($token, $chars = null) { <ide> $regex = '/"([' . implode('', $this->_tokenEscapeChars(true, $chars)) . '])"/'; <ide> protected function _unescapeToken($token, $chars = null) { <ide> * @param boolean $hex true to get them as HEX values, false otherwise <ide> * @param array $chars <ide> * @return array Escape chars <del> * @todo Test $chars parameter <ide> */ <ide> protected function _tokenEscapeChars($hex = true, $chars = null) { <ide> if (!empty($chars)) { <ide><path>lib/Cake/Network/Http/HttpSocket.php <ide> protected function _buildHeader($header, $mode = 'standard') { <ide> * <ide> * @param array $cookies Array of cookies to send with the request. <ide> * @return string Cookie header string to be sent with the request. <del> * @todo Refactor token escape mechanism to be configurable <ide> */ <ide> public function buildCookies($cookies) { <ide> $header = array(); <ide> public function buildCookies($cookies) { <ide> * @param string $token Token to escape <ide> * @param array $chars <ide> * @return string Escaped token <del> * @todo Test $chars parameter <ide> */ <ide> protected function _escapeToken($token, $chars = null) { <ide> $regex = '/([' . implode('', $this->_tokenEscapeChars(true, $chars)) . '])/'; <ide> protected function _escapeToken($token, $chars = null) { <ide> * @param boolean $hex true to get them as HEX values, false otherwise <ide> * @param array $chars <ide> * @return array Escape chars <del> * @todo Test $chars parameter <ide> */ <ide> protected function _tokenEscapeChars($hex = true, $chars = null) { <ide> if (!empty($chars)) { <ide><path>lib/Cake/Test/Case/Model/ModelReadTest.php <ide> public function testSaveEmpty() { <ide> /** <ide> * testFindAllWithConditionInChildQuery <ide> * <del> * @todo external conditions like this are going to need to be revisited at some point <ide> * @return void <ide> */ <ide> public function testFindAllWithConditionInChildQuery() { <ide><path>lib/Cake/Test/Case/Network/Http/HttpSocketTest.php <ide> public function testBuildHeader() { <ide> * testBuildCookies method <ide> * <ide> * @return void <del> * @todo Test more scenarios <ide> */ <ide> public function testBuildCookies() { <ide> $cookies = array( <ide><path>lib/Cake/Test/Case/Routing/RouterTest.php <ide> public function testNamedArgsUrlParsing() { <ide> * test url generation with legacy (1.2) style prefix routes. <ide> * <ide> * @return void <del> * @todo Remove tests related to legacy style routes. <ide> * @see testUrlGenerationWithAutoPrefixes <ide> */ <ide> public function testUrlGenerationWithLegacyPrefixes() { <ide><path>lib/Cake/Test/Case/View/ScaffoldViewTest.php <ide> public function testViewScaffold() { <ide> <ide> $this->assertRegExp('/<h2>View Scaffold Mock<\/h2>/', $result); <ide> $this->assertRegExp('/<dl>/', $result); <del> //TODO: add specific tests for fields. <add> <ide> $this->assertRegExp('/<a href="\/scaffold_users\/view\/1">1<\/a>/', $result); //belongsTo links <ide> $this->assertRegExp('/<li><a href="\/scaffold_mock\/edit\/1">Edit Scaffold Mock<\/a>\s<\/li>/', $result); <ide> $this->assertRegExp('/<a href="\#" onclick="if[^>]*>Delete Scaffold Mock<\/a>\s<\/li>/', $result); <ide> public function testAdminIndexScaffold() { <ide> <ide> $this->assertRegExp('/<h2>Scaffold Mock<\/h2>/', $result); <ide> $this->assertRegExp('/<table cellpadding="0" cellspacing="0">/', $result); <del> //TODO: add testing for table generation <add> <ide> $this->assertRegExp('/<li><a href="\/admin\/scaffold_mock\/add">New Scaffold Mock<\/a><\/li>/', $result); <ide> <ide> Configure::write('Routing.prefixes', $_backAdmin); <ide> public function testMultiplePrefixScaffold() { <ide> <ide> $this->assertRegExp('/<h2>Scaffold Mock<\/h2>/', $result); <ide> $this->assertRegExp('/<table cellpadding="0" cellspacing="0">/', $result); <del> //TODO: add testing for table generation <add> <ide> $this->assertRegExp('/<li><a href="\/member\/scaffold_mock\/add">New Scaffold Mock<\/a><\/li>/', $result); <ide> <ide> Configure::write('Routing.prefixes', $_backAdmin); <ide><path>lib/Cake/TestSuite/CakeTestCase.php <ide> protected function skipUnless($condition, $message = '') { <ide> } <ide> return $condition; <ide> } <del> // @codingStandardsIgnoreStop <add> // @codingStandardsIgnoreEnd <ide> <ide> } <ide><path>lib/Cake/Utility/ObjectCollection.php <ide> public function trigger($callback, $params = array(), $options = array()) { <ide> if (empty($event->omitSubject)) { <ide> $subject = $event->subject(); <ide> } <del> //TODO: Temporary BC check, while we move all the triggers system into the CakeEventManager <add> <ide> foreach (array('break', 'breakOn', 'collectReturn', 'modParams') as $opt) { <ide> if (isset($event->{$opt})) { <ide> $options[$opt] = $event->{$opt}; <ide><path>lib/Cake/Utility/Security.php <ide> public static function generateAuthKey() { <ide> * <ide> * @param string $authKey Authorization hash <ide> * @return boolean Success <del> * @todo Complete implementation <ide> */ <ide> public static function validateAuthKey($authKey) { <ide> return true; <ide><path>lib/Cake/View/Helper.php <ide> public function field() { <ide> * @param string $id The name of the 'id' attribute. <ide> * @return mixed If $options was an array, an array will be returned with $id set. If a string <ide> * was supplied, a string will be returned. <del> * @todo Refactor this method to not have as many input/output options. <ide> */ <ide> public function domId($options = null, $id = 'id') { <ide> if (is_array($options) && array_key_exists($id, $options) && $options[$id] === null) { <ide> public function domId($options = null, $id = 'id') { <ide> * @param string $key The name of the attribute to be set, defaults to 'name' <ide> * @return mixed If an array was given for $options, an array with $key set will be returned. <ide> * If a string was supplied a string will be returned. <del> * @todo Refactor this method to not have as many input/output options. <ide> */ <ide> protected function _name($options = array(), $field = null, $key = 'name') { <ide> if ($options === null) { <ide> protected function _name($options = array(), $field = null, $key = 'name') { <ide> * @param string $key The name of the attribute to be set, defaults to 'value' <ide> * @return mixed If an array was given for $options, an array with $key set will be returned. <ide> * If a string was supplied a string will be returned. <del> * @todo Refactor this method to not have as many input/output options. <ide> */ <ide> public function value($options = array(), $field = null, $key = 'value') { <ide> if ($options === null) { <ide><path>lib/Cake/View/View.php <ide> protected function _render($viewFile, $data = array()) { <ide> $this->getEventManager()->dispatch(new CakeEvent('View.beforeRenderFile', $this, array($viewFile))); <ide> $content = $this->_evaluate($viewFile, $data); <ide> $afterEvent = new CakeEvent('View.afterRenderFile', $this, array($viewFile, $content)); <del> //TODO: For BC puporses, set extra info in the event object. Remove when appropriate <add> <ide> $afterEvent->modParams = 1; <ide> $this->getEventManager()->dispatch($afterEvent); <ide> $content = $afterEvent->data[1];
17
Python
Python
add mix weights on fine_tune
5d837c37762cb06a230906be80225e0e421c6cb2
<ide><path>spacy/_ml.py <ide> def fine_tune_fwd(docs_tokvecs, drop=0.): <ide> lengths = model.ops.asarray([len(doc) for doc in docs], dtype='i') <ide> <ide> vecs, bp_vecs = embedding.begin_update(docs, drop=drop) <del> <add> flat_tokvecs = embedding.ops.flatten(tokvecs) <add> flat_vecs = embedding.ops.flatten(vecs) <ide> output = embedding.ops.unflatten( <del> embedding.ops.flatten(tokvecs) <del> + embedding.ops.flatten(vecs), <add> (model.mix[0] * flat_vecs + model.mix[1] * flat_tokvecs), <ide> lengths) <ide> <ide> def fine_tune_bwd(d_output, sgd=None): <ide> bp_vecs(d_output, sgd=sgd) <add> flat_grad = model.ops.flatten(d_output) <add> model.d_mix[1] += flat_tokvecs.dot(flat_grad.T).sum() <add> model.d_mix[0] += flat_vecs.dot(flat_grad.T).sum() <add> sgd(model._mem.weights, model._mem.gradient, key=model.id) <ide> return d_output <ide> return output, fine_tune_bwd <ide> model = wrap(fine_tune_fwd, embedding) <add> model.mix = model._mem.add((model.id, 'mix'), (2,)) <add> model.mix.fill(1.) <add> model.d_mix = model._mem.add_gradient((model.id, 'd_mix'), (model.id, 'mix')) <ide> return model <ide> <ide>
1
Python
Python
update affected tests
c7ffbe78034fea90f52f21950df1ec2c338caa4d
<ide><path>libcloud/dns/drivers/dummy.py <ide> def list_records(self, zone): <ide> >>> record = driver.create_record(name='libcloud', zone=zone, <ide> ... type=RecordType.A, data='127.0.0.1') <ide> >>> list(zone.list_records()) #doctest: +ELLIPSIS <del> [<Record: zone=id-apache.org, name=libcloud, type=A...>] <add> [<Record: zone=apache.org, name=libcloud, type=A...>] <ide> """ <ide> return self._zones[zone.id]['records'].values() <ide> <ide> def create_record(self, name, zone, type, data, extra=None): <ide> >>> record = driver.create_record(name='libcloud', zone=zone, <ide> ... type=RecordType.A, data='127.0.0.1') <ide> >>> record #doctest: +ELLIPSIS <del> <Record: zone=id-apache.org, name=libcloud, type=A, data=127.0.0.1...> <add> <Record: zone=apache.org, name=libcloud, type=A, data=127.0.0.1...> <ide> >>> record = driver.create_record(name='libcloud', zone=zone, <ide> ... type=RecordType.A, data='127.0.0.1') <ide> ... #doctest: +IGNORE_EXCEPTION_DETAIL
1
Javascript
Javascript
add safehtml method to string
bd6c1a3761207efda3cc2a5f3f5a39092447ff95
<ide><path>packages/ember-handlebars/lib/main.js <ide> require("ember-runtime"); <ide> require("ember-views"); <ide> require("ember-handlebars/ext"); <add>require("ember-handlebars/string"); <ide> require("ember-handlebars/helpers"); <ide> require("ember-handlebars/views"); <ide> require("ember-handlebars/controls"); <ide><path>packages/ember-handlebars/lib/string.js <add> <add>Ember.String.htmlSafe = function(str) { <add> return new Handlebars.SafeString(str); <add>}; <add> <add>var htmlSafe = Ember.String.htmlSafe; <add> <add>if (Ember.EXTEND_PROTOTYPES) { <add> <add> /** <add> @see Ember.String.htmlSafe <add> */ <add> String.prototype.htmlSafe = function() { <add> return htmlSafe(this); <add> }; <add> <add>} <ide><path>packages/ember-handlebars/tests/handlebars_test.js <ide> test("should allow values from normal JavaScript hash objects to be used", funct <ide> equal(view.$().text(), "Señor CFC (and Fido)", "prints out values from a hash"); <ide> }); <ide> <add>test("htmlSafe should return an instance of Handlebars.SafeString", function() { <add> var safeString = Ember.String.htmlSafe("you need to be more <b>bold</b>"); <add> <add> ok(safeString instanceof Handlebars.SafeString, "should return SafeString"); <add>}); <add> <ide> test("should escape HTML in normal mustaches", function() { <ide> view = Ember.View.create({ <ide> template: Ember.Handlebars.compile('{{output}}'),
3
Java
Java
add javadocs to describe serializedsubject class
5c0a955925062cb25bd0f2a20b6803b7b3512d54
<ide><path>src/main/java/rx/internal/operators/OperatorDoOnSubscribe.java <ide> <ide> /** <ide> * Constructs an instance of the operator with the callback that gets invoked when the modified Observable is subscribed <del> * @param unsubscribe The action that gets invoked when the modified {@link rx.Observable} is subscribed <add> * @param subscribe the action that gets invoked when the modified {@link rx.Observable} is subscribed <ide> */ <ide> public OperatorDoOnSubscribe(Action0 subscribe) { <ide> this.subscribe = subscribe; <ide><path>src/main/java/rx/subjects/SerializedSubject.java <ide> import rx.Subscriber; <ide> import rx.observers.SerializedObserver; <ide> <add>/** <add> * Wraps a {@link Subject} so that it is safe to call its various {@code on} methods from different threads. <add> * <p> <add> * When you use an ordinary {@link Subject} as a {@link Subscriber}, you must take care not to call its <add> * {@link Subscriber#onNext} method (or its other {@code on} methods) from multiple threads, as this could lead <add> * to non-serialized calls, which violates the Observable contract and creates an ambiguity in the resulting <add> * Subject. <add> * <p> <add> * To protect a {@code Subject} from this danger, you can convert it into a {@code SerializedSubject} with code <add> * like the following: <add> * <p><pre>{@code <add> * mySafeSubject = new SerializedSubject( myUnsafeSubject ); <add> * }</pre> <add> */ <ide> public class SerializedSubject<T, R> extends Subject<T, R> { <ide> private final SerializedObserver<T> observer; <ide>
2
PHP
PHP
add keys to transfrom
df01d9e85140e898f20f6ba8566ba052f80d5c5c
<ide><path>src/Illuminate/Support/Collection.php <ide> public function take($limit) <ide> */ <ide> public function transform(callable $callback) <ide> { <del> $this->items = array_map($callback, $this->items); <add> $keys = array_keys($this->items); <add> <add> $this->items = array_combine($keys, array_map($callback, $this->items, $keys)); <ide> <ide> return $this; <ide> } <ide><path>tests/Support/SupportCollectionTest.php <ide> public function testGetListValueWithAccessors() <ide> <ide> public function testTransform() <ide> { <del> $data = new Collection(array('taylor', 'colin', 'shawn')); <del> $data->transform(function($item) { return strrev($item); }); <del> $this->assertEquals(array('rolyat', 'niloc', 'nwahs'), array_values($data->all())); <add> $data = new Collection(['first' => 'taylor', 'last' => 'otwell']); <add> $data->transform(function($item, $key) { return $key.'-'.strrev($item); }); <add> $this->assertEquals(['first' => 'first-rolyat', 'last' => 'last-llewto'], $data->all()); <ide> } <ide> <ide>
2
Text
Text
fix doc typo
7a3566ea3f2fccb3ecaf6693c16a725237228de7
<ide><path>guides/source/upgrading_ruby_on_rails.md <ide> warning by adding the following configuration to your `config/application.rb`: <ide> <ide> See [#17227](https://github.com/rails/rails/pull/17227) for more details. <ide> <del>### ActiveJob jobs now inherent from ApplicationJob by default <add>### ActiveJob jobs now inherit from ApplicationJob by default <ide> <ide> In Rails 4.2 an ActiveJob inherits from `ActiveJob::Base`. In Rails 5.0 this <ide> behavior has changed to now inherit from `ApplicationJob`.
1
Python
Python
change optimizer in cifar10 example
7c4f033c6a7a3a6d157045bb69b0dd1f22ff4ab3
<ide><path>examples/cifar10_cnn.py <ide> from keras.models import Sequential <ide> from keras.layers import Dense, Dropout, Activation, Flatten <ide> from keras.layers import Convolution2D, MaxPooling2D <del>from keras.optimizers import SGD <ide> from keras.utils import np_utils <ide> <ide> batch_size = 32 <ide> model.add(Dense(nb_classes)) <ide> model.add(Activation('softmax')) <ide> <del># Let's train the model using SGD + momentum: <del>sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True) <add># Let's train the model using RMSprop <ide> model.compile(loss='categorical_crossentropy', <del> optimizer=sgd, <add> optimizer='rmsprop', <ide> metrics=['accuracy']) <ide> <ide> X_train = X_train.astype('float32')
1
Javascript
Javascript
use timer type as systrace identifier
75a154b4499e44b4ab31ccf28f9eb1dbf21578ac
<ide><path>Libraries/Core/Timers/JSTimers.js <ide> const timerIDs: Array<?number> = []; <ide> let immediates: Array<number> = []; <ide> let requestIdleCallbacks: Array<number> = []; <ide> const requestIdleCallbackTimeouts: {[number]: number} = {}; <del>const identifiers: Array<null | {methodName: string}> = []; <ide> <ide> let GUID = 1; <ide> let errors: ?Array<Error> = null; <ide> function _allocateCallback(func: Function, type: JSTimerType): number { <ide> timerIDs[freeIndex] = id; <ide> callbacks[freeIndex] = func; <ide> types[freeIndex] = type; <del> if (__DEV__) { <del> const parseErrorStack = require('../Devtools/parseErrorStack'); <del> // TODO: (moti) T55685778 Use Error.captureStackTrace on Hermes <del> const stack = parseErrorStack(new Error()); <del> if (stack) { <del> identifiers[freeIndex] = stack[1]; // skip _allocateCallback's own stack frame <del> } <del> } <ide> return id; <ide> } <ide> <ide> function _callTimer(timerID: number, frameTime: number, didTimeout: ?boolean) { <ide> } <ide> <ide> if (__DEV__) { <del> const identifier = identifiers[timerIndex] || {}; <del> Systrace.beginEvent('Systrace.callTimer: ' + identifier.methodName); <add> Systrace.beginEvent('Systrace.callTimer: ' + type); <ide> } <ide> <ide> // Clear the metadata <ide> function _clearIndex(i: number) { <ide> timerIDs[i] = null; <ide> callbacks[i] = null; <ide> types[i] = null; <del> identifiers[i] = null; <ide> } <ide> <ide> function _freeCallback(timerID: number) {
1
Javascript
Javascript
use uncommon key for candefineproperty test
1c9d970d3c5d3e0c6895ac8b869ce06e3db3f21d
<ide><path>dist/immutable.js <ide> // True if Object.defineProperty works as expected. IE8 fails this test. <ide> var canDefineProperty = (function() { <ide> try { <del> Object.defineProperty({}, 'x', {}); <add> Object.defineProperty({}, '@', {}); <ide> return true; <ide> } catch (e) { <ide> return false; <ide><path>dist/immutable.min.js <ide> else if(s>o||_>p){for(c=0;f;){var g=s>>>h&hn;if(g!==p>>>h&hn)break;g&&(c+=(1<<h) <ide> }function Kr(){return Fn||(Fn=Ur(Qe()))}function Lr(t){return null===t||void 0===t?Br():Tr(t)?t:Br().withMutations(function(e){var r=l(t);se(r.size),r.forEach(function(t){return e.add(t)})})}function Tr(t){return jr(t)&&w(t)}function Wr(t,e){var r=Object.create(Gn);return r.size=t?t.size:0,r._map=t,r.__ownerID=e,r}function Br(){return Zn||(Zn=Wr(Dr()))}function Cr(t,e){var r=function(t){return this instanceof r?void(this._map=Le(t)):new r(t)},n=Object.keys(t),i=r.prototype=Object.create($n);i.constructor=r,e&&(i._name=e),i._defaultValues=t,i._keys=n,i.size=n.length;try{n.forEach(function(t){Object.defineProperty(r.prototype,t,{get:function(){return this.get(t)},set:function(e){ue(this.__ownerID,"Cannot set on an immutable record."),this.set(t,e)}})})}catch(o){}return r}function Jr(t,e,r){var n=Object.create(Object.getPrototypeOf(t));return n._map=e,n.__ownerID=r,n}function Pr(t){return t._name||t.constructor.name}function Hr(t,e){if(t===e)return!0;if(!y(e)||void 0!==t.size&&void 0!==e.size&&t.size!==e.size||void 0!==t.__hash&&void 0!==e.__hash&&t.__hash!==e.__hash||d(t)!==d(e)||m(t)!==m(e)||w(t)!==w(e))return!1;if(0===t.size&&0===e.size)return!0;var r=!g(t);if(w(t)){var n=t.entries();return e.every(function(t,e){var i=n.next().value;return i&&X(i[1],t)&&(r||X(i[0],e))})&&n.next().done}var i=!1;if(void 0===t.size)if(void 0===e.size)t.cacheResult();else{i=!0;var o=t;t=e,e=o}var u=!0,s=e.__iterate(function(e,n){return(r?t.has(e):i?X(e,t.get(n,fn)):X(t.get(n,fn),e))?void 0:(u=!1,!1)});return u&&t.size===s}function Nr(t,e,r){if(!(this instanceof Nr))return new Nr(t,e,r);if(ue(0!==r,"Cannot step a Range by 0"),t=t||0,void 0===e&&(e=1/0),r=void 0===r?1:Math.abs(r),t>e&&(r=-r),this._start=t,this._end=e,this._step=r,this.size=Math.max(0,Math.ceil((e-t)/r-1)+1),0===this.size){if(ti)return ti;ti=this}}function Vr(t,e){if(!(this instanceof Vr))return new Vr(t,e);if(this._value=t,this.size=void 0===e?1/0:Math.max(0,e),0===this.size){if(ei)return ei;ei=this}}function Yr(t,e){var r=function(r){t.prototype[r]=e[r]};return Object.keys(e).forEach(r),Object.getOwnPropertySymbols&&Object.getOwnPropertySymbols(e).forEach(r),t <ide> }function Qr(t,e){return e}function Xr(t,e){return[e,t]}function Fr(t){return function(){return!t.apply(this,arguments)}}function Gr(t){return function(){return-t.apply(this,arguments)}}function Zr(t){return"string"==typeof t?JSON.stringify(t):t}function $r(){return i(arguments)}function tn(t,e){return e>t?1:t>e?-1:0}function en(t){if(1/0===t.size)return 0;var e=w(t),r=d(t),n=e?1:0,i=t.__iterate(r?e?function(t,e){n=31*n+nn(ee(t),ee(e))|0}:function(t,e){n=n+nn(ee(t),ee(e))|0}:e?function(t){n=31*n+ee(t)|0}:function(t){n=n+ee(t)|0});return rn(i,n)}function rn(t,e){return e=qn(e,3432918353),e=qn(e<<15|e>>>-15,461845907),e=qn(e<<13|e>>>-13,5),e=(e+3864292196|0)^t,e=qn(e^e>>>16,2246822507),e=qn(e^e>>>13,3266489909),e=te(e^e>>>16)}function nn(t,e){return t^e+2654435769+(t<<6)+(t>>2)|0}var on=Array.prototype.slice,un="delete",sn=5,an=1<<sn,hn=an-1,fn={},cn={value:!1},_n={value:!1};t(p,_),t(v,_),t(l,_),_.isIterable=y,_.isKeyed=d,_.isIndexed=m,_.isAssociative=g,_.isOrdered=w,_.Keyed=p,_.Indexed=v,_.Set=l;var pn="",vn="",ln="",yn="",dn=0,mn=1,gn=2,wn="function"==typeof Symbol&&Symbol.iterator,Sn="@@iterator",zn=wn||Sn;S.prototype.toString=function(){return"[Iterator]"},S.KEYS=dn,S.VALUES=mn,S.ENTRIES=gn,S.prototype.inspect=S.prototype.toSource=function(){return""+this},S.prototype[zn]=function(){return this},t(O,_),O.of=function(){return O(arguments)},O.prototype.toSeq=function(){return this},O.prototype.toString=function(){return this.__toString("Seq {","}")},O.prototype.cacheResult=function(){return!this._cache&&this.__iterateUncached&&(this._cache=this.entrySeq().toArray(),this.size=this._cache.length),this},O.prototype.__iterate=function(t,e){return P(this,t,e,!0)},O.prototype.__iterator=function(t,e){return H(this,t,e,!0)},t(x,O),x.prototype.toKeyedSeq=function(){return this},t(k,O),k.of=function(){return k(arguments)},k.prototype.toIndexedSeq=function(){return this},k.prototype.toString=function(){return this.__toString("Seq [","]") <ide> },k.prototype.__iterate=function(t,e){return P(this,t,e,!1)},k.prototype.__iterator=function(t,e){return H(this,t,e,!1)},t(A,O),A.of=function(){return A(arguments)},A.prototype.toSetSeq=function(){return this},O.isSeq=L,O.Keyed=x,O.Set=A,O.Indexed=k;var In="";O.prototype[In]=!0,t(j,k),j.prototype.get=function(t,e){return this.has(t)?this._array[u(this,t)]:e},j.prototype.__iterate=function(t,e){for(var r=this._array,n=r.length-1,i=0;n>=i;i++)if(t(r[e?n-i:i],i,this)===!1)return i+1;return i},j.prototype.__iterator=function(t,e){var r=this._array,n=r.length-1,i=0;return new S(function(){return i>n?I():z(t,i,r[e?n-i++:i++])})},t(R,x),R.prototype.get=function(t,e){return void 0===e||this.has(t)?this._object[t]:e},R.prototype.has=function(t){return this._object.hasOwnProperty(t)},R.prototype.__iterate=function(t,e){for(var r=this._object,n=this._keys,i=n.length-1,o=0;i>=o;o++){var u=n[e?i-o:o];if(t(r[u],u,this)===!1)return o+1}return o},R.prototype.__iterator=function(t,e){var r=this._object,n=this._keys,i=n.length-1,o=0;return new S(function(){var u=n[e?i-o:o];return o++>i?I():z(t,u,r[u])})},R.prototype[yn]=!0,t(U,k),U.prototype.__iterateUncached=function(t,e){if(e)return this.cacheResult().__iterate(t,e);var r=this._iterable,n=D(r),i=0;if(q(n))for(var o;!(o=n.next()).done&&t(o.value,i++,this)!==!1;);return i},U.prototype.__iteratorUncached=function(t,e){if(e)return this.cacheResult().__iterator(t,e);var r=this._iterable,n=D(r);if(!q(n))return new S(I);var i=0;return new S(function(){var e=n.next();return e.done?e:z(t,i++,e.value)})},t(K,k),K.prototype.__iterateUncached=function(t,e){if(e)return this.cacheResult().__iterate(t,e);for(var r=this._iterator,n=this._iteratorCache,i=0;n.length>i;)if(t(n[i],i++,this)===!1)return i;for(var o;!(o=r.next()).done;){var u=o.value;if(n[i]=u,t(u,i++,this)===!1)break}return i},K.prototype.__iteratorUncached=function(t,e){if(e)return this.cacheResult().__iterator(t,e);var r=this._iterator,n=this._iteratorCache,i=0;return new S(function(){if(i>=n.length){var e=r.next(); <del>if(e.done)return e;n[i]=e.value}return z(t,i,n[i++])})};var bn;t(N,_),t(V,N),t(Y,N),t(Q,N),N.Keyed=V,N.Indexed=Y,N.Set=Q;var qn="function"==typeof Math.imul&&-2===Math.imul(4294967295,2)?Math.imul:function(t,e){t=0|t,e=0|e;var r=65535&t,n=65535&e;return r*n+((t>>>16)*n+r*(e>>>16)<<16>>>0)|0},Dn=function(){try{return Object.defineProperty({},"x",{}),!0}catch(t){return!1}}(),Mn="function"==typeof WeakMap&&new WeakMap,En=0,On="__immutablehash__";"function"==typeof Symbol&&(On=Symbol(On));var xn=16,kn=255,An=0,jn={};t(ae,x),ae.prototype.get=function(t,e){return this._iter.get(t,e)},ae.prototype.has=function(t){return this._iter.has(t)},ae.prototype.valueSeq=function(){return this._iter.valueSeq()},ae.prototype.reverse=function(){var t=this,e=ve(this,!0);return this._useKeys||(e.valueSeq=function(){return t._iter.toSeq().reverse()}),e},ae.prototype.map=function(t,e){var r=this,n=pe(this,t,e);return this._useKeys||(n.valueSeq=function(){return r._iter.toSeq().map(t,e)}),n},ae.prototype.__iterate=function(t,e){var r,n=this;return this._iter.__iterate(this._useKeys?function(e,r){return t(e,r,n)}:(r=e?ke(this):0,function(i){return t(i,e?--r:r++,n)}),e)},ae.prototype.__iterator=function(t,e){if(this._useKeys)return this._iter.__iterator(t,e);var r=this._iter.__iterator(mn,e),n=e?ke(this):0;return new S(function(){var i=r.next();return i.done?i:z(t,e?--n:n++,i.value,i)})},ae.prototype[yn]=!0,t(he,k),he.prototype.contains=function(t){return this._iter.contains(t)},he.prototype.__iterate=function(t,e){var r=this,n=0;return this._iter.__iterate(function(e){return t(e,n++,r)},e)},he.prototype.__iterator=function(t,e){var r=this._iter.__iterator(mn,e),n=0;return new S(function(){var e=r.next();return e.done?e:z(t,n++,e.value,e)})},t(fe,A),fe.prototype.has=function(t){return this._iter.contains(t)},fe.prototype.__iterate=function(t,e){var r=this;return this._iter.__iterate(function(e){return t(e,e,r)},e)},fe.prototype.__iterator=function(t,e){var r=this._iter.__iterator(mn,e);return new S(function(){var e=r.next();return e.done?e:z(t,e.value,e.value,e) <add>if(e.done)return e;n[i]=e.value}return z(t,i,n[i++])})};var bn;t(N,_),t(V,N),t(Y,N),t(Q,N),N.Keyed=V,N.Indexed=Y,N.Set=Q;var qn="function"==typeof Math.imul&&-2===Math.imul(4294967295,2)?Math.imul:function(t,e){t=0|t,e=0|e;var r=65535&t,n=65535&e;return r*n+((t>>>16)*n+r*(e>>>16)<<16>>>0)|0},Dn=function(){try{return Object.defineProperty({},"@",{}),!0}catch(t){return!1}}(),Mn="function"==typeof WeakMap&&new WeakMap,En=0,On="__immutablehash__";"function"==typeof Symbol&&(On=Symbol(On));var xn=16,kn=255,An=0,jn={};t(ae,x),ae.prototype.get=function(t,e){return this._iter.get(t,e)},ae.prototype.has=function(t){return this._iter.has(t)},ae.prototype.valueSeq=function(){return this._iter.valueSeq()},ae.prototype.reverse=function(){var t=this,e=ve(this,!0);return this._useKeys||(e.valueSeq=function(){return t._iter.toSeq().reverse()}),e},ae.prototype.map=function(t,e){var r=this,n=pe(this,t,e);return this._useKeys||(n.valueSeq=function(){return r._iter.toSeq().map(t,e)}),n},ae.prototype.__iterate=function(t,e){var r,n=this;return this._iter.__iterate(this._useKeys?function(e,r){return t(e,r,n)}:(r=e?ke(this):0,function(i){return t(i,e?--r:r++,n)}),e)},ae.prototype.__iterator=function(t,e){if(this._useKeys)return this._iter.__iterator(t,e);var r=this._iter.__iterator(mn,e),n=e?ke(this):0;return new S(function(){var i=r.next();return i.done?i:z(t,e?--n:n++,i.value,i)})},ae.prototype[yn]=!0,t(he,k),he.prototype.contains=function(t){return this._iter.contains(t)},he.prototype.__iterate=function(t,e){var r=this,n=0;return this._iter.__iterate(function(e){return t(e,n++,r)},e)},he.prototype.__iterator=function(t,e){var r=this._iter.__iterator(mn,e),n=0;return new S(function(){var e=r.next();return e.done?e:z(t,n++,e.value,e)})},t(fe,A),fe.prototype.has=function(t){return this._iter.contains(t)},fe.prototype.__iterate=function(t,e){var r=this;return this._iter.__iterate(function(e){return t(e,e,r)},e)},fe.prototype.__iterator=function(t,e){var r=this._iter.__iterator(mn,e);return new S(function(){var e=r.next();return e.done?e:z(t,e.value,e.value,e) <ide> })},t(ce,x),ce.prototype.entrySeq=function(){return this._iter.toSeq()},ce.prototype.__iterate=function(t,e){var r=this;return this._iter.__iterate(function(e){return e?(xe(e),t(e[1],e[0],r)):void 0},e)},ce.prototype.__iterator=function(t,e){var r=this._iter.__iterator(mn,e);return new S(function(){for(;;){var e=r.next();if(e.done)return e;var n=e.value;if(n)return xe(n),t===gn?e:z(t,n[0],n[1],e)}})},he.prototype.cacheResult=ae.prototype.cacheResult=fe.prototype.cacheResult=ce.prototype.cacheResult=Re,t(Le,V),Le.prototype.toString=function(){return this.__toString("Map {","}")},Le.prototype.get=function(t,e){return this._root?this._root.get(0,void 0,t,e):e},Le.prototype.set=function(t,e){return Xe(this,t,e)},Le.prototype.setIn=function(t,e){return this.updateIn(t,fn,function(){return e})},Le.prototype.remove=function(t){return Xe(this,t,fn)},Le.prototype.deleteIn=function(t){return this.updateIn(t,function(){return fn})},Le.prototype.update=function(t,e,r){return 1===arguments.length?t(this):this.updateIn([t],e,r)},Le.prototype.updateIn=function(t,e,r){r||(r=e,e=void 0);var n=or(this,Ke(t),e,r);return n===fn?void 0:n},Le.prototype.clear=function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._root=null,this.__hash=void 0,this.__altered=!0,this):Qe()},Le.prototype.merge=function(){return rr(this,void 0,arguments)},Le.prototype.mergeWith=function(t){var e=on.call(arguments,1);return rr(this,t,e)},Le.prototype.mergeIn=function(t){var e=on.call(arguments,1);return this.updateIn(t,Qe(),function(t){return t.merge.apply(t,e)})},Le.prototype.mergeDeep=function(){return rr(this,nr(void 0),arguments)},Le.prototype.mergeDeepWith=function(t){var e=on.call(arguments,1);return rr(this,nr(t),e)},Le.prototype.mergeDeepIn=function(t){var e=on.call(arguments,1);return this.updateIn(t,Qe(),function(t){return t.mergeDeep.apply(t,e)})},Le.prototype.sort=function(t){return Ir(qe(this,t))},Le.prototype.sortBy=function(t,e){return Ir(qe(this,e,t))},Le.prototype.withMutations=function(t){var e=this.asMutable();return t(e),e.wasAltered()?e.__ensureOwner(this.__ownerID):this <ide> },Le.prototype.asMutable=function(){return this.__ownerID?this:this.__ensureOwner(new n)},Le.prototype.asImmutable=function(){return this.__ensureOwner()},Le.prototype.wasAltered=function(){return this.__altered},Le.prototype.__iterator=function(t,e){return new He(this,t,e)},Le.prototype.__iterate=function(t,e){var r=this,n=0;return this._root&&this._root.iterate(function(e){return n++,t(e[1],e[0],r)},e),n},Le.prototype.__ensureOwner=function(t){return t===this.__ownerID?this:t?Ye(this.size,this._root,t,this.__hash):(this.__ownerID=t,this.__altered=!1,this)},Le.isMap=Te;var Rn="",Un=Le.prototype;Un[Rn]=!0,Un[un]=Un.remove,Un.removeIn=Un.deleteIn,We.prototype.get=function(t,e,r,n){for(var i=this.entries,o=0,u=i.length;u>o;o++)if(X(r,i[o][0]))return i[o][1];return n},We.prototype.update=function(t,e,n,o,u,s,a){for(var h=u===fn,f=this.entries,c=0,_=f.length;_>c&&!X(o,f[c][0]);c++);var p=_>c;if(p?f[c][1]===u:h)return this;if(r(a),(h||!p)&&r(s),!h||1!==f.length){if(!p&&!h&&f.length>=Ln)return $e(t,f,o,u);var v=t&&t===this.ownerID,l=v?f:i(f);return p?h?c===_-1?l.pop():l[c]=l.pop():l[c]=[o,u]:l.push([o,u]),v?(this.entries=l,this):new We(t,l)}},Be.prototype.get=function(t,e,r,n){void 0===e&&(e=ee(r));var i=1<<((0===t?e:e>>>t)&hn),o=this.bitmap;return 0===(o&i)?n:this.nodes[ur(o&i-1)].get(t+sn,e,r,n)},Be.prototype.update=function(t,e,r,n,i,o,u){void 0===r&&(r=ee(n));var s=(0===e?r:r>>>e)&hn,a=1<<s,h=this.bitmap,f=0!==(h&a);if(!f&&i===fn)return this;var c=ur(h&a-1),_=this.nodes,p=f?_[c]:void 0,v=Fe(p,t,e+sn,r,n,i,o,u);if(v===p)return this;if(!f&&v&&_.length>=Tn)return er(t,_,h,s,v);if(f&&!v&&2===_.length&&Ge(_[1^c]))return _[1^c];if(f&&v&&1===_.length&&Ge(v))return v;var l=t&&t===this.ownerID,y=f?v?h:h^a:h|a,d=f?v?sr(_,c,v,l):hr(_,c,l):ar(_,c,v,l);return l?(this.bitmap=y,this.nodes=d,this):new Be(t,y,d)},Ce.prototype.get=function(t,e,r,n){void 0===e&&(e=ee(r));var i=(0===t?e:e>>>t)&hn,o=this.nodes[i];return o?o.get(t+sn,e,r,n):n},Ce.prototype.update=function(t,e,r,n,i,o,u){void 0===r&&(r=ee(n));var s=(0===e?r:r>>>e)&hn,a=i===fn,h=this.nodes,f=h[s]; <ide> if(a&&!f)return this;var c=Fe(f,t,e+sn,r,n,i,o,u);if(c===f)return this;var _=this.count;if(f){if(!c&&(_--,Wn>_))return tr(t,h,_,s)}else _++;var p=t&&t===this.ownerID,v=sr(h,s,c,p);return p?(this.count=_,this.nodes=v,this):new Ce(t,_,v)},Je.prototype.get=function(t,e,r,n){for(var i=this.entries,o=0,u=i.length;u>o;o++)if(X(r,i[o][0]))return i[o][1];return n},Je.prototype.update=function(t,e,n,o,u,s,a){void 0===n&&(n=ee(o));var h=u===fn;if(n!==this.keyHash)return h?this:(r(a),r(s),Ze(this,t,e,n,[o,u]));for(var f=this.entries,c=0,_=f.length;_>c&&!X(o,f[c][0]);c++);var p=_>c;if(p?f[c][1]===u:h)return this;if(r(a),(h||!p)&&r(s),h&&2===_)return new Pe(t,this.keyHash,f[1^c]);var v=t&&t===this.ownerID,l=v?f:i(f);return p?h?c===_-1?l.pop():l[c]=l.pop():l[c]=[o,u]:l.push([o,u]),v?(this.entries=l,this):new Je(t,this.keyHash,l)},Pe.prototype.get=function(t,e,r,n){return X(r,this.entry[0])?this.entry[1]:n},Pe.prototype.update=function(t,e,n,i,o,u,s){var a=o===fn,h=X(i,this.entry[0]);return(h?o===this.entry[1]:a)?this:(r(s),a?void r(u):h?t&&t===this.ownerID?(this.entry[1]=o,this):new Pe(t,this.keyHash,[i,o]):(r(u),Ze(this,t,e,ee(i),[i,o])))},We.prototype.iterate=Je.prototype.iterate=function(t,e){for(var r=this.entries,n=0,i=r.length-1;i>=n;n++)if(t(r[e?i-n:n])===!1)return!1},Be.prototype.iterate=Ce.prototype.iterate=function(t,e){for(var r=this.nodes,n=0,i=r.length-1;i>=n;n++){var o=r[e?i-n:n];if(o&&o.iterate(t,e)===!1)return!1}},Pe.prototype.iterate=function(t){return t(this.entry)},t(He,S),He.prototype.next=function(){for(var t=this._type,e=this._stack;e;){var r,n=e.node,i=e.index++;if(n.entry){if(0===i)return Ne(t,n.entry)}else if(n.entries){if(r=n.entries.length-1,r>=i)return Ne(t,n.entries[this._reverse?r-i:i])}else if(r=n.nodes.length-1,r>=i){var o=n.nodes[this._reverse?r-i:i];if(o){if(o.entry)return Ne(t,o.entry);e=this._stack=Ve(o,e)}continue}e=this._stack=this._stack.__prev}return I()};var Kn,Ln=an/4,Tn=an/2,Wn=an/4;t(fr,Y),fr.of=function(){return this(arguments)},fr.prototype.toString=function(){return this.__toString("List [","]") <ide><path>src/Hash.js <ide> function hashJSObj(obj) { <ide> // True if Object.defineProperty works as expected. IE8 fails this test. <ide> var canDefineProperty = (function() { <ide> try { <del> Object.defineProperty({}, 'x', {}); <add> Object.defineProperty({}, '@', {}); <ide> return true; <ide> } catch (e) { <ide> return false;
3
Text
Text
add info on notation
fe204ef8cb911f41537472e905b4d1fb4bfa8d82
<ide><path>guide/english/mathematics/derivatives/index.md <ide> title: Derivatives <ide> <ide> ## Derivatives <ide> <del>The derivative is the instantaneous rate of change of a function, or the rate of change of a function at any particular instant in time. The derivative is a tool of calculus and you can determine it by taking a limit. <add>The derivative is the instantaneous rate of change of a function, or the rate of change of a function at any particular instant in time. The derivative is a tool of calculus and you can determine it by taking a limit. The common notation for taking the derivative is (dy/dx) but it can be written as (d/dx)y. <ide> <ide> You can understand the concept of derivative by comparing it with the concept of slope (the average rate of change) in algebra. <ide>
1
Python
Python
add ticket for ticket #243
4963af21d20a3161d17f93b187b49cfdaa3664cd
<ide><path>numpy/core/tests/test_regression.py <ide> def check_object_array_shape(self,level=rlevel): <ide> assert_equal(N.array([[],[],[]],dtype=object).shape, (3,0)) <ide> assert_equal(N.array([[3,4],[5,6],None],dtype=object).shape, (3,)) <ide> <add> def check_mem_around(self,level=rlevel): <add> """Ticket #243""" <add> x = N.zeros((1,)) <add> y = [0] <add> decimal = 6 <add> N.around(abs(x-y),decimal) <= 10.0**(-decimal) <add> <ide> def check_character_array_strip(self,level=rlevel): <ide> """Ticket #246""" <ide> x = N.char.array(("x","x ","x "))
1
Python
Python
fix glue mnli when using `max_eval_samples`
ef6741fe65c130ddb33c43ad2ba2b82f40ea7e90
<ide><path>examples/pytorch/text-classification/run_glue.py <ide> def compute_metrics(p: EvalPrediction): <ide> eval_datasets = [eval_dataset] <ide> if data_args.task_name == "mnli": <ide> tasks.append("mnli-mm") <del> eval_datasets.append(raw_datasets["validation_mismatched"]) <add> valid_mm_dataset = raw_datasets["validation_mismatched"] <add> if data_args.max_eval_samples is not None: <add> max_eval_samples = min(len(valid_mm_dataset), data_args.max_eval_samples) <add> valid_mm_dataset = valid_mm_dataset.select(range(max_eval_samples)) <add> eval_datasets.append(valid_mm_dataset) <ide> combined = {} <ide> <ide> for eval_dataset, task in zip(eval_datasets, tasks):
1
Javascript
Javascript
catch syntax errors better
4631c503e35cd3bb75a10424dd47a73c12ec468e
<ide><path>lib/repl.js <ide> function REPLServer(prompt, stream, eval_, useGlobal, ignoreUndefined) { <ide> <ide> function defaultEval(code, context, file, cb) { <ide> var err, result; <add> // first, create the Script object to check the syntax <ide> try { <del> if (self.useGlobal) { <del> result = vm.runInThisContext(code, { <del> filename: file, <del> displayErrors: false <del> }); <del> } else { <del> result = vm.runInContext(code, context, { <del> filename: file, <del> displayErrors: false <del> }); <del> } <add> var script = vm.createScript(code, { <add> filename: file, <add> displayErrors: false <add> }); <ide> } catch (e) { <ide> err = e; <add> err._isSyntaxError = isSyntaxError(err); <ide> } <del> if (err && process.domain && !isSyntaxError(err)) { <add> if (!err) { <add> try { <add> if (self.useGlobal) { <add> result = script.runInThisContext({ displayErrors: false }); <add> } else { <add> result = script.runInContext(context, { displayErrors: false }); <add> } <add> } catch (e) { <add> err = e; <add> err._isSyntaxError = false; <add> } <add> } <add> if (err && process.domain && !err._isSyntaxError) { <ide> process.domain.emit('error', err); <ide> process.domain.exit(); <ide> } <ide> function REPLServer(prompt, stream, eval_, useGlobal, ignoreUndefined) { <ide> self._domain.on('error', function(e) { <ide> self.outputStream.write((e.stack || e) + '\n'); <ide> self.bufferedCommand = ''; <add> self.lines.level = []; <ide> self.displayPrompt(); <ide> }); <ide> <ide> function REPLServer(prompt, stream, eval_, useGlobal, ignoreUndefined) { <ide> <ide> self.resetContext(); <ide> self.bufferedCommand = ''; <add> self.lines.level = []; <ide> <ide> self.prompt = !util.isUndefined(prompt) ? prompt : '> '; <ide> <ide> function REPLServer(prompt, stream, eval_, useGlobal, ignoreUndefined) { <ide> } <ide> <ide> self.bufferedCommand = ''; <add> self.lines.level = []; <ide> self.displayPrompt(); <ide> }); <ide> <ide> function REPLServer(prompt, stream, eval_, useGlobal, ignoreUndefined) { <ide> self.context, <ide> 'repl', <ide> function(e, ret) { <del> if (e && !isSyntaxError(e)) return finish(e); <add> if (e && !e._isSyntaxError) return finish(e); <ide> <ide> if (util.isFunction(ret) && <ide> /^[\r\n\s]*function/.test(evalCmd) || e) { <ide> function REPLServer(prompt, stream, eval_, useGlobal, ignoreUndefined) { <ide> self.memory(cmd); <ide> <ide> // If error was SyntaxError and not JSON.parse error <del> if (isSyntaxError(e)) { <add> if (e && e._isSyntaxError) { <ide> if (!self.bufferedCommand && cmd.trim().match(/^npm /)) { <ide> self.outputStream.write('npm should be run outside of the ' + <ide> 'node repl, in your normal shell.\n' + <ide> REPLServer.prototype.convertToContext = function(cmd) { <ide> <ide> /** <ide> * Returns `true` if "e" is a SyntaxError, `false` otherwise. <del> * This function filters out false positives likes JSON.parse() errors and <del> * RegExp syntax errors. <add> * filters out strict-mode errors, which are not recoverable <ide> */ <ide> function isSyntaxError(e) { <ide> // Convert error to string <ide> e = e && (e.stack || e.toString()); <ide> return e && e.match(/^SyntaxError/) && <del> // RegExp syntax error <del> !e.match(/^SyntaxError: Invalid regular expression/) && <del> !e.match(/^SyntaxError: Invalid flags supplied to RegExp constructor/) && <ide> // "strict mode" syntax errors <del> !e.match(/^SyntaxError: .*strict mode.*/i) && <del> // JSON.parse() error <del> !e.match(/\n {4}at Object.parse \(native\)\n/); <add> !e.match(/^SyntaxError: .*strict mode.*/i); <ide> } <ide><path>test/simple/test-repl.js <ide> var net = require('net'), <ide> // absolute path to test/fixtures/a.js <ide> var moduleFilename = require('path').join(common.fixturesDir, 'a'); <ide> <del>common.error('repl test'); <add>console.error('repl test'); <ide> <ide> // function for REPL to run <ide> invoke_me = function(arg) { <ide> function send_expect(list) { <ide> if (list.length > 0) { <ide> var cur = list.shift(); <ide> <del> common.error('sending ' + JSON.stringify(cur.send)); <add> console.error('sending ' + JSON.stringify(cur.send)); <ide> <ide> cur.client.expect = cur.expect; <ide> cur.client.list = list; <ide> function error_test() { <ide> <ide> client_unix.on('data', function(data) { <ide> read_buffer += data.toString('ascii', 0, data.length); <del> common.error('Unix data: ' + JSON.stringify(read_buffer) + ', expecting ' + <add> console.error('Unix data: ' + JSON.stringify(read_buffer) + ', expecting ' + <ide> (client_unix.expect.exec ? <ide> client_unix.expect : <ide> JSON.stringify(client_unix.expect))); <ide> function error_test() { <ide> // if it's an exact match, then don't do the regexp <ide> if (read_buffer !== client_unix.expect) { <ide> assert.ok(read_buffer.match(client_unix.expect)); <del> common.error('match'); <add> console.error('match'); <ide> } <ide> read_buffer = ''; <ide> if (client_unix.list && client_unix.list.length > 0) { <ide> send_expect(client_unix.list); <ide> } else { <del> common.error('End of Error test, running TCP test.'); <add> console.error('End of Error test, running TCP test.'); <ide> tcp_test(); <ide> } <ide> <ide> function error_test() { <ide> if (client_unix.list && client_unix.list.length > 0) { <ide> send_expect(client_unix.list); <ide> } else { <del> common.error('End of Error test, running TCP test.\n'); <add> console.error('End of Error test, running TCP test.\n'); <ide> tcp_test(); <ide> } <ide> <ide> } else { <del> common.error('didn\'t see prompt yet, buffering.'); <add> console.error('didn\'t see prompt yet, buffering.'); <ide> } <ide> }); <ide> <ide> function error_test() { <ide> // You can recover with the .break command <ide> { client: client_unix, send: '.break', <ide> expect: prompt_unix }, <add> // But passing the same string to eval() should throw <add> { client: client_unix, send: 'eval("function test_func() {")', <add> expect: /^SyntaxError: Unexpected end of input/ }, <ide> // Floating point numbers are not interpreted as REPL commands. <ide> { client: client_unix, send: '.1234', <ide> expect: '0.1234' }, <ide> function tcp_test() { <ide> <ide> client_tcp.on('data', function(data) { <ide> read_buffer += data.toString('ascii', 0, data.length); <del> common.error('TCP data: ' + JSON.stringify(read_buffer) + <add> console.error('TCP data: ' + JSON.stringify(read_buffer) + <ide> ', expecting ' + JSON.stringify(client_tcp.expect)); <ide> if (read_buffer.indexOf(prompt_tcp) !== -1) { <ide> assert.strictEqual(client_tcp.expect, read_buffer); <del> common.error('match'); <add> console.error('match'); <ide> read_buffer = ''; <ide> if (client_tcp.list && client_tcp.list.length > 0) { <ide> send_expect(client_tcp.list); <ide> } else { <del> common.error('End of TCP test.\n'); <add> console.error('End of TCP test.\n'); <ide> clean_up(); <ide> } <ide> } else { <del> common.error('didn\'t see prompt yet, buffering'); <add> console.error('didn\'t see prompt yet, buffering'); <ide> } <ide> }); <ide> <ide> function unix_test() { <ide> <ide> client_unix.on('data', function(data) { <ide> read_buffer += data.toString('ascii', 0, data.length); <del> common.error('Unix data: ' + JSON.stringify(read_buffer) + <add> console.error('Unix data: ' + JSON.stringify(read_buffer) + <ide> ', expecting ' + JSON.stringify(client_unix.expect)); <ide> if (read_buffer.indexOf(prompt_unix) !== -1) { <ide> assert.strictEqual(client_unix.expect, read_buffer); <del> common.error('match'); <add> console.error('match'); <ide> read_buffer = ''; <ide> if (client_unix.list && client_unix.list.length > 0) { <ide> send_expect(client_unix.list); <ide> } else { <del> common.error('End of Unix test, running Error test.\n'); <add> console.error('End of Unix test, running Error test.\n'); <ide> process.nextTick(error_test); <ide> } <ide> } else { <del> common.error('didn\'t see prompt yet, buffering.'); <add> console.error('didn\'t see prompt yet, buffering.'); <ide> } <ide> }); <ide>
2
Javascript
Javascript
fix race condition in multicompiler queuing
174eb3cdf8b4bdffdd4a616f7ed19937ddd0dad9
<ide><path>lib/MultiCompiler.js <ide> module.exports = class MultiCompiler { <ide> * @returns {SetupResult[]} result of setup <ide> */ <ide> _runGraph(setup, run, callback) { <del> /** @typedef {{ compiler: Compiler, result: Stats, state: "blocked" | "queued" | "running" | "done", children: Node[], parents: Node[] }} Node */ <add> /** @typedef {{ compiler: Compiler, result: Stats, state: "pending" | "blocked" | "queued" | "running" | "running-outdated" | "done", children: Node[], parents: Node[] }} Node */ <add> <add> // State transitions for nodes: <add> // -> blocked (initial) <add> // blocked -> queued [add to queue] (when all parents done) <add> // queued -> running [running++] (when processing the queue) <add> // running -> done [running--] (when compilation is done) <add> // done -> pending (when invalidated from file change) <add> // pending -> blocked (when invalidated from aggregated changes) <add> // done -> blocked (when invalidated, from parent invalidation) <add> // running -> running-outdated (when invalidated, either from change or parent invalidation) <add> // running-outdated -> blocked [running--] (when compilation is done) <ide> <ide> /** @type {Node[]} */ <ide> const nodes = this.compilers.map(compiler => ({ <ide> module.exports = class MultiCompiler { <ide> running--; <ide> if (node.state === "running") { <ide> node.state = "done"; <del> } <del> for (const child of node.children) { <del> if (child.state !== "blocked") continue; <del> if (child.parents.every(p => p.state === "done")) { <del> child.state = "queued"; <del> queue.enqueue(child); <add> for (const child of node.children) { <add> checkUnblocked(child); <ide> } <add> } else if (node.state === "running-outdated") { <add> node.state = "blocked"; <add> checkUnblocked(node); <ide> } <ide> process.nextTick(processQueue); <ide> }; <ide> /** <ide> * @param {Node} node node <ide> * @returns {void} <ide> */ <del> const nodeInvalid = node => { <del> if (node.state === "done" || node.state === "running") { <add> const nodeInvalidFromParent = node => { <add> if (node.state === "done") { <ide> node.state = "blocked"; <add> } else if (node.state === "running") { <add> node.state = "running-outdated"; <ide> } <ide> for (const child of node.children) { <del> nodeInvalid(child); <add> nodeInvalidFromParent(child); <add> } <add> }; <add> /** <add> * @param {Node} node node <add> * @returns {void} <add> */ <add> const nodeInvalid = node => { <add> if (node.state === "done") { <add> node.state = "pending"; <add> } else if (node.state === "running") { <add> node.state = "running-outdated"; <add> } <add> for (const child of node.children) { <add> nodeInvalidFromParent(child); <ide> } <ide> }; <ide> /** <ide> module.exports = class MultiCompiler { <ide> */ <ide> const nodeChange = node => { <ide> nodeInvalid(node); <add> if (node.state === "pending") { <add> node.state = "blocked"; <add> } <add> checkUnblocked(node); <add> processQueue(); <add> }; <add> /** <add> * @param {Node} node node <add> * @returns {void} <add> */ <add> const checkUnblocked = node => { <ide> if ( <ide> node.state === "blocked" && <ide> node.parents.every(p => p.state === "done") <ide> ) { <ide> node.state = "queued"; <ide> queue.enqueue(node); <del> processQueue(); <ide> } <ide> }; <add> <ide> const setupResults = []; <ide> nodes.forEach((node, i) => { <ide> setupResults.push( <ide> setup( <ide> node.compiler, <ide> i, <ide> nodeDone.bind(null, node), <del> () => node.state === "blocked" || node.state === "queued", <add> () => node.state !== "running", <ide> () => nodeChange(node), <ide> () => nodeInvalid(node) <ide> ) <ide> module.exports = class MultiCompiler { <ide> node.state = "running"; <ide> run(node.compiler, nodeDone.bind(null, node)); <ide> } <del> if (!errored && running === 0) { <add> if ( <add> !errored && <add> running === 0 && <add> nodes.every(node => node.state === "done") <add> ) { <ide> const stats = []; <ide> for (const node of nodes) { <ide> const result = node.result; <ide><path>test/MultiCompiler.test.js <ide> describe("MultiCompiler", function () { <ide> } <ide> }); <ide> }); <add> it("shouldn't hang when invalidating during build", done => { <add> const compiler = webpack( <add> Object.assign([ <add> { <add> name: "a", <add> mode: "development", <add> context: path.join(__dirname, "fixtures"), <add> entry: "./a.js" <add> }, <add> { <add> name: "b", <add> mode: "development", <add> context: path.join(__dirname, "fixtures"), <add> entry: "./b.js", <add> dependencies: ["a"] <add> } <add> ]) <add> ); <add> compiler.outputFileSystem = createFsFromVolume(new Volume()); <add> const watchCallbacks = []; <add> const watchCallbacksUndelayed = []; <add> let firstRun = true; <add> compiler.watchFileSystem = { <add> watch( <add> files, <add> directories, <add> missing, <add> startTime, <add> options, <add> callback, <add> callbackUndelayed <add> ) { <add> watchCallbacks.push(callback); <add> watchCallbacksUndelayed.push(callbackUndelayed); <add> if (firstRun && files.has(path.join(__dirname, "fixtures", "a.js"))) { <add> process.nextTick(() => { <add> callback(null, new Map(), new Map(), new Set(), new Set()); <add> }); <add> firstRun = false; <add> } <add> } <add> }; <add> compiler.watch({}, (err, stats) => { <add> done(err); <add> }); <add> }); <ide> });
2
Mixed
Text
improve documentation for asset_url
db9a5c5a1f8e2a1590f0ac9436587d58a67a629e
<ide><path>actionview/CHANGELOG.md <ide> <ide> *Piotr Chmolowski, Łukasz Strzałkowski* <ide> <add>* Allow custom `:host` option to be passed to `asset_url` helper that <add> overwrites `config.action_controller.asset_host` for particular asset. <add> <add> *Hubert Łępicki* <ide> <ide> Please check [4-1-stable](https://github.com/rails/rails/blob/4-1-stable/actionview/CHANGELOG.md) for previous changes. <ide><path>actionview/lib/action_view/helpers/asset_url_helper.rb <ide> def asset_path(source, options = {}) <ide> <ide> # Computes the full URL to an asset in the public directory. This <ide> # will use +asset_path+ internally, so most of their behaviors <del> # will be the same. <add> # will be the same. If :host options is set, it overwrites global <add> # +config.action_controller.asset_host+ setting. <add> # <add> # All other options provided are forwarded to +asset_path+ call. <add> # <add> # asset_url "application.js" # => http://example.com/application.js <add> # asset_url "application.js", host: "http://cdn.example.com" # => http://cdn.example.com/javascripts/application.js <add> # <ide> def asset_url(source, options = {}) <ide> path_to_asset(source, options.merge(:protocol => :request)) <ide> end <ide><path>guides/source/asset_pipeline.md <ide> that it plays nicely with the pipeline. You may find quirks related to your <ide> specific set up, you may not. The defaults nginx uses, for example, should give <ide> you no problems when used as an HTTP cache. <ide> <add>If you want to serve only some assets from your CDN, you can use custom <add>`:host` option of `asset_url` helper, which overwrites value set in <add>`config.action_controller.asset_host`. <add> <add>```ruby <add>asset_url 'image.png', :host => 'http://cdn.example.com' <add>``` <add> <ide> Customizing the Pipeline <ide> ------------------------ <ide>
3
Text
Text
fix a couple of issues in get started docs
1cd06fdd1ac5ce1f91291330d9f89679468ce393
<ide><path>docs/getstarted/step_four.md <ide> commands to run. Your recipe is going to be very short. <ide> 2. Now, build your new image by typing the `docker build -t docker-whale .` command in your terminal (don't forget the . period). <ide> <ide> $ docker build -t docker-whale . <del> Sending build context to Docker daemon 158.8 MB <add> Sending build context to Docker daemon 2.048 kB <ide> ...snip... <ide> Removing intermediate container a8e6faa88df3 <ide> Successfully built 7d9495d03763 <ide> complex. In this section, you learn what each message means. <ide> <ide> First Docker checks to make sure it has everything it needs to build. <ide> <del> Sending build context to Docker daemon 158.8 MB <add> Sending build context to Docker daemon 2.048 kB <ide> <ide> Then, Docker loads with the `whalesay` image. It already has this image <ide> locally as you might recall from the last page. So, Docker doesn't need to <ide> manager. This takes a lot of lines, no need to list them all again here. <ide> <ide> Then, Docker installs the new `fortunes` software. <ide> <del> Removing intermediate container e2a84b5f390f <del> Step 3 : RUN apt-get install -y fortunes <del> ---> Running in 23aa52c1897c <ide> Reading package lists... <ide> Building dependency tree... <ide> Reading state information... <ide> Then, Docker installs the new `fortunes` software. <ide> <ide> Finally, Docker finishes the build and reports its outcome. <ide> <del> Step 4 : CMD /usr/games/fortune -a | cowsay <add> Step 3 : CMD /usr/games/fortune -a | cowsay <ide> ---> Running in a8e6faa88df3 <ide> ---> 7d9495d03763 <ide> Removing intermediate container a8e6faa88df3
1
Python
Python
add funcs to rpc instance
59f55fa7fc164cfd0332db6fa11e5af5c663b7b9
<ide><path>glances/glances.py <ide> # <ide> # Glances is a simple textual monitoring tool <ide> # <del># Pre-requisites: Python 2.6+ and PsUtil 0.4.0+ (for full functions) <del># <ide> # Copyright (C) Nicolargo 2012 <[email protected]> <ide> # <add># Glances is distributed <ide> # under the terms of the GNU Lesser General Public License as published <ide> # by the Free Software Foundation, either version 3 of the License, or <ide> # (at your option) any later version. <ide> # <ide> <ide> __appname__ = 'glances' <del>__version__ = "1.4.2.2b" <add>__version__ = "1.5b" <ide> __author__ = "Nicolas Hennion <[email protected]>" <ide> __licence__ = "LGPL" <ide> <ide> def __get_process_stats_NEW__(self, proc): <ide> procstat['cmdline'] = " ".join(procstat['cmdline']) <ide> <ide> return procstat <del> <ide> <ide> def __get_process_stats__(self, proc): <ide> """ <ide> def displayCpu(self, cpu, percpu, proclist): <ide> self.__colors_list[alert]) <ide> except: <ide> #~ alert = self.__getCpuAlert(percpu[i]['idle']) <del> logs.add(alert, "CPU-%d idle" % i, percpu[i]['idle'], proclist) <add> #~ logs.add(alert, "CPU-%d idle" % i, percpu[i]['idle'], proclist) <ide> self.term_window.addnstr(self.cpu_y + 3, self.cpu_x + 10 + i*10, <ide> "%.1f" % percpu[i]['idle'], 8) <ide> <ide> def displayCpu(self, cpu, percpu, proclist): <ide> self.__colors_list[alert]) <ide> except: <ide> #~ alert = self.__getCpuAlert(cpu['idle']) <del> logs.add(alert, "CPU idle", cpu['idle'], proclist) <add> #~ logs.add(alert, "CPU idle", cpu['idle'], proclist) <ide> self.term_window.addnstr(self.cpu_y + 3, self.cpu_x + 10, <ide> "%.1f" % cpu['idle'], 8) <ide> <del> <ide> # Return the X offset to display Load and Mem <ide> return offset_x <ide> <ide> def displayProcess(self, processcount, processlist, log_count=0): <ide> # IO <ide> if tag_io: <ide> if processlist[processes]['io_counters'] == {}: <del> pass <add> self.term_window.addnstr( <add> self.process_y + 3 + processes, process_x + 62, <add> _("A_DENY"), 8) <add> self.term_window.addnstr( <add> self.process_y + 3 + processes, process_x + 72, <add> _("A_DENY"), 8) <ide> else: <ide> # Processes are only refresh every 2 refresh_time <ide> #~ elapsed_time = max(1, self.__refresh_time) * 2 <ide> def displayHelp(self): <ide> """ <ide> Show the help panel <ide> """ <add> <ide> if not self.help_tag: <ide> return 0 <ide> screen_x = self.screen.getmaxyx()[1] <ide> def displayHelp(self): <ide> # Console 80x24 is mandatory to display the help message <ide> self.erase() <ide> <del> self.term_window.addnstr( <del> self.help_y, self.help_x, <del> _("Glances {0} with PsUtil {1}").format( <del> self.__version, psutil.__version__), <del> 79, self.title_color if self.hascolors else 0) <add> try: <add> self.term_window.addnstr( <add> self.help_y, self.help_x, <add> _("Glances {0} with PsUtil {1}").format( <add> self.__version, psutil.__version__), <add> 79, self.title_color if self.hascolors else 0) <add> except: <add> self.term_window.addnstr( <add> self.help_y, self.help_x, <add> _("Glances {0}").format(self.__version), <add> 79, self.title_color if self.hascolors else 0) <ide> <ide> self.term_window.addnstr(self.help_y + 2, self.help_x, <ide> _("Captions: "), 79) <ide> def displayHelp(self): <ide> self.term_window.addnstr( <ide> self.help_y + 5, self.help_x, <ide> "{0:^{width}} {1}".format( <del> _("a"), _("Sort processes automatically " <del> "(need PsUtil 0.2.0+)"), width=width), <add> _("a"), _("Sort processes automatically"), width=width), <ide> 79, self.ifCRITICAL_color2 <ide> if not psutil_get_cpu_percent_tag else 0) <ide> self.term_window.addnstr( <ide> self.help_y + 6, self.help_x, <ide> "{0:^{width}} {1}".format( <del> _("b"), _("Switch between bit/s or byte/s for network IO "), <add> _("b"), _("Switch between bit/s or byte/s for network IO"), <ide> width=width), 79, self.ifCRITICAL_color2 <ide> if not psutil_get_cpu_percent_tag else 0) <ide> self.term_window.addnstr( <ide> self.help_y + 7, self.help_x, <ide> "{0:^{width}} {1}".format( <del> _("c"), _("Sort processes by CPU% " <del> "(need PsUtil 0.2.0+)"), width=width), <add> _("c"), _("Sort processes by CPU%"), width=width), <ide> 79, self.ifCRITICAL_color2 <ide> if not psutil_get_cpu_percent_tag else 0) <ide> self.term_window.addnstr( <ide> def displayHelp(self): <ide> self.term_window.addnstr( <ide> self.help_y + 10, self.help_x, <ide> "{0:^{width}} {1}".format( <del> _("d"), _("Show/hide disk I/O stats " <del> "(need PsUtil 0.4.0+)"), width=width), <add> _("d"), _("Show/hide disk I/O stats"), width=width), <ide> 79, self.ifCRITICAL_color2 if not psutil_disk_io_tag else 0) <ide> self.term_window.addnstr( <ide> self.help_y + 11, self.help_x, <ide> "{0:^{width}} {1}".format( <del> _("f"), _("Show/hide file system stats " <del> "(need PsUtil 0.3.0+)"), width=width), <add> _("f"), _("Show/hide file system stats"), width=width), <ide> 79, self.ifCRITICAL_color2 if not psutil_fs_usage_tag else 0) <ide> self.term_window.addnstr( <ide> self.help_y + 12, self.help_x, <ide> "{0:^{width}} {1}".format( <del> _("n"), _("Show/hide network stats " <del> "(need PsUtil 0.3.0+)"), width=width), <add> _("n"), _("Show/hide network stats"), width=width), <ide> 79, self.ifCRITICAL_color2 if not psutil_network_io_tag else 0) <ide> self.term_window.addnstr( <ide> self.help_y + 13, self.help_x, <ide> "{0:^{width}} {1}".format( <del> _("l"), _("Show/hide log messages (only available " <del> "if display > 24 lines)"), width=width), 79) <add> _("l"), _("Show/hide log messages"), width=width), 79) <ide> self.term_window.addnstr( <ide> self.help_y + 14, self.help_x, <ide> "{0:^{width}} {1}".format( <ide> class GlancesInstance(): <ide> """ <ide> <ide> def init(self): <add> # Return the Glances version <ide> return __version__ <ide> <del> def get(self): <add> def getAll(self): <ide> # Update and return all the stats <ide> stats.update() <ide> return json.dumps(stats.getAll()) <ide> <add> def getCpu(self): <add> # Update and return CPU stats <add> stats.update() <add> return json.dumps(stats.getCpu()) <add> <add> def getLoad(self): <add> # Update and return LOAD stats <add> stats.update() <add> return json.dumps(stats.getLoad()) <add> <add> def getMem(self): <add> # Update and return MEM stats <add> stats.update() <add> return json.dumps(stats.getMem()) <add> <add> def getMemSwap(self): <add> # Update and return MEMSWAP stats <add> stats.update() <add> return json.dumps(stats.getMemSwap()) <add> <ide> <ide> class GlancesServer(): <ide> """ <ide> class GlancesServer(): <ide> <ide> def __init__(self, bind_address, bind_port = 61209, RequestHandler = GlancesHandler): <ide> self.server = SimpleXMLRPCServer((bind_address, bind_port), <del> requestHandler=RequestHandler) <add> requestHandler = RequestHandler) <ide> self.server.register_introspection_functions() <ide> self.server.register_instance(GlancesInstance()) <ide> return <ide> def client_init(self): <ide> return __version__[:3] == self.client.init()[:3] <ide> <ide> def client_get(self): <del> return json.loads(self.client.get()) <add> return json.loads(self.client.getAll()) <ide> <ide> <ide> # Global def
1
Ruby
Ruby
show tapped casks
2aa7597e70d8aa5bd529494884646797ea547cb0
<ide><path>Library/Homebrew/tap.rb <ide> def install(options = {}) <ide> <ide> link_completions_and_manpages <ide> <del> formula_count = formula_files.size <del> puts "Tapped #{Formatter.pluralize(formula_count, "formula")} (#{path.abv})" unless quiet <add> casks = Formatter.pluralize(cask_files.count, "cask") <add> formulae = Formatter.pluralize(formula_files.count, "formula") <add> puts "Tapped #{formulae} and #{casks} (#{path.abv})." unless quiet <ide> Descriptions.cache_formulae(formula_names) <ide> <ide> return if options[:clone_target] <ide> def uninstall <ide> require "descriptions" <ide> raise TapUnavailableError, name unless installed? <ide> <del> puts "Untapping #{name}... (#{path.abv})" <add> puts "Untapping #{name}..." <add> <add> abv = path.abv <add> casks = Formatter.pluralize(cask_files.count, "cask") <add> formulae = Formatter.pluralize(formula_files.count, "formula") <add> <ide> unpin if pinned? <del> formula_count = formula_files.size <ide> Descriptions.uncache_formulae(formula_names) <ide> Utils::Link.unlink_manpages(path) <ide> Utils::Link.unlink_completions(path) <ide> path.rmtree <ide> path.parent.rmdir_if_possible <del> puts "Untapped #{Formatter.pluralize(formula_count, "formula")}" <add> puts "Untapped #{formulae} and #{casks} (#{abv})." <ide> clear_cache <ide> end <ide>
1
Javascript
Javascript
make setstate return the state that was set
aec2f26ed7fa99f37b9122186b7a8951b78058a2
<ide><path>src/Redux.js <ide> export default class Redux { <ide> setState(nextState) { <ide> this.state = nextState; <ide> this.listeners.forEach(listener => listener()); <add> return nextState; <ide> } <ide> <ide> subscribe(listener) { <ide><path>src/createDispatcher.js <ide> import compose from './utils/composeMiddleware'; <ide> <ide> export default function createDispatcher(store, middlewares = []) { <ide> return function dispatcher(initialState, setState) { <del> let state = store(initialState, {}); <del> setState(state); <add> let state = setState(store(initialState, {})); <ide> <ide> function dispatch(action) { <del> state = store(state, action); <del> setState(state); <add> state = setState(store(state, action)); <ide> return action; <ide> } <ide> <ide><path>test/createDispatcher.spec.js <ide> const { ADD_TODO } = constants; <ide> describe('createDispatcher', () => { <ide> <ide> it('should handle sync and async dispatches', done => { <del> const spy = expect.createSpy(() => {}); <add> const spy = expect.createSpy( <add> nextState => nextState <add> ).andCallThrough(); <add> <ide> const dispatcher = createDispatcher( <ide> composeStores({ todoStore }), <ide> // we need this middleware to handle async actions <del> getState => [thunkMiddleware(getState)]); <add> getState => [thunkMiddleware(getState)] <add> ); <ide> <ide> expect(dispatcher).toBeA('function'); <ide>
3
Go
Go
normalize c.check to c.assert
5879446de9adbff7432f1ccaa781164fcd5efe26
<ide><path>integration-cli/docker_cli_build_test.go <ide> func (s *DockerSuite) TestBuildBuildTimeArgExpansion(c *check.C) { <ide> ) <ide> <ide> res := inspectField(c, imgName, "Config.WorkingDir") <del> c.Check(filepath.ToSlash(res), check.Equals, filepath.ToSlash(wdVal)) <add> c.Assert(filepath.ToSlash(res), check.Equals, filepath.ToSlash(wdVal)) <ide> <ide> var resArr []string <ide> inspectFieldAndUnmarshall(c, imgName, "Config.Env", &resArr) <ide><path>integration-cli/docker_cli_health_test.go <ide> func waitForHealthStatus(c *check.C, name string, prev string, expected string) <ide> if out == expected { <ide> return <ide> } <del> c.Check(out, checker.Equals, prev) <add> c.Assert(out, checker.Equals, prev) <ide> if out != prev { <ide> return <ide> } <ide> func getHealth(c *check.C, name string) *types.Health { <ide> out, _ := dockerCmd(c, "inspect", "--format={{json .State.Health}}", name) <ide> var health types.Health <ide> err := json.Unmarshal([]byte(out), &health) <del> c.Check(err, checker.Equals, nil) <add> c.Assert(err, checker.Equals, nil) <ide> return &health <ide> } <ide> <ide> func (s *DockerSuite) TestHealth(c *check.C) { <ide> cid, _ := dockerCmd(c, "create", "--name", name, imageName) <ide> out, _ := dockerCmd(c, "ps", "-a", "--format={{.ID}} {{.Status}}") <ide> out = RemoveOutputForExistingElements(out, existingContainers) <del> c.Check(out, checker.Equals, cid[:12]+" Created\n") <add> c.Assert(out, checker.Equals, cid[:12]+" Created\n") <ide> <ide> // Inspect the options <ide> out, _ = dockerCmd(c, "inspect", <ide> "--format=timeout={{.Config.Healthcheck.Timeout}} interval={{.Config.Healthcheck.Interval}} retries={{.Config.Healthcheck.Retries}} test={{.Config.Healthcheck.Test}}", name) <del> c.Check(out, checker.Equals, "timeout=30s interval=1s retries=0 test=[CMD-SHELL cat /status]\n") <add> c.Assert(out, checker.Equals, "timeout=30s interval=1s retries=0 test=[CMD-SHELL cat /status]\n") <ide> <ide> // Start <ide> dockerCmd(c, "start", name) <ide> func (s *DockerSuite) TestHealth(c *check.C) { <ide> <ide> // Inspect the status <ide> out, _ = dockerCmd(c, "inspect", "--format={{.State.Health.Status}}", name) <del> c.Check(out, checker.Equals, "unhealthy\n") <add> c.Assert(out, checker.Equals, "unhealthy\n") <ide> <ide> // Make it healthy again <ide> dockerCmd(c, "exec", name, "touch", "/status") <ide> func (s *DockerSuite) TestHealth(c *check.C) { <ide> // Disable the check from the CLI <ide> dockerCmd(c, "create", "--name=noh", "--no-healthcheck", imageName) <ide> out, _ = dockerCmd(c, "inspect", "--format={{.Config.Healthcheck.Test}}", "noh") <del> c.Check(out, checker.Equals, "[NONE]\n") <add> c.Assert(out, checker.Equals, "[NONE]\n") <ide> dockerCmd(c, "rm", "noh") <ide> <ide> // Disable the check with a new build <ide> buildImageSuccessfully(c, "no_healthcheck", build.WithDockerfile(`FROM testhealth <ide> HEALTHCHECK NONE`)) <ide> <ide> out, _ = dockerCmd(c, "inspect", "--format={{.Config.Healthcheck.Test}}", "no_healthcheck") <del> c.Check(out, checker.Equals, "[NONE]\n") <add> c.Assert(out, checker.Equals, "[NONE]\n") <ide> <ide> // Enable the checks from the CLI <ide> _, _ = dockerCmd(c, "run", "-d", "--name=fatal_healthcheck", <ide> func (s *DockerSuite) TestHealth(c *check.C) { <ide> "no_healthcheck") <ide> waitForHealthStatus(c, "fatal_healthcheck", "starting", "healthy") <ide> health := getHealth(c, "fatal_healthcheck") <del> c.Check(health.Status, checker.Equals, "healthy") <del> c.Check(health.FailingStreak, checker.Equals, 0) <add> c.Assert(health.Status, checker.Equals, "healthy") <add> c.Assert(health.FailingStreak, checker.Equals, 0) <ide> last := health.Log[len(health.Log)-1] <del> c.Check(last.ExitCode, checker.Equals, 0) <del> c.Check(last.Output, checker.Equals, "OK\n") <add> c.Assert(last.ExitCode, checker.Equals, 0) <add> c.Assert(last.Output, checker.Equals, "OK\n") <ide> <ide> // Fail the check <ide> dockerCmd(c, "exec", "fatal_healthcheck", "rm", "/status") <ide> waitForHealthStatus(c, "fatal_healthcheck", "healthy", "unhealthy") <ide> <ide> failsStr, _ := dockerCmd(c, "inspect", "--format={{.State.Health.FailingStreak}}", "fatal_healthcheck") <ide> fails, err := strconv.Atoi(strings.TrimSpace(failsStr)) <del> c.Check(err, check.IsNil) <del> c.Check(fails >= 3, checker.Equals, true) <add> c.Assert(err, check.IsNil) <add> c.Assert(fails >= 3, checker.Equals, true) <ide> dockerCmd(c, "rm", "-f", "fatal_healthcheck") <ide> <ide> // Check timeout <ide> func (s *DockerSuite) TestHealth(c *check.C) { <ide> waitForHealthStatus(c, "test", "starting", "unhealthy") <ide> health = getHealth(c, "test") <ide> last = health.Log[len(health.Log)-1] <del> c.Check(health.Status, checker.Equals, "unhealthy") <del> c.Check(last.ExitCode, checker.Equals, -1) <del> c.Check(last.Output, checker.Equals, "Health check exceeded timeout (1s)") <add> c.Assert(health.Status, checker.Equals, "unhealthy") <add> c.Assert(last.ExitCode, checker.Equals, -1) <add> c.Assert(last.Output, checker.Equals, "Health check exceeded timeout (1s)") <ide> dockerCmd(c, "rm", "-f", "test") <ide> <ide> // Check JSON-format <ide> func (s *DockerSuite) TestHealth(c *check.C) { <ide> CMD ["cat", "/my status"]`)) <ide> out, _ = dockerCmd(c, "inspect", <ide> "--format={{.Config.Healthcheck.Test}}", imageName) <del> c.Check(out, checker.Equals, "[CMD cat /my status]\n") <add> c.Assert(out, checker.Equals, "[CMD cat /my status]\n") <ide> <ide> } <ide> <ide><path>integration-cli/docker_cli_run_test.go <ide> func (s *DockerSuite) TestRunCredentialSpecFailures(c *check.C) { <ide> for _, attempt := range attempts { <ide> _, _, err := dockerCmdWithError("run", "--security-opt=credentialspec="+attempt.value, "busybox", "true") <ide> c.Assert(err, checker.NotNil, check.Commentf("%s expected non-nil err", attempt.value)) <del> c.Check(err.Error(), checker.Contains, attempt.expectedError, check.Commentf("%s expected %s got %s", attempt.value, attempt.expectedError, err)) <add> c.Assert(err.Error(), checker.Contains, attempt.expectedError, check.Commentf("%s expected %s got %s", attempt.value, attempt.expectedError, err)) <ide> } <ide> } <ide> <ide> func (s *DockerSuite) TestRunCredentialSpecWellFormed(c *check.C) { <ide> // controller handy <ide> out, _ := dockerCmd(c, "run", "--rm", "--security-opt=credentialspec="+value, minimalBaseImage(), "nltest", "/PARENTDOMAIN") <ide> <del> c.Check(out, checker.Contains, "hyperv.local.") <del> c.Check(out, checker.Contains, "The command completed successfully") <add> c.Assert(out, checker.Contains, "hyperv.local.") <add> c.Assert(out, checker.Contains, "The command completed successfully") <ide> } <ide> } <ide>
3
Java
Java
add clienthttpconnector test suite
7bd524e9d705f0189730b3a3b4c41ebde0a2936b
<ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/HttpComponentsClientHttpConnector.java <ide> import org.apache.hc.client5.http.protocol.HttpClientContext; <ide> import org.apache.hc.core5.concurrent.FutureCallback; <ide> import org.apache.hc.core5.http.HttpResponse; <add>import org.apache.hc.core5.http.HttpStreamResetException; <ide> import org.apache.hc.core5.http.Message; <ide> import org.apache.hc.core5.http.nio.AsyncRequestProducer; <ide> import org.apache.hc.core5.reactive.ReactiveResponseConsumer; <ide> * {@link ClientHttpConnector} implementation for the Apache HttpComponents HttpClient 5.x. <ide> * <ide> * @author Martin Tarjányi <add> * @author Arjen Poutsma <ide> * @since 5.3 <ide> * @see <a href="https://hc.apache.org/index.html">Apache HttpComponents</a> <ide> */ <ide> public void completed(Message<HttpResponse, Publisher<ByteBuffer>> result) { <ide> <ide> @Override <ide> public void failed(Exception ex) { <del> this.sink.error(ex); <add> Throwable t = ex; <add> if (t instanceof HttpStreamResetException) { <add> HttpStreamResetException httpStreamResetException = (HttpStreamResetException) ex; <add> t = httpStreamResetException.getCause(); <add> } <add> this.sink.error(t); <ide> } <ide> <ide> @Override <ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/HttpComponentsClientHttpRequest.java <ide> import java.net.URISyntaxException; <ide> import java.nio.ByteBuffer; <ide> import java.util.Collection; <add>import java.util.function.Function; <ide> <ide> import org.apache.hc.client5.http.cookie.CookieStore; <ide> import org.apache.hc.client5.http.impl.cookie.BasicClientCookie; <ide> import org.springframework.http.HttpHeaders; <ide> import org.springframework.http.HttpMethod; <ide> import org.springframework.lang.Nullable; <add>import org.springframework.util.Assert; <ide> <ide> import static org.springframework.http.MediaType.ALL_VALUE; <ide> <ide> /** <ide> * {@link ClientHttpRequest} implementation for the Apache HttpComponents HttpClient 5.x. <del> * <ide> * @author Martin Tarjányi <add> * @author Arjen Poutsma <ide> * @since 5.3 <ide> * @see <a href="https://hc.apache.org/index.html">Apache HttpComponents</a> <ide> */ <ide> public HttpComponentsClientHttpRequest(HttpMethod method, URI uri, HttpClientCon <ide> <ide> @Override <ide> public HttpMethod getMethod() { <del> return HttpMethod.resolve(this.httpRequest.getMethod()); <add> HttpMethod method = HttpMethod.resolve(this.httpRequest.getMethod()); <add> Assert.state(method != null, "Method must not be null"); <add> return method; <ide> } <ide> <ide> @Override <ide> public Mono<Void> writeWith(Publisher<? extends DataBuffer> body) { <ide> <ide> @Override <ide> public Mono<Void> writeAndFlushWith(Publisher<? extends Publisher<? extends DataBuffer>> body) { <del> return writeWith(Flux.from(body).flatMap(p -> p)); <add> return writeWith(Flux.from(body).flatMap(Function.identity())); <ide> } <ide> <ide> @Override <ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/HttpComponentsClientHttpResponse.java <ide> * {@link ClientHttpResponse} implementation for the Apache HttpComponents HttpClient 5.x. <ide> * <ide> * @author Martin Tarjányi <add> * @author Arjen Poutsma <ide> * @since 5.3 <ide> * @see <a href="https://hc.apache.org/index.html">Apache HttpComponents</a> <ide> */ <ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/JettyClientHttpConnector.java <ide> /* <del> * Copyright 2002-2019 the original author or authors. <add> * Copyright 2002-2020 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> public JettyClientHttpConnector(JettyResourceFactory resourceFactory, @Nullable <ide> } <ide> } <ide> <del> <add> /** <add> * Set the buffer factory to be used. <add> */ <ide> public void setBufferFactory(DataBufferFactory bufferFactory) { <ide> this.bufferFactory = bufferFactory; <ide> } <ide> <del> <ide> @Override <ide> public Mono<ClientHttpResponse> connect(HttpMethod method, URI uri, <ide> Function<? super ClientHttpRequest, Mono<Void>> requestCallback) { <ide> public Mono<ClientHttpResponse> connect(HttpMethod method, URI uri, <ide> } <ide> <ide> private DataBuffer toDataBuffer(ContentChunk chunk) { <del> <del> // We must copy until this is resolved: <del> // https://github.com/eclipse/jetty.project/issues/2429 <del> <del> // Use copy instead of buffer wrapping because Callback#succeeded() is <del> // used not only to release the buffer but also to request more data <del> // which is a problem for codecs that buffer data. <del> <del> DataBuffer buffer = this.bufferFactory.allocateBuffer(chunk.buffer.capacity()); <del> buffer.write(chunk.buffer); <add> DataBuffer buffer = this.bufferFactory.wrap(chunk.buffer); <ide> chunk.callback.succeeded(); <ide> return buffer; <ide> } <ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/JettyClientHttpRequest.java <ide> /* <del> * Copyright 2002-2018 the original author or authors. <add> * Copyright 2002-2020 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> import org.eclipse.jetty.reactive.client.ReactiveRequest; <ide> import org.eclipse.jetty.util.Callback; <ide> import org.reactivestreams.Publisher; <del>import reactor.core.Exceptions; <ide> import reactor.core.publisher.Flux; <ide> import reactor.core.publisher.Mono; <add>import reactor.core.publisher.MonoSink; <ide> <ide> import org.springframework.core.io.buffer.DataBuffer; <ide> import org.springframework.core.io.buffer.DataBufferFactory; <ide> import org.springframework.core.io.buffer.DataBufferUtils; <del>import org.springframework.core.io.buffer.PooledDataBuffer; <ide> import org.springframework.http.HttpHeaders; <ide> import org.springframework.http.HttpMethod; <ide> import org.springframework.http.MediaType; <ide> public DataBufferFactory bufferFactory() { <ide> <ide> @Override <ide> public Mono<Void> writeWith(Publisher<? extends DataBuffer> body) { <del> Flux<ContentChunk> chunks = Flux.from(body).map(this::toContentChunk); <del> ReactiveRequest.Content content = ReactiveRequest.Content.fromPublisher(chunks, getContentType()); <del> this.reactiveRequest = ReactiveRequest.newBuilder(this.jettyRequest).content(content).build(); <del> return doCommit(this::completes); <add> return Mono.<Void>create(sink -> { <add> Flux<ContentChunk> chunks = Flux.from(body).map(buffer -> toContentChunk(buffer, sink)); <add> ReactiveRequest.Content content = ReactiveRequest.Content.fromPublisher(chunks, getContentType()); <add> this.reactiveRequest = ReactiveRequest.newBuilder(this.jettyRequest).content(content).build(); <add> sink.success(); <add> }) <add> .then(doCommit(this::completes)); <ide> } <ide> <ide> @Override <ide> public Mono<Void> writeAndFlushWith(Publisher<? extends Publisher<? extends DataBuffer>> body) { <del> Flux<ContentChunk> chunks = Flux.from(body) <del> .flatMap(Function.identity()) <del> .doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release) <del> .map(this::toContentChunk); <del> ReactiveRequest.Content content = ReactiveRequest.Content.fromPublisher(chunks, getContentType()); <del> this.reactiveRequest = ReactiveRequest.newBuilder(this.jettyRequest).content(content).build(); <del> return doCommit(this::completes); <add> return writeWith(Flux.from(body).flatMap(Function.identity())); <ide> } <ide> <ide> private String getContentType() { <ide> private Mono<Void> completes() { <ide> return Mono.empty(); <ide> } <ide> <del> private ContentChunk toContentChunk(DataBuffer buffer) { <add> private ContentChunk toContentChunk(DataBuffer buffer, MonoSink<Void> sink) { <ide> return new ContentChunk(buffer.asByteBuffer(), new Callback() { <ide> @Override <ide> public void succeeded() { <ide> public void succeeded() { <ide> @Override <ide> public void failed(Throwable x) { <ide> DataBufferUtils.release(buffer); <del> throw Exceptions.propagate(x); <add> sink.error(x); <ide> } <ide> }); <ide> } <ide><path>spring-web/src/test/java/org/springframework/http/client/reactive/ClientHttpConnectorTests.java <add>/* <add> * Copyright 2002-2020 the original author or authors. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * https://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add> <add>package org.springframework.http.client.reactive; <add> <add>import java.io.IOException; <add>import java.lang.annotation.ElementType; <add>import java.lang.annotation.Retention; <add>import java.lang.annotation.RetentionPolicy; <add>import java.lang.annotation.Target; <add>import java.net.URI; <add>import java.nio.charset.StandardCharsets; <add>import java.util.ArrayList; <add>import java.util.Arrays; <add>import java.util.EnumSet; <add>import java.util.List; <add>import java.util.Random; <add>import java.util.concurrent.CountDownLatch; <add>import java.util.function.Consumer; <add> <add>import okhttp3.mockwebserver.MockResponse; <add>import okhttp3.mockwebserver.MockWebServer; <add>import okhttp3.mockwebserver.RecordedRequest; <add>import okio.Buffer; <add>import org.junit.jupiter.api.AfterEach; <add>import org.junit.jupiter.api.BeforeEach; <add>import org.junit.jupiter.params.ParameterizedTest; <add>import org.junit.jupiter.params.provider.Arguments; <add>import org.junit.jupiter.params.provider.MethodSource; <add>import reactor.core.publisher.Flux; <add>import reactor.core.publisher.Mono; <add>import reactor.test.StepVerifier; <add> <add>import org.springframework.core.io.buffer.DataBuffer; <add>import org.springframework.core.io.buffer.DataBufferFactory; <add>import org.springframework.core.io.buffer.DataBufferUtils; <add>import org.springframework.core.io.buffer.DefaultDataBufferFactory; <add>import org.springframework.http.HttpMethod; <add>import org.springframework.http.HttpStatus; <add>import org.springframework.http.ReactiveHttpOutputMessage; <add>import org.springframework.lang.NonNull; <add> <add>import static org.assertj.core.api.Assertions.assertThat; <add>import static org.assertj.core.api.Assertions.fail; <add> <add>/** <add> * @author Arjen Poutsma <add> */ <add>public class ClientHttpConnectorTests { <add> <add> private static final int BUF_SIZE = 1024; <add> <add> private static final EnumSet<HttpMethod> METHODS_WITH_BODY = <add> EnumSet.of(HttpMethod.PUT, HttpMethod.POST, HttpMethod.PATCH); <add> <add> private final MockWebServer server = new MockWebServer(); <add> <add> private final DataBufferFactory bufferFactory = new DefaultDataBufferFactory(); <add> <add> @BeforeEach <add> void startServer() throws IOException { <add> server.start(); <add> } <add> <add> @AfterEach <add> void stopServer() throws IOException { <add> server.shutdown(); <add> } <add> <add> @ParameterizedTest <add> @MethodSource("org.springframework.http.client.reactive.ClientHttpConnectorTests#methodsWithConnectors") <add> void basic(ClientHttpConnector connector, HttpMethod method) throws Exception { <add> URI uri = this.server.url("/").uri(); <add> <add> String responseBody = "bar\r\n"; <add> prepareResponse(response -> { <add> response.setResponseCode(200); <add> response.addHeader("Baz", "Qux"); <add> response.setBody(responseBody); <add> }); <add> <add> String requestBody = "foo\r\n"; <add> boolean requestHasBody = METHODS_WITH_BODY.contains(method); <add> <add> Mono<ClientHttpResponse> futureResponse = connector.connect(method, uri, request -> { <add> assertThat(request.getMethod()).isEqualTo(method); <add> assertThat(request.getURI()).isEqualTo(uri); <add> request.getHeaders().add("Foo", "Bar"); <add> if (requestHasBody) { <add> Mono<DataBuffer> body = Mono.fromCallable(() -> { <add> byte[] bytes = requestBody.getBytes(StandardCharsets.UTF_8); <add> return this.bufferFactory.wrap(bytes); <add> }); <add> return request.writeWith(body); <add> } <add> else { <add> return request.setComplete(); <add> } <add> }); <add> <add> CountDownLatch latch = new CountDownLatch(1); <add> StepVerifier.create(futureResponse) <add> .assertNext(response -> { <add> assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK); <add> assertThat(response.getHeaders().getFirst("Baz")).isEqualTo("Qux"); <add> DataBufferUtils.join(response.getBody()) <add> .map(buffer -> { <add> String s = buffer.toString(StandardCharsets.UTF_8); <add> DataBufferUtils.release(buffer); <add> return s; <add> }).subscribe( <add> s -> assertThat(s).isEqualTo(responseBody), <add> throwable -> { <add> latch.countDown(); <add> fail(throwable.getMessage(), throwable); <add> }, <add> latch::countDown); <add> }) <add> .verifyComplete(); <add> latch.await(); <add> <add> expectRequest(request -> { <add> assertThat(request.getMethod()).isEqualTo(method.name()); <add> assertThat(request.getHeader("Foo")).isEqualTo("Bar"); <add> if (requestHasBody) { <add> assertThat(request.getBody().readUtf8()).isEqualTo(requestBody); <add> } <add> }); <add> } <add> <add> @ParameterizedConnectorTest <add> void errorInRequestBody(ClientHttpConnector connector) { <add> Exception error = new RuntimeException(); <add> Flux<DataBuffer> body = Flux.concat( <add> stringBuffer("foo"), <add> Mono.error(error) <add> ); <add> prepareResponse(response -> response.setResponseCode(200)); <add> Mono<ClientHttpResponse> futureResponse = <add> connector.connect(HttpMethod.POST, this.server.url("/").uri(), request -> request.writeWith(body)); <add> StepVerifier.create(futureResponse) <add> .expectErrorSatisfies(throwable -> assertThat(throwable).isSameAs(error)) <add> .verify(); <add> } <add> <add> @ParameterizedConnectorTest <add> void cancelResponseBody(ClientHttpConnector connector) { <add> Buffer responseBody = randomBody(100); <add> prepareResponse(response -> response.setBody(responseBody)); <add> <add> ClientHttpResponse response = connector.connect(HttpMethod.POST, this.server.url("/").uri(), <add> ReactiveHttpOutputMessage::setComplete).block(); <add> assertThat(response).isNotNull(); <add> <add> StepVerifier.create(response.getBody(), 1) <add> .expectNextCount(1) <add> .thenRequest(1) <add> .thenCancel() <add> .verify(); <add> } <add> <add> @NonNull <add> private Buffer randomBody(int size) { <add> Buffer responseBody = new Buffer(); <add> Random rnd = new Random(); <add> for (int i = 0; i < size; i++) { <add> byte[] bytes = new byte[BUF_SIZE]; <add> rnd.nextBytes(bytes); <add> responseBody.write(bytes); <add> } <add> return responseBody; <add> } <add> <add> private void prepareResponse(Consumer<MockResponse> consumer) { <add> MockResponse response = new MockResponse(); <add> consumer.accept(response); <add> this.server.enqueue(response); <add> } <add> <add> private void expectRequest(Consumer<RecordedRequest> consumer) { <add> try { <add> consumer.accept(this.server.takeRequest()); <add> } <add> catch (InterruptedException ex) { <add> throw new IllegalStateException(ex); <add> } <add> } <add> <add> @Retention(RetentionPolicy.RUNTIME) <add> @Target(ElementType.METHOD) <add> @ParameterizedTest <add> @MethodSource("org.springframework.http.client.reactive.ClientHttpConnectorTests#connectors") <add> public @interface ParameterizedConnectorTest { <add> <add> } <add> <add> static List<ClientHttpConnector> connectors() { <add> return Arrays.asList( <add> new ReactorClientHttpConnector(), <add> new JettyClientHttpConnector(), <add> new HttpComponentsClientHttpConnector() <add> ); <add> } <add> <add> static List<Arguments> methodsWithConnectors() { <add> List<Arguments> result = new ArrayList<>(); <add> for (ClientHttpConnector connector : connectors()) { <add> for (HttpMethod method : HttpMethod.values()) { <add> result.add(Arguments.of(connector, method)); <add> } <add> } <add> return result; <add> } <add> <add> private Mono<DataBuffer> stringBuffer(String value) { <add> return Mono.fromCallable(() -> { <add> byte[] bytes = value.getBytes(StandardCharsets.UTF_8); <add> DataBuffer buffer = this.bufferFactory.allocateBuffer(bytes.length); <add> buffer.write(bytes); <add> return buffer; <add> }); <add> } <add> <add>}
6
Text
Text
add company to airflow users list
da83ef8cfe8a7e59f96290adbf98f936e8059ee1
<ide><path>README.md <ide> Currently **officially** using Airflow: <ide> 1. [Poshmark](https://www.poshmark.com) <ide> 1. [Postmates](http://www.postmates.com) [[@syeoryn](https://github.com/syeoryn)] <ide> 1. [Premise](http://www.premise.com) [[@jmccallum-premise](https://github.com/jmccallum-premise)] <add>1. [Promofarma](https://www.promofarma.com/) [[@JavierLopezT](https://github.com/JavierLopezT)] <ide> 1. [Pronto Tools](http://www.prontotools.io/) [[@zkan](https://github.com/zkan) & [@mesodiar](https://github.com/mesodiar)] <ide> 1. [proton.ai](https://proton.ai/) [[@prmsolutions](https://github.com/prmsolutions)] <ide> 1. [PubNub](https://pubnub.com) [[@jzucker2](https://github.com/jzucker2)]
1
Text
Text
update the changelog
9d4c2dfaac682c546bc746ed257b408b4e862f3d
<ide><path>CHANGELOG.md <ide> <ide> * Fix `null` showing up in a warning instead of the component stack. ([@gaearon](https://github.com/gaearon) in [#10915](https://github.com/facebook/react/pull/10915)) <ide> * Fix IE11 crash in development mode. ([@leidegre](https://github.com/leidegre) in [#10921](https://github.com/facebook/react/pull/10921)) <add>* Fix `tabIndex` not getting applied to SVG elements. ([@gaearon](http://github.com/gaearon) in [#11034](https://github.com/facebook/react/pull/11034)) <ide> * Minor bundle size improvements. ([@gaearon](https://github.com/gaearon) in [#10802](https://github.com/facebook/react/pull/10802), [#10803](https://github.com/facebook/react/pull/10803)) <ide> <ide> </details>
1
Javascript
Javascript
pass type to the relevant host config methods
6c1592f3842fb2bee94e82e84d64b17dec39b38b
<ide><path>src/renderers/art/ReactARTFiber.js <ide> const ARTRenderer = ReactFiberReconciler({ <ide> // Noop <ide> }, <ide> <del> commitUpdate(instance, oldProps, newProps) { <add> commitUpdate(instance, type, oldProps, newProps) { <ide> instance._applyProps(instance, newProps, oldProps); <ide> }, <ide> <ide> const ARTRenderer = ReactFiberReconciler({ <ide> // Noop <ide> }, <ide> <del> prepareUpdate(domElement, oldProps, newProps) { <add> prepareUpdate(domElement, type, oldProps, newProps) { <ide> return true; <ide> }, <ide> <ide><path>src/renderers/dom/fiber/ReactDOMFiber.js <ide> var DOMRenderer = ReactFiberReconciler({ <ide> <ide> finalizeInitialChildren( <ide> domElement : Instance, <add> type : string, <ide> props : Props, <ide> rootContainerInstance : Container, <ide> ) : void { <del> // TODO: we normalize here because DOM renderer expects tag to be lowercase. <del> // We can change DOM renderer to compare special case against upper case, <del> // and use tagName (which is upper case for HTML DOM elements). Or we could <del> // let the renderer "normalize" the fiber type so we don't have to read <del> // the type from DOM. However we need to remember SVG is case-sensitive. <del> var tag = domElement.tagName.toLowerCase(); <del> setInitialProperties(domElement, tag, props, rootContainerInstance); <add> setInitialProperties(domElement, type, props, rootContainerInstance); <ide> }, <ide> <ide> prepareUpdate( <ide> domElement : Instance, <add> type : string, <ide> oldProps : Props, <ide> newProps : Props <ide> ) : boolean { <ide> var DOMRenderer = ReactFiberReconciler({ <ide> <ide> commitUpdate( <ide> domElement : Instance, <add> type : string, <ide> oldProps : Props, <ide> newProps : Props, <ide> rootContainerInstance : Container, <ide> internalInstanceHandle : Object, <ide> ) : void { <del> // TODO: we normalize here because DOM renderer expects tag to be lowercase. <del> // We can change DOM renderer to compare special case against upper case, <del> // and use tagName (which is upper case for HTML DOM elements). Or we could <del> // let the renderer "normalize" the fiber type so we don't have to read <del> // the type from DOM. However we need to remember SVG is case-sensitive. <del> var tag = domElement.tagName.toLowerCase(); <ide> // Update the internal instance handle so that we know which props are <ide> // the current ones. <ide> precacheFiberNode(internalInstanceHandle, domElement); <del> updateProperties(domElement, tag, oldProps, newProps, rootContainerInstance); <add> updateProperties(domElement, type, oldProps, newProps, rootContainerInstance); <ide> }, <ide> <ide> shouldSetTextContent(props : Props) : boolean { <ide><path>src/renderers/noop/ReactNoop.js <ide> var NoopRenderer = ReactFiberReconciler({ <ide> parentInstance.children.push(child); <ide> }, <ide> <del> finalizeInitialChildren(domElement : Instance, props : Props) : void { <add> finalizeInitialChildren(domElement : Instance, type : string, props : Props) : void { <ide> // Noop <ide> }, <ide> <del> prepareUpdate(instance : Instance, oldProps : Props, newProps : Props) : boolean { <add> prepareUpdate(instance : Instance, type : string, oldProps : Props, newProps : Props) : boolean { <ide> return true; <ide> }, <ide> <del> commitUpdate(instance : Instance, oldProps : Props, newProps : Props) : void { <add> commitUpdate(instance : Instance, type : string, oldProps : Props, newProps : Props) : void { <ide> instance.prop = newProps.prop; <ide> }, <ide> <ide><path>src/renderers/shared/fiber/ReactFiberCommitWork.js <ide> module.exports = function<T, P, I, TI, C, CX>( <ide> const newProps = finishedWork.memoizedProps; <ide> const oldProps = current.memoizedProps; <ide> const rootContainerInstance = getRootHostContainer(); <del> commitUpdate(instance, oldProps, newProps, rootContainerInstance, finishedWork); <add> const type = finishedWork.type; <add> commitUpdate(instance, type, oldProps, newProps, rootContainerInstance, finishedWork); <ide> } <ide> detachRefIfNeeded(current, finishedWork); <ide> return; <ide><path>src/renderers/shared/fiber/ReactFiberCompleteWork.js <ide> module.exports = function<T, P, I, TI, C, CX>( <ide> } <ide> case HostComponent: <ide> popHostContext(workInProgress); <add> const type = workInProgress.type; <ide> let newProps = workInProgress.pendingProps; <ide> if (current && workInProgress.stateNode != null) { <ide> // If we have an alternate, that means this is an update and we need to <ide> module.exports = function<T, P, I, TI, C, CX>( <ide> newProps = workInProgress.memoizedProps || oldProps; <ide> } <ide> const instance : I = workInProgress.stateNode; <del> if (prepareUpdate(instance, oldProps, newProps)) { <add> if (prepareUpdate(instance, type, oldProps, newProps)) { <ide> // This returns true if there was something to update. <ide> markUpdate(workInProgress); <ide> } <ide> module.exports = function<T, P, I, TI, C, CX>( <ide> // or completeWork depending on we want to add then top->down or <ide> // bottom->up. Top->down is faster in IE11. <ide> const instance = createInstance( <del> workInProgress.type, <add> type, <ide> newProps, <ide> rootContainerInstance, <ide> currentHostContext, <ide> workInProgress <ide> ); <ide> appendAllChildren(instance, workInProgress); <del> finalizeInitialChildren(instance, newProps, rootContainerInstance); <add> finalizeInitialChildren(instance, type, newProps, rootContainerInstance); <ide> <ide> workInProgress.stateNode = instance; <ide> if (workInProgress.ref) { <ide><path>src/renderers/shared/fiber/ReactFiberReconciler.js <ide> export type HostConfig<T, P, I, TI, C, CX> = { <ide> <ide> createInstance(type : T, props : P, rootContainerInstance : C, hostContext : CX | null, internalInstanceHandle : OpaqueNode) : I, <ide> appendInitialChild(parentInstance : I, child : I | TI) : void, <del> finalizeInitialChildren(parentInstance : I, props : P, rootContainerInstance : C) : void, <add> finalizeInitialChildren(parentInstance : I, type : T, props : P, rootContainerInstance : C) : void, <ide> <del> prepareUpdate(instance : I, oldProps : P, newProps : P) : boolean, <del> commitUpdate(instance : I, oldProps : P, newProps : P, rootContainerInstance : C, internalInstanceHandle : OpaqueNode) : void, <add> prepareUpdate(instance : I, type : T, oldProps : P, newProps : P) : boolean, <add> commitUpdate(instance : I, type : T, oldProps : P, newProps : P, rootContainerInstance : C, internalInstanceHandle : OpaqueNode) : void, <ide> <ide> shouldSetTextContent(props : P) : boolean, <ide> resetTextContent(instance : I) : void,
6
Javascript
Javascript
ignore deprecatedproptype in bridgeless mode
0d968fd7dc93c135031aece5f702c082165eb298
<ide><path>Libraries/Utilities/deprecatedPropType.js <ide> function deprecatedPropType( <ide> return function validate(props, propName, componentName, ...rest) { <ide> // Don't warn for native components. <ide> if ( <add> !global.RN$Bridgeless && <ide> !UIManager.getViewManagerConfig(componentName) && <ide> props[propName] !== undefined <ide> ) {
1
Python
Python
add tests for the grid_data endpoint
2b2d97068fa45881672dab6f2134becae246f3f3
<ide><path>airflow/www/views.py <ide> def grid_data(self): <ide> } <ide> <ide> # avoid spaces to reduce payload size <del> return htmlsafe_json_dumps(data, separators=(',', ':')) <add> return ( <add> htmlsafe_json_dumps(data, separators=(',', ':')), <add> {'Content-Type': 'application/json; charset=utf-8'}, <add> ) <ide> <ide> @expose('/robots.txt') <ide> @action_logging <ide><path>tests/www/views/test_views_grid.py <add># <add># Licensed to the Apache Software Foundation (ASF) under one <add># or more contributor license agreements. See the NOTICE file <add># distributed with this work for additional information <add># regarding copyright ownership. The ASF licenses this file <add># to you under the Apache License, Version 2.0 (the <add># "License"); you may not use this file except in compliance <add># with the License. You may obtain a copy of the License at <add># <add># http://www.apache.org/licenses/LICENSE-2.0 <add># <add># Unless required by applicable law or agreed to in writing, <add># software distributed under the License is distributed on an <add># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY <add># KIND, either express or implied. See the License for the <add># specific language governing permissions and limitations <add># under the License. <add> <add>import freezegun <add>import pendulum <add>import pytest <add> <add>from airflow.models import DagBag <add>from airflow.operators.empty import EmptyOperator <add>from airflow.utils.state import DagRunState, TaskInstanceState <add>from airflow.utils.task_group import TaskGroup <add>from airflow.utils.types import DagRunType <add>from tests.test_utils.mock_operators import MockOperator <add> <add>DAG_ID = 'test' <add>CURRENT_TIME = pendulum.DateTime(2021, 9, 7) <add> <add> <add>@pytest.fixture(autouse=True, scope="module") <add>def examples_dag_bag(): <add> # Speed up: We don't want example dags for this module <add> return DagBag(include_examples=False, read_dags_from_db=True) <add> <add> <add>@pytest.fixture <add>def dag_without_runs(dag_maker, session, app, monkeypatch): <add> with monkeypatch.context() as m: <add> # Remove global operator links for this test <add> m.setattr('airflow.plugins_manager.global_operator_extra_links', []) <add> m.setattr('airflow.plugins_manager.operator_extra_links', []) <add> m.setattr('airflow.plugins_manager.registered_operator_link_classes', {}) <add> <add> with dag_maker(dag_id=DAG_ID, serialized=True, session=session): <add> EmptyOperator(task_id="task1") <add> with TaskGroup(group_id='group'): <add> MockOperator.partial(task_id='mapped').expand(arg1=['a', 'b', 'c']) <add> <add> m.setattr(app, 'dag_bag', dag_maker.dagbag) <add> yield dag_maker <add> <add> <add>@pytest.fixture <add>def dag_with_runs(dag_without_runs): <add> with freezegun.freeze_time(CURRENT_TIME): <add> date = dag_without_runs.dag.start_date <add> run_1 = dag_without_runs.create_dagrun( <add> run_id='run_1', state=DagRunState.SUCCESS, run_type=DagRunType.SCHEDULED, execution_date=date <add> ) <add> run_2 = dag_without_runs.create_dagrun( <add> run_id='run_2', <add> run_type=DagRunType.SCHEDULED, <add> execution_date=dag_without_runs.dag.next_dagrun_info(date).logical_date, <add> ) <add> <add> yield run_1, run_2 <add> <add> <add>def test_no_runs(admin_client, dag_without_runs): <add> resp = admin_client.get(f'/object/grid_data?dag_id={DAG_ID}', follow_redirects=True) <add> assert resp.status_code == 200, resp.json <add> assert resp.json == { <add> 'dag_runs': [], <add> 'groups': { <add> 'children': [ <add> { <add> 'extra_links': [], <add> 'id': 'task1', <add> 'instances': [], <add> 'is_mapped': False, <add> 'label': 'task1', <add> }, <add> { <add> 'children': [ <add> { <add> 'extra_links': [], <add> 'id': 'group.mapped', <add> 'instances': [], <add> 'is_mapped': True, <add> 'label': 'mapped', <add> } <add> ], <add> 'id': 'group', <add> 'instances': [], <add> 'label': 'group', <add> 'tooltip': '', <add> }, <add> ], <add> 'id': None, <add> 'instances': [], <add> 'label': None, <add> 'tooltip': '', <add> }, <add> } <add> <add> <add>def test_one_run(admin_client, dag_with_runs, session): <add> run1, run2 = dag_with_runs <add> <add> for ti in run1.task_instances: <add> ti.state = TaskInstanceState.SUCCESS <add> <add> session.flush() <add> <add> resp = admin_client.get(f'/object/grid_data?dag_id={DAG_ID}', follow_redirects=True) <add> assert resp.status_code == 200, resp.json <add> assert resp.json == { <add> 'dag_runs': [ <add> { <add> 'data_interval_end': '2016-01-02T00:00:00+00:00', <add> 'data_interval_start': '2016-01-01T00:00:00+00:00', <add> 'end_date': '2021-09-07T00:00:00+00:00', <add> 'execution_date': '2016-01-01T00:00:00+00:00', <add> 'last_scheduling_decision': None, <add> 'run_id': 'run_1', <add> 'run_type': 'scheduled', <add> 'start_date': '2016-01-01T00:00:00+00:00', <add> 'state': 'success', <add> }, <add> { <add> 'data_interval_end': '2016-01-03T00:00:00+00:00', <add> 'data_interval_start': '2016-01-02T00:00:00+00:00', <add> 'end_date': None, <add> 'execution_date': '2016-01-02T00:00:00+00:00', <add> 'last_scheduling_decision': None, <add> 'run_id': 'run_2', <add> 'run_type': 'scheduled', <add> 'start_date': '2016-01-01T00:00:00+00:00', <add> 'state': 'running', <add> }, <add> ], <add> 'groups': { <add> 'children': [ <add> { <add> 'extra_links': [], <add> 'id': 'task1', <add> 'instances': [ <add> { <add> 'end_date': None, <add> 'map_index': -1, <add> 'run_id': 'run_1', <add> 'start_date': None, <add> 'state': 'success', <add> 'task_id': 'task1', <add> 'try_number': 1, <add> }, <add> { <add> 'end_date': None, <add> 'map_index': -1, <add> 'run_id': 'run_2', <add> 'start_date': None, <add> 'state': None, <add> 'task_id': 'task1', <add> 'try_number': 1, <add> }, <add> ], <add> 'is_mapped': False, <add> 'label': 'task1', <add> }, <add> { <add> 'children': [ <add> { <add> 'extra_links': [], <add> 'id': 'group.mapped', <add> 'instances': [ <add> { <add> 'end_date': None, <add> 'mapped_states': ['success', 'success', 'success'], <add> 'run_id': 'run_1', <add> 'start_date': None, <add> 'state': 'success', <add> 'task_id': 'group.mapped', <add> 'try_number': 1, <add> }, <add> { <add> 'end_date': None, <add> 'mapped_states': [None, None, None], <add> 'run_id': 'run_2', <add> 'start_date': None, <add> 'state': None, <add> 'task_id': 'group.mapped', <add> 'try_number': 1, <add> }, <add> ], <add> 'is_mapped': True, <add> 'label': 'mapped', <add> }, <add> ], <add> 'id': 'group', <add> 'instances': [ <add> { <add> 'end_date': None, <add> 'run_id': 'run_1', <add> 'start_date': None, <add> 'state': 'success', <add> 'task_id': 'group', <add> }, <add> { <add> 'end_date': None, <add> 'run_id': 'run_2', <add> 'start_date': None, <add> 'state': None, <add> 'task_id': 'group', <add> }, <add> ], <add> 'label': 'group', <add> 'tooltip': '', <add> }, <add> ], <add> 'id': None, <add> 'instances': [ <add> { <add> 'end_date': None, <add> 'run_id': 'run_1', <add> 'start_date': None, <add> 'state': 'success', <add> 'task_id': None, <add> }, <add> {'end_date': None, 'run_id': 'run_2', 'start_date': None, 'state': None, 'task_id': None}, <add> ], <add> 'label': None, <add> 'tooltip': '', <add> }, <add> }
2
PHP
PHP
update filesystemadapter.php
e0755231de6c1f05720fd763a6e782b3f25d8237
<ide><path>src/Illuminate/Filesystem/FilesystemAdapter.php <ide> public function temporaryUrl($path, $expiration, array $options = []) <ide> { <ide> $adapter = $this->driver->getAdapter(); <ide> <del> $client = $adapter->getClient(); <del> <del> if (! $adapter instanceof AwsS3Adapter) { <add> if (method_exists($adapter, 'getTemporaryUrl')) { <add> return $adapter->getTemporaryUrl($path, $expiration, $options); <add> } else if (! $adapter instanceof AwsS3Adapter) { <ide> throw new RuntimeException('This driver does not support creating temporary URLs.'); <ide> } <ide> <add> $client = $adapter->getClient(); <add> <ide> $command = $client->getCommand('GetObject', array_merge([ <ide> 'Bucket' => $adapter->getBucket(), <ide> 'Key' => $adapter->getPathPrefix().$path,
1
Java
Java
replace explicit use of pooleddatabuffer.release()
3aaff40e2ea3719eaaa98689e058c3edcc0a6e8a
<ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/DataBufferUtils.java <ide> public static boolean release(@Nullable DataBuffer dataBuffer) { <ide> catch (IllegalStateException ex) { <ide> // Avoid dependency on Netty: IllegalReferenceCountException <ide> if (logger.isDebugEnabled()) { <del> logger.debug("Failed to release PooledDataBuffer", ex); <add> logger.debug("Failed to release PooledDataBuffer: " + dataBuffer, ex); <ide> } <ide> return false; <ide> } <ide><path>spring-web/src/main/java/org/springframework/http/codec/multipart/PartHttpMessageWriter.java <ide> import org.springframework.core.codec.Hints; <ide> import org.springframework.core.io.buffer.DataBuffer; <ide> import org.springframework.core.io.buffer.DataBufferFactory; <add>import org.springframework.core.io.buffer.DataBufferUtils; <ide> import org.springframework.core.io.buffer.PooledDataBuffer; <ide> import org.springframework.http.HttpHeaders; <ide> import org.springframework.http.MediaType; <ide> public Mono<Void> write(Publisher<? extends Part> parts, <ide> Flux<DataBuffer> body = Flux.from(parts) <ide> .concatMap(part -> encodePart(boundary, part, outputMessage.bufferFactory())) <ide> .concatWith(generateLastLine(boundary, outputMessage.bufferFactory())) <del> .doOnDiscard(PooledDataBuffer.class, PooledDataBuffer::release); <add> .doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release); <ide> <ide> return outputMessage.writeWith(body); <ide> }
2
Java
Java
increase fudge factor
836976d7328464bbe5939dd68c58b48678413207
<ide><path>spring-test/src/test/java/org/springframework/test/context/junit/jupiter/event/ParallelApplicationEventsIntegrationTests.java <ide> void executeTestsInParallel(Class<?> testClass) { <ide> assertThat(testNames).hasSize(10); <ide> <ide> // Skip the following assertion entirely if the thread count is too low. <del> if (ManagementFactory.getThreadMXBean().getThreadCount() >= 2) { <add> if (ManagementFactory.getThreadMXBean().getThreadCount() >= 4) { <ide> // There are probably 10 different thread names on a developer's machine, <ide> // but we really just want to assert that at least two different threads <ide> // were used, since the CI server often has fewer threads available.
1
Java
Java
ignore empty statements (spr-7363)
a8133a9917744ed656189dd7f553a7b1acfa1129
<ide><path>org.springframework.jdbc/src/main/java/org/springframework/jdbc/datasource/init/ResourceDatabasePopulator.java <ide> private boolean containsSqlScriptDelimiters(String script, char delim) { <ide> } <ide> <ide> /** <del> * Split an SQL script into separate statements delimited with the provided delimiter character. Each individual <del> * statement will be added to the provided <code>List</code>. <add> * Split an SQL script into separate statements delimited with the provided delimiter character. <add> * Each individual statement will be added to the provided <code>List</code>. <ide> * @param script the SQL script <del> * @param delim character delimiting each statement - typically a ';' character <add> * @param delim character delimiting each statement (typically a ';' character) <ide> * @param statements the List that will contain the individual statements <ide> */ <ide> private void splitSqlScript(String script, char delim, List<String> statements) { <ide> StringBuilder sb = new StringBuilder(); <ide> boolean inLiteral = false; <ide> char[] content = script.toCharArray(); <ide> for (int i = 0; i < script.length(); i++) { <del> if (content[i] == '\'') { <add> char c = content[i]; <add> if (c == '\'') { <ide> inLiteral = !inLiteral; <ide> } <del> if (content[i] == delim && !inLiteral) { <add> if ((c == delim || c == '\n') && !inLiteral) { <ide> if (sb.length() > 0) { <ide> statements.add(sb.toString()); <ide> sb = new StringBuilder(); <ide> } <ide> } <ide> else { <del> sb.append(content[i]); <add> sb.append(c); <ide> } <ide> } <del> if (sb.length() > 0) { <add> if (StringUtils.hasText(sb)) { <ide> statements.add(sb.toString()); <ide> } <ide> }
1
Javascript
Javascript
create diagnostics channels lazily
cb955e0dd1c394fa7f168ba30c0832d49e9504fd
<ide><path>lib/net.js <ide> const noop = () => {}; <ide> <ide> const kPerfHooksNetConnectContext = Symbol('kPerfHooksNetConnectContext'); <ide> <del>const dc = require('diagnostics_channel'); <del>const netClientSocketChannel = dc.channel('net.client.socket'); <del>const netServerSocketChannel = dc.channel('net.server.socket'); <add>let netClientSocketChannel; <add>let netServerSocketChannel; <add>function lazyChannels() { <add> // TODO(joyeecheung): support diagnostics channels in the snapshot. <add> // For now it is fine to create them lazily when there isn't a snapshot to <add> // build. If users need the channels they would have to create them first <add> // before invoking any built-ins that would publish to these channels <add> // anyway. <add> if (netClientSocketChannel === undefined) { <add> const dc = require('diagnostics_channel'); <add> netClientSocketChannel = dc.channel('net.client.socket'); <add> netServerSocketChannel = dc.channel('net.server.socket'); <add> } <add>} <ide> <ide> const { <ide> hasObserver, <ide> function connect(...args) { <ide> const options = normalized[0]; <ide> debug('createConnection', normalized); <ide> const socket = new Socket(options); <add> lazyChannels(); <ide> if (netClientSocketChannel.hasSubscribers) { <ide> netClientSocketChannel.publish({ <ide> socket, <ide> function onconnection(err, clientHandle) { <ide> socket.server = self; <ide> socket._server = self; <ide> self.emit('connection', socket); <add> lazyChannels(); <ide> if (netServerSocketChannel.hasSubscribers) { <ide> netServerSocketChannel.publish({ <ide> socket, <ide><path>test/parallel/test-bootstrap-modules.js <ide> const expectedModules = new Set([ <ide> 'NativeModule v8', <ide> 'NativeModule internal/v8/startup_snapshot', <ide> 'NativeModule vm', <del> 'NativeModule diagnostics_channel', <ide> ]); <ide> <ide> if (!common.isMainThread) {
2
PHP
PHP
initialize $fixtures to null
c224bbffc01e0e12cc6427f06c6cc3585f0884f1
<ide><path>src/TestSuite/TestCase.php <ide> abstract class TestCase extends BaseTestCase <ide> * <ide> * @var array|string|null <ide> */ <del> public $fixtures; <add> public $fixtures = null; <ide> <ide> /** <ide> * By default, all fixtures attached to this class will be truncated and reloaded after each test.
1
Javascript
Javascript
define all used properties in constructors
a2ea1344ea9c9badaac500f399d89293f4238fed
<ide><path>lib/_http_client.js <ide> function ClientRequest(options, cb) { <ide> self._renderHeaders()); <ide> } <ide> <add> this._ended = false; <add> this.res = null; <add> this.aborted = undefined; <add> this.timeoutCb = null; <add> this.upgradeOrConnect = false; <add> this.parser = null; <add> this.maxHeadersCount = null; <add> <ide> var called = false; <ide> if (self.socketPath) { <ide> self._last = true; <ide> function ClientRequest(options, cb) { <ide> self._flush(); <ide> self = null; <ide> }); <del> <del> this._ended = false; <ide> } <ide> <ide> util.inherits(ClientRequest, OutgoingMessage); <ide> <ide> exports.ClientRequest = ClientRequest; <ide> <del>ClientRequest.prototype.aborted = undefined; <del> <ide> ClientRequest.prototype._finish = function _finish() { <ide> DTRACE_HTTP_CLIENT_REQUEST(this, this.connection); <ide> LTTNG_HTTP_CLIENT_REQUEST(this, this.connection); <ide> ClientRequest.prototype._implicitHeader = function _implicitHeader() { <ide> }; <ide> <ide> ClientRequest.prototype.abort = function abort() { <del> if (this.aborted === undefined) { <add> if (!this.aborted) { <ide> process.nextTick(emitAbortNT, this); <ide> } <ide> // Mark as aborting so we can avoid sending queued request data <ide> function parserOnIncomingClient(res, shouldKeepAlive) { <ide> <ide> if (res.statusCode === 100) { <ide> // restart the parser, as this is a continue message. <del> delete req.res; // Clear res so that we don't hit double-responses. <add> req.res = null; // Clear res so that we don't hit double-responses. <ide> req.emit('continue'); <ide> return true; <ide> } <ide><path>lib/_http_server.js <ide> function Server(requestListener) { <ide> this.timeout = 2 * 60 * 1000; <ide> <ide> this._pendingResponseData = 0; <add> this.maxHeadersCount = null; <ide> } <ide> util.inherits(Server, net.Server); <ide>
2
Java
Java
support implicit ports in mhsr.getrequesturl()
86591e5e5aeb7eead1798069d8b6f9c1790f4b9d
<ide><path>spring-test/src/main/java/org/springframework/mock/web/MockHttpServletRequest.java <ide> import java.util.Locale; <ide> import java.util.Map; <ide> import java.util.Set; <add> <ide> import javax.servlet.AsyncContext; <ide> import javax.servlet.DispatcherType; <ide> import javax.servlet.RequestDispatcher; <ide> public String getRequestURI() { <ide> <ide> @Override <ide> public StringBuffer getRequestURL() { <del> StringBuffer url = new StringBuffer(this.scheme); <del> url.append("://").append(this.serverName).append(':').append(this.serverPort); <add> StringBuffer url = new StringBuffer(this.scheme).append("://").append(this.serverName); <add> <add> if (this.serverPort > 0 <add> && (("http".equalsIgnoreCase(scheme) && this.serverPort != 80) || ("https".equalsIgnoreCase(scheme) && this.serverPort != 443))) { <add> url.append(':').append(this.serverPort); <add> } <add> <ide> url.append(getRequestURI()); <ide> return url; <ide> } <ide><path>spring-test/src/test/java/org/springframework/mock/web/MockHttpServletRequestTests.java <ide> public void setPreferredLocales() { <ide> assertEqualEnumerations(Collections.enumeration(preferredLocales), request.getLocales()); <ide> } <ide> <add> @Test <add> public void getRequestURL() { <add> request.setServerPort(8080); <add> request.setRequestURI("/path"); <add> assertEquals("http://localhost:8080/path", request.getRequestURL().toString()); <add> <add> request.setScheme("https"); <add> request.setServerName("example.com"); <add> request.setServerPort(8443); <add> assertEquals("https://example.com:8443/path", request.getRequestURL().toString()); <add> } <add> <add> @Test <add> public void getRequestURLWithDefaults() { <add> StringBuffer requestURL = request.getRequestURL(); <add> assertEquals("http://localhost", requestURL.toString()); <add> } <add> <add> @Test <add> public void getRequestURLWithDefaultsAndHttps() { <add> request.setScheme("https"); <add> request.setServerPort(443); <add> StringBuffer requestURL = request.getRequestURL(); <add> assertEquals("https://localhost", requestURL.toString()); <add> } <add> <add> @Test <add> public void getRequestURLWithNegativePort() { <add> request.setServerPort(-99); <add> StringBuffer requestURL = request.getRequestURL(); <add> assertEquals("http://localhost", requestURL.toString()); <add> } <add> <ide> private void assertEqualEnumerations(Enumeration<?> enum1, Enumeration<?> enum2) { <ide> assertNotNull(enum1); <ide> assertNotNull(enum2);
2
PHP
PHP
allow complex urls for login action
2a8d9abe20681573fa51f317e931b9011f48ff6b
<ide><path>src/Controller/Component/AuthComponent.php <ide> protected function _loginActionRedirectUrl() <ide> if (is_array($loginAction)) { <ide> $loginAction['?'][static::QUERY_STRING_REDIRECT] = $currentUrl; <ide> } else { <del> $loginAction .= '?' . static::QUERY_STRING_REDIRECT . '=' . rawurlencode($currentUrl); <add> $char = strpos($loginAction, '?') === false ? '?' : '&'; <add> $loginAction .= $char . static::QUERY_STRING_REDIRECT . '=' . urlencode($currentUrl); <ide> } <ide> <ide> return $loginAction; <ide><path>tests/TestCase/Controller/Component/AuthComponentTest.php <ide> public function testLoginRedirectQueryString() <ide> $this->assertEquals($expected, $redirectHeader); <ide> } <ide> <add> /** <add> * @return void <add> */ <add> public function testLoginRedirectQueryStringWithComplexLoginActionUrl() <add> { <add> $this->Auth->session->delete('Auth'); <add> $url = '/posts/view/29'; <add> $this->Auth->request->addParams(Router::parse($url)); <add> $this->Auth->request->url = $this->Auth->request->here = Router::normalize($url); <add> $this->Auth->request->query = [ <add> 'print' => 'true', <add> 'refer' => 'menu' <add> ]; <add> <add> $this->Auth->session->delete('Auth'); <add> $this->Auth->config('loginAction', '/auth_test/login/passed-param?a=b'); <add> $event = new Event('Controller.startup', $this->Controller); <add> $response = $this->Auth->startup($event); <add> <add> $redirectHeader = $response->header()['Location']; <add> $expected = Router::url(['controller' => 'AuthTest', 'action' => 'login', 'passed-param', '?' => ['a' => 'b', 'redirect' => '/posts/view/29?print=true&refer=menu']], true); <add> $this->assertEquals($expected, $redirectHeader); <add> } <add> <ide> /** <ide> * @return void <ide> */ <ide> public function testStatelessAuthNoRedirect() <ide> * @return void <ide> * @triggers Controller.startup $this->Controller <ide> */ <del> public function testStatelessFollowedByStatefulAuth() <add> public function testStatelessAuthRedirectToLogin() <ide> { <ide> $this->Auth->response = $this->getMockBuilder('Cake\Network\Response') <ide> ->setMethods(['stop', 'statusCode', 'send'])
2
Javascript
Javascript
avoid needless sort in chunk compare
fb9b61b72fbf2bb0de8d6535b2496e5794299a13
<ide><path>lib/Chunk.js <ide> class Chunk { <ide> * @returns {-1|0|1} this is a comparitor function like sort and returns -1, 0, or 1 based on sort order <ide> */ <ide> compareTo(otherChunk) { <del> this._modules.sort(); <del> otherChunk._modules.sort(); <ide> if (this._modules.size > otherChunk._modules.size) return -1; <ide> if (this._modules.size < otherChunk._modules.size) return 1; <add> this._modules.sort(); <add> otherChunk._modules.sort(); <ide> const a = this._modules[Symbol.iterator](); <ide> const b = otherChunk._modules[Symbol.iterator](); <ide> // eslint-disable-next-line no-constant-condition <ide> while (true) { <ide> const aItem = a.next(); <del> const bItem = b.next(); <ide> if (aItem.done) return 0; <add> const bItem = b.next(); <ide> const aModuleIdentifier = aItem.value.identifier(); <ide> const bModuleIdentifier = bItem.value.identifier(); <ide> if (aModuleIdentifier < bModuleIdentifier) return -1;
1
Python
Python
enable full convolution with the theano backend.
bc6880fa348eda885dfad305088b395510d74300
<ide><path>keras/backend/theano_backend.py <ide> def _preprocess_border_mode(border_mode): <ide> th_border_mode = 'half' <ide> elif border_mode == 'valid': <ide> th_border_mode = 'valid' <add> elif border_mode == 'full': <add> th_border_mode = 'full' <ide> else: <ide> raise Exception('Border mode not supported: ' + str(border_mode)) <ide> return th_border_mode <ide><path>keras/layers/convolutional.py <ide> class Convolution1D(Layer): <ide> If you don't specify anything, no activation is applied <ide> (ie. "linear" activation: a(x) = x). <ide> weights: list of numpy arrays to set as initial weights. <del> border_mode: 'valid' or 'same'. <add> border_mode: 'valid', 'same' or 'full'. ('full' requires the Theano backend.) <ide> subsample_length: factor by which to subsample output. <ide> W_regularizer: instance of [WeightRegularizer](../regularizers.md) <ide> (eg. L1 or L2 regularization), applied to the main weights matrix. <ide> def __init__(self, nb_filter, filter_length, <ide> W_constraint=None, b_constraint=None, <ide> bias=True, input_dim=None, input_length=None, **kwargs): <ide> <del> if border_mode not in {'valid', 'same'}: <add> if border_mode not in {'valid', 'same', 'full'}: <ide> raise Exception('Invalid border mode for Convolution1D:', border_mode) <ide> self.nb_filter = nb_filter <ide> self.filter_length = filter_length <ide> self.init = initializations.get(init, dim_ordering='th') <ide> self.activation = activations.get(activation) <del> assert border_mode in {'valid', 'same'}, 'border_mode must be in {valid, same}' <ide> self.border_mode = border_mode <ide> self.subsample_length = subsample_length <ide> <ide> class AtrousConvolution1D(Convolution1D): <ide> If you don't specify anything, no activation is applied <ide> (ie. "linear" activation: a(x) = x). <ide> weights: list of numpy arrays to set as initial weights. <del> border_mode: 'valid' or 'same'. <add> border_mode: 'valid', 'same' or 'full'. ('full' requires the Theano backend.) <ide> subsample_length: factor by which to subsample output. <ide> atrous_rate: Factor for kernel dilation. Also called filter_dilation <ide> elsewhere. <ide> def __init__(self, nb_filter, filter_length, <ide> W_constraint=None, b_constraint=None, <ide> bias=True, **kwargs): <ide> <del> if border_mode not in {'valid', 'same'}: <add> if border_mode not in {'valid', 'same', 'full'}: <ide> raise Exception('Invalid border mode for AtrousConv1D:', border_mode) <ide> <ide> self.atrous_rate = int(atrous_rate) <ide> class Convolution2D(Layer): <ide> If you don't specify anything, no activation is applied <ide> (ie. "linear" activation: a(x) = x). <ide> weights: list of numpy arrays to set as initial weights. <del> border_mode: 'valid' or 'same'. <add> border_mode: 'valid', 'same' or 'full'. ('full' requires the Theano backend.) <ide> subsample: tuple of length 2. Factor by which to subsample output. <ide> Also called strides elsewhere. <ide> W_regularizer: instance of [WeightRegularizer](../regularizers.md) <ide> def __init__(self, nb_filter, nb_row, nb_col, <ide> bias=True, **kwargs): <ide> if dim_ordering == 'default': <ide> dim_ordering = K.image_dim_ordering() <del> if border_mode not in {'valid', 'same'}: <add> if border_mode not in {'valid', 'same', 'full'}: <ide> raise Exception('Invalid border mode for Convolution2D:', border_mode) <ide> self.nb_filter = nb_filter <ide> self.nb_row = nb_row <ide> self.nb_col = nb_col <ide> self.init = initializations.get(init, dim_ordering=dim_ordering) <ide> self.activation = activations.get(activation) <del> assert border_mode in {'valid', 'same'}, 'border_mode must be in {valid, same}' <ide> self.border_mode = border_mode <ide> self.subsample = tuple(subsample) <ide> assert dim_ordering in {'tf', 'th'}, 'dim_ordering must be in {tf, th}' <ide> class Deconvolution2D(Convolution2D): <ide> If you don't specify anything, no activation is applied <ide> (ie. "linear" activation: a(x) = x). <ide> weights: list of numpy arrays to set as initial weights. <del> border_mode: 'valid' or 'same'. <add> border_mode: 'valid', 'same' or 'full'. ('full' requires the Theano backend.) <ide> subsample: tuple of length 2. Factor by which to oversample output. <ide> Also called strides elsewhere. <ide> W_regularizer: instance of [WeightRegularizer](../regularizers.md) <ide> def __init__(self, nb_filter, nb_row, nb_col, output_shape, <ide> bias=True, **kwargs): <ide> if dim_ordering == 'default': <ide> dim_ordering = K.image_dim_ordering() <del> if border_mode not in {'valid', 'same'}: <add> if border_mode not in {'valid', 'same', 'full'}: <ide> raise Exception('Invalid border mode for Deconvolution2D:', border_mode) <ide> <ide> self.output_shape_ = output_shape <ide> class AtrousConvolution2D(Convolution2D): <ide> If you don't specify anything, no activation is applied <ide> (ie. "linear" activation: a(x) = x). <ide> weights: list of numpy arrays to set as initial weights. <del> border_mode: 'valid' or 'same'. <add> border_mode: 'valid', 'same' or 'full'. ('full' requires the Theano backend.) <ide> subsample: tuple of length 2. Factor by which to subsample output. <ide> Also called strides elsewhere. <ide> atrous_rate: tuple of length 2. Factor for kernel dilation. <ide> def __init__(self, nb_filter, nb_row, nb_col, <ide> if dim_ordering == 'default': <ide> dim_ordering = K.image_dim_ordering() <ide> <del> if border_mode not in {'valid', 'same'}: <add> if border_mode not in {'valid', 'same', 'full'}: <ide> raise Exception('Invalid border mode for AtrousConv2D:', border_mode) <ide> <ide> self.atrous_rate = tuple(atrous_rate) <ide> class Convolution3D(Layer): <ide> If you don't specify anything, no activation is applied <ide> (ie. "linear" activation: a(x) = x). <ide> weights: list of Numpy arrays to set as initial weights. <del> border_mode: 'valid' or 'same'. <add> border_mode: 'valid', 'same' or 'full'. ('full' requires the Theano backend.) <ide> subsample: tuple of length 3. Factor by which to subsample output. <ide> Also called strides elsewhere. <ide> Note: 'subsample' is implemented by slicing the output of conv3d with strides=(1,1,1). <ide> def __init__(self, nb_filter, kernel_dim1, kernel_dim2, kernel_dim3, <ide> if dim_ordering == 'default': <ide> dim_ordering = K.image_dim_ordering() <ide> <del> if border_mode not in {'valid', 'same'}: <add> if border_mode not in {'valid', 'same', 'full'}: <ide> raise Exception('Invalid border mode for Convolution3D:', border_mode) <ide> self.nb_filter = nb_filter <ide> self.kernel_dim1 = kernel_dim1 <ide> self.kernel_dim2 = kernel_dim2 <ide> self.kernel_dim3 = kernel_dim3 <ide> self.init = initializations.get(init, dim_ordering=dim_ordering) <ide> self.activation = activations.get(activation) <del> assert border_mode in {'valid', 'same'}, 'border_mode must be in {valid, same}' <ide> self.border_mode = border_mode <ide> self.subsample = tuple(subsample) <ide> assert dim_ordering in {'tf', 'th'}, 'dim_ordering must be in {tf, th}' <ide><path>keras/utils/np_utils.py <ide> def convert_kernel(kernel, dim_ordering='default'): <ide> def conv_output_length(input_length, filter_size, border_mode, stride, dilation=1): <ide> if input_length is None: <ide> return None <del> assert border_mode in {'same', 'valid'} <add> assert border_mode in {'same', 'valid', 'full'} <ide> dilated_filter_size = filter_size + (filter_size - 1) * (dilation - 1) <ide> if border_mode == 'same': <ide> output_length = input_length <ide> elif border_mode == 'valid': <ide> output_length = input_length - dilated_filter_size + 1 <add> elif border_mode == 'full': <add> output_length = input_length + dilated_filter_size - 1 <ide> return (output_length + stride - 1) // stride <ide> <ide> <ide> def conv_input_length(output_length, filter_size, border_mode, stride): <ide> if output_length is None: <ide> return None <del> assert border_mode in {'same', 'valid'} <add> assert border_mode in {'same', 'valid', 'full'} <ide> if border_mode == 'same': <ide> pad = filter_size // 2 <ide> elif border_mode == 'valid': <ide> pad = 0 <add> elif border_mode == 'full': <add> pad = filter_size - 1 <ide> return (output_length - 1) * stride - 2 * pad + filter_size <ide><path>tests/keras/layers/test_convolutional.py <ide> from keras.layers import convolutional, pooling <ide> <ide> <add># TensorFlow does not support full convolution. <add>if K._BACKEND == 'theano': <add> _convolution_border_modes = ['valid', 'same', 'full'] <add>else: <add> _convolution_border_modes = ['valid', 'same'] <add> <add> <ide> @keras_test <ide> def test_convolution_1d(): <ide> nb_samples = 2 <ide> def test_convolution_1d(): <ide> filter_length = 3 <ide> nb_filter = 3 <ide> <del> for border_mode in ['valid', 'same']: <add> for border_mode in _convolution_border_modes: <ide> for subsample_length in [1, 2]: <ide> if border_mode == 'same' and subsample_length != 1: <ide> continue <ide> def test_atrous_conv_1d(): <ide> filter_length = 3 <ide> nb_filter = 3 <ide> <del> for border_mode in ['valid', 'same']: <add> for border_mode in _convolution_border_modes: <ide> for subsample_length in [1, 2]: <ide> for atrous_rate in [1, 2]: <ide> if border_mode == 'same' and subsample_length != 1: <ide> def test_convolution_2d(): <ide> nb_row = 10 <ide> nb_col = 6 <ide> <del> for border_mode in ['valid', 'same']: <add> for border_mode in _convolution_border_modes: <ide> for subsample in [(1, 1), (2, 2)]: <ide> if border_mode == 'same' and subsample != (1, 1): <ide> continue <ide> def test_deconvolution_2d(): <ide> nb_row = 10 <ide> nb_col = 6 <ide> <del> for border_mode in ['valid', 'same']: <add> for border_mode in _convolution_border_modes: <ide> for subsample in [(1, 1), (2, 2)]: <ide> if border_mode == 'same' and subsample != (1, 1): <ide> continue <ide> def test_atrous_conv_2d(): <ide> nb_row = 10 <ide> nb_col = 6 <ide> <del> for border_mode in ['valid', 'same']: <add> for border_mode in _convolution_border_modes: <ide> for subsample in [(1, 1), (2, 2)]: <ide> for atrous_rate in [(1, 1), (2, 2)]: <ide> if border_mode == 'same' and subsample != (1, 1): <ide> def test_separable_conv_2d(): <ide> nb_row = 10 <ide> nb_col = 6 <ide> <del> for border_mode in ['valid', 'same']: <add> for border_mode in _convolution_border_modes: <ide> for subsample in [(1, 1), (2, 2)]: <ide> for multiplier in [1, 2]: <ide> if border_mode == 'same' and subsample != (1, 1): <ide> def test_convolution_3d(): <ide> input_len_dim2 = 11 <ide> input_len_dim3 = 12 <ide> <del> for border_mode in ['same', 'valid']: <add> for border_mode in _convolution_border_modes: <ide> for subsample in [(1, 1, 1), (2, 2, 2)]: <ide> if border_mode == 'same' and subsample != (1, 1, 1): <ide> continue
4
Javascript
Javascript
flatten access to primordials
0646eda4fc0affb98e13c30acb522e63b7fd6dde
<ide><path>lib/_http_agent.js <ide> 'use strict'; <ide> <ide> const { <del> Object: { <del> setPrototypeOf: ObjectSetPrototypeOf, <del> keys: ObjectKeys, <del> values: ObjectValues <del> } <add> ObjectKeys, <add> ObjectSetPrototypeOf, <add> ObjectValues, <ide> } = primordials; <ide> <ide> const net = require('net'); <ide><path>lib/_http_client.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectAssign, <add> ObjectKeys, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const net = require('net'); <ide> const url = require('url'); <ide> function ClientRequest(input, options, cb) { <ide> cb = options; <ide> options = input || {}; <ide> } else { <del> options = Object.assign(input || {}, options); <add> options = ObjectAssign(input || {}, options); <ide> } <ide> <ide> let agent = options.agent; <ide> function ClientRequest(input, options, cb) { <ide> const headersArray = Array.isArray(options.headers); <ide> if (!headersArray) { <ide> if (options.headers) { <del> const keys = Object.keys(options.headers); <add> const keys = ObjectKeys(options.headers); <ide> for (let i = 0; i < keys.length; i++) { <ide> const key = keys[i]; <ide> this.setHeader(key, options.headers[key]); <ide> function ClientRequest(input, options, cb) { <ide> <ide> this._deferToConnect(null, null, () => this._flush()); <ide> } <del>Object.setPrototypeOf(ClientRequest.prototype, OutgoingMessage.prototype); <del>Object.setPrototypeOf(ClientRequest, OutgoingMessage); <add>ObjectSetPrototypeOf(ClientRequest.prototype, OutgoingMessage.prototype); <add>ObjectSetPrototypeOf(ClientRequest, OutgoingMessage); <ide> <ide> ClientRequest.prototype._finish = function _finish() { <ide> DTRACE_HTTP_CLIENT_REQUEST(this, this.socket); <ide><path>lib/_http_common.js <ide> <ide> 'use strict'; <ide> <del>const { Math } = primordials; <add>const { <add> MathMin, <add>} = primordials; <ide> const { setImmediate } = require('timers'); <ide> <ide> const { methods, HTTPParser } = internalBinding('http_parser'); <ide> function parserOnHeadersComplete(versionMajor, versionMinor, headers, method, <ide> <ide> // If parser.maxHeaderPairs <= 0 assume that there's no limit. <ide> if (parser.maxHeaderPairs > 0) <del> n = Math.min(n, parser.maxHeaderPairs); <add> n = MathMin(n, parser.maxHeaderPairs); <ide> <ide> incoming._addHeaderLines(headers, n); <ide> <ide><path>lib/_http_incoming.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const Stream = require('stream'); <ide> <ide> function IncomingMessage(socket) { <ide> // read by the user, so there's no point continuing to handle it. <ide> this._dumped = false; <ide> } <del>Object.setPrototypeOf(IncomingMessage.prototype, Stream.Readable.prototype); <del>Object.setPrototypeOf(IncomingMessage, Stream.Readable); <add>ObjectSetPrototypeOf(IncomingMessage.prototype, Stream.Readable.prototype); <add>ObjectSetPrototypeOf(IncomingMessage, Stream.Readable); <ide> <del>Object.defineProperty(IncomingMessage.prototype, 'connection', { <add>ObjectDefineProperty(IncomingMessage.prototype, 'connection', { <ide> get: function() { <ide> return this.socket; <ide> }, <ide><path>lib/_http_outgoing.js <ide> <ide> 'use strict'; <ide> <del>const { Object, ObjectPrototype } = primordials; <add>const { <add> ObjectCreate, <add> ObjectDefineProperty, <add> ObjectKeys, <add> ObjectPrototypeHasOwnProperty, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { getDefaultHighWaterMark } = require('internal/streams/state'); <ide> const assert = require('internal/assert'); <ide> function OutgoingMessage() { <ide> <ide> this._onPendingData = noopPendingOutput; <ide> } <del>Object.setPrototypeOf(OutgoingMessage.prototype, Stream.prototype); <del>Object.setPrototypeOf(OutgoingMessage, Stream); <add>ObjectSetPrototypeOf(OutgoingMessage.prototype, Stream.prototype); <add>ObjectSetPrototypeOf(OutgoingMessage, Stream); <ide> <del>Object.defineProperty(OutgoingMessage.prototype, 'writableFinished', { <add>ObjectDefineProperty(OutgoingMessage.prototype, 'writableFinished', { <ide> get() { <ide> return ( <ide> this.finished && <ide> Object.defineProperty(OutgoingMessage.prototype, 'writableFinished', { <ide> } <ide> }); <ide> <del>Object.defineProperty(OutgoingMessage.prototype, 'writableObjectMode', { <add>ObjectDefineProperty(OutgoingMessage.prototype, 'writableObjectMode', { <ide> get() { <ide> return false; <ide> } <ide> }); <ide> <del>Object.defineProperty(OutgoingMessage.prototype, 'writableLength', { <add>ObjectDefineProperty(OutgoingMessage.prototype, 'writableLength', { <ide> get() { <ide> return this.outputSize + (this.socket ? this.socket.writableLength : 0); <ide> } <ide> }); <ide> <del>Object.defineProperty(OutgoingMessage.prototype, 'writableHighWaterMark', { <add>ObjectDefineProperty(OutgoingMessage.prototype, 'writableHighWaterMark', { <ide> get() { <ide> return this.socket ? this.socket.writableHighWaterMark : HIGH_WATER_MARK; <ide> } <ide> }); <ide> <del>Object.defineProperty(OutgoingMessage.prototype, 'writableCorked', { <add>ObjectDefineProperty(OutgoingMessage.prototype, 'writableCorked', { <ide> get() { <ide> const corked = this.socket ? this.socket.writableCorked : 0; <ide> return corked + this[kCorked]; <ide> } <ide> }); <ide> <del>Object.defineProperty(OutgoingMessage.prototype, '_headers', { <add>ObjectDefineProperty(OutgoingMessage.prototype, '_headers', { <ide> get: internalUtil.deprecate(function() { <ide> return this.getHeaders(); <ide> }, 'OutgoingMessage.prototype._headers is deprecated', 'DEP0066'), <ide> set: internalUtil.deprecate(function(val) { <ide> if (val == null) { <ide> this[kOutHeaders] = null; <ide> } else if (typeof val === 'object') { <del> const headers = this[kOutHeaders] = Object.create(null); <del> const keys = Object.keys(val); <add> const headers = this[kOutHeaders] = ObjectCreate(null); <add> const keys = ObjectKeys(val); <ide> for (var i = 0; i < keys.length; ++i) { <ide> const name = keys[i]; <ide> headers[name.toLowerCase()] = [name, val[name]]; <ide> Object.defineProperty(OutgoingMessage.prototype, '_headers', { <ide> }, 'OutgoingMessage.prototype._headers is deprecated', 'DEP0066') <ide> }); <ide> <del>Object.defineProperty(OutgoingMessage.prototype, 'connection', { <add>ObjectDefineProperty(OutgoingMessage.prototype, 'connection', { <ide> get: function() { <ide> return this.socket; <ide> }, <ide> Object.defineProperty(OutgoingMessage.prototype, 'connection', { <ide> } <ide> }); <ide> <del>Object.defineProperty(OutgoingMessage.prototype, '_headerNames', { <add>ObjectDefineProperty(OutgoingMessage.prototype, '_headerNames', { <ide> get: internalUtil.deprecate(function() { <ide> const headers = this[kOutHeaders]; <ide> if (headers !== null) { <del> const out = Object.create(null); <del> const keys = Object.keys(headers); <add> const out = ObjectCreate(null); <add> const keys = ObjectKeys(headers); <ide> for (var i = 0; i < keys.length; ++i) { <ide> const key = keys[i]; <ide> const val = headers[key][0]; <ide> Object.defineProperty(OutgoingMessage.prototype, '_headerNames', { <ide> const headers = this[kOutHeaders]; <ide> if (!headers) <ide> return; <del> const keys = Object.keys(val); <add> const keys = ObjectKeys(val); <ide> for (var i = 0; i < keys.length; ++i) { <ide> const header = headers[keys[i]]; <ide> if (header) <ide> OutgoingMessage.prototype._renderHeaders = function _renderHeaders() { <ide> const headers = {}; <ide> <ide> if (headersMap !== null) { <del> const keys = Object.keys(headersMap); <add> const keys = ObjectKeys(headersMap); <ide> for (var i = 0, l = keys.length; i < l; i++) { <ide> const key = keys[i]; <ide> headers[headersMap[key][0]] = headersMap[key][1]; <ide> function _storeHeader(firstLine, headers) { <ide> } <ide> } else { <ide> for (const key in headers) { <del> if (ObjectPrototype.hasOwnProperty(headers, key)) { <add> if (ObjectPrototypeHasOwnProperty(headers, key)) { <ide> processHeader(this, state, key, headers[key], true); <ide> } <ide> } <ide> OutgoingMessage.prototype.setHeader = function setHeader(name, value) { <ide> <ide> let headers = this[kOutHeaders]; <ide> if (headers === null) <del> this[kOutHeaders] = headers = Object.create(null); <add> this[kOutHeaders] = headers = ObjectCreate(null); <ide> <ide> headers[name.toLowerCase()] = [name, value]; <ide> }; <ide> OutgoingMessage.prototype.getHeader = function getHeader(name) { <ide> <ide> // Returns an array of the names of the current outgoing headers. <ide> OutgoingMessage.prototype.getHeaderNames = function getHeaderNames() { <del> return this[kOutHeaders] !== null ? Object.keys(this[kOutHeaders]) : []; <add> return this[kOutHeaders] !== null ? ObjectKeys(this[kOutHeaders]) : []; <ide> }; <ide> <ide> <ide> // Returns a shallow copy of the current outgoing headers. <ide> OutgoingMessage.prototype.getHeaders = function getHeaders() { <ide> const headers = this[kOutHeaders]; <del> const ret = Object.create(null); <add> const ret = ObjectCreate(null); <ide> if (headers) { <del> const keys = Object.keys(headers); <add> const keys = ObjectKeys(headers); <ide> for (var i = 0; i < keys.length; ++i) { <ide> const key = keys[i]; <ide> const val = headers[key][1]; <ide> OutgoingMessage.prototype._implicitHeader = function _implicitHeader() { <ide> this.emit('error', new ERR_METHOD_NOT_IMPLEMENTED('_implicitHeader()')); <ide> }; <ide> <del>Object.defineProperty(OutgoingMessage.prototype, 'headersSent', { <add>ObjectDefineProperty(OutgoingMessage.prototype, 'headersSent', { <ide> configurable: true, <ide> enumerable: true, <ide> get: function() { return !!this._header; } <ide> }); <ide> <del>Object.defineProperty(OutgoingMessage.prototype, 'writableEnded', { <add>ObjectDefineProperty(OutgoingMessage.prototype, 'writableEnded', { <ide> get: function() { return this.finished; } <ide> }); <ide> <ide> function connectionCorkNT(conn) { <ide> <ide> OutgoingMessage.prototype.addTrailers = function addTrailers(headers) { <ide> this._trailer = ''; <del> const keys = Object.keys(headers); <add> const keys = ObjectKeys(headers); <ide> const isArray = Array.isArray(headers); <ide> var field, value; <ide> for (var i = 0, l = keys.length; i < l; i++) { <ide><path>lib/_http_server.js <ide> 'use strict'; <ide> <ide> const { <del> Object: { <del> setPrototypeOf: ObjectSetPrototypeOf, <del> keys: ObjectKeys, <del> } <add> ObjectKeys, <add> ObjectSetPrototypeOf, <ide> } = primordials; <ide> <ide> const net = require('net'); <ide> function writeHead(statusCode, reason, obj) { <ide> let k; <ide> if (obj) { <ide> const keys = ObjectKeys(obj); <del> for (var i = 0; i < keys.length; i++) { <add> for (let i = 0; i < keys.length; i++) { <ide> k = keys[i]; <ide> if (k) this.setHeader(k, obj[k]); <ide> } <ide><path>lib/_stream_duplex.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add> ObjectKeys, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> module.exports = Duplex; <ide> <ide> const Readable = require('_stream_readable'); <ide> const Writable = require('_stream_writable'); <ide> <del>Object.setPrototypeOf(Duplex.prototype, Readable.prototype); <del>Object.setPrototypeOf(Duplex, Readable); <add>ObjectSetPrototypeOf(Duplex.prototype, Readable.prototype); <add>ObjectSetPrototypeOf(Duplex, Readable); <ide> <ide> { <ide> // Allow the keys array to be GC'ed. <del> const keys = Object.keys(Writable.prototype); <add> const keys = ObjectKeys(Writable.prototype); <ide> for (let v = 0; v < keys.length; v++) { <ide> const method = keys[v]; <ide> if (!Duplex.prototype[method]) <ide> function Duplex(options) { <ide> } <ide> } <ide> <del>Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { <add>ObjectDefineProperty(Duplex.prototype, 'writableHighWaterMark', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Duplex.prototype, 'writableBuffer', { <add>ObjectDefineProperty(Duplex.prototype, 'writableBuffer', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Duplex.prototype, 'writableBuffer', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Duplex.prototype, 'writableLength', { <add>ObjectDefineProperty(Duplex.prototype, 'writableLength', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Duplex.prototype, 'writableLength', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Duplex.prototype, 'writableFinished', { <add>ObjectDefineProperty(Duplex.prototype, 'writableFinished', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Duplex.prototype, 'writableFinished', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Duplex.prototype, 'writableCorked', { <add>ObjectDefineProperty(Duplex.prototype, 'writableCorked', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Duplex.prototype, 'writableCorked', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Duplex.prototype, 'writableEnded', { <add>ObjectDefineProperty(Duplex.prototype, 'writableEnded', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> function onEndNT(self) { <ide> self.end(); <ide> } <ide> <del>Object.defineProperty(Duplex.prototype, 'destroyed', { <add>ObjectDefineProperty(Duplex.prototype, 'destroyed', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide><path>lib/_stream_passthrough.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> module.exports = PassThrough; <ide> <ide> const Transform = require('_stream_transform'); <del>Object.setPrototypeOf(PassThrough.prototype, Transform.prototype); <del>Object.setPrototypeOf(PassThrough, Transform); <add>ObjectSetPrototypeOf(PassThrough.prototype, Transform.prototype); <add>ObjectSetPrototypeOf(PassThrough, Transform); <ide> <ide> function PassThrough(options) { <ide> if (!(this instanceof PassThrough)) <ide><path>lib/_stream_readable.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> module.exports = Readable; <ide> Readable.ReadableState = ReadableState; <ide> let StringDecoder; <ide> let createReadableStreamAsyncIterator; <ide> let from; <ide> <del>Object.setPrototypeOf(Readable.prototype, Stream.prototype); <del>Object.setPrototypeOf(Readable, Stream); <add>ObjectSetPrototypeOf(Readable.prototype, Stream.prototype); <add>ObjectSetPrototypeOf(Readable, Stream); <ide> <ide> const { errorOrDestroy } = destroyImpl; <ide> const kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; <ide> function ReadableState(options, stream, isDuplex) { <ide> } <ide> <ide> // Legacy getter for `pipesCount` <del>Object.defineProperty(ReadableState.prototype, 'pipesCount', { <add>ObjectDefineProperty(ReadableState.prototype, 'pipesCount', { <ide> get() { <ide> return this.pipes.length; <ide> } <ide> function Readable(options) { <ide> Stream.call(this); <ide> } <ide> <del>Object.defineProperty(Readable.prototype, 'destroyed', { <add>ObjectDefineProperty(Readable.prototype, 'destroyed', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Readable.prototype, 'destroyed', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Readable.prototype, 'readableEnded', { <add>ObjectDefineProperty(Readable.prototype, 'readableEnded', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Readable.prototype[Symbol.asyncIterator] = function() { <ide> return createReadableStreamAsyncIterator(this); <ide> }; <ide> <del>Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { <add>ObjectDefineProperty(Readable.prototype, 'readableHighWaterMark', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Readable.prototype, 'readableBuffer', { <add>ObjectDefineProperty(Readable.prototype, 'readableBuffer', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Readable.prototype, 'readableBuffer', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Readable.prototype, 'readableFlowing', { <add>ObjectDefineProperty(Readable.prototype, 'readableFlowing', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Readable.prototype, 'readableFlowing', { <ide> // Exposed for testing purposes only. <ide> Readable._fromList = fromList; <ide> <del>Object.defineProperty(Readable.prototype, 'readableLength', { <add>ObjectDefineProperty(Readable.prototype, 'readableLength', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Readable.prototype, 'readableLength', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Readable.prototype, 'readableObjectMode', { <add>ObjectDefineProperty(Readable.prototype, 'readableObjectMode', { <ide> enumerable: false, <ide> get() { <ide> return this._readableState ? this._readableState.objectMode : false; <ide> } <ide> }); <ide> <del>Object.defineProperty(Readable.prototype, 'readableEncoding', { <add>ObjectDefineProperty(Readable.prototype, 'readableEncoding', { <ide> enumerable: false, <ide> get() { <ide> return this._readableState ? this._readableState.encoding : null; <ide><path>lib/_stream_transform.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> module.exports = Transform; <ide> const { <ide> const { <ide> ERR_TRANSFORM_WITH_LENGTH_0 <ide> } = require('internal/errors').codes; <ide> const Duplex = require('_stream_duplex'); <del>Object.setPrototypeOf(Transform.prototype, Duplex.prototype); <del>Object.setPrototypeOf(Transform, Duplex); <add>ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype); <add>ObjectSetPrototypeOf(Transform, Duplex); <ide> <ide> <ide> function afterTransform(er, data) { <ide><path>lib/_stream_writable.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> module.exports = Writable; <ide> Writable.WritableState = WritableState; <ide> const { <ide> <ide> const { errorOrDestroy } = destroyImpl; <ide> <del>Object.setPrototypeOf(Writable.prototype, Stream.prototype); <del>Object.setPrototypeOf(Writable, Stream); <add>ObjectSetPrototypeOf(Writable.prototype, Stream.prototype); <add>ObjectSetPrototypeOf(Writable, Stream); <ide> <ide> function nop() {} <ide> <ide> WritableState.prototype.getBuffer = function getBuffer() { <ide> return out; <ide> }; <ide> <del>Object.defineProperty(WritableState.prototype, 'buffer', { <add>ObjectDefineProperty(WritableState.prototype, 'buffer', { <ide> get: internalUtil.deprecate(function writableStateBufferGetter() { <ide> return this.getBuffer(); <ide> }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + <ide> Object.defineProperty(WritableState.prototype, 'buffer', { <ide> var realHasInstance; <ide> if (typeof Symbol === 'function' && Symbol.hasInstance) { <ide> realHasInstance = Function.prototype[Symbol.hasInstance]; <del> Object.defineProperty(Writable, Symbol.hasInstance, { <add> ObjectDefineProperty(Writable, Symbol.hasInstance, { <ide> value: function(object) { <ide> if (realHasInstance.call(this, object)) <ide> return true; <ide> Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { <ide> return this; <ide> }; <ide> <del>Object.defineProperty(Writable.prototype, 'writableBuffer', { <add>ObjectDefineProperty(Writable.prototype, 'writableBuffer', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> function decodeChunk(state, chunk, encoding) { <ide> return chunk; <ide> } <ide> <del>Object.defineProperty(Writable.prototype, 'writableEnded', { <add>ObjectDefineProperty(Writable.prototype, 'writableEnded', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Writable.prototype, 'writableEnded', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { <add>ObjectDefineProperty(Writable.prototype, 'writableHighWaterMark', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Writable.prototype, 'writableCorked', { <add>ObjectDefineProperty(Writable.prototype, 'writableCorked', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Writable.prototype.end = function(chunk, encoding, cb) { <ide> return this; <ide> }; <ide> <del>Object.defineProperty(Writable.prototype, 'writableLength', { <add>ObjectDefineProperty(Writable.prototype, 'writableLength', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> function onFinished(stream, state, cb) { <ide> stream.prependListener('error', onerror); <ide> } <ide> <del>Object.defineProperty(Writable.prototype, 'destroyed', { <add>ObjectDefineProperty(Writable.prototype, 'destroyed', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide> Object.defineProperty(Writable.prototype, 'destroyed', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Writable.prototype, 'writableObjectMode', { <add>ObjectDefineProperty(Writable.prototype, 'writableObjectMode', { <ide> enumerable: false, <ide> get() { <ide> return this._writableState ? this._writableState.objectMode : false; <ide> } <ide> }); <ide> <del>Object.defineProperty(Writable.prototype, 'writableFinished', { <add>ObjectDefineProperty(Writable.prototype, 'writableFinished', { <ide> // Making it explicit this property is not enumerable <ide> // because otherwise some prototype manipulation in <ide> // userland will fail <ide><path>lib/_tls_common.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectCreate, <add>} = primordials; <ide> <ide> const { parseCertString } = require('internal/tls'); <ide> const { isArrayBufferView } = require('internal/util/types'); <ide> exports.translatePeerCertificate = function translatePeerCertificate(c) { <ide> if (c.subject != null) c.subject = parseCertString(c.subject); <ide> if (c.infoAccess != null) { <ide> const info = c.infoAccess; <del> c.infoAccess = Object.create(null); <add> c.infoAccess = ObjectCreate(null); <ide> <ide> // XXX: More key validation? <ide> info.replace(/([^\n:]*):([^\n]*)(?:\n|$)/g, (all, key, val) => { <ide><path>lib/_tls_wrap.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectAssign, <add> ObjectDefineProperty, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { <ide> assertCrypto, <ide> function TLSSocket(socket, opts) { <ide> // Read on next tick so the caller has a chance to setup listeners <ide> process.nextTick(initRead, this, socket); <ide> } <del>Object.setPrototypeOf(TLSSocket.prototype, net.Socket.prototype); <del>Object.setPrototypeOf(TLSSocket, net.Socket); <add>ObjectSetPrototypeOf(TLSSocket.prototype, net.Socket.prototype); <add>ObjectSetPrototypeOf(TLSSocket, net.Socket); <ide> exports.TLSSocket = TLSSocket; <ide> <ide> const proxiedMethods = [ <ide> TLSSocket.prototype._wrapHandle = function(wrap) { <ide> // This eliminates a cyclic reference to TLSWrap <ide> // Ref: https://github.com/nodejs/node/commit/f7620fb96d339f704932f9bb9a0dceb9952df2d4 <ide> function defineHandleReading(socket, handle) { <del> Object.defineProperty(handle, 'reading', { <add> ObjectDefineProperty(handle, 'reading', { <ide> get: () => { <ide> return socket[kRes].reading; <ide> }, <ide> function Server(options, listener) { <ide> this[kEnableTrace] = options.enableTrace; <ide> } <ide> <del>Object.setPrototypeOf(Server.prototype, net.Server.prototype); <del>Object.setPrototypeOf(Server, net.Server); <add>ObjectSetPrototypeOf(Server.prototype, net.Server.prototype); <add>ObjectSetPrototypeOf(Server, net.Server); <ide> exports.Server = Server; <ide> exports.createServer = function createServer(options, listener) { <ide> return new Server(options, listener); <ide> function normalizeConnectArgs(listArgs) { <ide> // the host/port/path args that it knows about, not the tls options. <ide> // This means that options.host overrides a host arg. <ide> if (listArgs[1] !== null && typeof listArgs[1] === 'object') { <del> Object.assign(options, listArgs[1]); <add> ObjectAssign(options, listArgs[1]); <ide> } else if (listArgs[2] !== null && typeof listArgs[2] === 'object') { <del> Object.assign(options, listArgs[2]); <add> ObjectAssign(options, listArgs[2]); <ide> } <ide> <ide> return cb ? [options, cb] : [options]; <ide><path>lib/assert.js <ide> <ide> 'use strict'; <ide> <del>const { Object, ObjectPrototype } = primordials; <add>const { <add> ObjectAssign, <add> ObjectIs, <add> ObjectKeys, <add> ObjectPrototypeIsPrototypeOf, <add>} = primordials; <ide> <ide> const { Buffer } = require('buffer'); <ide> const { <ide> assert.strictEqual = function strictEqual(actual, expected, message) { <ide> if (arguments.length < 2) { <ide> throw new ERR_MISSING_ARGS('actual', 'expected'); <ide> } <del> if (!Object.is(actual, expected)) { <add> if (!ObjectIs(actual, expected)) { <ide> innerFail({ <ide> actual, <ide> expected, <ide> assert.notStrictEqual = function notStrictEqual(actual, expected, message) { <ide> if (arguments.length < 2) { <ide> throw new ERR_MISSING_ARGS('actual', 'expected'); <ide> } <del> if (Object.is(actual, expected)) { <add> if (ObjectIs(actual, expected)) { <ide> innerFail({ <ide> actual, <ide> expected, <ide> function expectedException(actual, expected, message, fn) { <ide> throw err; <ide> } else { <ide> // Handle validation objects. <del> const keys = Object.keys(expected); <add> const keys = ObjectKeys(expected); <ide> // Special handle errors to make sure the name and the message are <ide> // compared as well. <ide> if (expected instanceof Error) { <ide> function expectedException(actual, expected, message, fn) { <ide> // Check for matching Error classes. <ide> } else if (expected.prototype !== undefined && actual instanceof expected) { <ide> return; <del> } else if (ObjectPrototype.isPrototypeOf(Error, expected)) { <add> } else if (ObjectPrototypeIsPrototypeOf(Error, expected)) { <ide> if (!message) { <ide> generatedMessage = true; <ide> message = 'The error is expected to be an instance of ' + <ide> assert.ifError = function ifError(err) { <ide> function strict(...args) { <ide> innerOk(strict, args.length, ...args); <ide> } <del>assert.strict = Object.assign(strict, assert, { <add>assert.strict = ObjectAssign(strict, assert, { <ide> equal: assert.strictEqual, <ide> deepEqual: assert.deepStrictEqual, <ide> notEqual: assert.notStrictEqual, <ide><path>lib/async_hooks.js <ide> 'use strict'; <ide> <del>const { Reflect } = primordials; <add>const { <add> ReflectApply, <add>} = primordials; <ide> <ide> const { <ide> ERR_ASYNC_CALLBACK, <ide> class AsyncResource { <ide> try { <ide> if (thisArg === undefined) <ide> return fn(...args); <del> return Reflect.apply(fn, thisArg, args); <add> return ReflectApply(fn, thisArg, args); <ide> } finally { <ide> if (hasAsyncIdStack()) <ide> emitAfter(asyncId); <ide><path>lib/buffer.js <ide> 'use strict'; <ide> <ide> const { <del> Object: { <del> defineProperties: ObjectDefineProperties, <del> defineProperty: ObjectDefineProperty, <del> setPrototypeOf: ObjectSetPrototypeOf, <del> create: ObjectCreate <del> }, <del> Math: { <del> floor: MathFloor, <del> trunc: MathTrunc, <del> min: MathMin <del> } <add> MathFloor, <add> MathMin, <add> MathTrunc, <add> ObjectCreate, <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add> ObjectSetPrototypeOf, <ide> } = primordials; <ide> <ide> const { <ide><path>lib/child_process.js <ide> <ide> 'use strict'; <ide> <del>const { Object, ObjectPrototype } = primordials; <add>const { <add> ObjectAssign, <add> ObjectDefineProperty, <add> ObjectPrototypeHasOwnProperty, <add>} = primordials; <ide> <ide> const { <ide> promisify, <ide> const customPromiseExecFunction = (orig) => { <ide> }; <ide> }; <ide> <del>Object.defineProperty(exec, promisify.custom, { <add>ObjectDefineProperty(exec, promisify.custom, { <ide> enumerable: false, <ide> value: customPromiseExecFunction(exec) <ide> }); <ide> function execFile(file /* , args, options, callback */) { <ide> return child; <ide> } <ide> <del>Object.defineProperty(execFile, promisify.custom, { <add>ObjectDefineProperty(execFile, promisify.custom, { <ide> enumerable: false, <ide> value: customPromiseExecFunction(execFile) <ide> }); <ide> function normalizeSpawnArguments(file, args, options) { <ide> // process.env.NODE_V8_COVERAGE always propagates, making it possible to <ide> // collect coverage for programs that spawn with white-listed environment. <ide> if (process.env.NODE_V8_COVERAGE && <del> !ObjectPrototype.hasOwnProperty(options.env || {}, 'NODE_V8_COVERAGE')) { <add> !ObjectPrototypeHasOwnProperty(options.env || {}, 'NODE_V8_COVERAGE')) { <ide> env.NODE_V8_COVERAGE = process.env.NODE_V8_COVERAGE; <ide> } <ide> <ide> function checkExecSyncError(ret, args, cmd) { <ide> err = new Error(msg); <ide> } <ide> if (err) { <del> Object.assign(err, ret); <add> ObjectAssign(err, ret); <ide> } <ide> return err; <ide> } <ide><path>lib/constants.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectAssign, <add> ObjectFreeze, <add>} = primordials; <ide> <ide> // This module is deprecated in documentation only. Users should be directed <ide> // towards using the specific constants exposed by the individual modules on <ide> // which they are most relevant. <ide> // Deprecation Code: DEP0008 <ide> const constants = internalBinding('constants'); <del>Object.assign(exports, <del> constants.os.dlopen, <del> constants.os.errno, <del> constants.os.priority, <del> constants.os.signals, <del> constants.fs, <del> constants.crypto); <del>Object.freeze(exports); <add>ObjectAssign(exports, <add> constants.os.dlopen, <add> constants.os.errno, <add> constants.os.priority, <add> constants.os.signals, <add> constants.fs, <add> constants.crypto); <add>ObjectFreeze(exports); <ide><path>lib/crypto.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperties, <add>} = primordials; <ide> <ide> const { <ide> assertCrypto, <ide> function getFipsForced() { <ide> return 1; <ide> } <ide> <del>Object.defineProperties(module.exports, { <add>ObjectDefineProperties(module.exports, { <ide> createCipher: { <ide> enumerable: false, <ide> value: deprecate(createCipher, <ide><path>lib/dgram.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const errors = require('internal/errors'); <ide> const { <ide> function Socket(type, listener) { <ide> sendBufferSize <ide> }; <ide> } <del>Object.setPrototypeOf(Socket.prototype, EventEmitter.prototype); <del>Object.setPrototypeOf(Socket, EventEmitter); <add>ObjectSetPrototypeOf(Socket.prototype, EventEmitter.prototype); <add>ObjectSetPrototypeOf(Socket, EventEmitter); <ide> <ide> <ide> function createSocket(type, listener) { <ide> Socket.prototype.getSendBufferSize = function() { <ide> <ide> <ide> // Deprecated private APIs. <del>Object.defineProperty(Socket.prototype, '_handle', { <add>ObjectDefineProperty(Socket.prototype, '_handle', { <ide> get: deprecate(function() { <ide> return this[kStateSymbol].handle; <ide> }, 'Socket.prototype._handle is deprecated', 'DEP0112'), <ide> Object.defineProperty(Socket.prototype, '_handle', { <ide> }); <ide> <ide> <del>Object.defineProperty(Socket.prototype, '_receiving', { <add>ObjectDefineProperty(Socket.prototype, '_receiving', { <ide> get: deprecate(function() { <ide> return this[kStateSymbol].receiving; <ide> }, 'Socket.prototype._receiving is deprecated', 'DEP0112'), <ide> Object.defineProperty(Socket.prototype, '_receiving', { <ide> }); <ide> <ide> <del>Object.defineProperty(Socket.prototype, '_bindState', { <add>ObjectDefineProperty(Socket.prototype, '_bindState', { <ide> get: deprecate(function() { <ide> return this[kStateSymbol].bindState; <ide> }, 'Socket.prototype._bindState is deprecated', 'DEP0112'), <ide> Object.defineProperty(Socket.prototype, '_bindState', { <ide> }); <ide> <ide> <del>Object.defineProperty(Socket.prototype, '_queue', { <add>ObjectDefineProperty(Socket.prototype, '_queue', { <ide> get: deprecate(function() { <ide> return this[kStateSymbol].queue; <ide> }, 'Socket.prototype._queue is deprecated', 'DEP0112'), <ide> Object.defineProperty(Socket.prototype, '_queue', { <ide> }); <ide> <ide> <del>Object.defineProperty(Socket.prototype, '_reuseAddr', { <add>ObjectDefineProperty(Socket.prototype, '_reuseAddr', { <ide> get: deprecate(function() { <ide> return this[kStateSymbol].reuseAddr; <ide> }, 'Socket.prototype._reuseAddr is deprecated', 'DEP0112'), <ide> Socket.prototype._stopReceiving = deprecate(function() { <ide> <ide> // Legacy alias on the C++ wrapper object. This is not public API, so we may <ide> // want to runtime-deprecate it at some point. There's no hurry, though. <del>Object.defineProperty(UDP.prototype, 'owner', { <add>ObjectDefineProperty(UDP.prototype, 'owner', { <ide> get() { return this[owner_symbol]; }, <ide> set(v) { return this[owner_symbol] = v; } <ide> }); <ide><path>lib/dns.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectCreate, <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add>} = primordials; <ide> <ide> const cares = internalBinding('cares_wrap'); <ide> const { toASCII } = require('internal/idna'); <ide> function lookup(hostname, options, callback) { <ide> return req; <ide> } <ide> <del>Object.defineProperty(lookup, customPromisifyArgs, <del> { value: ['address', 'family'], enumerable: false }); <add>ObjectDefineProperty(lookup, customPromisifyArgs, <add> { value: ['address', 'family'], enumerable: false }); <ide> <ide> <ide> function onlookupservice(err, hostname, service) { <ide> function lookupService(address, port, callback) { <ide> return req; <ide> } <ide> <del>Object.defineProperty(lookupService, customPromisifyArgs, <del> { value: ['hostname', 'service'], enumerable: false }); <add>ObjectDefineProperty(lookupService, customPromisifyArgs, <add> { value: ['hostname', 'service'], enumerable: false }); <ide> <ide> <ide> function onresolve(err, result, ttls) { <ide> function resolver(bindingName) { <ide> if (err) throw dnsException(err, bindingName, name); <ide> return req; <ide> } <del> Object.defineProperty(query, 'name', { value: bindingName }); <add> ObjectDefineProperty(query, 'name', { value: bindingName }); <ide> return query; <ide> } <ide> <del>const resolveMap = Object.create(null); <add>const resolveMap = ObjectCreate(null); <ide> Resolver.prototype.resolveAny = resolveMap.ANY = resolver('queryAny'); <ide> Resolver.prototype.resolve4 = resolveMap.A = resolver('queryA'); <ide> Resolver.prototype.resolve6 = resolveMap.AAAA = resolver('queryAaaa'); <ide> module.exports = { <ide> <ide> bindDefaultResolver(module.exports, getDefaultResolver()); <ide> <del>Object.defineProperties(module.exports, { <add>ObjectDefineProperties(module.exports, { <ide> promises: { <ide> configurable: true, <ide> enumerable: true, <ide><path>lib/domain.js <ide> // No new pull requests targeting this module will be accepted <ide> // unless they address existing, critical bugs. <ide> <del>const { Object, Reflect } = primordials; <add>const { <add> ObjectDefineProperty, <add> ReflectApply, <add>} = primordials; <ide> <ide> const EventEmitter = require('events'); <ide> const { <ide> const { WeakReference } = internalBinding('util'); <ide> // Overwrite process.domain with a getter/setter that will allow for more <ide> // effective optimizations <ide> const _domain = [null]; <del>Object.defineProperty(process, 'domain', { <add>ObjectDefineProperty(process, 'domain', { <ide> enumerable: true, <ide> get: function() { <ide> return _domain[0]; <ide> const asyncHook = createHook({ <ide> if (process.domain !== null && process.domain !== undefined) { <ide> // If this operation is created while in a domain, let's mark it <ide> pairing.set(asyncId, process.domain[kWeak]); <del> Object.defineProperty(resource, 'domain', { <add> ObjectDefineProperty(resource, 'domain', { <ide> configurable: true, <ide> enumerable: false, <ide> value: process.domain, <ide> function topLevelDomainCallback(cb, ...args) { <ide> <ide> if (domain) <ide> domain.enter(); <del> const ret = Reflect.apply(cb, this, args); <add> const ret = ReflectApply(cb, this, args); <ide> if (domain) <ide> domain.exit(); <ide> <ide> Domain.prototype._errorHandler = function(er) { <ide> let caught = false; <ide> <ide> if ((typeof er === 'object' && er !== null) || typeof er === 'function') { <del> Object.defineProperty(er, 'domain', { <add> ObjectDefineProperty(er, 'domain', { <ide> configurable: true, <ide> enumerable: false, <ide> value: this, <ide> Domain.prototype.add = function(ee) { <ide> } <ide> } <ide> <del> Object.defineProperty(ee, 'domain', { <add> ObjectDefineProperty(ee, 'domain', { <ide> configurable: true, <ide> enumerable: false, <ide> value: this, <ide> function intercepted(_this, self, cb, fnargs) { <ide> const er = fnargs[0]; <ide> er.domainBound = cb; <ide> er.domainThrown = false; <del> Object.defineProperty(er, 'domain', { <add> ObjectDefineProperty(er, 'domain', { <ide> configurable: true, <ide> enumerable: false, <ide> value: self, <ide> Domain.prototype.bind = function(cb) { <ide> return bound(this, self, cb, arguments); <ide> } <ide> <del> Object.defineProperty(runBound, 'domain', { <add> ObjectDefineProperty(runBound, 'domain', { <ide> configurable: true, <ide> enumerable: false, <ide> value: this, <ide> EventEmitter.usingDomains = true; <ide> <ide> const eventInit = EventEmitter.init; <ide> EventEmitter.init = function() { <del> Object.defineProperty(this, 'domain', { <add> ObjectDefineProperty(this, 'domain', { <ide> configurable: true, <ide> enumerable: false, <ide> value: null, <ide> EventEmitter.prototype.emit = function(...args) { <ide> // handler, there's no active domain or this is process <ide> if (shouldEmitError || domain === null || domain === undefined || <ide> this === process) { <del> return Reflect.apply(eventEmit, this, args); <add> return ReflectApply(eventEmit, this, args); <ide> } <ide> <ide> if (type === 'error') { <ide> EventEmitter.prototype.emit = function(...args) { <ide> <ide> if (typeof er === 'object') { <ide> er.domainEmitter = this; <del> Object.defineProperty(er, 'domain', { <add> ObjectDefineProperty(er, 'domain', { <ide> configurable: true, <ide> enumerable: false, <ide> value: domain, <ide> EventEmitter.prototype.emit = function(...args) { <ide> } <ide> <ide> domain.enter(); <del> const ret = Reflect.apply(eventEmit, this, args); <add> const ret = ReflectApply(eventEmit, this, args); <ide> domain.exit(); <ide> <ide> return ret; <ide><path>lib/events.js <ide> 'use strict'; <ide> <ide> const { <del> Math: { <del> min: MathMin <del> }, <del> Object: { <del> defineProperty: ObjectDefineProperty, <del> getPrototypeOf: ObjectGetPrototypeOf, <del> create: ObjectCreate, <del> keys: ObjectKeys, <del> }, <del> Reflect: { <del> apply: ReflectApply, <del> ownKeys: ReflectOwnKeys, <del> } <add> MathMin, <add> ObjectCreate, <add> ObjectDefineProperty, <add> ObjectGetPrototypeOf, <add> ObjectKeys, <add> ReflectApply, <add> ReflectOwnKeys, <ide> } = primordials; <ide> <ide> var spliceOne; <ide><path>lib/fs.js <ide> <ide> 'use strict'; <ide> <del>const { Math, Object } = primordials; <add>const { <add> MathMax, <add> ObjectCreate, <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add>} = primordials; <ide> <ide> const { fs: constants } = internalBinding('constants'); <ide> const { <ide> function exists(path, callback) { <ide> } <ide> } <ide> <del>Object.defineProperty(exists, internalUtil.promisify.custom, { <add>ObjectDefineProperty(exists, internalUtil.promisify.custom, { <ide> value: (path) => { <ide> return new Promise((resolve) => fs.exists(path, resolve)); <ide> } <ide> function read(fd, buffer, offset, length, position, callback) { <ide> binding.read(fd, buffer, offset, length, position, req); <ide> } <ide> <del>Object.defineProperty(read, internalUtil.customPromisifyArgs, <del> { value: ['bytesRead', 'buffer'], enumerable: false }); <add>ObjectDefineProperty(read, internalUtil.customPromisifyArgs, <add> { value: ['bytesRead', 'buffer'], enumerable: false }); <ide> <ide> function readSync(fd, buffer, offset, length, position) { <ide> validateInt32(fd, 'fd', 0); <ide> function write(fd, buffer, offset, length, position, callback) { <ide> return binding.writeString(fd, buffer, offset, length, req); <ide> } <ide> <del>Object.defineProperty(write, internalUtil.customPromisifyArgs, <del> { value: ['bytesWritten', 'buffer'], enumerable: false }); <add>ObjectDefineProperty(write, internalUtil.customPromisifyArgs, <add> { value: ['bytesWritten', 'buffer'], enumerable: false }); <ide> <ide> // Usage: <ide> // fs.writeSync(fd, buffer[, offset[, length[, position]]]); <ide> function writev(fd, buffers, position, callback) { <ide> return binding.writeBuffers(fd, buffers, position, req); <ide> } <ide> <del>Object.defineProperty(writev, internalUtil.customPromisifyArgs, { <add>ObjectDefineProperty(writev, internalUtil.customPromisifyArgs, { <ide> value: ['bytesWritten', 'buffer'], <ide> enumerable: false <ide> }); <ide> function ftruncate(fd, len = 0, callback) { <ide> } <ide> validateInt32(fd, 'fd', 0); <ide> validateInteger(len, 'len'); <del> len = Math.max(0, len); <add> len = MathMax(0, len); <ide> const req = new FSReqCallback(); <ide> req.oncomplete = makeCallback(callback); <ide> binding.ftruncate(fd, len, req); <ide> function ftruncate(fd, len = 0, callback) { <ide> function ftruncateSync(fd, len = 0) { <ide> validateInt32(fd, 'fd', 0); <ide> validateInteger(len, 'len'); <del> len = Math.max(0, len); <add> len = MathMax(0, len); <ide> const ctx = {}; <ide> binding.ftruncate(fd, len, undefined, ctx); <ide> handleErrorFromBinding(ctx); <ide> if (isWindows) { <ide> nextPart = function nextPart(p, i) { return p.indexOf('/', i); }; <ide> } <ide> <del>const emptyObj = Object.create(null); <add>const emptyObj = ObjectCreate(null); <ide> function realpathSync(p, options) { <ide> if (!options) <ide> options = emptyObj; <ide> function realpathSync(p, options) { <ide> return maybeCachedResult; <ide> } <ide> <del> const seenLinks = Object.create(null); <del> const knownHard = Object.create(null); <add> const seenLinks = ObjectCreate(null); <add> const knownHard = ObjectCreate(null); <ide> const original = p; <ide> <ide> // Current character position in p <ide> function realpath(p, options, callback) { <ide> validatePath(p); <ide> p = pathModule.resolve(p); <ide> <del> const seenLinks = Object.create(null); <del> const knownHard = Object.create(null); <add> const seenLinks = ObjectCreate(null); <add> const knownHard = ObjectCreate(null); <ide> <ide> // Current character position in p <ide> let pos; <ide> module.exports = fs = { <ide> _toUnixTimestamp: toUnixTimestamp <ide> }; <ide> <del>Object.defineProperties(fs, { <add>ObjectDefineProperties(fs, { <ide> F_OK: { enumerable: true, value: F_OK || 0 }, <ide> R_OK: { enumerable: true, value: R_OK || 0 }, <ide> W_OK: { enumerable: true, value: W_OK || 0 }, <ide><path>lib/http.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add>} = primordials; <ide> <ide> const httpAgent = require('_http_agent'); <ide> const { ClientRequest } = require('_http_client'); <ide> module.exports = { <ide> request <ide> }; <ide> <del>Object.defineProperty(module.exports, 'maxHeaderSize', { <add>ObjectDefineProperty(module.exports, 'maxHeaderSize', { <ide> configurable: true, <ide> enumerable: true, <ide> get() { <ide> Object.defineProperty(module.exports, 'maxHeaderSize', { <ide> } <ide> }); <ide> <del>Object.defineProperty(module.exports, 'globalAgent', { <add>ObjectDefineProperty(module.exports, 'globalAgent', { <ide> configurable: true, <ide> enumerable: true, <ide> get() { <ide><path>lib/https.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectAssign, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> require('internal/util').assertCrypto(); <ide> <ide> function Server(opts, requestListener) { <ide> this.maxHeadersCount = null; <ide> this.headersTimeout = 40 * 1000; // 40 seconds <ide> } <del>Object.setPrototypeOf(Server.prototype, tls.Server.prototype); <del>Object.setPrototypeOf(Server, tls.Server); <add>ObjectSetPrototypeOf(Server.prototype, tls.Server.prototype); <add>ObjectSetPrototypeOf(Server, tls.Server); <ide> <ide> Server.prototype.setTimeout = HttpServer.prototype.setTimeout; <ide> <ide> function Agent(options) { <ide> list: [] <ide> }; <ide> } <del>Object.setPrototypeOf(Agent.prototype, HttpAgent.prototype); <del>Object.setPrototypeOf(Agent, HttpAgent); <add>ObjectSetPrototypeOf(Agent.prototype, HttpAgent.prototype); <add>ObjectSetPrototypeOf(Agent, HttpAgent); <ide> Agent.prototype.createConnection = createConnection; <ide> <ide> Agent.prototype.getName = function getName(options) { <ide> function request(...args) { <ide> } <ide> <ide> if (args[0] && typeof args[0] !== 'function') { <del> Object.assign(options, args.shift()); <add> ObjectAssign(options, args.shift()); <ide> } <ide> <ide> options._defaultAgent = module.exports.globalAgent; <ide><path>lib/inspector.js <ide> 'use strict'; <ide> <del>const { JSON } = primordials; <add>const { <add> JSONParse, <add> JSONStringify, <add>} = primordials; <ide> <ide> const { <ide> ERR_INSPECTOR_ALREADY_CONNECTED, <ide> class Session extends EventEmitter { <ide> } <ide> <ide> [onMessageSymbol](message) { <del> const parsed = JSON.parse(message); <add> const parsed = JSONParse(message); <ide> try { <ide> if (parsed.id) { <ide> const callback = this[messageCallbacksSymbol].get(parsed.id); <ide> class Session extends EventEmitter { <ide> if (callback) { <ide> this[messageCallbacksSymbol].set(id, callback); <ide> } <del> this[connectionSymbol].dispatch(JSON.stringify(message)); <add> this[connectionSymbol].dispatch(JSONStringify(message)); <ide> } <ide> <ide> disconnect() { <ide><path>lib/internal/assert/assertion_error.js <ide> 'use strict'; <ide> <del>const { Math, Object } = primordials; <add>const { <add> MathMax, <add> ObjectCreate, <add> ObjectDefineProperty, <add> ObjectGetPrototypeOf, <add> ObjectKeys, <add>} = primordials; <ide> <ide> const { inspect } = require('internal/util/inspect'); <ide> const { codes: { <ide> const kReadableOperator = { <ide> const kMaxShortLength = 12; <ide> <ide> function copyError(source) { <del> const keys = Object.keys(source); <del> const target = Object.create(Object.getPrototypeOf(source)); <add> const keys = ObjectKeys(source); <add> const target = ObjectCreate(ObjectGetPrototypeOf(source)); <ide> for (const key of keys) { <ide> target[key] = source[key]; <ide> } <del> Object.defineProperty(target, 'message', { value: source.message }); <add> ObjectDefineProperty(target, 'message', { value: source.message }); <ide> return target; <ide> } <ide> <ide> function createErrDiff(actual, expected, operator) { <ide> b = expectedLines[expectedLines.length - 1]; <ide> } <ide> <del> const maxLines = Math.max(actualLines.length, expectedLines.length); <add> const maxLines = MathMax(actualLines.length, expectedLines.length); <ide> // Strict equal with identical objects that are not identical by reference. <ide> // E.g., assert.deepStrictEqual({ a: Symbol() }, { a: Symbol() }) <ide> if (maxLines === 0) { <ide> class AssertionError extends Error { <ide> Error.stackTraceLimit = limit; <ide> <ide> this.generatedMessage = !message; <del> Object.defineProperty(this, 'name', { <add> ObjectDefineProperty(this, 'name', { <ide> value: 'AssertionError [ERR_ASSERTION]', <ide> enumerable: false, <ide> writable: true, <ide><path>lib/internal/async_hooks.js <ide> 'use strict'; <ide> <del>const { FunctionPrototype, Object } = primordials; <add>const { <add> FunctionPrototypeBind, <add> ObjectDefineProperty, <add>} = primordials; <ide> <ide> const { <ide> ERR_ASYNC_TYPE, <ide> function emitHook(symbol, asyncId) { <ide> } <ide> <ide> function emitHookFactory(symbol, name) { <del> const fn = FunctionPrototype.bind(emitHook, undefined, symbol); <add> const fn = FunctionPrototypeBind(emitHook, undefined, symbol); <ide> <ide> // Set the name property of the function as it looks good in the stack trace. <del> Object.defineProperty(fn, 'name', { <add> ObjectDefineProperty(fn, 'name', { <ide> value: name <ide> }); <ide> return fn; <ide><path>lib/internal/bootstrap/loaders.js <ide> /* global process, getLinkedBinding, getInternalBinding, primordials */ <ide> <ide> const { <del> Reflect, <del> Object, <del> ObjectPrototype, <del> SafeSet <add> ReflectGet, <add> ObjectCreate, <add> ObjectDefineProperty, <add> ObjectKeys, <add> ObjectPrototypeHasOwnProperty, <add> SafeSet, <ide> } = primordials; <ide> <ide> // Set up process.moduleLoadList. <ide> const moduleLoadList = []; <del>Object.defineProperty(process, 'moduleLoadList', { <add>ObjectDefineProperty(process, 'moduleLoadList', { <ide> value: moduleLoadList, <ide> configurable: true, <ide> enumerable: true, <ide> const internalBindingWhitelist = new SafeSet([ <ide> <ide> // Set up process.binding() and process._linkedBinding(). <ide> { <del> const bindingObj = Object.create(null); <add> const bindingObj = ObjectCreate(null); <ide> <ide> process.binding = function binding(module) { <ide> module = String(module); <ide> const internalBindingWhitelist = new SafeSet([ <ide> // Set up internalBinding() in the closure. <ide> let internalBinding; <ide> { <del> const bindingObj = Object.create(null); <add> const bindingObj = ObjectCreate(null); <ide> // eslint-disable-next-line no-global-assign <ide> internalBinding = function internalBinding(module) { <ide> let mod = bindingObj[module]; <ide> NativeModule.prototype.compileForPublicLoader = function() { <ide> // When using --expose-internals, we do not want to reflect the named <ide> // exports from core modules as this can trigger unnecessary getters. <ide> const internal = this.id.startsWith('internal/'); <del> this.exportKeys = internal ? [] : Object.keys(this.exports); <add> this.exportKeys = internal ? [] : ObjectKeys(this.exports); <ide> } <ide> this.getESMFacade(); <ide> this.syncExports(); <ide> return this.exports; <ide> }; <ide> <ide> const getOwn = (target, property, receiver) => { <del> return ObjectPrototype.hasOwnProperty(target, property) ? <del> Reflect.get(target, property, receiver) : <add> return ObjectPrototypeHasOwnProperty(target, property) ? <add> ReflectGet(target, property, receiver) : <ide> undefined; <ide> }; <ide> <ide><path>lib/internal/bootstrap/node.js <ide> <ide> setupPrepareStackTrace(); <ide> <del>const { JSON, Object, Symbol } = primordials; <add>const { <add> JSONParse, <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add> ObjectGetPrototypeOf, <add> ObjectSetPrototypeOf, <add> SymbolToStringTag, <add>} = primordials; <ide> const config = internalBinding('config'); <ide> const { deprecate } = require('internal/util'); <ide> <ide> if (ownsProcessState) { <ide> } <ide> <ide> // process.config is serialized config.gypi <del>process.config = JSON.parse(internalBinding('native_module').config); <add>process.config = JSONParse(internalBinding('native_module').config); <ide> <ide> const rawMethods = internalBinding('process_methods'); <ide> // Set up methods and events on the process object for the main thread <ide> const { setTraceCategoryStateUpdateHandler } = internalBinding('trace_events'); <ide> setTraceCategoryStateUpdateHandler(perThreadSetup.toggleTraceCategoryState); <ide> <ide> // process.allowedNodeEnvironmentFlags <del>Object.defineProperty(process, 'allowedNodeEnvironmentFlags', { <add>ObjectDefineProperty(process, 'allowedNodeEnvironmentFlags', { <ide> get() { <ide> const flags = perThreadSetup.buildAllowedFlags(); <ide> process.allowedNodeEnvironmentFlags = flags; <ide> Object.defineProperty(process, 'allowedNodeEnvironmentFlags', { <ide> // If the user tries to set this to another value, override <ide> // this completely to that value. <ide> set(value) { <del> Object.defineProperty(this, 'allowedNodeEnvironmentFlags', { <add> ObjectDefineProperty(this, 'allowedNodeEnvironmentFlags', { <ide> value, <ide> configurable: true, <ide> enumerable: true, <ide> process.assert = deprecate( <ide> // TODO(joyeecheung): this property has not been well-maintained, should we <ide> // deprecate it in favor of a better API? <ide> const { isDebugBuild, hasOpenSSL, hasInspector } = config; <del>Object.defineProperty(process, 'features', { <add>ObjectDefineProperty(process, 'features', { <ide> enumerable: true, <ide> writable: false, <ide> configurable: false, <ide> function setupPrepareStackTrace() { <ide> <ide> function setupProcessObject() { <ide> const EventEmitter = require('events'); <del> const origProcProto = Object.getPrototypeOf(process); <del> Object.setPrototypeOf(origProcProto, EventEmitter.prototype); <add> const origProcProto = ObjectGetPrototypeOf(process); <add> ObjectSetPrototypeOf(origProcProto, EventEmitter.prototype); <ide> EventEmitter.call(process); <del> Object.defineProperty(process, Symbol.toStringTag, { <add> ObjectDefineProperty(process, SymbolToStringTag, { <ide> enumerable: false, <ide> writable: true, <ide> configurable: false, <ide> value: 'process' <ide> }); <ide> // Make process globally available to users by putting it on the global proxy <del> Object.defineProperty(global, 'process', { <add> ObjectDefineProperty(global, 'process', { <ide> value: process, <ide> enumerable: false, <ide> writable: true, <ide> function setupProcessObject() { <ide> } <ide> <ide> function setupProcessStdio(getStdout, getStdin, getStderr) { <del> Object.defineProperty(process, 'stdout', { <add> ObjectDefineProperty(process, 'stdout', { <ide> configurable: true, <ide> enumerable: true, <ide> get: getStdout <ide> }); <ide> <del> Object.defineProperty(process, 'stderr', { <add> ObjectDefineProperty(process, 'stderr', { <ide> configurable: true, <ide> enumerable: true, <ide> get: getStderr <ide> }); <ide> <del> Object.defineProperty(process, 'stdin', { <add> ObjectDefineProperty(process, 'stdin', { <ide> configurable: true, <ide> enumerable: true, <ide> get: getStdin <ide> function setupProcessStdio(getStdout, getStdin, getStderr) { <ide> } <ide> <ide> function setupGlobalProxy() { <del> Object.defineProperty(global, Symbol.toStringTag, { <add> ObjectDefineProperty(global, SymbolToStringTag, { <ide> value: 'global', <ide> writable: false, <ide> enumerable: false, <ide> function setupGlobalProxy() { <ide> <ide> function makeSetter(name) { <ide> return deprecate(function(value) { <del> Object.defineProperty(this, name, { <add> ObjectDefineProperty(this, name, { <ide> configurable: true, <ide> writable: true, <ide> enumerable: true, <ide> function setupGlobalProxy() { <ide> }, `'${name}' is deprecated, use 'global'`, 'DEP0016'); <ide> } <ide> <del> Object.defineProperties(global, { <add> ObjectDefineProperties(global, { <ide> GLOBAL: { <ide> configurable: true, <ide> get: makeGetter('GLOBAL'), <ide> function setupBuffer() { <ide> delete bufferBinding.setBufferPrototype; <ide> delete bufferBinding.zeroFill; <ide> <del> Object.defineProperty(global, 'Buffer', { <add> ObjectDefineProperty(global, 'Buffer', { <ide> value: Buffer, <ide> enumerable: false, <ide> writable: true, <ide> function createGlobalConsole(consoleFromVM) { <ide> <ide> // https://heycam.github.io/webidl/#es-namespaces <ide> function exposeNamespace(target, name, namespaceObject) { <del> Object.defineProperty(target, name, { <add> ObjectDefineProperty(target, name, { <ide> writable: true, <ide> enumerable: false, <ide> configurable: true, <ide> function exposeNamespace(target, name, namespaceObject) { <ide> <ide> // https://heycam.github.io/webidl/#es-interfaces <ide> function exposeInterface(target, name, interfaceObject) { <del> Object.defineProperty(target, name, { <add> ObjectDefineProperty(target, name, { <ide> writable: true, <ide> enumerable: false, <ide> configurable: true, <ide> function exposeInterface(target, name, interfaceObject) { <ide> <ide> // https://heycam.github.io/webidl/#define-the-operations <ide> function defineOperation(target, name, method) { <del> Object.defineProperty(target, name, { <add> ObjectDefineProperty(target, name, { <ide> writable: true, <ide> enumerable: true, <ide> configurable: true, <ide><path>lib/internal/bootstrap/pre_execution.js <ide> 'use strict'; <ide> <del>const { Object, SafeWeakMap } = primordials; <add>const { <add> ObjectDefineProperty, <add> SafeWeakMap, <add>} = primordials; <ide> <ide> const { getOptionValue } = require('internal/options'); <ide> const { Buffer } = require('buffer'); <ide> function patchProcessObject(expandArgv1) { <ide> <ide> patchProcessObjectNative(process); <ide> <del> Object.defineProperty(process, 'argv0', { <add> ObjectDefineProperty(process, 'argv0', { <ide> enumerable: true, <ide> configurable: false, <ide> value: process.argv[0] <ide> function patchProcessObject(expandArgv1) { <ide> function addReadOnlyProcessAlias(name, option, enumerable = true) { <ide> const value = getOptionValue(option); <ide> if (value) { <del> Object.defineProperty(process, name, { <add> ObjectDefineProperty(process, name, { <ide> writable: false, <ide> configurable: true, <ide> enumerable, <ide> function initializeReport() { <ide> } <ide> const { report } = require('internal/process/report'); <ide> const { emitExperimentalWarning } = require('internal/util'); <del> Object.defineProperty(process, 'report', { <add> ObjectDefineProperty(process, 'report', { <ide> enumerable: false, <ide> configurable: true, <ide> get() { <ide> function initializeDeprecations() { <ide> // process.features. <ide> const { noBrowserGlobals } = internalBinding('config'); <ide> if (noBrowserGlobals) { <del> Object.defineProperty(process, '_noBrowserGlobals', { <add> ObjectDefineProperty(process, '_noBrowserGlobals', { <ide> writable: false, <ide> enumerable: true, <ide> configurable: true, <ide> function initializeDeprecations() { <ide> // deprecation path for these in ES Modules. <ide> // See https://github.com/nodejs/node/pull/26334. <ide> let _process = process; <del> Object.defineProperty(global, 'process', { <add> ObjectDefineProperty(global, 'process', { <ide> get() { <ide> return _process; <ide> }, <ide> function initializeDeprecations() { <ide> }); <ide> <ide> let _Buffer = Buffer; <del> Object.defineProperty(global, 'Buffer', { <add> ObjectDefineProperty(global, 'Buffer', { <ide> get() { <ide> return _Buffer; <ide> }, <ide><path>lib/internal/buffer.js <ide> 'use strict'; <ide> <del>const { Math } = primordials; <add>const { <add> MathFloor, <add>} = primordials; <ide> <ide> const { <ide> ERR_BUFFER_OUT_OF_BOUNDS, <ide> function checkInt(value, min, max, buf, offset, byteLength) { <ide> } <ide> <ide> function boundsError(value, length, type) { <del> if (Math.floor(value) !== value) { <add> if (MathFloor(value) !== value) { <ide> validateNumber(value, type); <ide> throw new ERR_OUT_OF_RANGE(type || 'offset', 'an integer', value); <ide> } <ide> function writeU_Int48LE(buf, value, offset, min, max) { <ide> value = +value; <ide> checkInt(value, min, max, buf, offset, 5); <ide> <del> const newVal = Math.floor(value * 2 ** -32); <add> const newVal = MathFloor(value * 2 ** -32); <ide> buf[offset++] = value; <ide> value = value >>> 8; <ide> buf[offset++] = value; <ide> function writeU_Int40LE(buf, value, offset, min, max) { <ide> buf[offset++] = value; <ide> value = value >>> 8; <ide> buf[offset++] = value; <del> buf[offset++] = Math.floor(newVal * 2 ** -32); <add> buf[offset++] = MathFloor(newVal * 2 ** -32); <ide> return offset; <ide> } <ide> <ide> function writeU_Int48BE(buf, value, offset, min, max) { <ide> value = +value; <ide> checkInt(value, min, max, buf, offset, 5); <ide> <del> const newVal = Math.floor(value * 2 ** -32); <add> const newVal = MathFloor(value * 2 ** -32); <ide> buf[offset++] = (newVal >>> 8); <ide> buf[offset++] = newVal; <ide> buf[offset + 3] = value; <ide> function writeU_Int40BE(buf, value, offset, min, max) { <ide> value = +value; <ide> checkInt(value, min, max, buf, offset, 4); <ide> <del> buf[offset++] = Math.floor(value * 2 ** -32); <add> buf[offset++] = MathFloor(value * 2 ** -32); <ide> buf[offset + 3] = value; <ide> value = value >>> 8; <ide> buf[offset + 2] = value; <ide><path>lib/internal/child_process.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { <ide> errnoException, <ide> function ChildProcess() { <ide> maybeClose(this); <ide> }; <ide> } <del>Object.setPrototypeOf(ChildProcess.prototype, EventEmitter.prototype); <del>Object.setPrototypeOf(ChildProcess, EventEmitter); <add>ObjectSetPrototypeOf(ChildProcess.prototype, EventEmitter.prototype); <add>ObjectSetPrototypeOf(ChildProcess, EventEmitter); <ide> <ide> <ide> function flushStdio(subprocess) { <ide> let serialization; <ide> function setupChannel(target, channel, serializationMode) { <ide> target.channel = channel; <ide> <del> Object.defineProperty(target, '_channel', { <add> ObjectDefineProperty(target, '_channel', { <ide> get: deprecate(() => { <ide> return target.channel; <ide> }, channelDeprecationMsg, 'DEP0129'), <ide><path>lib/internal/child_process/serialization.js <ide> 'use strict'; <ide> <del>const { JSON } = primordials; <add>const { <add> JSONParse, <add> JSONStringify, <add>} = primordials; <ide> const { Buffer } = require('buffer'); <ide> const { StringDecoder } = require('string_decoder'); <ide> const v8 = require('v8'); <ide> const json = { <ide> } else { <ide> chunks[0] = channel[kJSONBuffer] + chunks[0]; <ide> for (let i = 0; i < numCompleteChunks; i++) <del> yield JSON.parse(chunks[i]); <add> yield JSONParse(chunks[i]); <ide> channel[kJSONBuffer] = incompleteChunk; <ide> } <ide> channel.buffering = channel[kJSONBuffer].length !== 0; <ide> }, <ide> <ide> writeChannelMessage(channel, req, message, handle) { <del> const string = JSON.stringify(message) + '\n'; <add> const string = JSONStringify(message) + '\n'; <ide> return channel.writeUtf8String(req, string, handle); <ide> }, <ide> }; <ide><path>lib/internal/cli_table.js <ide> 'use strict'; <ide> <del>const { Math, ObjectPrototype } = primordials; <add>const { <add> MathCeil, <add> MathMax, <add> ObjectPrototypeHasOwnProperty, <add>} = primordials; <ide> <ide> const { getStringWidth } = require('internal/readline/utils'); <ide> <ide> const renderRow = (row, columnWidths) => { <ide> const needed = (columnWidths[i] - len) / 2; <ide> // round(needed) + ceil(needed) will always add up to the amount <ide> // of spaces we need while also left justifying the output. <del> out += `${' '.repeat(needed)}${cell}${' '.repeat(Math.ceil(needed))}`; <add> out += `${' '.repeat(needed)}${cell}${' '.repeat(MathCeil(needed))}`; <ide> if (i !== row.length - 1) <ide> out += tableChars.middle; <ide> } <ide> const renderRow = (row, columnWidths) => { <ide> const table = (head, columns) => { <ide> const rows = []; <ide> const columnWidths = head.map((h) => getStringWidth(h)); <del> const longestColumn = columns.reduce((n, a) => Math.max(n, a.length), 0); <add> const longestColumn = columns.reduce((n, a) => MathMax(n, a.length), 0); <ide> <ide> for (let i = 0; i < head.length; i++) { <ide> const column = columns[i]; <ide> for (let j = 0; j < longestColumn; j++) { <ide> if (rows[j] === undefined) <ide> rows[j] = []; <ide> const value = rows[j][i] = <del> ObjectPrototype.hasOwnProperty(column, j) ? column[j] : ''; <add> ObjectPrototypeHasOwnProperty(column, j) ? column[j] : ''; <ide> const width = columnWidths[i] || 0; <ide> const counted = getStringWidth(value); <del> columnWidths[i] = Math.max(width, counted); <add> columnWidths[i] = MathMax(width, counted); <ide> } <ide> } <ide> <ide><path>lib/internal/cluster/child.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectAssign, <add>} = primordials; <ide> <ide> const assert = require('internal/assert'); <ide> const path = require('path'); <ide> function rr(message, indexesKey, cb) { <ide> <ide> function getsockname(out) { <ide> if (key) <del> Object.assign(out, message.sockname); <add> ObjectAssign(out, message.sockname); <ide> <ide> return 0; <ide> } <ide><path>lib/internal/cluster/master.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectKeys, <add> ObjectValues, <add>} = primordials; <ide> <ide> const assert = require('internal/assert'); <ide> const { fork } = require('child_process'); <ide> cluster.setupMaster = function(options) { <ide> if (message.cmd !== 'NODE_DEBUG_ENABLED') <ide> return; <ide> <del> for (const worker of Object.values(cluster.workers)) { <add> for (const worker of ObjectValues(cluster.workers)) { <ide> if (worker.state === 'online' || worker.state === 'listening') { <ide> process._debugProcess(worker.process.pid); <ide> } else { <ide> function removeWorker(worker) { <ide> assert(worker); <ide> delete cluster.workers[worker.id]; <ide> <del> if (Object.keys(cluster.workers).length === 0) { <add> if (ObjectKeys(cluster.workers).length === 0) { <ide> assert(handles.size === 0, 'Resource leak detected.'); <ide> intercom.emit('disconnect'); <ide> } <ide> function emitForkNT(worker) { <ide> } <ide> <ide> cluster.disconnect = function(cb) { <del> const workers = Object.keys(cluster.workers); <add> const workers = ObjectKeys(cluster.workers); <ide> <ide> if (workers.length === 0) { <ide> process.nextTick(() => intercom.emit('disconnect')); <ide> } else { <del> for (const worker of Object.values(cluster.workers)) { <add> for (const worker of ObjectValues(cluster.workers)) { <ide> if (worker.isConnected()) { <ide> worker.disconnect(); <ide> } <ide><path>lib/internal/cluster/worker.js <ide> 'use strict'; <ide> <ide> const { <del> Object: { <del> setPrototypeOf: ObjectSetPrototypeOf <del> } <add> ObjectSetPrototypeOf, <ide> } = primordials; <ide> <ide> const EventEmitter = require('events'); <ide><path>lib/internal/console/constructor.js <ide> // The Console constructor is not actually used to construct the global <ide> // console. It's exported for backwards compatibility. <ide> <del>const { Object, ObjectPrototype, Reflect, Math } = primordials; <add>const { <add> MathFloor, <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add> ObjectKeys, <add> ObjectPrototypeHasOwnProperty, <add> ObjectValues, <add> ReflectOwnKeys, <add>} = primordials; <ide> <ide> const { trace } = internalBinding('trace_events'); <ide> const { <ide> function Console(options /* or: stdout, stderr, ignoreErrors = true */) { <ide> } <ide> <ide> // Bind the prototype functions to this Console instance <del> const keys = Object.keys(Console.prototype); <add> const keys = ObjectKeys(Console.prototype); <ide> for (var v = 0; v < keys.length; v++) { <ide> var k = keys[v]; <ide> // We have to bind the methods grabbed from the instance instead of from <ide> const consolePropAttributes = { <ide> }; <ide> <ide> // Fixup global.console instanceof global.console.Console <del>Object.defineProperty(Console, Symbol.hasInstance, { <add>ObjectDefineProperty(Console, Symbol.hasInstance, { <ide> value(instance) { <ide> return instance[kIsConsole]; <ide> } <ide> }); <ide> <ide> // Eager version for the Console constructor <ide> Console.prototype[kBindStreamsEager] = function(stdout, stderr) { <del> Object.defineProperties(this, { <add> ObjectDefineProperties(this, { <ide> '_stdout': { ...consolePropAttributes, value: stdout }, <ide> '_stderr': { ...consolePropAttributes, value: stderr } <ide> }); <ide> Console.prototype[kBindStreamsEager] = function(stdout, stderr) { <ide> Console.prototype[kBindStreamsLazy] = function(object) { <ide> let stdout; <ide> let stderr; <del> Object.defineProperties(this, { <add> ObjectDefineProperties(this, { <ide> '_stdout': { <ide> enumerable: false, <ide> configurable: true, <ide> Console.prototype[kBindStreamsLazy] = function(object) { <ide> }; <ide> <ide> Console.prototype[kBindProperties] = function(ignoreErrors, colorMode) { <del> Object.defineProperties(this, { <add> ObjectDefineProperties(this, { <ide> '_stdoutErrorHandler': { <ide> ...consolePropAttributes, <ide> value: createWriteErrorHandler(this, kUseStdout) <ide> const consoleMethods = { <ide> const depth = v !== null && <ide> typeof v === 'object' && <ide> !isArray(v) && <del> Object.keys(v).length > 2 ? -1 : 0; <add> ObjectKeys(v).length > 2 ? -1 : 0; <ide> const opt = { <ide> depth, <ide> maxArrayLength: 3, <ide> const consoleMethods = { <ide> const map = {}; <ide> let hasPrimitives = false; <ide> const valuesKeyArray = []; <del> const indexKeyArray = Object.keys(tabularData); <add> const indexKeyArray = ObjectKeys(tabularData); <ide> <ide> for (; i < indexKeyArray.length; i++) { <ide> const item = tabularData[indexKeyArray[i]]; <ide> const consoleMethods = { <ide> hasPrimitives = true; <ide> valuesKeyArray[i] = _inspect(item); <ide> } else { <del> const keys = properties || Object.keys(item); <add> const keys = properties || ObjectKeys(item); <ide> for (const key of keys) { <ide> if (map[key] === undefined) <ide> map[key] = []; <ide> if ((primitive && properties) || <del> !ObjectPrototype.hasOwnProperty(item, key)) <add> !ObjectPrototypeHasOwnProperty(item, key)) <ide> map[key][i] = ''; <ide> else <ide> map[key][i] = _inspect(item[key]); <ide> } <ide> } <ide> } <ide> <del> const keys = Object.keys(map); <del> const values = Object.values(map); <add> const keys = ObjectKeys(map); <add> const values = ObjectValues(map); <ide> if (hasPrimitives) { <ide> keys.push(valuesKey); <ide> values.push(valuesKeyArray); <ide> function formatTime(ms) { <ide> if (ms >= kSecond) { <ide> if (ms >= kMinute) { <ide> if (ms >= kHour) { <del> hours = Math.floor(ms / kHour); <add> hours = MathFloor(ms / kHour); <ide> ms = ms % kHour; <ide> } <del> minutes = Math.floor(ms / kMinute); <add> minutes = MathFloor(ms / kMinute); <ide> ms = ms % kMinute; <ide> } <ide> seconds = ms / kSecond; <ide> const isArray = (v) => ArrayIsArray(v) || isTypedArray(v) || isBuffer(v); <ide> <ide> function noop() {} <ide> <del>for (const method of Reflect.ownKeys(consoleMethods)) <add>for (const method of ReflectOwnKeys(consoleMethods)) <ide> Console.prototype[method] = consoleMethods[method]; <ide> <ide> Console.prototype.debug = Console.prototype.log; <ide><path>lib/internal/console/global.js <ide> // Therefore, the console.Console.prototype is not <ide> // in the global console prototype chain anymore. <ide> <del>const { Object, Reflect } = primordials; <add>const { <add> ObjectCreate, <add> ReflectDefineProperty, <add> ReflectGetOwnPropertyDescriptor, <add> ReflectOwnKeys, <add>} = primordials; <ide> <ide> const { <ide> Console, <ide> kBindStreamsLazy, <ide> kBindProperties <ide> } = require('internal/console/constructor'); <ide> <del>const globalConsole = Object.create({}); <add>const globalConsole = ObjectCreate({}); <ide> <ide> // Since Console is not on the prototype chain of the global console, <ide> // the symbol properties on Console.prototype have to be looked up from <ide> // the global console itself. In addition, we need to make the global <ide> // console a namespace by binding the console methods directly onto <ide> // the global console with the receiver fixed. <del>for (const prop of Reflect.ownKeys(Console.prototype)) { <add>for (const prop of ReflectOwnKeys(Console.prototype)) { <ide> if (prop === 'constructor') { continue; } <del> const desc = Reflect.getOwnPropertyDescriptor(Console.prototype, prop); <add> const desc = ReflectGetOwnPropertyDescriptor(Console.prototype, prop); <ide> if (typeof desc.value === 'function') { // fix the receiver <ide> desc.value = desc.value.bind(globalConsole); <ide> } <del> Reflect.defineProperty(globalConsole, prop, desc); <add> ReflectDefineProperty(globalConsole, prop, desc); <ide> } <ide> <ide> globalConsole[kBindStreamsLazy](process); <ide><path>lib/internal/crypto/cipher.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { <ide> RSA_PKCS1_OAEP_PADDING, <ide> function Cipher(cipher, password, options) { <ide> createCipher.call(this, cipher, password, options, true); <ide> } <ide> <del>Object.setPrototypeOf(Cipher.prototype, LazyTransform.prototype); <del>Object.setPrototypeOf(Cipher, LazyTransform); <add>ObjectSetPrototypeOf(Cipher.prototype, LazyTransform.prototype); <add>ObjectSetPrototypeOf(Cipher, LazyTransform); <ide> <ide> Cipher.prototype._transform = function _transform(chunk, encoding, callback) { <ide> this.push(this[kHandle].update(chunk, encoding)); <ide> function addCipherPrototypeFunctions(constructor) { <ide> constructor.prototype.setAAD = Cipher.prototype.setAAD; <ide> } <ide> <del>Object.setPrototypeOf(Cipheriv.prototype, LazyTransform.prototype); <del>Object.setPrototypeOf(Cipheriv, LazyTransform); <add>ObjectSetPrototypeOf(Cipheriv.prototype, LazyTransform.prototype); <add>ObjectSetPrototypeOf(Cipheriv, LazyTransform); <ide> addCipherPrototypeFunctions(Cipheriv); <ide> <ide> function Decipher(cipher, password, options) { <ide> function Decipher(cipher, password, options) { <ide> createCipher.call(this, cipher, password, options, false); <ide> } <ide> <del>Object.setPrototypeOf(Decipher.prototype, LazyTransform.prototype); <del>Object.setPrototypeOf(Decipher, LazyTransform); <add>ObjectSetPrototypeOf(Decipher.prototype, LazyTransform.prototype); <add>ObjectSetPrototypeOf(Decipher, LazyTransform); <ide> addCipherPrototypeFunctions(Decipher); <ide> <ide> <ide> function Decipheriv(cipher, key, iv, options) { <ide> createCipherWithIV.call(this, cipher, key, options, false, iv); <ide> } <ide> <del>Object.setPrototypeOf(Decipheriv.prototype, LazyTransform.prototype); <del>Object.setPrototypeOf(Decipheriv, LazyTransform); <add>ObjectSetPrototypeOf(Decipheriv.prototype, LazyTransform.prototype); <add>ObjectSetPrototypeOf(Decipheriv, LazyTransform); <ide> addCipherPrototypeFunctions(Decipheriv); <ide> <ide> module.exports = { <ide><path>lib/internal/crypto/diffiehellman.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add>} = primordials; <ide> <ide> const { Buffer } = require('buffer'); <ide> const { <ide> function DiffieHellman(sizeOrKey, keyEncoding, generator, genEncoding) { <ide> generator = toBuf(generator, genEncoding); <ide> <ide> this[kHandle] = new _DiffieHellman(sizeOrKey, generator); <del> Object.defineProperty(this, 'verifyError', { <add> ObjectDefineProperty(this, 'verifyError', { <ide> enumerable: true, <ide> value: this[kHandle].verifyError, <ide> writable: false <ide> function DiffieHellmanGroup(name) { <ide> if (!(this instanceof DiffieHellmanGroup)) <ide> return new DiffieHellmanGroup(name); <ide> this[kHandle] = new _DiffieHellmanGroup(name); <del> Object.defineProperty(this, 'verifyError', { <add> ObjectDefineProperty(this, 'verifyError', { <ide> enumerable: true, <ide> value: this[kHandle].verifyError, <ide> writable: false <ide><path>lib/internal/crypto/hash.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { <ide> Hash: _Hash, <ide> function Hash(algorithm, options) { <ide> LazyTransform.call(this, options); <ide> } <ide> <del>Object.setPrototypeOf(Hash.prototype, LazyTransform.prototype); <del>Object.setPrototypeOf(Hash, LazyTransform); <add>ObjectSetPrototypeOf(Hash.prototype, LazyTransform.prototype); <add>ObjectSetPrototypeOf(Hash, LazyTransform); <ide> <ide> Hash.prototype.copy = function copy(options) { <ide> const state = this[kState]; <ide> function Hmac(hmac, key, options) { <ide> LazyTransform.call(this, options); <ide> } <ide> <del>Object.setPrototypeOf(Hmac.prototype, LazyTransform.prototype); <del>Object.setPrototypeOf(Hmac, LazyTransform); <add>ObjectSetPrototypeOf(Hmac.prototype, LazyTransform.prototype); <add>ObjectSetPrototypeOf(Hmac, LazyTransform); <ide> <ide> Hmac.prototype.update = Hash.prototype.update; <ide> <ide><path>lib/internal/crypto/keygen.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add>} = primordials; <ide> <ide> const { AsyncWrap, Providers } = internalBinding('async_wrap'); <ide> const { <ide> function generateKeyPair(type, options, callback) { <ide> handleError(impl(wrap)); <ide> } <ide> <del>Object.defineProperty(generateKeyPair, customPromisifyArgs, { <add>ObjectDefineProperty(generateKeyPair, customPromisifyArgs, { <ide> value: ['publicKey', 'privateKey'], <ide> enumerable: false <ide> }); <ide><path>lib/internal/crypto/keys.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add>} = primordials; <ide> <ide> const { <ide> KeyObject: KeyObjectHandle, <ide> class KeyObject { <ide> <ide> this[kKeyType] = type; <ide> <del> Object.defineProperty(this, kHandle, { <add> ObjectDefineProperty(this, kHandle, { <ide> value: handle, <ide> enumerable: false, <ide> configurable: false, <ide><path>lib/internal/crypto/random.js <ide> 'use strict'; <ide> <del>const { Math } = primordials; <add>const { <add> MathMin, <add>} = primordials; <ide> <ide> const { AsyncWrap, Providers } = internalBinding('async_wrap'); <ide> const { Buffer, kMaxLength } = require('buffer'); <ide> const { validateNumber } = require('internal/validators'); <ide> const { isArrayBufferView } = require('internal/util/types'); <ide> <ide> const kMaxUint32 = 2 ** 32 - 1; <del>const kMaxPossibleLength = Math.min(kMaxLength, kMaxUint32); <add>const kMaxPossibleLength = MathMin(kMaxLength, kMaxUint32); <ide> <ide> function assertOffset(offset, elementSize, length) { <ide> validateNumber(offset, 'offset'); <ide> offset *= elementSize; <ide> <del> const maxLength = Math.min(length, kMaxPossibleLength); <add> const maxLength = MathMin(length, kMaxPossibleLength); <ide> if (Number.isNaN(offset) || offset > maxLength || offset < 0) { <ide> throw new ERR_OUT_OF_RANGE('offset', `>= 0 && <= ${maxLength}`, offset); <ide> } <ide><path>lib/internal/crypto/sig.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { <ide> ERR_CRYPTO_SIGN_KEY_REQUIRED, <ide> function Sign(algorithm, options) { <ide> Writable.call(this, options); <ide> } <ide> <del>Object.setPrototypeOf(Sign.prototype, Writable.prototype); <del>Object.setPrototypeOf(Sign, Writable); <add>ObjectSetPrototypeOf(Sign.prototype, Writable.prototype); <add>ObjectSetPrototypeOf(Sign, Writable); <ide> <ide> Sign.prototype._write = function _write(chunk, encoding, callback) { <ide> this.update(chunk, encoding); <ide> function Verify(algorithm, options) { <ide> Writable.call(this, options); <ide> } <ide> <del>Object.setPrototypeOf(Verify.prototype, Writable.prototype); <del>Object.setPrototypeOf(Verify, Writable); <add>ObjectSetPrototypeOf(Verify.prototype, Writable.prototype); <add>ObjectSetPrototypeOf(Verify, Writable); <ide> <ide> Verify.prototype._write = Sign.prototype._write; <ide> Verify.prototype.update = Sign.prototype.update; <ide><path>lib/internal/dns/promises.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectCreate, <add> ObjectDefineProperty, <add>} = primordials; <ide> <ide> const { <ide> bindDefaultResolver, <ide> function resolver(bindingName) { <ide> return createResolverPromise(this, bindingName, name, ttl); <ide> } <ide> <del> Object.defineProperty(query, 'name', { value: bindingName }); <add> ObjectDefineProperty(query, 'name', { value: bindingName }); <ide> return query; <ide> } <ide> <ide> <del>const resolveMap = Object.create(null); <add>const resolveMap = ObjectCreate(null); <ide> <ide> // Resolver instances correspond 1:1 to c-ares channels. <ide> class Resolver { <ide><path>lib/internal/encoding.js <ide> // An implementation of the WHATWG Encoding Standard <ide> // https://encoding.spec.whatwg.org <ide> <del>const { Object } = primordials; <add>const { <add> ObjectCreate, <add> ObjectDefineProperties, <add> ObjectGetOwnPropertyDescriptors, <add>} = primordials; <ide> <ide> const { <ide> ERR_ENCODING_INVALID_ENCODED_DATA, <ide> class TextEncoder { <ide> if (typeof depth === 'number' && depth < 0) <ide> return this; <ide> const ctor = getConstructorOf(this); <del> const obj = Object.create({ <add> const obj = ObjectCreate({ <ide> constructor: ctor === null ? TextEncoder : ctor <ide> }); <ide> obj.encoding = this.encoding; <ide> class TextEncoder { <ide> } <ide> } <ide> <del>Object.defineProperties( <add>ObjectDefineProperties( <ide> TextEncoder.prototype, { <ide> 'encode': { enumerable: true }, <ide> 'encodeInto': { enumerable: true }, <ide> function makeTextDecoderJS() { <ide> <ide> // Mix in some shared properties. <ide> { <del> Object.defineProperties( <add> ObjectDefineProperties( <ide> TextDecoder.prototype, <del> Object.getOwnPropertyDescriptors({ <add> ObjectGetOwnPropertyDescriptors({ <ide> get encoding() { <ide> validateDecoder(this); <ide> return this[kEncoding]; <ide> function makeTextDecoderJS() { <ide> if (typeof depth === 'number' && depth < 0) <ide> return this; <ide> const ctor = getConstructorOf(this); <del> const obj = Object.create({ <add> const obj = ObjectCreate({ <ide> constructor: ctor === null ? TextDecoder : ctor <ide> }); <ide> obj.encoding = this.encoding; <ide> function makeTextDecoderJS() { <ide> return require('internal/util/inspect').inspect(obj, opts); <ide> } <ide> })); <del> Object.defineProperties(TextDecoder.prototype, { <add> ObjectDefineProperties(TextDecoder.prototype, { <ide> decode: { enumerable: true }, <ide> [inspect]: { enumerable: false }, <ide> [Symbol.toStringTag]: { <ide><path>lib/internal/error-serdes.js <ide> <ide> const Buffer = require('buffer').Buffer; <ide> const { <del> ArrayPrototype, <del> FunctionPrototype, <del> Object, <del> ObjectPrototype, <add> ArrayPrototypeForEach, <add> FunctionPrototypeCall, <add> ObjectAssign, <add> ObjectCreate, <add> ObjectDefineProperty, <add> ObjectGetOwnPropertyDescriptor, <add> ObjectGetOwnPropertyNames, <add> ObjectGetPrototypeOf, <add> ObjectKeys, <add> ObjectPrototypeToString, <ide> SafeSet, <ide> } = primordials; <ide> <ide> const kInspectedError = 2; <ide> const errors = { <ide> Error, TypeError, RangeError, URIError, SyntaxError, ReferenceError, EvalError <ide> }; <del>const errorConstructorNames = new SafeSet(Object.keys(errors)); <add>const errorConstructorNames = new SafeSet(ObjectKeys(errors)); <ide> <ide> function TryGetAllProperties(object, target = object) { <del> const all = Object.create(null); <add> const all = ObjectCreate(null); <ide> if (object === null) <ide> return all; <del> Object.assign(all, <del> TryGetAllProperties(Object.getPrototypeOf(object), target)); <del> const keys = Object.getOwnPropertyNames(object); <del> ArrayPrototype.forEach(keys, (key) => { <add> ObjectAssign(all, <add> TryGetAllProperties(ObjectGetPrototypeOf(object), target)); <add> const keys = ObjectGetOwnPropertyNames(object); <add> ArrayPrototypeForEach(keys, (key) => { <ide> let descriptor; <ide> try { <del> descriptor = Object.getOwnPropertyDescriptor(object, key); <add> descriptor = ObjectGetOwnPropertyDescriptor(object, key); <ide> } catch { return; } <ide> const getter = descriptor.get; <ide> if (getter && key !== '__proto__') { <ide> try { <del> descriptor.value = FunctionPrototype.call(getter, target); <add> descriptor.value = FunctionPrototypeCall(getter, target); <ide> } catch {} <ide> } <ide> if ('value' in descriptor && typeof descriptor.value !== 'function') { <ide> function GetConstructors(object) { <ide> <ide> for (var current = object; <ide> current !== null; <del> current = Object.getPrototypeOf(current)) { <del> const desc = Object.getOwnPropertyDescriptor(current, 'constructor'); <add> current = ObjectGetPrototypeOf(current)) { <add> const desc = ObjectGetOwnPropertyDescriptor(current, 'constructor'); <ide> if (desc && desc.value) { <del> Object.defineProperty(constructors, constructors.length, { <add> ObjectDefineProperty(constructors, constructors.length, { <ide> value: desc.value, enumerable: true <ide> }); <ide> } <ide> function GetConstructors(object) { <ide> } <ide> <ide> function GetName(object) { <del> const desc = Object.getOwnPropertyDescriptor(object, 'name'); <add> const desc = ObjectGetOwnPropertyDescriptor(object, 'name'); <ide> return desc && desc.value; <ide> } <ide> <ide> function serializeError(error) { <ide> if (!serialize) serialize = require('v8').serialize; <ide> try { <ide> if (typeof error === 'object' && <del> ObjectPrototype.toString(error) === '[object Error]') { <add> ObjectPrototypeToString(error) === '[object Error]') { <ide> const constructors = GetConstructors(error); <ide> for (var i = 0; i < constructors.length; i++) { <ide> const name = GetName(constructors[i]); <ide> function deserializeError(error) { <ide> case kSerializedError: <ide> const { constructor, properties } = deserialize(error.subarray(1)); <ide> const ctor = errors[constructor]; <del> return Object.create(ctor.prototype, properties); <add> return ObjectCreate(ctor.prototype, properties); <ide> case kSerializedObject: <ide> return deserialize(error.subarray(1)); <ide> case kInspectedError: <ide><path>lib/internal/errors.js <ide> // value statically and permanently identifies the error. While the error <ide> // message may change, the code should not. <ide> <del>const { Object, Math } = primordials; <add>const { <add> MathAbs, <add> ObjectDefineProperty, <add> ObjectKeys, <add>} = primordials; <ide> <ide> const messages = new Map(); <ide> const codes = {}; <ide> class SystemError extends Error { <ide> if (context.dest !== undefined) <ide> message += ` => ${context.dest}`; <ide> <del> Object.defineProperty(this, 'message', { <add> ObjectDefineProperty(this, 'message', { <ide> value: message, <ide> enumerable: false, <ide> writable: true, <ide> class SystemError extends Error { <ide> <ide> this.code = key; <ide> <del> Object.defineProperty(this, 'info', { <add> ObjectDefineProperty(this, 'info', { <ide> value: context, <ide> enumerable: true, <ide> configurable: true, <ide> writable: false <ide> }); <ide> <del> Object.defineProperty(this, 'errno', { <add> ObjectDefineProperty(this, 'errno', { <ide> get() { <ide> return context.errno; <ide> }, <ide> class SystemError extends Error { <ide> configurable: true <ide> }); <ide> <del> Object.defineProperty(this, 'syscall', { <add> ObjectDefineProperty(this, 'syscall', { <ide> get() { <ide> return context.syscall; <ide> }, <ide> class SystemError extends Error { <ide> // always be of type string. We should probably just remove the <ide> // `.toString()` and `Buffer.from()` operations and set the value on the <ide> // context as the user did. <del> Object.defineProperty(this, 'path', { <add> ObjectDefineProperty(this, 'path', { <ide> get() { <ide> return context.path != null ? <ide> context.path.toString() : context.path; <ide> class SystemError extends Error { <ide> } <ide> <ide> if (context.dest !== undefined) { <del> Object.defineProperty(this, 'dest', { <add> ObjectDefineProperty(this, 'dest', { <ide> get() { <ide> return context.dest != null ? <ide> context.dest.toString() : context.dest; <ide> function makeNodeErrorWithCode(Base, key) { <ide> Error.stackTraceLimit = limit; <ide> } <ide> const message = getMessage(key, args, this); <del> Object.defineProperty(this, 'message', { <add> ObjectDefineProperty(this, 'message', { <ide> value: message, <ide> enumerable: false, <ide> writable: true, <ide> function addCodeToName(err, name, code) { <ide> err.stack; <ide> // Reset the name to the actual name. <ide> if (name === 'SystemError') { <del> Object.defineProperty(err, 'name', { <add> ObjectDefineProperty(err, 'name', { <ide> value: name, <ide> enumerable: false, <ide> writable: true, <ide> function uvException(ctx) { <ide> const err = new Error(message); <ide> Error.stackTraceLimit = tmpLimit; <ide> <del> for (const prop of Object.keys(ctx)) { <add> for (const prop of ObjectKeys(ctx)) { <ide> if (prop === 'message' || prop === 'path' || prop === 'dest') { <ide> continue; <ide> } <ide> E('ERR_OUT_OF_RANGE', <ide> let msg = replaceDefaultBoolean ? str : <ide> `The value of "${str}" is out of range.`; <ide> let received; <del> if (Number.isInteger(input) && Math.abs(input) > 2 ** 32) { <add> if (Number.isInteger(input) && MathAbs(input) > 2 ** 32) { <ide> received = addNumericalSeparator(String(input)); <ide> } else if (typeof input === 'bigint') { <ide> received = String(input); <ide><path>lib/internal/freelist.js <ide> 'use strict'; <ide> <del>const { Reflect } = primordials; <add>const { <add> ReflectApply, <add>} = primordials; <ide> <ide> class FreeList { <ide> constructor(name, max, ctor) { <ide> class FreeList { <ide> alloc() { <ide> return this.list.length > 0 ? <ide> this.list.pop() : <del> Reflect.apply(this.ctor, this, arguments); <add> ReflectApply(this.ctor, this, arguments); <ide> } <ide> <ide> free(obj) { <ide><path>lib/internal/fs/dir.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add>} = primordials; <ide> <ide> const pathModule = require('path'); <ide> const binding = internalBinding('fs'); <ide> class Dir { <ide> } <ide> } <ide> <del>Object.defineProperty(Dir.prototype, Symbol.asyncIterator, { <add>ObjectDefineProperty(Dir.prototype, Symbol.asyncIterator, { <ide> value: Dir.prototype.entries, <ide> enumerable: false, <ide> writable: true, <ide><path>lib/internal/fs/promises.js <ide> 'use strict'; <ide> <del>const { Math } = primordials; <add>const { <add> MathMax, <add> MathMin, <add>} = primordials; <ide> <ide> const { <ide> F_OK, <ide> async function writeFileHandle(filehandle, data, options) { <ide> do { <ide> const { bytesWritten } = <ide> await write(filehandle, buffer, 0, <del> Math.min(16384, buffer.length)); <add> MathMin(16384, buffer.length)); <ide> remaining -= bytesWritten; <ide> buffer = buffer.slice(bytesWritten); <ide> } while (remaining > 0); <ide> async function readFileHandle(filehandle, options) { <ide> const chunks = []; <ide> const chunkSize = size === 0 ? <ide> kReadFileMaxChunkSize : <del> Math.min(size, kReadFileMaxChunkSize); <add> MathMin(size, kReadFileMaxChunkSize); <ide> let endOfFile = false; <ide> do { <ide> const buf = Buffer.alloc(chunkSize); <ide> async function truncate(path, len = 0) { <ide> async function ftruncate(handle, len = 0) { <ide> validateFileHandle(handle); <ide> validateInteger(len, 'len'); <del> len = Math.max(0, len); <add> len = MathMax(0, len); <ide> return binding.ftruncate(handle.fd, len, kUsePromises); <ide> } <ide> <ide><path>lib/internal/fs/read_file_context.js <ide> 'use strict'; <ide> <del>const { Math } = primordials; <add>const { <add> MathMin, <add>} = primordials; <ide> <ide> const { Buffer } = require('buffer'); <ide> <ide> class ReadFileContext { <ide> } else { <ide> buffer = this.buffer; <ide> offset = this.pos; <del> length = Math.min(kReadFileBufferLength, this.size - this.pos); <add> length = MathMin(kReadFileBufferLength, this.size - this.pos); <ide> } <ide> <ide> const req = new FSReqCallback(); <ide><path>lib/internal/fs/streams.js <ide> 'use strict'; <ide> <del>const { Math, Object } = primordials; <add>const { <add> MathMin, <add> ObjectDefineProperty, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { <ide> ERR_OUT_OF_RANGE <ide> function ReadStream(path, options) { <ide> } <ide> }); <ide> } <del>Object.setPrototypeOf(ReadStream.prototype, Readable.prototype); <del>Object.setPrototypeOf(ReadStream, Readable); <add>ObjectSetPrototypeOf(ReadStream.prototype, Readable.prototype); <add>ObjectSetPrototypeOf(ReadStream, Readable); <ide> <ide> const openReadFs = internalUtil.deprecate(function() { <ide> _openReadFs(this); <ide> ReadStream.prototype._read = function(n) { <ide> // in the thread pool another read() finishes up the pool, and <ide> // allocates a new one. <ide> const thisPool = pool; <del> let toRead = Math.min(pool.length - pool.used, n); <add> let toRead = MathMin(pool.length - pool.used, n); <ide> const start = pool.used; <ide> <ide> if (this.pos !== undefined) <del> toRead = Math.min(this.end - this.pos + 1, toRead); <add> toRead = MathMin(this.end - this.pos + 1, toRead); <ide> else <del> toRead = Math.min(this.end - this.bytesRead + 1, toRead); <add> toRead = MathMin(this.end - this.bytesRead + 1, toRead); <ide> <ide> // Already read everything we were supposed to read! <ide> // treat as EOF. <ide> ReadStream.prototype.close = function(cb) { <ide> this.destroy(null, cb); <ide> }; <ide> <del>Object.defineProperty(ReadStream.prototype, 'pending', { <add>ObjectDefineProperty(ReadStream.prototype, 'pending', { <ide> get() { return this.fd === null; }, <ide> configurable: true <ide> }); <ide> function WriteStream(path, options) { <ide> if (typeof this.fd !== 'number') <ide> _openWriteFs(this); <ide> } <del>Object.setPrototypeOf(WriteStream.prototype, Writable.prototype); <del>Object.setPrototypeOf(WriteStream, Writable); <add>ObjectSetPrototypeOf(WriteStream.prototype, Writable.prototype); <add>ObjectSetPrototypeOf(WriteStream, Writable); <ide> <ide> WriteStream.prototype._final = function(callback) { <ide> if (typeof this.fd !== 'number') { <ide> WriteStream.prototype.close = function(cb) { <ide> // There is no shutdown() for files. <ide> WriteStream.prototype.destroySoon = WriteStream.prototype.end; <ide> <del>Object.defineProperty(WriteStream.prototype, 'pending', { <add>ObjectDefineProperty(WriteStream.prototype, 'pending', { <ide> get() { return this.fd === null; }, <ide> configurable: true <ide> }); <ide><path>lib/internal/fs/sync_write_stream.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { Writable } = require('stream'); <ide> const { closeSync, writeSync } = require('fs'); <ide> function SyncWriteStream(fd, options) { <ide> this.autoClose = options.autoClose === undefined ? true : options.autoClose; <ide> } <ide> <del>Object.setPrototypeOf(SyncWriteStream.prototype, Writable.prototype); <del>Object.setPrototypeOf(SyncWriteStream, Writable); <add>ObjectSetPrototypeOf(SyncWriteStream.prototype, Writable.prototype); <add>ObjectSetPrototypeOf(SyncWriteStream, Writable); <ide> <ide> SyncWriteStream.prototype._write = function(chunk, encoding, cb) { <ide> writeSync(this.fd, chunk, 0, chunk.length); <ide><path>lib/internal/fs/utils.js <ide> 'use strict'; <ide> <del>const { Object, Reflect } = primordials; <add>const { <add> ObjectSetPrototypeOf, <add> ReflectOwnKeys, <add>} = primordials; <ide> <ide> const { Buffer } = require('buffer'); <ide> const { <ide> class DirentFromStats extends Dirent { <ide> } <ide> } <ide> <del>for (const name of Reflect.ownKeys(Dirent.prototype)) { <add>for (const name of ReflectOwnKeys(Dirent.prototype)) { <ide> if (name === 'constructor') { <ide> continue; <ide> } <ide> function BigIntStats(dev, mode, nlink, uid, gid, rdev, blksize, <ide> this.birthtime = dateFromMs(this.birthtimeMs); <ide> } <ide> <del>Object.setPrototypeOf(BigIntStats.prototype, StatsBase.prototype); <del>Object.setPrototypeOf(BigIntStats, StatsBase); <add>ObjectSetPrototypeOf(BigIntStats.prototype, StatsBase.prototype); <add>ObjectSetPrototypeOf(BigIntStats, StatsBase); <ide> <ide> BigIntStats.prototype._checkModeProperty = function(property) { <ide> if (isWindows && (property === S_IFIFO || property === S_IFBLK || <ide> function Stats(dev, mode, nlink, uid, gid, rdev, blksize, <ide> this.birthtime = dateFromMs(birthtimeMs); <ide> } <ide> <del>Object.setPrototypeOf(Stats.prototype, StatsBase.prototype); <del>Object.setPrototypeOf(Stats, StatsBase); <add>ObjectSetPrototypeOf(Stats.prototype, StatsBase.prototype); <add>ObjectSetPrototypeOf(Stats, StatsBase); <ide> <ide> // HACK: Workaround for https://github.com/standard-things/esm/issues/821. <ide> // TODO(ronag): Remove this as soon as `esm` publishes a fixed version. <ide><path>lib/internal/fs/watchers.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const errors = require('internal/errors'); <ide> const { <ide> function StatWatcher(bigint) { <ide> this[kOldStatus] = -1; <ide> this[kUseBigint] = bigint; <ide> } <del>Object.setPrototypeOf(StatWatcher.prototype, EventEmitter.prototype); <del>Object.setPrototypeOf(StatWatcher, EventEmitter); <add>ObjectSetPrototypeOf(StatWatcher.prototype, EventEmitter.prototype); <add>ObjectSetPrototypeOf(StatWatcher, EventEmitter); <ide> <ide> function onchange(newStatus, stats) { <ide> const self = this[owner_symbol]; <ide> function FSWatcher() { <ide> } <ide> }; <ide> } <del>Object.setPrototypeOf(FSWatcher.prototype, EventEmitter.prototype); <del>Object.setPrototypeOf(FSWatcher, EventEmitter); <add>ObjectSetPrototypeOf(FSWatcher.prototype, EventEmitter.prototype); <add>ObjectSetPrototypeOf(FSWatcher, EventEmitter); <ide> <ide> // At the moment if filename is undefined, we <ide> // 1. Throw an Error if it's the first time Symbol('kFSWatchStart') is called <ide> function emitCloseNT(self) { <ide> <ide> // Legacy alias on the C++ wrapper object. This is not public API, so we may <ide> // want to runtime-deprecate it at some point. There's no hurry, though. <del>Object.defineProperty(FSEvent.prototype, 'owner', { <add>ObjectDefineProperty(FSEvent.prototype, 'owner', { <ide> get() { return this[owner_symbol]; }, <ide> set(v) { return this[owner_symbol] = v; } <ide> }); <ide><path>lib/internal/http2/compat.js <ide> 'use strict'; <ide> <del>const { Object, ObjectPrototype, Reflect } = primordials; <add>const { <add> ObjectAssign, <add> ObjectCreate, <add> ObjectKeys, <add> ObjectPrototypeHasOwnProperty, <add> ReflectGetPrototypeOf, <add>} = primordials; <ide> <ide> const assert = require('internal/assert'); <ide> const Stream = require('stream'); <ide> function onStreamData(chunk) { <ide> function onStreamTrailers(trailers, flags, rawTrailers) { <ide> const request = this[kRequest]; <ide> if (request !== undefined) { <del> Object.assign(request[kTrailers], trailers); <add> ObjectAssign(request[kTrailers], trailers); <ide> request[kRawTrailers].push(...rawTrailers); <ide> } <ide> } <ide> const proxySocketHandler = { <ide> }, <ide> getPrototypeOf(stream) { <ide> if (stream.session !== undefined) <del> return Reflect.getPrototypeOf(stream.session[kSocket]); <del> return Reflect.getPrototypeOf(stream); <add> return ReflectGetPrototypeOf(stream.session[kSocket]); <add> return ReflectGetPrototypeOf(stream); <ide> }, <ide> set(stream, prop, value) { <ide> switch (prop) { <ide> class Http2ServerResponse extends Stream { <ide> sendDate: true, <ide> statusCode: HTTP_STATUS_OK, <ide> }; <del> this[kHeaders] = Object.create(null); <del> this[kTrailers] = Object.create(null); <add> this[kHeaders] = ObjectCreate(null); <add> this[kTrailers] = ObjectCreate(null); <ide> this[kStream] = stream; <ide> stream[kProxySocket] = null; <ide> stream[kResponse] = this; <ide> class Http2ServerResponse extends Stream { <ide> } <ide> <ide> addTrailers(headers) { <del> const keys = Object.keys(headers); <add> const keys = ObjectKeys(headers); <ide> let key = ''; <ide> for (var i = 0; i < keys.length; i++) { <ide> key = keys[i]; <ide> class Http2ServerResponse extends Stream { <ide> } <ide> <ide> getHeaderNames() { <del> return Object.keys(this[kHeaders]); <add> return ObjectKeys(this[kHeaders]); <ide> } <ide> <ide> getHeaders() { <ide> class Http2ServerResponse extends Stream { <ide> hasHeader(name) { <ide> validateString(name, 'name'); <ide> name = name.trim().toLowerCase(); <del> return ObjectPrototype.hasOwnProperty(this[kHeaders], name); <add> return ObjectPrototypeHasOwnProperty(this[kHeaders], name); <ide> } <ide> <ide> removeHeader(name) { <ide> class Http2ServerResponse extends Stream { <ide> this[kSetHeader](header[0], header[1]); <ide> } <ide> } else if (typeof headers === 'object') { <del> const keys = Object.keys(headers); <add> const keys = ObjectKeys(headers); <ide> let key = ''; <ide> for (i = 0; i < keys.length; i++) { <ide> key = keys[i]; <ide><path>lib/internal/http2/core.js <ide> <ide> /* eslint-disable no-use-before-define */ <ide> <del>const { Math, Object, Reflect } = primordials; <add>const { <add> MathMin, <add> ObjectAssign, <add> ObjectCreate, <add> ObjectDefineProperty, <add> ObjectPrototypeHasOwnProperty, <add> ReflectGetPrototypeOf, <add>} = primordials; <ide> <ide> const { <ide> assertCrypto, <ide> const { kTimeout } = require('internal/timers'); <ide> const { isArrayBufferView } = require('internal/util/types'); <ide> const { format } = require('internal/util/inspect'); <ide> <del>const hasOwnProperty = Object.prototype.hasOwnProperty; <del> <ide> const { FileHandle } = internalBinding('fs'); <ide> const binding = internalBinding('http2'); <ide> const { <ide> const proxySocketHandler = { <ide> const socket = session[kSocket]; <ide> if (socket === undefined) <ide> throw new ERR_HTTP2_SOCKET_UNBOUND(); <del> return Reflect.getPrototypeOf(socket); <add> return ReflectGetPrototypeOf(socket); <ide> }, <ide> set(session, prop, value) { <ide> switch (prop) { <ide> class ClientHttp2Session extends Http2Session { <ide> assertIsObject(headers, 'headers'); <ide> assertIsObject(options, 'options'); <ide> <del> headers = Object.assign(Object.create(null), headers); <add> headers = ObjectAssign(ObjectCreate(null), headers); <ide> options = { ...options }; <ide> <ide> if (headers[HTTP2_HEADER_METHOD] === undefined) <ide> class Http2Stream extends Duplex { <ide> throw new ERR_HTTP2_TRAILERS_NOT_READY(); <ide> <ide> assertIsObject(headers, 'headers'); <del> headers = Object.assign(Object.create(null), headers); <add> headers = ObjectAssign(ObjectCreate(null), headers); <ide> <ide> debugStreamObj(this, 'sending trailers'); <ide> <ide> class Http2Stream extends Duplex { <ide> <ide> function processHeaders(oldHeaders) { <ide> assertIsObject(oldHeaders, 'headers'); <del> const headers = Object.create(null); <add> const headers = ObjectCreate(null); <ide> <ide> if (oldHeaders !== null && oldHeaders !== undefined) { <del> const hop = hasOwnProperty.bind(oldHeaders); <ide> // This loop is here for performance reason. Do not change. <ide> for (const key in oldHeaders) { <del> if (hop(key)) { <add> if (ObjectPrototypeHasOwnProperty(oldHeaders, key)) { <ide> headers[key] = oldHeaders[key]; <ide> } <ide> } <ide> function doSendFileFD(session, options, fd, headers, streamOptions, err, stat) { <ide> if (stat.isFile()) { <ide> statOptions.length = <ide> statOptions.length < 0 ? stat.size - (+statOptions.offset) : <del> Math.min(stat.size - (+statOptions.offset), <del> statOptions.length); <add> MathMin(stat.size - (+statOptions.offset), <add> statOptions.length); <ide> <ide> headers[HTTP2_HEADER_CONTENT_LENGTH] = statOptions.length; <ide> } <ide> class ServerHttp2Stream extends Http2Stream { <ide> options.endStream = !!options.endStream; <ide> <ide> assertIsObject(headers, 'headers'); <del> headers = Object.assign(Object.create(null), headers); <add> headers = ObjectAssign(ObjectCreate(null), headers); <ide> <ide> if (headers[HTTP2_HEADER_METHOD] === undefined) <ide> headers[HTTP2_HEADER_METHOD] = HTTP2_METHOD_GET; <ide> class ServerHttp2Stream extends Http2Stream { <ide> throw new ERR_HTTP2_HEADERS_AFTER_RESPOND(); <ide> <ide> assertIsObject(headers, 'headers'); <del> headers = Object.assign(Object.create(null), headers); <add> headers = ObjectAssign(ObjectCreate(null), headers); <ide> <ide> debugStreamObj(this, 'sending additional headers'); <ide> <ide> const setTimeout = { <ide> writable: true, <ide> value: setStreamTimeout <ide> }; <del>Object.defineProperty(Http2Stream.prototype, 'setTimeout', setTimeout); <del>Object.defineProperty(Http2Session.prototype, 'setTimeout', setTimeout); <add>ObjectDefineProperty(Http2Stream.prototype, 'setTimeout', setTimeout); <add>ObjectDefineProperty(Http2Session.prototype, 'setTimeout', setTimeout); <ide> <ide> <ide> // When the socket emits an error, destroy the associated Http2Session and <ide> function connect(authority, options, listener) { <ide> } <ide> <ide> // Support util.promisify <del>Object.defineProperty(connect, promisify.custom, { <add>ObjectDefineProperty(connect, promisify.custom, { <ide> value: (authority, options) => { <ide> return new Promise((resolve) => { <ide> const server = connect(authority, options, () => resolve(server)); <ide><path>lib/internal/http2/util.js <ide> 'use strict'; <ide> <del>const { Math, Object } = primordials; <add>const { <add> MathMax, <add> ObjectCreate, <add> ObjectKeys, <add>} = primordials; <ide> <ide> const binding = internalBinding('http2'); <ide> const { <ide> function updateOptionsBuffer(options) { <ide> if (typeof options.maxOutstandingSettings === 'number') { <ide> flags |= (1 << IDX_OPTIONS_MAX_OUTSTANDING_SETTINGS); <ide> optionsBuffer[IDX_OPTIONS_MAX_OUTSTANDING_SETTINGS] = <del> Math.max(1, options.maxOutstandingSettings); <add> MathMax(1, options.maxOutstandingSettings); <ide> } <ide> if (typeof options.maxSessionMemory === 'number') { <ide> flags |= (1 << IDX_OPTIONS_MAX_SESSION_MEMORY); <ide> optionsBuffer[IDX_OPTIONS_MAX_SESSION_MEMORY] = <del> Math.max(1, options.maxSessionMemory); <add> MathMax(1, options.maxSessionMemory); <ide> } <ide> optionsBuffer[IDX_OPTIONS_FLAGS] = flags; <ide> } <ide> <ide> function getDefaultSettings() { <ide> settingsBuffer[IDX_SETTINGS_FLAGS] = 0; <ide> binding.refreshDefaultSettings(); <del> const holder = Object.create(null); <add> const holder = ObjectCreate(null); <ide> <ide> const flags = settingsBuffer[IDX_SETTINGS_FLAGS]; <ide> <ide> function mapToHeaders(map, <ide> assertValuePseudoHeader = assertValidPseudoHeader) { <ide> let ret = ''; <ide> let count = 0; <del> const keys = Object.keys(map); <add> const keys = ObjectKeys(map); <ide> const singles = new Set(); <ide> let i; <ide> let isArray; <ide> const assertWithinRange = hideStackFrames( <ide> ); <ide> <ide> function toHeaderObject(headers) { <del> const obj = Object.create(null); <add> const obj = ObjectCreate(null); <ide> for (var n = 0; n < headers.length; n = n + 2) { <ide> const name = headers[n]; <ide> let value = headers[n + 1]; <ide><path>lib/internal/main/worker_thread.js <ide> // In worker threads, execute the script sent through the <ide> // message port. <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add>} = primordials; <ide> <ide> const { <ide> patchProcessObject, <ide> const port = getEnvMessagePort(); <ide> // related IPC properties as unavailable. <ide> if (process.env.NODE_CHANNEL_FD) { <ide> const workerThreadSetup = require('internal/process/worker_thread_only'); <del> Object.defineProperty(process, 'channel', { <add> ObjectDefineProperty(process, 'channel', { <ide> enumerable: false, <ide> get: workerThreadSetup.unavailable('process.channel') <ide> }); <ide> <del> Object.defineProperty(process, 'connected', { <add> ObjectDefineProperty(process, 'connected', { <ide> enumerable: false, <ide> get: workerThreadSetup.unavailable('process.connected') <ide> }); <ide><path>lib/internal/modules/cjs/helpers.js <ide> 'use strict'; <ide> <del>const { Object, SafeMap } = primordials; <add>const { <add> ObjectDefineProperty, <add> SafeMap, <add>} = primordials; <ide> const { <ide> ERR_MANIFEST_DEPENDENCY_MISSING, <ide> ERR_UNKNOWN_BUILTIN_MODULE <ide> function addBuiltinLibsToObject(object) { <ide> object[name] = val; <ide> }; <ide> <del> Object.defineProperty(object, name, { <add> ObjectDefineProperty(object, name, { <ide> get: () => { <ide> const lib = require(name); <ide> <ide> // Disable the current getter/setter and set up a new <ide> // non-enumerable property. <ide> delete object[name]; <del> Object.defineProperty(object, name, { <add> ObjectDefineProperty(object, name, { <ide> get: () => lib, <ide> set: setReal, <ide> configurable: true, <ide><path>lib/internal/modules/cjs/loader.js <ide> 'use strict'; <ide> <ide> const { <del> JSON, <del> Object, <del> ObjectPrototype, <del> Reflect, <add> JSONParse, <add> ObjectCreate, <add> ObjectDefineProperty, <add> ObjectFreeze, <add> ObjectGetOwnPropertyDescriptor, <add> ObjectGetPrototypeOf, <add> ObjectKeys, <add> ObjectPrototypeHasOwnProperty, <add> ObjectSetPrototypeOf, <add> ReflectSet, <ide> SafeMap, <del> StringPrototype, <add> StringPrototypeIndexOf, <add> StringPrototypeMatch, <add> StringPrototypeSlice, <add> StringPrototypeStartsWith, <ide> } = primordials; <ide> <ide> const { NativeModule } = require('internal/bootstrap/loaders'); <ide> const { <ide> <ide> const isWindows = process.platform === 'win32'; <ide> <del>const relativeResolveCache = Object.create(null); <add>const relativeResolveCache = ObjectCreate(null); <ide> <ide> let requireDepth = 0; <ide> let statCache = null; <ide> for (const [id, mod] of NativeModule.map) { <ide> } <ide> } <ide> <del>Object.freeze(builtinModules); <add>ObjectFreeze(builtinModules); <ide> Module.builtinModules = builtinModules; <ide> <del>Module._cache = Object.create(null); <del>Module._pathCache = Object.create(null); <del>Module._extensions = Object.create(null); <add>Module._cache = ObjectCreate(null); <add>Module._pathCache = ObjectCreate(null); <add>Module._extensions = ObjectCreate(null); <ide> let modulePaths = []; <ide> Module.globalPaths = []; <ide> <ide> const wrapper = [ <ide> let wrapperProxy = new Proxy(wrapper, { <ide> set(target, property, value, receiver) { <ide> patched = true; <del> return Reflect.set(target, property, value, receiver); <add> return ReflectSet(target, property, value, receiver); <ide> }, <ide> <ide> defineProperty(target, property, descriptor) { <ide> patched = true; <del> return Object.defineProperty(target, property, descriptor); <add> return ObjectDefineProperty(target, property, descriptor); <ide> } <ide> }); <ide> <del>Object.defineProperty(Module, 'wrap', { <add>ObjectDefineProperty(Module, 'wrap', { <ide> get() { <ide> return wrap; <ide> }, <ide> Object.defineProperty(Module, 'wrap', { <ide> } <ide> }); <ide> <del>Object.defineProperty(Module, 'wrapper', { <add>ObjectDefineProperty(Module, 'wrapper', { <ide> get() { <ide> return wrapperProxy; <ide> }, <ide> function readPackage(requestPath) { <ide> } <ide> <ide> try { <del> const parsed = JSON.parse(json); <add> const parsed = JSONParse(json); <ide> const filtered = { <ide> name: parsed.name, <ide> main: parsed.main, <ide> function resolveBasePath(basePath, exts, isMain, trailingSlash, request) { <ide> if (!filename) { <ide> // Try it with each of the extensions <ide> if (exts === undefined) <del> exts = Object.keys(Module._extensions); <add> exts = ObjectKeys(Module._extensions); <ide> filename = tryExtensions(basePath, exts, isMain); <ide> } <ide> } <ide> <ide> if (!filename && rc === 1) { // Directory. <ide> // try it with each of the extensions at "index" <ide> if (exts === undefined) <del> exts = Object.keys(Module._extensions); <add> exts = ObjectKeys(Module._extensions); <ide> filename = tryPackage(basePath, exts, isMain, request); <ide> } <ide> <ide> function trySelf(paths, exts, isMain, trailingSlash, request) { <ide> let expansion; <ide> if (request === pkg.name) { <ide> expansion = ''; <del> } else if (StringPrototype.startsWith(request, `${pkg.name}/`)) { <del> expansion = StringPrototype.slice(request, pkg.name.length); <add> } else if (StringPrototypeStartsWith(request, `${pkg.name}/`)) { <add> expansion = StringPrototypeSlice(request, pkg.name.length); <ide> } else { <ide> return false; <ide> } <ide> <ide> if (exts === undefined) <del> exts = Object.keys(Module._extensions); <add> exts = ObjectKeys(Module._extensions); <ide> <ide> if (expansion) { <ide> // Use exports <ide> function isConditionalDotExportSugar(exports, basePath) { <ide> return false; <ide> let isConditional = false; <ide> let firstCheck = true; <del> for (const key of Object.keys(exports)) { <add> for (const key of ObjectKeys(exports)) { <ide> const curIsConditional = key[0] !== '.'; <ide> if (firstCheck) { <ide> firstCheck = false; <ide> function applyExports(basePath, expansion) { <ide> pkgExports = { '.': pkgExports }; <ide> <ide> if (typeof pkgExports === 'object') { <del> if (ObjectPrototype.hasOwnProperty(pkgExports, mappingKey)) { <add> if (ObjectPrototypeHasOwnProperty(pkgExports, mappingKey)) { <ide> const mapping = pkgExports[mappingKey]; <ide> return resolveExportsTarget(pathToFileURL(basePath + '/'), mapping, '', <ide> basePath, mappingKey); <ide> function applyExports(basePath, expansion) { <ide> return basePath; <ide> <ide> let dirMatch = ''; <del> for (const candidateKey of Object.keys(pkgExports)) { <add> for (const candidateKey of ObjectKeys(pkgExports)) { <ide> if (candidateKey[candidateKey.length - 1] !== '/') continue; <ide> if (candidateKey.length > dirMatch.length && <del> StringPrototype.startsWith(mappingKey, candidateKey)) { <add> StringPrototypeStartsWith(mappingKey, candidateKey)) { <ide> dirMatch = candidateKey; <ide> } <ide> } <ide> <ide> if (dirMatch !== '') { <ide> const mapping = pkgExports[dirMatch]; <del> const subpath = StringPrototype.slice(mappingKey, dirMatch.length); <add> const subpath = StringPrototypeSlice(mappingKey, dirMatch.length); <ide> return resolveExportsTarget(pathToFileURL(basePath + '/'), mapping, <ide> subpath, basePath, mappingKey); <ide> } <ide> function resolveExports(nmPath, request, absoluteRequest) { <ide> // The implementation's behavior is meant to mirror resolution in ESM. <ide> if (!absoluteRequest) { <ide> const [, name, expansion = ''] = <del> StringPrototype.match(request, EXPORTS_PATTERN) || []; <add> StringPrototypeMatch(request, EXPORTS_PATTERN) || []; <ide> if (!name) { <ide> return path.resolve(nmPath, request); <ide> } <ide> function resolveExportsTarget(pkgPath, target, subpath, basePath, mappingKey) { <ide> const resolvedTarget = new URL(target, pkgPath); <ide> const pkgPathPath = pkgPath.pathname; <ide> const resolvedTargetPath = resolvedTarget.pathname; <del> if (StringPrototype.startsWith(resolvedTargetPath, pkgPathPath) && <del> StringPrototype.indexOf(resolvedTargetPath, '/node_modules/', <del> pkgPathPath.length - 1) === -1) { <add> if (StringPrototypeStartsWith(resolvedTargetPath, pkgPathPath) && <add> StringPrototypeIndexOf(resolvedTargetPath, '/node_modules/', <add> pkgPathPath.length - 1) === -1) { <ide> const resolved = new URL(subpath, resolvedTarget); <ide> const resolvedPath = resolved.pathname; <del> if (StringPrototype.startsWith(resolvedPath, resolvedTargetPath) && <del> StringPrototype.indexOf(resolvedPath, '/node_modules/', <del> pkgPathPath.length - 1) === -1) { <add> if (StringPrototypeStartsWith(resolvedPath, resolvedTargetPath) && <add> StringPrototypeIndexOf(resolvedPath, '/node_modules/', <add> pkgPathPath.length - 1) === -1) { <ide> return fileURLToPath(resolved); <ide> } <ide> } <ide> function resolveExportsTarget(pkgPath, target, subpath, basePath, mappingKey) { <ide> } <ide> } else if (typeof target === 'object' && target !== null) { <ide> if (experimentalConditionalExports && <del> ObjectPrototype.hasOwnProperty(target, 'require')) { <add> ObjectPrototypeHasOwnProperty(target, 'require')) { <ide> try { <ide> return resolveExportsTarget(pkgPath, target.require, subpath, <ide> basePath, mappingKey); <ide> function resolveExportsTarget(pkgPath, target, subpath, basePath, mappingKey) { <ide> } <ide> } <ide> if (experimentalConditionalExports && <del> ObjectPrototype.hasOwnProperty(target, 'node')) { <add> ObjectPrototypeHasOwnProperty(target, 'node')) { <ide> try { <ide> return resolveExportsTarget(pkgPath, target.node, subpath, <ide> basePath, mappingKey); <ide> } catch (e) { <ide> if (e.code !== 'MODULE_NOT_FOUND') throw e; <ide> } <ide> } <del> if (ObjectPrototype.hasOwnProperty(target, 'default')) { <add> if (ObjectPrototypeHasOwnProperty(target, 'default')) { <ide> try { <ide> return resolveExportsTarget(pkgPath, target.default, subpath, <ide> basePath, mappingKey); <ide> Module._findPath = function(request, paths, isMain) { <ide> if (!filename) { <ide> // Try it with each of the extensions <ide> if (exts === undefined) <del> exts = Object.keys(Module._extensions); <add> exts = ObjectKeys(Module._extensions); <ide> filename = tryExtensions(basePath, exts, isMain); <ide> } <ide> } <ide> <ide> if (!filename && rc === 1) { // Directory. <ide> // try it with each of the extensions at "index" <ide> if (exts === undefined) <del> exts = Object.keys(Module._extensions); <add> exts = ObjectKeys(Module._extensions); <ide> filename = tryPackage(basePath, exts, isMain, request); <ide> } <ide> <ide> const CircularRequirePrototypeWarningProxy = new Proxy({}, { <ide> }, <ide> <ide> getOwnPropertyDescriptor(target, prop) { <del> if (ObjectPrototype.hasOwnProperty(target, prop)) <del> return Object.getOwnPropertyDescriptor(target, prop); <add> if (ObjectPrototypeHasOwnProperty(target, prop)) <add> return ObjectGetOwnPropertyDescriptor(target, prop); <ide> emitCircularRequireWarning(prop); <ide> return undefined; <ide> } <ide> const PublicObjectPrototype = global.Object.prototype; <ide> <ide> function getExportsForCircularRequire(module) { <ide> if (module.exports && <del> Object.getPrototypeOf(module.exports) === PublicObjectPrototype && <add> ObjectGetPrototypeOf(module.exports) === PublicObjectPrototype && <ide> // Exclude transpiled ES6 modules / TypeScript code because those may <ide> // employ unusual patterns for accessing 'module.exports'. That should be <ide> // okay because ES6 modules have a different approach to circular <ide> // dependencies anyway. <ide> !module.exports.__esModule) { <ide> // This is later unset once the module is done loading. <del> Object.setPrototypeOf(module.exports, CircularRequirePrototypeWarningProxy); <add> ObjectSetPrototypeOf(module.exports, CircularRequirePrototypeWarningProxy); <ide> } <ide> <ide> return module.exports; <ide> Module._load = function(request, parent, isMain) { <ide> delete relativeResolveCache[relResolveCacheIdentifier]; <ide> } <ide> } else if (module.exports && <del> Object.getPrototypeOf(module.exports) === <add> ObjectGetPrototypeOf(module.exports) === <ide> CircularRequirePrototypeWarningProxy) { <del> Object.setPrototypeOf(module.exports, PublicObjectPrototype); <add> ObjectSetPrototypeOf(module.exports, PublicObjectPrototype); <ide> } <ide> } <ide> <ide> Module._extensions['.json'] = function(module, filename) { <ide> } <ide> <ide> try { <del> module.exports = JSON.parse(stripBOM(content)); <add> module.exports = JSONParse(stripBOM(content)); <ide> } catch (err) { <ide> err.message = filename + ': ' + err.message; <ide> throw err; <ide><path>lib/internal/modules/esm/create_dynamic_module.js <ide> 'use strict'; <ide> <del>const { ArrayPrototype, JSON, Object } = primordials; <add>const { <add> ArrayPrototypeJoin, <add> ArrayPrototypeMap, <add> JSONStringify, <add> ObjectCreate, <add>} = primordials; <ide> <ide> const debug = require('internal/util/debuglog').debuglog('esm'); <ide> <ide> function createImport(impt, index) { <del> const imptPath = JSON.stringify(impt); <add> const imptPath = JSONStringify(impt); <ide> return `import * as $import_${index} from ${imptPath}; <ide> import.meta.imports[${imptPath}] = $import_${index};`; <ide> } <ide> import.meta.exports.${name} = { <ide> const createDynamicModule = (imports, exports, url = '', evaluate) => { <ide> debug('creating ESM facade for %s with exports: %j', url, exports); <ide> const source = ` <del>${ArrayPrototype.join(ArrayPrototype.map(imports, createImport), '\n')} <del>${ArrayPrototype.join(ArrayPrototype.map(exports, createExport), '\n')} <add>${ArrayPrototypeJoin(ArrayPrototypeMap(imports, createImport), '\n')} <add>${ArrayPrototypeJoin(ArrayPrototypeMap(exports, createExport), '\n')} <ide> import.meta.done(); <ide> `; <ide> const { ModuleWrap, callbackMap } = internalBinding('module_wrap'); <ide> const m = new ModuleWrap(`${url}`, undefined, source, 0, 0); <ide> <ide> const readyfns = new Set(); <ide> const reflect = { <del> exports: Object.create(null), <add> exports: ObjectCreate(null), <ide> onReady: (cb) => { readyfns.add(cb); }, <ide> }; <ide> <ide> if (imports.length) <del> reflect.imports = Object.create(null); <add> reflect.imports = ObjectCreate(null); <ide> <ide> callbackMap.set(m, { <ide> initializeImportMeta: (meta, wrap) => { <ide><path>lib/internal/modules/esm/default_resolve.js <ide> 'use strict'; <ide> <add>const { <add> SafeMap, <add>} = primordials; <add> <ide> const internalFS = require('internal/fs/utils'); <ide> const { NativeModule } = require('internal/bootstrap/loaders'); <ide> const { extname } = require('path'); <ide> const { URL, pathToFileURL, fileURLToPath } = require('internal/url'); <ide> const { ERR_INPUT_TYPE_NOT_ALLOWED, <ide> ERR_UNKNOWN_FILE_EXTENSION } = require('internal/errors').codes; <ide> <del>const { SafeMap } = primordials; <del> <ide> const realpathCache = new SafeMap(); <ide> <ide> // const TYPE_NONE = 0; <ide><path>lib/internal/modules/esm/loader.js <ide> 'use strict'; <ide> <del>const { FunctionPrototype } = primordials; <add>const { <add> FunctionPrototypeBind, <add> ObjectSetPrototypeOf, <add> SafeMap, <add>} = primordials; <ide> <ide> const { <ide> ERR_INVALID_RETURN_PROPERTY, <ide> const { getOptionValue } = require('internal/options'); <ide> <ide> const debug = require('internal/util/debuglog').debuglog('esm'); <ide> <del>const { <del> Object, <del> SafeMap <del>} = primordials; <del> <ide> /* A Loader instance is used as the main entry point for loading ES modules. <ide> * Currently, this is a singleton -- there is only one used for loading <ide> * the main module and everything in its dependency graph. */ <ide> class Loader { <ide> hook({ resolve, dynamicInstantiate }) { <ide> // Use .bind() to avoid giving access to the Loader instance when called. <ide> if (resolve !== undefined) <del> this._resolve = FunctionPrototype.bind(resolve, null); <add> this._resolve = FunctionPrototypeBind(resolve, null); <ide> if (dynamicInstantiate !== undefined) { <ide> this._dynamicInstantiate = <del> FunctionPrototype.bind(dynamicInstantiate, null); <add> FunctionPrototypeBind(dynamicInstantiate, null); <ide> } <ide> } <ide> <ide> class Loader { <ide> } <ide> } <ide> <del>Object.setPrototypeOf(Loader.prototype, null); <add>ObjectSetPrototypeOf(Loader.prototype, null); <ide> <ide> exports.Loader = Loader; <ide><path>lib/internal/modules/esm/module_job.js <ide> 'use strict'; <ide> <ide> const { <del> Object, <add> ObjectSetPrototypeOf, <ide> SafeSet, <del> SafePromise <add> SafePromise, <ide> } = primordials; <ide> <ide> const { ModuleWrap } = internalBinding('module_wrap'); <ide> class ModuleJob { <ide> return { module, result: module.evaluate(timeout, breakOnSigint) }; <ide> } <ide> } <del>Object.setPrototypeOf(ModuleJob.prototype, null); <add>ObjectSetPrototypeOf(ModuleJob.prototype, null); <ide> module.exports = ModuleJob; <ide><path>lib/internal/modules/esm/module_map.js <ide> <ide> const ModuleJob = require('internal/modules/esm/module_job'); <ide> const { <del> SafeMap <add> SafeMap, <ide> } = primordials; <ide> const debug = require('internal/util/debuglog').debuglog('esm'); <ide> const { ERR_INVALID_ARG_TYPE } = require('internal/errors').codes; <ide><path>lib/internal/modules/esm/translators.js <ide> /* global WebAssembly */ <ide> <ide> const { <del> JSON, <del> Object, <add> JSONParse, <add> ObjectKeys, <ide> SafeMap, <del> StringPrototype <add> StringPrototypeReplace, <ide> } = primordials; <ide> <ide> const { Buffer } = require('buffer'); <ide> const { <ide> ERR_UNKNOWN_BUILTIN_MODULE <ide> } = require('internal/errors').codes; <ide> const readFileAsync = promisify(fs.readFile); <del>const JsonParse = JSON.parse; <ide> const { maybeCacheSourceMap } = require('internal/source_map/source_map_cache'); <ide> const moduleWrap = internalBinding('module_wrap'); <ide> const { ModuleWrap } = moduleWrap; <ide> translators.set('commonjs', function commonjsStrategy(url, isMain) { <ide> return cached; <ide> } <ide> const module = CJSModule._cache[ <del> isWindows ? StringPrototype.replace(pathname, winSepRegEx, '\\') : pathname <add> isWindows ? StringPrototypeReplace(pathname, winSepRegEx, '\\') : pathname <ide> ]; <ide> if (module && module.loaded) { <ide> const exports = module.exports; <ide> translators.set('json', async function jsonStrategy(url) { <ide> let module; <ide> if (pathname) { <ide> modulePath = isWindows ? <del> StringPrototype.replace(pathname, winSepRegEx, '\\') : pathname; <add> StringPrototypeReplace(pathname, winSepRegEx, '\\') : pathname; <ide> module = CJSModule._cache[modulePath]; <ide> if (module && module.loaded) { <ide> const exports = module.exports; <ide> translators.set('json', async function jsonStrategy(url) { <ide> } <ide> } <ide> try { <del> const exports = JsonParse(stripBOM(content)); <add> const exports = JSONParse(stripBOM(content)); <ide> module = { <ide> exports, <ide> loaded: true <ide> translators.set('wasm', async function(url) { <ide> <ide> return createDynamicModule(imports, exports, url, (reflect) => { <ide> const { exports } = new WebAssembly.Instance(compiled, reflect.imports); <del> for (const expt of Object.keys(exports)) <add> for (const expt of ObjectKeys(exports)) <ide> reflect.exports[expt].set(exports[expt]); <ide> }).module; <ide> }); <ide><path>lib/internal/per_context/domexception.js <ide> 'use strict'; <ide> <ide> const { <add> ObjectDefineProperties, <add> ObjectDefineProperty, <ide> SafeWeakMap, <ide> SafeMap, <del> Object, <del> Symbol <add> SymbolToStringTag, <ide> } = primordials; <ide> <ide> class ERR_INVALID_THIS extends TypeError { <ide> class DOMException extends Error { <ide> } <ide> } <ide> <del>Object.defineProperties(DOMException.prototype, { <del> [Symbol.toStringTag]: { configurable: true, value: 'DOMException' }, <add>ObjectDefineProperties(DOMException.prototype, { <add> [SymbolToStringTag]: { configurable: true, value: 'DOMException' }, <ide> name: { enumerable: true, configurable: true }, <ide> message: { enumerable: true, configurable: true }, <ide> code: { enumerable: true, configurable: true } <ide> function forEachCode(fn) { <ide> <ide> forEachCode((name, codeName, value) => { <ide> const desc = { enumerable: true, value }; <del> Object.defineProperty(DOMException, codeName, desc); <del> Object.defineProperty(DOMException.prototype, codeName, desc); <add> ObjectDefineProperty(DOMException, codeName, desc); <add> ObjectDefineProperty(DOMException.prototype, codeName, desc); <ide> }); <ide> <ide> exports.DOMException = DOMException; <ide><path>lib/internal/per_context/primordials.js <ide> function copyProps(src, dest) { <ide> } <ide> } <ide> <del>function copyPrototype(src, dest) { <add>function copyPropsRenamed(src, dest, prefix) { <ide> for (const key of Reflect.ownKeys(src)) { <del> if (!Reflect.getOwnPropertyDescriptor(dest, key)) { <add> if (typeof key === 'string') { <add> Reflect.defineProperty( <add> dest, <add> `${prefix}${key[0].toUpperCase()}${key.slice(1)}`, <add> Reflect.getOwnPropertyDescriptor(src, key)); <add> } <add> } <add>} <add> <add>function copyPrototype(src, dest, prefix) { <add> for (const key of Reflect.ownKeys(src)) { <add> if (typeof key === 'string') { <ide> const desc = Reflect.getOwnPropertyDescriptor(src, key); <ide> if (typeof desc.value === 'function') { <ide> desc.value = uncurryThis(desc.value); <ide> } <del> Reflect.defineProperty(dest, key, desc); <add> Reflect.defineProperty( <add> dest, <add> `${prefix}${key[0].toUpperCase()}${key.slice(1)}`, <add> desc); <ide> } <ide> } <ide> } <ide> primordials.SafePromise = makeSafe( <ide> 'Math', <ide> 'Reflect' <ide> ].forEach((name) => { <del> const target = primordials[name] = Object.create(null); <del> copyProps(global[name], target); <add> copyPropsRenamed(global[name], primordials, name); <ide> }); <ide> <ide> // Create copies of intrinsic objects <ide> [ <ide> 'Array', <add> 'ArrayBuffer', <ide> 'BigInt', <ide> 'Boolean', <ide> 'Date', <ide> primordials.SafePromise = makeSafe( <ide> 'Set', <ide> 'String', <ide> 'Symbol', <add> 'WeakMap', <add> 'WeakSet', <ide> ].forEach((name) => { <ide> const original = global[name]; <del> const target = primordials[name] = Object.setPrototypeOf({ <add> primordials[name] = Object.setPrototypeOf({ <ide> [name]: function(...args) { <ide> return new.target ? <ide> ReflectConstruct(original, args, new.target) : <ide> ReflectApply(original, this, args); <ide> } <ide> }[name], null); <del> copyProps(original, target); <del> const proto = primordials[name + 'Prototype'] = Object.create(null); <del> copyPrototype(original.prototype, proto); <add> copyPropsRenamed(original, primordials, name); <add> copyPrototype(original.prototype, primordials, `${name}Prototype`); <ide> }); <ide> <ide> Object.setPrototypeOf(primordials, null); <ide><path>lib/internal/policy/manifest.js <ide> <ide> const { <ide> Map, <del> MapPrototype, <del> Object, <del> RegExpPrototype, <add> MapPrototypeSet, <add> ObjectEntries, <add> ObjectFreeze, <add> ObjectSetPrototypeOf, <add> RegExpPrototypeTest, <ide> SafeMap, <del> uncurryThis <add> uncurryThis, <ide> } = primordials; <ide> const { <ide> canBeRequiredByUsers <ide> const HashUpdate = uncurryThis(crypto.Hash.prototype.update); <ide> const HashDigest = uncurryThis(crypto.Hash.prototype.digest); <ide> const BufferEquals = uncurryThis(Buffer.prototype.equals); <ide> const BufferToString = uncurryThis(Buffer.prototype.toString); <del>const { entries } = Object; <ide> const kRelativeURLStringPattern = /^\.{0,2}\//; <ide> const { getOptionValue } = require('internal/options'); <ide> const shouldAbortOnUncaughtException = <ide> class Manifest { <ide> } <ide> <ide> this.#reaction = reaction; <del> const manifestEntries = entries(obj.resources); <add> const manifestEntries = ObjectEntries(obj.resources); <ide> <ide> const parsedURLs = new SafeMap(); <ide> for (let i = 0; i < manifestEntries.length; i++) { <ide> class Manifest { <ide> resourceURL = parsedURLs.get(resourceHREF); <ide> resourceHREF = resourceURL.href; <ide> } else if ( <del> RegExpPrototype.test(kRelativeURLStringPattern, resourceHREF) <add> RegExpPrototypeTest(kRelativeURLStringPattern, resourceHREF) <ide> ) { <ide> resourceURL = new URL(resourceHREF, manifestURL); <ide> resourceHREF = resourceURL.href; <ide> class Manifest { <ide> if (integrity != null) { <ide> debug(`Manifest contains integrity for url ${originalHREF}`); <ide> if (typeof integrity === 'string') { <del> const sri = Object.freeze(SRI.parse(integrity)); <add> const sri = ObjectFreeze(SRI.parse(integrity)); <ide> if (integrities.has(resourceHREF)) { <ide> const old = integrities.get(resourceHREF); <ide> let mismatch = false; <ide> class Manifest { <ide> parsedURLs.set(to, resolvedURL); <ide> parsedURLs.set(href, resolvedURL); <ide> return resolvedURL; <del> } else if (RegExpPrototype.test(kRelativeURLStringPattern, to)) { <add> } else if (RegExpPrototypeTest(kRelativeURLStringPattern, to)) { <ide> const resolvedURL = new URL(to, manifestURL); <ide> const href = resourceURL.href; <ide> parsedURLs.set(to, resolvedURL); <ide> class Manifest { <ide> 'dependencies'); <ide> } <ide> } <del> Object.freeze(this); <add> ObjectFreeze(this); <ide> } <ide> <ide> getRedirector(requester) { <ide> class Manifest { <ide> timingSafeEqual(digest, expected)) { <ide> return true; <ide> } <del> MapPrototype.set( <add> MapPrototypeSet( <ide> realIntegrities, <ide> algorithm, <ide> BufferToString(digest, 'base64') <ide> class Manifest { <ide> } <ide> <ide> // Lock everything down to avoid problems even if reference is leaked somehow <del>Object.setPrototypeOf(Manifest, null); <del>Object.setPrototypeOf(Manifest.prototype, null); <del>Object.freeze(Manifest); <del>Object.freeze(Manifest.prototype); <del>module.exports = Object.freeze({ Manifest }); <add>ObjectSetPrototypeOf(Manifest, null); <add>ObjectSetPrototypeOf(Manifest.prototype, null); <add>ObjectFreeze(Manifest); <add>ObjectFreeze(Manifest.prototype); <add>module.exports = ObjectFreeze({ Manifest }); <ide><path>lib/internal/policy/sri.js <ide> // Value of https://w3c.github.io/webappsec-subresource-integrity/#the-integrity-attribute <ide> <ide> const { <del> Object, <del> RegExpPrototype, <del> StringPrototype <add> ObjectDefineProperty, <add> ObjectFreeze, <add> ObjectSeal, <add> RegExpPrototypeExec, <add> RegExpPrototypeTest, <add> StringPrototypeSlice, <ide> } = primordials; <ide> <ide> // Returns [{algorithm, value (in base64 string), options,}] <ide> const kHASH_EXPRESSION = `(${kHASH_ALGO})-(${kHASH_VALUE})`; <ide> const kOPTION_EXPRESSION = `(${kVCHAR}*)`; <ide> const kHASH_WITH_OPTIONS = `${kHASH_EXPRESSION}(?:[?](${kOPTION_EXPRESSION}))?`; <ide> const kSRIPattern = RegExp(`(${kWSP}*)(?:${kHASH_WITH_OPTIONS})`, 'g'); <del>const { freeze } = Object; <del>Object.seal(kSRIPattern); <add>ObjectSeal(kSRIPattern); <ide> const kAllWSP = RegExp(`^${kWSP}*$`); <del>Object.seal(kAllWSP); <add>ObjectSeal(kAllWSP); <ide> <ide> const BufferFrom = require('buffer').Buffer.from; <ide> <ide> const parse = (str) => { <ide> let prevIndex = 0; <ide> let match; <ide> const entries = []; <del> while (match = RegExpPrototype.exec(kSRIPattern, str)) { <add> while (match = RegExpPrototypeExec(kSRIPattern, str)) { <ide> if (match.index !== prevIndex) { <ide> throw new ERR_SRI_PARSE(str, str.charAt(prevIndex), prevIndex); <ide> } <ide> const parse = (str) => { <ide> } <ide> <ide> // Avoid setters being fired <del> Object.defineProperty(entries, entries.length, { <add> ObjectDefineProperty(entries, entries.length, { <ide> enumerable: true, <ide> configurable: true, <del> value: freeze({ <add> value: ObjectFreeze({ <ide> __proto__: null, <ide> algorithm: match[2], <ide> value: BufferFrom(match[3], 'base64'), <ide> const parse = (str) => { <ide> } <ide> <ide> if (prevIndex !== str.length) { <del> if (!RegExpPrototype.test(kAllWSP, StringPrototype.slice(str, prevIndex))) { <add> if (!RegExpPrototypeTest(kAllWSP, StringPrototypeSlice(str, prevIndex))) { <ide> throw new ERR_SRI_PARSE(str, str.charAt(prevIndex), prevIndex); <ide> } <ide> } <ide><path>lib/internal/process/execution.js <ide> 'use strict'; <ide> <del>const { JSON } = primordials; <add>const { <add> JSONStringify, <add>} = primordials; <ide> <ide> const path = require('path'); <ide> <ide> function evalScript(name, body, breakFirstLine, print) { <ide> module.paths = CJSModule._nodeModulePaths(cwd); <ide> global.kVmBreakFirstLineSymbol = kVmBreakFirstLineSymbol; <ide> const script = ` <del> global.__filename = ${JSON.stringify(name)}; <add> global.__filename = ${JSONStringify(name)}; <ide> global.exports = exports; <ide> global.module = module; <ide> global.__dirname = __dirname; <ide> global.require = require; <ide> const { kVmBreakFirstLineSymbol } = global; <ide> delete global.kVmBreakFirstLineSymbol; <ide> return require("vm").runInThisContext( <del> ${JSON.stringify(body)}, { <del> filename: ${JSON.stringify(name)}, <add> ${JSONStringify(body)}, { <add> filename: ${JSONStringify(name)}, <ide> displayErrors: true, <ide> [kVmBreakFirstLineSymbol]: ${!!breakFirstLine} <ide> });\n`; <ide><path>lib/internal/process/per_thread.js <ide> // thread and the worker threads. <ide> <ide> const { <del> Object, <del> RegExpPrototype, <del> SetPrototype, <del> StringPrototype <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add> ObjectFreeze, <add> ObjectGetOwnPropertyDescriptors, <add> RegExpPrototypeTest, <add> SetPrototypeHas, <add> StringPrototypeReplace, <ide> } = primordials; <ide> <ide> const { <ide> function buildAllowedFlags() { <ide> } <ide> <ide> const trimLeadingDashes = <del> (flag) => StringPrototype.replace(flag, leadingDashesRegex, ''); <add> (flag) => StringPrototypeReplace(flag, leadingDashesRegex, ''); <ide> <ide> // Save these for comparison against flags provided to <ide> // process.allowedNodeEnvironmentFlags.has() which lack leading dashes. <ide> // Avoid interference w/ user code by flattening `Set.prototype` into <ide> // each object. <del> const nodeFlags = Object.defineProperties( <add> const nodeFlags = ObjectDefineProperties( <ide> new Set(allowedNodeEnvironmentFlags.map(trimLeadingDashes)), <del> Object.getOwnPropertyDescriptors(Set.prototype) <add> ObjectGetOwnPropertyDescriptors(Set.prototype) <ide> ); <ide> <ide> class NodeEnvironmentFlagsSet extends Set { <ide> function buildAllowedFlags() { <ide> <ide> // The super constructor consumes `add`, but <ide> // disallow any future adds. <del> Object.defineProperty(this, 'add', { <add> ObjectDefineProperty(this, 'add', { <ide> value: () => this <ide> }); <ide> } <ide> function buildAllowedFlags() { <ide> // on a dummy option set and see whether it rejects the argument or <ide> // not. <ide> if (typeof key === 'string') { <del> key = StringPrototype.replace(key, replaceUnderscoresRegex, '-'); <del> if (RegExpPrototype.test(leadingDashesRegex, key)) { <del> key = StringPrototype.replace(key, trailingValuesRegex, ''); <del> return SetPrototype.has(this, key); <add> key = StringPrototypeReplace(key, replaceUnderscoresRegex, '-'); <add> if (RegExpPrototypeTest(leadingDashesRegex, key)) { <add> key = StringPrototypeReplace(key, trailingValuesRegex, ''); <add> return SetPrototypeHas(this, key); <ide> } <del> return SetPrototype.has(nodeFlags, key); <add> return SetPrototypeHas(nodeFlags, key); <ide> } <ide> return false; <ide> } <ide> } <ide> <del> Object.freeze(NodeEnvironmentFlagsSet.prototype.constructor); <del> Object.freeze(NodeEnvironmentFlagsSet.prototype); <add> ObjectFreeze(NodeEnvironmentFlagsSet.prototype.constructor); <add> ObjectFreeze(NodeEnvironmentFlagsSet.prototype); <ide> <del> return Object.freeze(new NodeEnvironmentFlagsSet( <add> return ObjectFreeze(new NodeEnvironmentFlagsSet( <ide> allowedNodeEnvironmentFlags <ide> )); <ide> } <ide><path>lib/internal/process/policy.js <ide> 'use strict'; <ide> <del>const { JSON, Object, Reflect } = primordials; <add>const { <add> JSONParse, <add> ObjectFreeze, <add> ReflectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { <ide> ERR_MANIFEST_TDZ, <ide> let manifest; <ide> let manifestSrc; <ide> let manifestURL; <ide> <del>module.exports = Object.freeze({ <add>module.exports = ObjectFreeze({ <ide> __proto__: null, <ide> setup(src, url) { <ide> manifestSrc = src; <ide> module.exports = Object.freeze({ <ide> return; <ide> } <ide> <del> const json = JSON.parse(src, (_, o) => { <add> const json = JSONParse(src, (_, o) => { <ide> if (o && typeof o === 'object') { <del> Reflect.setPrototypeOf(o, null); <del> Object.freeze(o); <add> ReflectSetPrototypeOf(o, null); <add> ObjectFreeze(o); <ide> } <ide> return o; <ide> }); <ide><path>lib/internal/process/promises.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add>} = primordials; <ide> <ide> const { <ide> tickInfo, <ide> function getErrorWithoutStack(name, message) { <ide> // eslint-disable-next-line no-restricted-syntax <ide> const err = new Error(message); <ide> Error.stackTraceLimit = tmp; <del> Object.defineProperty(err, 'name', { <add> ObjectDefineProperty(err, 'name', { <ide> value: name, <ide> enumerable: false, <ide> writable: true, <ide><path>lib/internal/process/report.js <ide> const { <ide> } = require('internal/errors').codes; <ide> const { validateSignalName, validateString } = require('internal/validators'); <ide> const nr = internalBinding('report'); <del>const { JSON } = primordials; <add>const { <add> JSONParse, <add>} = primordials; <ide> const report = { <ide> writeReport(file, err) { <ide> if (typeof file === 'object' && file !== null) { <ide> const report = { <ide> else if (err === null || typeof err !== 'object') <ide> throw new ERR_INVALID_ARG_TYPE('err', 'Object', err); <ide> <del> return JSON.parse(nr.getReport(err.stack)); <add> return JSONParse(nr.getReport(err.stack)); <ide> }, <ide> get directory() { <ide> return nr.getDirectory(); <ide><path>lib/internal/process/task_queues.js <ide> 'use strict'; <ide> <del>const { FunctionPrototype } = primordials; <add>const { <add> FunctionPrototypeBind, <add>} = primordials; <ide> <ide> const { <ide> // For easy access to the nextTick state in the C++ land, <ide> function queueMicrotask(callback) { <ide> const asyncResource = createMicrotaskResource(); <ide> asyncResource.callback = callback; <ide> <del> enqueueMicrotask(FunctionPrototype.bind(runMicrotask, asyncResource)); <add> enqueueMicrotask(FunctionPrototypeBind(runMicrotask, asyncResource)); <ide> } <ide> <ide> module.exports = { <ide><path>lib/internal/repl.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectCreate, <add>} = primordials; <ide> <ide> const REPL = require('repl'); <ide> const { kStandaloneREPL } = require('internal/repl/utils'); <ide> <del>module.exports = Object.create(REPL); <add>module.exports = ObjectCreate(REPL); <ide> module.exports.createInternalRepl = createRepl; <ide> <ide> function createRepl(env, opts, cb) { <ide><path>lib/internal/repl/await.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectKeys, <add>} = primordials; <ide> <ide> const acorn = require('internal/deps/acorn/acorn/dist/acorn'); <ide> const walk = require('internal/deps/acorn/acorn-walk/dist/walk'); <ide> const visitorsWithoutAncestors = { <ide> }; <ide> <ide> const visitors = {}; <del>for (const nodeType of Object.keys(walk.base)) { <add>for (const nodeType of ObjectKeys(walk.base)) { <ide> const callback = visitorsWithoutAncestors[nodeType] || walk.base[nodeType]; <ide> visitors[nodeType] = (node, state, c) => { <ide> const isNew = node !== state.ancestors[state.ancestors.length - 1]; <ide><path>lib/internal/source_map/source_map_cache.js <ide> 'use strict'; <ide> <ide> const { <del> JSON, <del> Object: { <del> create: ObjectCreate, <del> keys: ObjectKeys, <del> getOwnPropertyDescriptor: ObjectGetOwnPropertyDescriptor, <del> }, <del> ObjectPrototype: { <del> hasOwnProperty: ObjectHasOwnProperty <del> }, <del> MapPrototype: { <del> entries: MapEntries <del> }, uncurryThis <add> JSONParse, <add> ObjectCreate, <add> ObjectKeys, <add> ObjectGetOwnPropertyDescriptor, <add> ObjectPrototypeHasOwnProperty, <add> MapPrototypeEntries, <add> WeakMapPrototypeGet, <add> uncurryThis, <ide> } = primordials; <ide> <del>const MapIteratorNext = uncurryThis(MapEntries(new Map()).next); <del>const WeakMapGet = uncurryThis(WeakMap.prototype.get); <add>const MapIteratorNext = uncurryThis(MapPrototypeEntries(new Map()).next); <ide> <ide> function ObjectGetValueSafe(obj, key) { <ide> const desc = ObjectGetOwnPropertyDescriptor(obj, key); <del> return ObjectHasOwnProperty(desc, 'value') ? desc.value : undefined; <add> return ObjectPrototypeHasOwnProperty(desc, 'value') ? desc.value : undefined; <ide> } <ide> <ide> // See https://sourcemaps.info/spec.html for SourceMap V3 specification. <ide> function lineLengths(content) { <ide> function sourceMapFromFile(sourceMapFile) { <ide> try { <ide> const content = fs.readFileSync(sourceMapFile, 'utf8'); <del> const data = JSON.parse(content); <add> const data = JSONParse(content); <ide> return sourcesToAbsolute(dirname(sourceMapFile), data); <ide> } catch (err) { <ide> debug(err.stack); <ide> function sourceMapFromDataUrl(basePath, url) { <ide> const decodedData = base64 ? <ide> Buffer.from(data, 'base64').toString('utf8') : data; <ide> try { <del> const parsedData = JSON.parse(decodedData); <add> const parsedData = JSONParse(decodedData); <ide> return sourcesToAbsolute(basePath, parsedData); <ide> } catch (err) { <ide> debug(err.stack); <ide> function rekeySourceMap(cjsModuleInstance, newInstance) { <ide> function sourceMapCacheToObject() { <ide> const obj = ObjectCreate(null); <ide> <del> const it = MapEntries(esmSourceMapCache); <add> const it = MapPrototypeEntries(esmSourceMapCache); <ide> let entry; <ide> while (!(entry = MapIteratorNext(it)).done) { <ide> const k = entry.value[0]; <ide> function appendCJSCache(obj) { <ide> for (let i = 0; i < cjsModules.length; i++) { <ide> const key = cjsModules[i]; <ide> const module = ObjectGetValueSafe(cjsModuleCache, key); <del> const value = WeakMapGet(cjsSourceMapCache, module); <add> const value = WeakMapPrototypeGet(cjsSourceMapCache, module); <ide> if (value) { <ide> // This is okay because `obj` has a null prototype. <ide> obj[`file://${key}`] = { <ide><path>lib/internal/streams/async_iterator.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectCreate, <add> ObjectGetPrototypeOf, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const finished = require('internal/streams/end-of-stream'); <ide> <ide> function wrapForNext(lastPromise, iter) { <ide> }; <ide> } <ide> <del>const AsyncIteratorPrototype = Object.getPrototypeOf( <del> Object.getPrototypeOf(async function* () {}).prototype); <add>const AsyncIteratorPrototype = ObjectGetPrototypeOf( <add> ObjectGetPrototypeOf(async function* () {}).prototype); <ide> <del>const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({ <add>const ReadableStreamAsyncIteratorPrototype = ObjectSetPrototypeOf({ <ide> get stream() { <ide> return this[kStream]; <ide> }, <ide> const ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf({ <ide> }, AsyncIteratorPrototype); <ide> <ide> const createReadableStreamAsyncIterator = (stream) => { <del> const iterator = Object.create(ReadableStreamAsyncIteratorPrototype, { <add> const iterator = ObjectCreate(ReadableStreamAsyncIteratorPrototype, { <ide> [kStream]: { value: stream, writable: true }, <ide> [kLastResolve]: { value: null, writable: true }, <ide> [kLastReject]: { value: null, writable: true }, <ide><path>lib/internal/streams/lazy_transform.js <ide> // for the stream, one conventional and one non-conventional. <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const stream = require('stream'); <ide> <ide> function LazyTransform(options) { <ide> this.writable = true; <ide> this.readable = true; <ide> } <del>Object.setPrototypeOf(LazyTransform.prototype, stream.Transform.prototype); <del>Object.setPrototypeOf(LazyTransform, stream.Transform); <add>ObjectSetPrototypeOf(LazyTransform.prototype, stream.Transform.prototype); <add>ObjectSetPrototypeOf(LazyTransform, stream.Transform); <ide> <ide> function makeGetter(name) { <ide> return function() { <ide> function makeGetter(name) { <ide> <ide> function makeSetter(name) { <ide> return function(val) { <del> Object.defineProperty(this, name, { <add> ObjectDefineProperty(this, name, { <ide> value: val, <ide> enumerable: true, <ide> configurable: true, <ide> function makeSetter(name) { <ide> }; <ide> } <ide> <del>Object.defineProperties(LazyTransform.prototype, { <add>ObjectDefineProperties(LazyTransform.prototype, { <ide> _readableState: { <ide> get: makeGetter('_readableState'), <ide> set: makeSetter('_readableState'), <ide><path>lib/internal/streams/legacy.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const EE = require('events'); <ide> <ide> function Stream() { <ide> EE.call(this); <ide> } <del>Object.setPrototypeOf(Stream.prototype, EE.prototype); <del>Object.setPrototypeOf(Stream, EE); <add>ObjectSetPrototypeOf(Stream.prototype, EE.prototype); <add>ObjectSetPrototypeOf(Stream, EE); <ide> <ide> Stream.prototype.pipe = function(dest, options) { <ide> const source = this; <ide><path>lib/internal/streams/state.js <ide> 'use strict'; <ide> <del>const { Math } = primordials; <add>const { <add> MathFloor, <add>} = primordials; <ide> <ide> const { ERR_INVALID_OPT_VALUE } = require('internal/errors').codes; <ide> <ide> function getHighWaterMark(state, options, duplexKey, isDuplex) { <ide> const name = isDuplex ? duplexKey : 'highWaterMark'; <ide> throw new ERR_INVALID_OPT_VALUE(name, hwm); <ide> } <del> return Math.floor(hwm); <add> return MathFloor(hwm); <ide> } <ide> <ide> // Default value <ide><path>lib/internal/timers.js <ide> // timers within (or creation of a new list). However, these operations combined <ide> // have shown to be trivial in comparison to other timers architectures. <ide> <del>const { Math, Object } = primordials; <add>const { <add> MathMax, <add> MathTrunc, <add> ObjectCreate, <add>} = primordials; <ide> <ide> const { <ide> scheduleTimer, <ide> const timerListQueue = new PriorityQueue(compareTimersLists, setPosition); <ide> // <ide> // - key = time in milliseconds <ide> // - value = linked list <del>const timerListMap = Object.create(null); <add>const timerListMap = ObjectCreate(null); <ide> <ide> function initAsyncResource(resource, type) { <ide> const asyncId = resource[async_id_symbol] = newAsyncId(); <ide> function insert(item, refed, start) { <ide> return; <ide> <ide> // Truncate so that accuracy of sub-millisecond timers is not assumed. <del> msecs = Math.trunc(msecs); <add> msecs = MathTrunc(msecs); <ide> <ide> item._idleStart = start; <ide> <ide> function getTimerCallbacks(runNextTicks) { <ide> // Check if this loop iteration is too early for the next timer. <ide> // This happens if there are more timers scheduled for later in the list. <ide> if (diff < msecs) { <del> list.expiry = Math.max(timer._idleStart + msecs, now + 1); <add> list.expiry = MathMax(timer._idleStart + msecs, now + 1); <ide> list.id = timerListId++; <ide> timerListQueue.percolateDown(1); <ide> debug('%d list wait because diff is %d', msecs, diff); <ide><path>lib/internal/tls.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectCreate, <add>} = primordials; <ide> <ide> // Example: <ide> // C=US\nST=CA\nL=SF\nO=Joyent\nOU=Node.js\nCN=ca1\[email protected] <ide> function parseCertString(s) { <del> const out = Object.create(null); <add> const out = ObjectCreate(null); <ide> const parts = s.split('\n'); <ide> for (let i = 0, len = parts.length; i < len; i++) { <ide> const sepIndex = parts[i].indexOf('='); <ide><path>lib/internal/trace_events_async_hooks.js <ide> 'use strict'; <ide> <del>const { Object, SafeMap, SafeSet } = primordials; <add>const { <add> ObjectKeys, <add> SafeMap, <add> SafeSet, <add>} = primordials; <ide> <ide> const { trace } = internalBinding('trace_events'); <ide> const async_wrap = internalBinding('async_wrap'); <ide> const kEnabled = Symbol('enabled'); <ide> // Embedder C++ API can't be emitted from async_wrap.cc. Thus they are <ide> // emitted using the JavaScript API. To prevent emitting the same event <ide> // twice the async_wrap.Providers list is used to filter the events. <del>const nativeProviders = new SafeSet(Object.keys(async_wrap.Providers)); <add>const nativeProviders = new SafeSet(ObjectKeys(async_wrap.Providers)); <ide> const typeMemory = new SafeMap(); <ide> <ide> function createHook() { <ide><path>lib/internal/url.js <ide> 'use strict'; <ide> <del>const { Object, Reflect } = primordials; <add>const { <add> ObjectCreate, <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add> ObjectGetOwnPropertySymbols, <add> ObjectGetPrototypeOf, <add> ObjectKeys, <add> ReflectGetOwnPropertyDescriptor, <add> ReflectOwnKeys, <add>} = primordials; <ide> <ide> const { inspect } = require('internal/util/inspect'); <ide> const { <ide> const searchParams = Symbol('query'); <ide> const kFormat = Symbol('format'); <ide> <ide> // https://tc39.github.io/ecma262/#sec-%iteratorprototype%-object <del>const IteratorPrototype = Object.getPrototypeOf( <del> Object.getPrototypeOf([][Symbol.iterator]()) <add>const IteratorPrototype = ObjectGetPrototypeOf( <add> ObjectGetPrototypeOf([][Symbol.iterator]()) <ide> ); <ide> <ide> const unpairedSurrogateRe = <ide> class URLSearchParams { <ide> // Record<USVString, USVString> <ide> // Need to use reflection APIs for full spec compliance. <ide> this[searchParams] = []; <del> const keys = Reflect.ownKeys(init); <add> const keys = ReflectOwnKeys(init); <ide> for (let i = 0; i < keys.length; i++) { <ide> const key = keys[i]; <del> const desc = Reflect.getOwnPropertyDescriptor(init, key); <add> const desc = ReflectGetOwnPropertyDescriptor(init, key); <ide> if (desc !== undefined && desc.enumerable) { <ide> const typedKey = toUSVString(key); <ide> const typedValue = toUSVString(init[key]); <ide> class URL { <ide> <ide> [inspect.custom](depth, opts) { <ide> if (this == null || <del> Object.getPrototypeOf(this[context]) !== URLContext.prototype) { <add> ObjectGetPrototypeOf(this[context]) !== URLContext.prototype) { <ide> throw new ERR_INVALID_THIS('URL'); <ide> } <ide> <ide> class URL { <ide> <ide> const ctor = getConstructorOf(this); <ide> <del> const obj = Object.create({ <add> const obj = ObjectCreate({ <ide> constructor: ctor === null ? URL : ctor <ide> }); <ide> <ide> class URL { <ide> } <ide> } <ide> <del>Object.defineProperties(URL.prototype, { <add>ObjectDefineProperties(URL.prototype, { <ide> [kFormat]: { <ide> enumerable: false, <ide> configurable: false, <ide> function serializeParams(array) { <ide> // Mainly to mitigate func-name-matching ESLint rule <ide> function defineIDLClass(proto, classStr, obj) { <ide> // https://heycam.github.io/webidl/#dfn-class-string <del> Object.defineProperty(proto, Symbol.toStringTag, { <add> ObjectDefineProperty(proto, Symbol.toStringTag, { <ide> writable: false, <ide> enumerable: false, <ide> configurable: true, <ide> value: classStr <ide> }); <ide> <ide> // https://heycam.github.io/webidl/#es-operations <del> for (const key of Object.keys(obj)) { <del> Object.defineProperty(proto, key, { <add> for (const key of ObjectKeys(obj)) { <add> ObjectDefineProperty(proto, key, { <ide> writable: true, <ide> enumerable: true, <ide> configurable: true, <ide> value: obj[key] <ide> }); <ide> } <del> for (const key of Object.getOwnPropertySymbols(obj)) { <del> Object.defineProperty(proto, key, { <add> for (const key of ObjectGetOwnPropertySymbols(obj)) { <add> ObjectDefineProperty(proto, key, { <ide> writable: true, <ide> enumerable: false, <ide> configurable: true, <ide> defineIDLClass(URLSearchParams.prototype, 'URLSearchParams', { <ide> }); <ide> <ide> // https://heycam.github.io/webidl/#es-iterable-entries <del>Object.defineProperty(URLSearchParams.prototype, Symbol.iterator, { <add>ObjectDefineProperty(URLSearchParams.prototype, Symbol.iterator, { <ide> writable: true, <ide> configurable: true, <ide> value: URLSearchParams.prototype.entries <ide> }); <ide> <ide> // https://heycam.github.io/webidl/#dfn-default-iterator-object <ide> function createSearchParamsIterator(target, kind) { <del> const iterator = Object.create(URLSearchParamsIteratorPrototype); <add> const iterator = ObjectCreate(URLSearchParamsIteratorPrototype); <ide> iterator[context] = { <ide> target, <ide> kind, <ide> function createSearchParamsIterator(target, kind) { <ide> } <ide> <ide> // https://heycam.github.io/webidl/#dfn-iterator-prototype-object <del>const URLSearchParamsIteratorPrototype = Object.create(IteratorPrototype); <add>const URLSearchParamsIteratorPrototype = ObjectCreate(IteratorPrototype); <ide> <ide> defineIDLClass(URLSearchParamsIteratorPrototype, 'URLSearchParams Iterator', { <ide> next() { <ide> if (!this || <del> Object.getPrototypeOf(this) !== URLSearchParamsIteratorPrototype) { <add> ObjectGetPrototypeOf(this) !== URLSearchParamsIteratorPrototype) { <ide> throw new ERR_INVALID_THIS('URLSearchParamsIterator'); <ide> } <ide> <ide> function constructUrl(flags, protocol, username, password, <ide> ctx.fragment = fragment; <ide> ctx.host = host; <ide> <del> const url = Object.create(URL.prototype); <add> const url = ObjectCreate(URL.prototype); <ide> url[context] = ctx; <ide> const params = new URLSearchParams(); <ide> url[searchParams] = params; <ide><path>lib/internal/util.js <ide> 'use strict'; <ide> <del>const { Object, Reflect } = primordials; <add>const { <add> ObjectCreate, <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add> ObjectGetOwnPropertyDescriptor, <add> ObjectGetOwnPropertyDescriptors, <add> ObjectGetPrototypeOf, <add> ObjectSetPrototypeOf, <add> ReflectConstruct, <add>} = primordials; <ide> const { <ide> codes: { <ide> ERR_INVALID_ARG_TYPE, <ide> function deprecate(fn, msg, code) { <ide> } <ide> } <ide> if (new.target) { <del> return Reflect.construct(fn, args, new.target); <add> return ReflectConstruct(fn, args, new.target); <ide> } <ide> return fn.apply(this, args); <ide> } <ide> <ide> // The wrapper will keep the same prototype as fn to maintain prototype chain <del> Object.setPrototypeOf(deprecated, fn); <add> ObjectSetPrototypeOf(deprecated, fn); <ide> if (fn.prototype) { <ide> // Setting this (rather than using Object.setPrototype, as above) ensures <ide> // that calling the unwrapped constructor gives an instanceof the wrapped <ide> function cachedResult(fn) { <ide> // B() instanceof B // true <ide> function createClassWrapper(type) { <ide> function fn(...args) { <del> return Reflect.construct(type, args, new.target || type); <add> return ReflectConstruct(type, args, new.target || type); <ide> } <ide> // Mask the wrapper function name and length values <del> Object.defineProperties(fn, { <add> ObjectDefineProperties(fn, { <ide> name: { value: type.name }, <ide> length: { value: type.length } <ide> }); <del> Object.setPrototypeOf(fn, type); <add> ObjectSetPrototypeOf(fn, type); <ide> fn.prototype = type.prototype; <ide> return fn; <ide> } <ide> function getSignalsToNamesMapping() { <ide> if (signalsToNamesMapping !== undefined) <ide> return signalsToNamesMapping; <ide> <del> signalsToNamesMapping = Object.create(null); <add> signalsToNamesMapping = ObjectCreate(null); <ide> for (const key in signals) { <ide> signalsToNamesMapping[signals[key]] = key; <ide> } <ide> function convertToValidSignal(signal) { <ide> <ide> function getConstructorOf(obj) { <ide> while (obj) { <del> const descriptor = Object.getOwnPropertyDescriptor(obj, 'constructor'); <add> const descriptor = ObjectGetOwnPropertyDescriptor(obj, 'constructor'); <ide> if (descriptor !== undefined && <ide> typeof descriptor.value === 'function' && <ide> descriptor.value.name !== '') { <ide> return descriptor.value; <ide> } <ide> <del> obj = Object.getPrototypeOf(obj); <add> obj = ObjectGetPrototypeOf(obj); <ide> } <ide> <ide> return null; <ide> function promisify(original) { <ide> if (typeof fn !== 'function') { <ide> throw new ERR_INVALID_ARG_TYPE('util.promisify.custom', 'Function', fn); <ide> } <del> return Object.defineProperty(fn, kCustomPromisifiedSymbol, { <add> return ObjectDefineProperty(fn, kCustomPromisifiedSymbol, { <ide> value: fn, enumerable: false, writable: false, configurable: true <ide> }); <ide> } <ide> function promisify(original) { <ide> }); <ide> } <ide> <del> Object.setPrototypeOf(fn, Object.getPrototypeOf(original)); <add> ObjectSetPrototypeOf(fn, ObjectGetPrototypeOf(original)); <ide> <del> Object.defineProperty(fn, kCustomPromisifiedSymbol, { <add> ObjectDefineProperty(fn, kCustomPromisifiedSymbol, { <ide> value: fn, enumerable: false, writable: false, configurable: true <ide> }); <del> return Object.defineProperties( <add> return ObjectDefineProperties( <ide> fn, <del> Object.getOwnPropertyDescriptors(original) <add> ObjectGetOwnPropertyDescriptors(original) <ide> ); <ide> } <ide> <ide><path>lib/internal/util/comparisons.js <ide> 'use strict'; <ide> <ide> const { <del> BigIntPrototype, <del> BooleanPrototype, <del> DatePrototype, <del> Number, <del> NumberPrototype, <del> Object, <del> ObjectPrototype: { <del> hasOwnProperty, <del> propertyIsEnumerable, <del> toString: objectToString <del> }, <del> StringPrototype, <del> SymbolPrototype <add> BigIntPrototypeValueOf, <add> BooleanPrototypeValueOf, <add> DatePrototypeGetTime, <add> NumberIsNaN, <add> NumberPrototypeValueOf, <add> ObjectGetOwnPropertySymbols, <add> ObjectGetPrototypeOf, <add> ObjectIs, <add> ObjectKeys, <add> ObjectPrototypeHasOwnProperty, <add> ObjectPrototypePropertyIsEnumerable, <add> ObjectPrototypeToString, <add> StringPrototypeValueOf, <add> SymbolPrototypeValueOf, <ide> } = primordials; <ide> <ide> const { compare } = internalBinding('buffer'); <ide> function areEqualArrayBuffers(buf1, buf2) { <ide> function isEqualBoxedPrimitive(val1, val2) { <ide> if (isNumberObject(val1)) { <ide> return isNumberObject(val2) && <del> Object.is(NumberPrototype.valueOf(val1), <del> NumberPrototype.valueOf(val2)); <add> ObjectIs(NumberPrototypeValueOf(val1), <add> NumberPrototypeValueOf(val2)); <ide> } <ide> if (isStringObject(val1)) { <ide> return isStringObject(val2) && <del> StringPrototype.valueOf(val1) === StringPrototype.valueOf(val2); <add> StringPrototypeValueOf(val1) === StringPrototypeValueOf(val2); <ide> } <ide> if (isBooleanObject(val1)) { <ide> return isBooleanObject(val2) && <del> BooleanPrototype.valueOf(val1) === BooleanPrototype.valueOf(val2); <add> BooleanPrototypeValueOf(val1) === BooleanPrototypeValueOf(val2); <ide> } <ide> if (isBigIntObject(val1)) { <ide> return isBigIntObject(val2) && <del> BigIntPrototype.valueOf(val1) === BigIntPrototype.valueOf(val2); <add> BigIntPrototypeValueOf(val1) === BigIntPrototypeValueOf(val2); <ide> } <ide> if (isSymbolObject(val1)) { <ide> return isSymbolObject(val2) && <del> SymbolPrototype.valueOf(val1) === SymbolPrototype.valueOf(val2); <add> SymbolPrototypeValueOf(val1) === SymbolPrototypeValueOf(val2); <ide> } <ide> return false; <ide> } <ide> function innerDeepEqual(val1, val2, strict, memos) { <ide> if (val1 === val2) { <ide> if (val1 !== 0) <ide> return true; <del> return strict ? Object.is(val1, val2) : true; <add> return strict ? ObjectIs(val1, val2) : true; <ide> } <ide> <ide> // Check more closely if val1 and val2 are equal. <ide> if (strict) { <ide> if (typeof val1 !== 'object') { <del> return typeof val1 === 'number' && Number.isNaN(val1) && <del> Number.isNaN(val2); <add> return typeof val1 === 'number' && NumberIsNaN(val1) && <add> NumberIsNaN(val2); <ide> } <ide> if (typeof val2 !== 'object' || val1 === null || val2 === null) { <ide> return false; <ide> } <del> if (Object.getPrototypeOf(val1) !== Object.getPrototypeOf(val2)) { <add> if (ObjectGetPrototypeOf(val1) !== ObjectGetPrototypeOf(val2)) { <ide> return false; <ide> } <ide> } else { <ide> function innerDeepEqual(val1, val2, strict, memos) { <ide> return false; <ide> } <ide> } <del> const val1Tag = objectToString(val1); <del> const val2Tag = objectToString(val2); <add> const val1Tag = ObjectPrototypeToString(val1); <add> const val2Tag = ObjectPrototypeToString(val2); <ide> <ide> if (val1Tag !== val2Tag) { <ide> return false; <ide> function innerDeepEqual(val1, val2, strict, memos) { <ide> return keyCheck(val1, val2, strict, memos, kNoIterator); <ide> } <ide> if (isDate(val1)) { <del> if (DatePrototype.getTime(val1) !== DatePrototype.getTime(val2)) { <add> if (DatePrototypeGetTime(val1) !== DatePrototypeGetTime(val2)) { <ide> return false; <ide> } <ide> } else if (isRegExp(val1)) { <ide> function innerDeepEqual(val1, val2, strict, memos) { <ide> } <ide> <ide> function getEnumerables(val, keys) { <del> return keys.filter((k) => propertyIsEnumerable(val, k)); <add> return keys.filter((k) => ObjectPrototypePropertyIsEnumerable(val, k)); <ide> } <ide> <ide> function keyCheck(val1, val2, strict, memos, iterationType, aKeys) { <ide> function keyCheck(val1, val2, strict, memos, iterationType, aKeys) { <ide> // d) For Sets and Maps, equal contents <ide> // Note: this accounts for both named and indexed properties on Arrays. <ide> if (arguments.length === 5) { <del> aKeys = Object.keys(val1); <del> const bKeys = Object.keys(val2); <add> aKeys = ObjectKeys(val1); <add> const bKeys = ObjectKeys(val2); <ide> <ide> // The pair must have the same number of owned properties. <ide> if (aKeys.length !== bKeys.length) { <ide> function keyCheck(val1, val2, strict, memos, iterationType, aKeys) { <ide> // Cheap key test <ide> let i = 0; <ide> for (; i < aKeys.length; i++) { <del> if (!hasOwnProperty(val2, aKeys[i])) { <add> if (!ObjectPrototypeHasOwnProperty(val2, aKeys[i])) { <ide> return false; <ide> } <ide> } <ide> <ide> if (strict && arguments.length === 5) { <del> const symbolKeysA = Object.getOwnPropertySymbols(val1); <add> const symbolKeysA = ObjectGetOwnPropertySymbols(val1); <ide> if (symbolKeysA.length !== 0) { <ide> let count = 0; <ide> for (i = 0; i < symbolKeysA.length; i++) { <ide> const key = symbolKeysA[i]; <del> if (propertyIsEnumerable(val1, key)) { <del> if (!propertyIsEnumerable(val2, key)) { <add> if (ObjectPrototypePropertyIsEnumerable(val1, key)) { <add> if (!ObjectPrototypePropertyIsEnumerable(val2, key)) { <ide> return false; <ide> } <ide> aKeys.push(key); <ide> count++; <del> } else if (propertyIsEnumerable(val2, key)) { <add> } else if (ObjectPrototypePropertyIsEnumerable(val2, key)) { <ide> return false; <ide> } <ide> } <del> const symbolKeysB = Object.getOwnPropertySymbols(val2); <add> const symbolKeysB = ObjectGetOwnPropertySymbols(val2); <ide> if (symbolKeysA.length !== symbolKeysB.length && <ide> getEnumerables(val2, symbolKeysB).length !== count) { <ide> return false; <ide> } <ide> } else { <del> const symbolKeysB = Object.getOwnPropertySymbols(val2); <add> const symbolKeysB = ObjectGetOwnPropertySymbols(val2); <ide> if (symbolKeysB.length !== 0 && <ide> getEnumerables(val2, symbolKeysB).length !== 0) { <ide> return false; <ide> function findLooseMatchingPrimitives(prim) { <ide> // a regular number and not NaN. <ide> // Fall through <ide> case 'number': <del> if (Number.isNaN(prim)) { <add> if (NumberIsNaN(prim)) { <ide> return false; <ide> } <ide> } <ide> function objEquiv(a, b, strict, keys, memos, iterationType) { <ide> } <ide> } else if (iterationType === kIsArray) { <ide> for (; i < a.length; i++) { <del> if (hasOwnProperty(a, i)) { <del> if (!hasOwnProperty(b, i) || <add> if (ObjectPrototypeHasOwnProperty(a, i)) { <add> if (!ObjectPrototypeHasOwnProperty(b, i) || <ide> !innerDeepEqual(a[i], b[i], strict, memos)) { <ide> return false; <ide> } <del> } else if (hasOwnProperty(b, i)) { <add> } else if (ObjectPrototypeHasOwnProperty(b, i)) { <ide> return false; <ide> } else { <ide> // Array is sparse. <del> const keysA = Object.keys(a); <add> const keysA = ObjectKeys(a); <ide> for (; i < keysA.length; i++) { <ide> const key = keysA[i]; <del> if (!hasOwnProperty(b, key) || <add> if (!ObjectPrototypeHasOwnProperty(b, key) || <ide> !innerDeepEqual(a[key], b[key], strict, memos)) { <ide> return false; <ide> } <ide> } <del> if (keysA.length !== Object.keys(b).length) { <add> if (keysA.length !== ObjectKeys(b).length) { <ide> return false; <ide> } <ide> return true; <ide><path>lib/internal/util/inspect.js <ide> 'use strict'; <ide> <ide> const { <del> BigIntPrototype, <del> BooleanPrototype, <del> DatePrototype, <del> ErrorPrototype, <del> JSON, <del> MapPrototype, <del> Math, <del> NumberPrototype, <del> Object, <del> ObjectPrototype: { <del> hasOwnProperty, <del> propertyIsEnumerable <del> }, <del> RegExpPrototype, <del> SetPrototype, <del> StringPrototype, <del> SymbolPrototype, <add> ArrayIsArray, <add> BigIntPrototypeValueOf, <add> BooleanPrototypeValueOf, <add> DatePrototypeGetTime, <add> DatePrototypeToISOString, <add> DatePrototypeToString, <add> ErrorPrototypeToString, <add> JSONStringify, <add> MapPrototypeEntries, <add> MathFloor, <add> MathMax, <add> MathMin, <add> MathRound, <add> MathSqrt, <add> NumberPrototypeValueOf, <add> ObjectAssign, <add> ObjectCreate, <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add> ObjectGetOwnPropertyDescriptor, <add> ObjectGetOwnPropertyDescriptors, <add> ObjectGetOwnPropertyNames, <add> ObjectGetOwnPropertySymbols, <add> ObjectGetPrototypeOf, <add> ObjectIs, <add> ObjectKeys, <add> ObjectPrototypeHasOwnProperty, <add> ObjectPrototypePropertyIsEnumerable, <add> ObjectSeal, <add> RegExpPrototypeToString, <add> SetPrototypeValues, <add> StringPrototypeValueOf, <add> SymbolPrototypeToString, <add> SymbolPrototypeValueOf, <add> SymbolIterator, <add> SymbolToStringTag, <ide> uncurryThis <ide> } = primordials; <ide> <ide> const { NativeModule } = require('internal/bootstrap/loaders'); <ide> let hexSlice; <ide> <ide> const builtInObjects = new Set( <del> Object.getOwnPropertyNames(global).filter((e) => /^([A-Z][a-z]+)+$/.test(e)) <add> ObjectGetOwnPropertyNames(global).filter((e) => /^([A-Z][a-z]+)+$/.test(e)) <ide> ); <ide> <del>const inspectDefaultOptions = Object.seal({ <add>const inspectDefaultOptions = ObjectSeal({ <ide> showHidden: false, <ide> depth: 2, <ide> colors: false, <ide> const meta = [ <ide> <ide> function getUserOptions(ctx) { <ide> const obj = { stylize: ctx.stylize }; <del> for (const key of Object.keys(inspectDefaultOptions)) { <add> for (const key of ObjectKeys(inspectDefaultOptions)) { <ide> obj[key] = ctx[key]; <ide> } <ide> if (ctx.userOptions === undefined) <ide> function inspect(value, opts) { <ide> if (typeof opts === 'boolean') { <ide> ctx.showHidden = opts; <ide> } else if (opts) { <del> const optKeys = Object.keys(opts); <add> const optKeys = ObjectKeys(opts); <ide> for (const key of optKeys) { <ide> // TODO(BridgeAR): Find a solution what to do about stylize. Either make <ide> // this function public or add a new API with a similar or better <ide> // functionality. <del> if (hasOwnProperty(inspectDefaultOptions, key) || key === 'stylize') { <add> if ( <add> ObjectPrototypeHasOwnProperty(inspectDefaultOptions, key) || <add> key === 'stylize') { <ide> ctx[key] = opts[key]; <ide> } else if (ctx.userOptions === undefined) { <ide> // This is required to pass through the actual user input. <ide> function inspect(value, opts) { <ide> } <ide> inspect.custom = customInspectSymbol; <ide> <del>Object.defineProperty(inspect, 'defaultOptions', { <add>ObjectDefineProperty(inspect, 'defaultOptions', { <ide> get() { <ide> return inspectDefaultOptions; <ide> }, <ide> set(options) { <ide> if (options === null || typeof options !== 'object') { <ide> throw new ERR_INVALID_ARG_TYPE('options', 'Object', options); <ide> } <del> return Object.assign(inspectDefaultOptions, options); <add> return ObjectAssign(inspectDefaultOptions, options); <ide> } <ide> }); <ide> <ide> // http://en.wikipedia.org/wiki/ANSI_escape_code#graphics <del>inspect.colors = Object.assign(Object.create(null), { <add>inspect.colors = ObjectAssign(ObjectCreate(null), { <ide> bold: [1, 22], <ide> italic: [3, 23], <ide> underline: [4, 24], <ide> inspect.colors = Object.assign(Object.create(null), { <ide> }); <ide> <ide> // Don't use 'blue' not visible on cmd.exe <del>inspect.styles = Object.assign(Object.create(null), { <add>inspect.styles = ObjectAssign(ObjectCreate(null), { <ide> special: 'cyan', <ide> number: 'yellow', <ide> bigint: 'yellow', <ide> function getConstructorName(obj, ctx, recurseTimes) { <ide> let firstProto; <ide> const tmp = obj; <ide> while (obj) { <del> const descriptor = Object.getOwnPropertyDescriptor(obj, 'constructor'); <add> const descriptor = ObjectGetOwnPropertyDescriptor(obj, 'constructor'); <ide> if (descriptor !== undefined && <ide> typeof descriptor.value === 'function' && <ide> descriptor.value.name !== '') { <ide> return descriptor.value.name; <ide> } <ide> <del> obj = Object.getPrototypeOf(obj); <add> obj = ObjectGetPrototypeOf(obj); <ide> if (firstProto === undefined) { <ide> firstProto = obj; <ide> } <ide> function getPrefix(constructor, tag, fallback) { <ide> // Look up the keys of the object. <ide> function getKeys(value, showHidden) { <ide> let keys; <del> const symbols = Object.getOwnPropertySymbols(value); <add> const symbols = ObjectGetOwnPropertySymbols(value); <ide> if (showHidden) { <del> keys = Object.getOwnPropertyNames(value); <add> keys = ObjectGetOwnPropertyNames(value); <ide> if (symbols.length !== 0) <ide> keys.push(...symbols); <ide> } else { <ide> function getKeys(value, showHidden) { <ide> // TODO(devsnek): track https://github.com/tc39/ecma262/issues/1209 <ide> // and modify this logic as needed. <ide> try { <del> keys = Object.keys(value); <add> keys = ObjectKeys(value); <ide> } catch (err) { <ide> assert(isNativeError(err) && err.name === 'ReferenceError' && <ide> isModuleNamespaceObject(value)); <del> keys = Object.getOwnPropertyNames(value); <add> keys = ObjectGetOwnPropertyNames(value); <ide> } <ide> if (symbols.length !== 0) { <del> keys.push(...symbols.filter((key) => propertyIsEnumerable(value, key))); <add> const filter = (key) => ObjectPrototypePropertyIsEnumerable(value, key); <add> keys.push(...symbols.filter(filter)); <ide> } <ide> } <ide> return keys; <ide> function clazzWithNullPrototype(clazz, name) { <ide> } <ide> } <ide> class NullPrototype extends clazz { <del> get [Symbol.toStringTag]() { <add> get [SymbolToStringTag]() { <ide> return ''; <ide> } <ide> } <del> Object.defineProperty(NullPrototype.prototype.constructor, 'name', <del> { value: `[${name}: null prototype]` }); <add> ObjectDefineProperty(NullPrototype.prototype.constructor, 'name', <add> { value: `[${name}: null prototype]` }); <ide> lazyNullPrototypeCache.set(clazz, NullPrototype); <ide> return NullPrototype; <ide> } <ide> function noPrototypeIterator(ctx, value, recurseTimes) { <ide> let newVal; <ide> if (isSet(value)) { <ide> const clazz = clazzWithNullPrototype(Set, 'Set'); <del> newVal = new clazz(SetPrototype.values(value)); <add> newVal = new clazz(SetPrototypeValues(value)); <ide> } else if (isMap(value)) { <ide> const clazz = clazzWithNullPrototype(Map, 'Map'); <del> newVal = new clazz(MapPrototype.entries(value)); <del> } else if (Array.isArray(value)) { <add> newVal = new clazz(MapPrototypeEntries(value)); <add> } else if (ArrayIsArray(value)) { <ide> const clazz = clazzWithNullPrototype(Array, 'Array'); <ide> newVal = new clazz(value.length); <ide> } else if (isTypedArray(value)) { <ide> function noPrototypeIterator(ctx, value, recurseTimes) { <ide> newVal = new clazz(value); <ide> } <ide> if (newVal !== undefined) { <del> Object.defineProperties(newVal, Object.getOwnPropertyDescriptors(value)); <add> ObjectDefineProperties(newVal, ObjectGetOwnPropertyDescriptors(value)); <ide> return formatRaw(ctx, newVal, recurseTimes); <ide> } <ide> } <ide> function formatRaw(ctx, value, recurseTimes, typedArray) { <ide> let keys; <ide> <ide> const constructor = getConstructorName(value, ctx, recurseTimes); <del> let tag = value[Symbol.toStringTag]; <add> let tag = value[SymbolToStringTag]; <ide> // Only list the tag in case it's non-enumerable / not an own property. <ide> // Otherwise we'd print this twice. <ide> if (typeof tag !== 'string' || <ide> (tag !== '' && <del> (ctx.showHidden ? hasOwnProperty : propertyIsEnumerable)( <del> value, Symbol.toStringTag <add> (ctx.showHidden ? <add> ObjectPrototypeHasOwnProperty : <add> ObjectPrototypePropertyIsEnumerable)( <add> value, SymbolToStringTag <ide> ))) { <ide> tag = ''; <ide> } <ide> function formatRaw(ctx, value, recurseTimes, typedArray) { <ide> let extrasType = kObjectType; <ide> <ide> // Iterators and the rest are split to reduce checks. <del> if (value[Symbol.iterator]) { <add> if (value[SymbolIterator]) { <ide> noIterator = false; <del> if (Array.isArray(value)) { <add> if (ArrayIsArray(value)) { <ide> keys = getOwnNonIndexProperties(value, filter); <ide> // Only set the constructor for non ordinary ("Array [...]") arrays. <ide> const prefix = getPrefix(constructor, tag, 'Array'); <ide> function formatRaw(ctx, value, recurseTimes, typedArray) { <ide> return ctx.stylize(base, 'special'); <ide> } else if (isRegExp(value)) { <ide> // Make RegExps say that they are RegExps <del> base = RegExpPrototype.toString( <add> base = RegExpPrototypeToString( <ide> constructor !== null ? value : new RegExp(value) <ide> ); <ide> const prefix = getPrefix(constructor, tag, 'RegExp'); <ide> function formatRaw(ctx, value, recurseTimes, typedArray) { <ide> return ctx.stylize(base, 'regexp'); <ide> } else if (isDate(value)) { <ide> // Make dates with properties first say the date <del> base = Number.isNaN(DatePrototype.getTime(value)) ? <del> DatePrototype.toString(value) : <del> DatePrototype.toISOString(value); <add> base = Number.isNaN(DatePrototypeGetTime(value)) ? <add> DatePrototypeToString(value) : <add> DatePrototypeToISOString(value); <ide> const prefix = getPrefix(constructor, tag, 'Date'); <ide> if (prefix !== 'Date ') <ide> base = `${prefix}${base}`; <ide> function getBoxedBase(value, ctx, keys, constructor, tag) { <ide> let fn; <ide> let type; <ide> if (isNumberObject(value)) { <del> fn = NumberPrototype; <add> fn = NumberPrototypeValueOf; <ide> type = 'Number'; <ide> } else if (isStringObject(value)) { <del> fn = StringPrototype; <add> fn = StringPrototypeValueOf; <ide> type = 'String'; <ide> // For boxed Strings, we have to remove the 0-n indexed entries, <ide> // since they just noisy up the output and are redundant <ide> // Make boxed primitive Strings look like such <ide> keys.splice(0, value.length); <ide> } else if (isBooleanObject(value)) { <del> fn = BooleanPrototype; <add> fn = BooleanPrototypeValueOf; <ide> type = 'Boolean'; <ide> } else if (isBigIntObject(value)) { <del> fn = BigIntPrototype; <add> fn = BigIntPrototypeValueOf; <ide> type = 'BigInt'; <ide> } else { <del> fn = SymbolPrototype; <add> fn = SymbolPrototypeValueOf; <ide> type = 'Symbol'; <ide> } <ide> let base = `[${type}`; <ide> function getBoxedBase(value, ctx, keys, constructor, tag) { <ide> base += ` (${constructor})`; <ide> } <ide> } <del> base += `: ${formatPrimitive(stylizeNoColor, fn.valueOf(value), ctx)}]`; <add> base += `: ${formatPrimitive(stylizeNoColor, fn(value), ctx)}]`; <ide> if (tag !== '' && tag !== constructor) { <ide> base += ` [${tag}]`; <ide> } <ide> function getFunctionBase(value, constructor, tag) { <ide> } <ide> <ide> function formatError(err, constructor, tag, ctx) { <del> let stack = err.stack || ErrorPrototype.toString(err); <add> let stack = err.stack || ErrorPrototypeToString(err); <ide> <ide> // A stack trace may contain arbitrary data. Only manipulate the output <ide> // for "regular errors" (errors that "look normal") for now. <ide> function groupArrayElements(ctx, output, value) { <ide> (totalLength / actualMax > 5 || maxLength <= 6)) { <ide> <ide> const approxCharHeights = 2.5; <del> const averageBias = Math.sqrt(actualMax - totalLength / output.length); <del> const biasedMax = Math.max(actualMax - 3 - averageBias, 1); <add> const averageBias = MathSqrt(actualMax - totalLength / output.length); <add> const biasedMax = MathMax(actualMax - 3 - averageBias, 1); <ide> // Dynamically check how many columns seem possible. <del> const columns = Math.min( <add> const columns = MathMin( <ide> // Ideally a square should be drawn. We expect a character to be about 2.5 <ide> // times as high as wide. This is the area formula to calculate a square <ide> // which contains n rectangles of size `actualMax * approxCharHeights`. <ide> // Divide that by `actualMax` to receive the correct number of columns. <ide> // The added bias increases the columns for short entries. <del> Math.round( <del> Math.sqrt( <add> MathRound( <add> MathSqrt( <ide> approxCharHeights * biasedMax * outputLength <ide> ) / biasedMax <ide> ), <ide> // Do not exceed the breakLength. <del> Math.floor((ctx.breakLength - ctx.indentationLvl) / actualMax), <add> MathFloor((ctx.breakLength - ctx.indentationLvl) / actualMax), <ide> // Limit array grouping for small `compact` modes as the user requested <ide> // minimal grouping. <ide> ctx.compact * 4, <ide> function groupArrayElements(ctx, output, value) { <ide> // Each iteration creates a single line of grouped entries. <ide> for (let i = 0; i < outputLength; i += columns) { <ide> // The last lines may contain less entries than columns. <del> const max = Math.min(i + columns, outputLength); <add> const max = MathMin(i + columns, outputLength); <ide> let str = ''; <ide> let j = i; <ide> for (; j < max - 1; j++) { <ide> function handleMaxCallStackSize(ctx, err, constructorName, indentationLvl) { <ide> <ide> function formatNumber(fn, value) { <ide> // Format -0 as '-0'. Checking `value === -0` won't distinguish 0 from -0. <del> return fn(Object.is(value, -0) ? '-0' : `${value}`, 'number'); <add> return fn(ObjectIs(value, -0) ? '-0' : `${value}`, 'number'); <ide> } <ide> <ide> function formatBigInt(fn, value) { <ide> function formatPrimitive(fn, value, ctx) { <ide> if (typeof value === 'undefined') <ide> return fn('undefined', 'undefined'); <ide> // es6 symbol primitive <del> return fn(SymbolPrototype.toString(value), 'symbol'); <add> return fn(SymbolPrototypeToString(value), 'symbol'); <ide> } <ide> <ide> function formatNamespaceObject(ctx, value, recurseTimes, keys) { <ide> function formatNamespaceObject(ctx, value, recurseTimes, keys) { <ide> <ide> // The array is sparse and/or has extra keys <ide> function formatSpecialArray(ctx, value, recurseTimes, maxLength, output, i) { <del> const keys = Object.keys(value); <add> const keys = ObjectKeys(value); <ide> let index = i; <ide> for (; i < keys.length && output.length < maxLength; i++) { <ide> const key = keys[i]; <ide> function formatArrayBuffer(ctx, value) { <ide> } <ide> if (hexSlice === undefined) <ide> hexSlice = uncurryThis(require('buffer').Buffer.prototype.hexSlice); <del> let str = hexSlice(buffer, 0, Math.min(ctx.maxArrayLength, buffer.length)) <add> let str = hexSlice(buffer, 0, MathMin(ctx.maxArrayLength, buffer.length)) <ide> .replace(/(.{2})/g, '$1 ').trim(); <ide> const remaining = buffer.length - ctx.maxArrayLength; <ide> if (remaining > 0) <ide> function formatArrayBuffer(ctx, value) { <ide> <ide> function formatArray(ctx, value, recurseTimes) { <ide> const valLen = value.length; <del> const len = Math.min(Math.max(0, ctx.maxArrayLength), valLen); <add> const len = MathMin(MathMax(0, ctx.maxArrayLength), valLen); <ide> <ide> const remaining = valLen - len; <ide> const output = []; <ide> for (var i = 0; i < len; i++) { <ide> // Special handle sparse arrays. <del> if (!hasOwnProperty(value, i)) { <add> if (!ObjectPrototypeHasOwnProperty(value, i)) { <ide> return formatSpecialArray(ctx, value, recurseTimes, len, output, i); <ide> } <ide> output.push(formatProperty(ctx, value, recurseTimes, i, kArrayType)); <ide> function formatArray(ctx, value, recurseTimes) { <ide> } <ide> <ide> function formatTypedArray(ctx, value, recurseTimes) { <del> const maxLength = Math.min(Math.max(0, ctx.maxArrayLength), value.length); <add> const maxLength = MathMin(MathMax(0, ctx.maxArrayLength), value.length); <ide> const remaining = value.length - maxLength; <ide> const output = new Array(maxLength); <ide> const elementFormatter = value.length > 0 && typeof value[0] === 'number' ? <ide> function formatSet(ctx, value, recurseTimes) { <ide> ctx.indentationLvl -= 2; <ide> // With `showHidden`, `length` will display as a hidden property for <ide> // arrays. For consistency's sake, do the same for `size`, even though this <del> // property isn't selected by Object.getOwnPropertyNames(). <add> // property isn't selected by ObjectGetOwnPropertyNames(). <ide> if (ctx.showHidden) <ide> output.push(`[size]: ${ctx.stylize(`${value.size}`, 'number')}`); <ide> return output; <ide> function formatMap(ctx, value, recurseTimes) { <ide> } <ide> <ide> function formatSetIterInner(ctx, recurseTimes, entries, state) { <del> const maxArrayLength = Math.max(ctx.maxArrayLength, 0); <del> const maxLength = Math.min(maxArrayLength, entries.length); <add> const maxArrayLength = MathMax(ctx.maxArrayLength, 0); <add> const maxLength = MathMin(maxArrayLength, entries.length); <ide> let output = new Array(maxLength); <ide> ctx.indentationLvl += 2; <ide> for (var i = 0; i < maxLength; i++) { <ide> function formatSetIterInner(ctx, recurseTimes, entries, state) { <ide> } <ide> <ide> function formatMapIterInner(ctx, recurseTimes, entries, state) { <del> const maxArrayLength = Math.max(ctx.maxArrayLength, 0); <add> const maxArrayLength = MathMax(ctx.maxArrayLength, 0); <ide> // Entries exist as [key1, val1, key2, val2, ...] <ide> const len = entries.length / 2; <ide> const remaining = len - maxArrayLength; <del> const maxLength = Math.min(maxArrayLength, len); <add> const maxLength = MathMin(maxArrayLength, len); <ide> let output = new Array(maxLength); <ide> let i = 0; <ide> ctx.indentationLvl += 2; <ide> function formatPromise(ctx, value, recurseTimes) { <ide> function formatProperty(ctx, value, recurseTimes, key, type) { <ide> let name, str; <ide> let extra = ' '; <del> const desc = Object.getOwnPropertyDescriptor(value, key) || <add> const desc = ObjectGetOwnPropertyDescriptor(value, key) || <ide> { value: value[key], enumerable: true }; <ide> if (desc.value !== undefined) { <ide> const diff = (type !== kObjectType || ctx.compact !== true) ? 2 : 3; <ide> const firstErrorLine = (error) => error.message.split('\n')[0]; <ide> let CIRCULAR_ERROR_MESSAGE; <ide> function tryStringify(arg) { <ide> try { <del> return JSON.stringify(arg); <add> return JSONStringify(arg); <ide> } catch (err) { <ide> // Populate the circular error message lazily <ide> if (!CIRCULAR_ERROR_MESSAGE) { <ide> try { <del> const a = {}; a.a = a; JSON.stringify(a); <add> const a = {}; a.a = a; JSONStringify(a); <ide> } catch (err) { <ide> CIRCULAR_ERROR_MESSAGE = firstErrorLine(err); <ide> } <ide> function formatWithOptionsInternal(inspectOptions, ...args) { <ide> tempArg === null || <ide> (typeof tempArg.toString === 'function' && <ide> // A direct own property. <del> (hasOwnProperty(tempArg, 'toString') || <add> (ObjectPrototypeHasOwnProperty(tempArg, 'toString') || <ide> // A direct own property on the constructor prototype in <ide> // case the constructor is not an built-in object. <ide> ((constr = tempArg.constructor) && <ide> !builtInObjects.has(constr.name) && <ide> constr.prototype && <del> hasOwnProperty(constr.prototype, 'toString'))))) { <add> ObjectPrototypeHasOwnProperty(constr.prototype, <add> 'toString'))))) { <ide> tempStr = String(tempArg); <ide> } else { <ide> tempStr = inspect(tempArg, { <ide><path>lib/internal/util/inspector.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectKeys, <add>} = primordials; <ide> <ide> let session; <ide> function sendInspectorCommand(cb, onError) { <ide> function installConsoleExtensions(commandLineApi) { <ide> // Wrap a console implemented by Node.js with features from the VM inspector <ide> function wrapConsole(consoleFromNode, consoleFromVM) { <ide> const { consoleCall } = internalBinding('inspector'); <del> for (const key of Object.keys(consoleFromVM)) { <add> for (const key of ObjectKeys(consoleFromVM)) { <ide> // If global console has the same method as inspector console, <ide> // then wrap these two methods into one. Native wrapper will preserve <ide> // the original stack. <ide><path>lib/internal/util/types.js <ide> 'use strict'; <ide> <del>const { Object, uncurryThis } = primordials; <add>const { <add> ArrayBufferIsView, <add> ObjectGetOwnPropertyDescriptor, <add> ObjectGetPrototypeOf, <add> SymbolToStringTag, <add> uncurryThis, <add>} = primordials; <ide> <del>const TypedArrayPrototype = Object.getPrototypeOf(Uint8Array.prototype); <add>const TypedArrayPrototype = ObjectGetPrototypeOf(Uint8Array.prototype); <ide> <ide> const TypedArrayProto_toStringTag = <ide> uncurryThis( <del> Object.getOwnPropertyDescriptor(TypedArrayPrototype, <del> Symbol.toStringTag).get); <del> <del>// Cached to make sure no userland code can tamper with it. <del>const isArrayBufferView = ArrayBuffer.isView; <add> ObjectGetOwnPropertyDescriptor(TypedArrayPrototype, <add> SymbolToStringTag).get); <ide> <ide> function isTypedArray(value) { <ide> return TypedArrayProto_toStringTag(value) !== undefined; <ide> function isBigUint64Array(value) { <ide> <ide> module.exports = { <ide> ...internalBinding('types'), <del> isArrayBufferView, <add> isArrayBufferView: ArrayBufferIsView, <ide> isTypedArray, <ide> isUint8Array, <ide> isUint8ClampedArray, <ide><path>lib/internal/v8_prof_processor.js <ide> 'use strict'; <ide> <del>const { JSON } = primordials; <add>const { <add> JSONStringify, <add>} = primordials; <ide> <ide> const vm = require('vm'); <ide> <ide> if (process.platform === 'darwin') { <ide> } <ide> tickArguments.push.apply(tickArguments, process.argv.slice(1)); <ide> script = `(function(module, require) { <del> arguments = ${JSON.stringify(tickArguments)}; <add> arguments = ${JSONStringify(tickArguments)}; <ide> function write (s) { process.stdout.write(s) } <ide> function printErr(err) { console.error(err); } <ide> ${script} <ide><path>lib/internal/vm/module.js <ide> 'use strict'; <ide> <del>const { Object, Symbol, SafePromise } = primordials; <add>const { <add> ObjectCreate, <add> ObjectDefineProperty, <add> Symbol, <add> SafePromise, <add>} = primordials; <ide> <ide> const { isContext } = internalBinding('contextify'); <ide> const { isModuleNamespaceObject } = require('internal/util/types'); <ide> class Module { <ide> context[kPerContextModuleId] += 1; <ide> } else { <ide> identifier = `${defaultModuleName}(0)`; <del> Object.defineProperty(context, kPerContextModuleId, { <add> ObjectDefineProperty(context, kPerContextModuleId, { <ide> value: 1, <ide> writable: true, <ide> enumerable: false, <ide> class Module { <ide> if (typeof depth === 'number' && depth < 0) <ide> return options.stylize(`[${ctor.name}]`, 'special'); <ide> <del> const o = Object.create({ constructor: ctor }); <add> const o = ObjectCreate({ constructor: ctor }); <ide> o.status = this.status; <ide> o.identifier = this.identifier; <ide> o.context = this.context; <ide><path>lib/internal/worker.js <ide> <ide> /* global SharedArrayBuffer */ <ide> <del>const { Math, Object } = primordials; <add>const { <add> MathMax, <add> ObjectCreate, <add> ObjectEntries, <add>} = primordials; <ide> <ide> const EventEmitter = require('events'); <ide> const assert = require('internal/assert'); <ide> class Worker extends EventEmitter { <ide> <ide> let env; <ide> if (typeof options.env === 'object' && options.env !== null) { <del> env = Object.create(null); <del> for (const [ key, value ] of Object.entries(options.env)) <add> env = ObjectCreate(null); <add> for (const [ key, value ] of ObjectEntries(options.env)) <ide> env[key] = `${value}`; <ide> } else if (options.env == null) { <ide> env = process.env; <ide> function parseResourceLimits(obj) { <ide> if (typeof obj !== 'object' || obj === null) return ret; <ide> <ide> if (typeof obj.maxOldGenerationSizeMb === 'number') <del> ret[kMaxOldGenerationSizeMb] = Math.max(obj.maxOldGenerationSizeMb, 2); <add> ret[kMaxOldGenerationSizeMb] = MathMax(obj.maxOldGenerationSizeMb, 2); <ide> if (typeof obj.maxYoungGenerationSizeMb === 'number') <ide> ret[kMaxYoungGenerationSizeMb] = obj.maxYoungGenerationSizeMb; <ide> if (typeof obj.codeRangeSizeMb === 'number') <ide><path>lib/internal/worker/io.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectAssign, <add> ObjectCreate, <add> ObjectDefineProperty, <add> ObjectGetOwnPropertyDescriptors, <add> ObjectGetPrototypeOf, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { <ide> handle_onclose: handleOnCloseSymbol, <ide> const messageTypes = { <ide> // not provide methods that are not present in the Browser and not documented <ide> // on our side (e.g. hasRef). <ide> // Save a copy of the original set of methods as a shallow clone. <del>const MessagePortPrototype = Object.create( <del> Object.getPrototypeOf(MessagePort.prototype), <del> Object.getOwnPropertyDescriptors(MessagePort.prototype)); <add>const MessagePortPrototype = ObjectCreate( <add> ObjectGetPrototypeOf(MessagePort.prototype), <add> ObjectGetOwnPropertyDescriptors(MessagePort.prototype)); <ide> // Set up the new inheritance chain. <del>Object.setPrototypeOf(MessagePort, EventEmitter); <del>Object.setPrototypeOf(MessagePort.prototype, EventEmitter.prototype); <add>ObjectSetPrototypeOf(MessagePort, EventEmitter); <add>ObjectSetPrototypeOf(MessagePort.prototype, EventEmitter.prototype); <ide> // Copy methods that are inherited from HandleWrap, because <ide> // changing the prototype of MessagePort.prototype implicitly removed them. <ide> MessagePort.prototype.ref = MessagePortPrototype.ref; <ide> MessagePort.prototype[kOnMessageListener] = function onmessage(event) { <ide> // This is for compatibility with the Web's MessagePort API. It makes sense to <ide> // provide it as an `EventEmitter` in Node.js, but if somebody overrides <ide> // `onmessage`, we'll switch over to the Web API model. <del>Object.defineProperty(MessagePort.prototype, 'onmessage', { <add>ObjectDefineProperty(MessagePort.prototype, 'onmessage', { <ide> enumerable: true, <ide> configurable: true, <ide> get() { <ide> function oninit() { <ide> setupPortReferencing(this, this, 'message'); <ide> } <ide> <del>Object.defineProperty(MessagePort.prototype, onInitSymbol, { <add>ObjectDefineProperty(MessagePort.prototype, onInitSymbol, { <ide> enumerable: true, <ide> writable: false, <ide> value: oninit <ide> function onclose() { <ide> this.emit('close'); <ide> } <ide> <del>Object.defineProperty(MessagePort.prototype, handleOnCloseSymbol, { <add>ObjectDefineProperty(MessagePort.prototype, handleOnCloseSymbol, { <ide> enumerable: false, <ide> writable: false, <ide> value: onclose <ide> MessagePort.prototype.close = function(cb) { <ide> MessagePortPrototype.close.call(this); <ide> }; <ide> <del>Object.defineProperty(MessagePort.prototype, inspect.custom, { <add>ObjectDefineProperty(MessagePort.prototype, inspect.custom, { <ide> enumerable: false, <ide> writable: false, <ide> value: function inspect() { // eslint-disable-line func-name-matching <ide> Object.defineProperty(MessagePort.prototype, inspect.custom, { <ide> // e.g. when accessing the prototype directly. <ide> ref = MessagePortPrototype.hasRef.call(this); <ide> } catch { return this; } <del> return Object.assign(Object.create(MessagePort.prototype), <del> ref === undefined ? { <del> active: false, <del> } : { <del> active: true, <del> refed: ref <del> }, <del> this); <add> return ObjectAssign(ObjectCreate(MessagePort.prototype), <add> ref === undefined ? { <add> active: false, <add> } : { <add> active: true, <add> refed: ref <add> }, <add> this); <ide> } <ide> }); <ide> <ide><path>lib/net.js <ide> 'use strict'; <ide> <ide> const { <del> Object: { <del> defineProperty: ObjectDefineProperty, <del> setPrototypeOf: ObjectSetPrototypeOf <del> } <add> ObjectDefineProperty, <add> ObjectSetPrototypeOf, <ide> } = primordials; <ide> <ide> const EventEmitter = require('events'); <ide><path>lib/os.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperties, <add>} = primordials; <ide> <ide> const { safeGetenv } = internalBinding('credentials'); <ide> const constants = internalBinding('constants').os; <ide> module.exports = { <ide> tmpDir: deprecate(tmpdir, tmpDirDeprecationMsg, 'DEP0022') <ide> }; <ide> <del>Object.defineProperties(module.exports, { <add>ObjectDefineProperties(module.exports, { <ide> constants: { <ide> configurable: false, <ide> enumerable: true, <ide><path>lib/perf_hooks.js <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add> ObjectKeys, <add>} = primordials; <ide> <ide> const { <ide> ELDHistogram: _ELDHistogram, <ide> const nodeTiming = new PerformanceNodeTiming(); <ide> // Maintains a list of entries as a linked list stored in insertion order. <ide> class PerformanceObserverEntryList { <ide> constructor() { <del> Object.defineProperties(this, { <add> ObjectDefineProperties(this, { <ide> [kEntries]: { <ide> writable: true, <ide> enumerable: false, <ide> class PerformanceObserver extends AsyncResource { <ide> throw new ERR_INVALID_CALLBACK(callback); <ide> } <ide> super('PerformanceObserver'); <del> Object.defineProperties(this, { <add> ObjectDefineProperties(this, { <ide> [kTypes]: { <ide> enumerable: false, <ide> writable: true, <ide> class PerformanceObserver extends AsyncResource { <ide> disconnect() { <ide> const observerCountsGC = observerCounts[NODE_PERFORMANCE_ENTRY_TYPE_GC]; <ide> const types = this[kTypes]; <del> const keys = Object.keys(types); <add> const keys = ObjectKeys(types); <ide> for (var n = 0; n < keys.length; n++) { <ide> const item = types[keys[n]]; <ide> if (item) { <ide> class Performance { <ide> if (fn[kTimerified]) <ide> return fn[kTimerified]; <ide> const ret = timerify(fn, fn.length); <del> Object.defineProperty(fn, kTimerified, { <add> ObjectDefineProperty(fn, kTimerified, { <ide> enumerable: false, <ide> configurable: true, <ide> writable: false, <ide> value: ret <ide> }); <del> Object.defineProperties(ret, { <add> ObjectDefineProperties(ret, { <ide> [kTimerified]: { <ide> enumerable: false, <ide> configurable: true, <ide> module.exports = { <ide> monitorEventLoopDelay <ide> }; <ide> <del>Object.defineProperty(module.exports, 'constants', { <add>ObjectDefineProperty(module.exports, 'constants', { <ide> configurable: false, <ide> enumerable: true, <ide> value: constants <ide><path>lib/querystring.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectCreate, <add> ObjectKeys, <add>} = primordials; <ide> <ide> const { Buffer } = require('buffer'); <ide> const { <ide> function stringify(obj, sep, eq, options) { <ide> } <ide> <ide> if (obj !== null && typeof obj === 'object') { <del> const keys = Object.keys(obj); <add> const keys = ObjectKeys(obj); <ide> const len = keys.length; <ide> const flast = len - 1; <ide> let fields = ''; <ide> function addKeyVal(obj, key, value, keyEncoded, valEncoded, decode) { <ide> <ide> // Parse a key/val string. <ide> function parse(qs, sep, eq, options) { <del> const obj = Object.create(null); <add> const obj = ObjectCreate(null); <ide> <ide> if (typeof qs !== 'string' || qs.length === 0) { <ide> return obj; <ide><path>lib/readline.js <ide> <ide> 'use strict'; <ide> <del>const { Math, Object } = primordials; <add>const { <add> MathCeil, <add> MathFloor, <add> MathMax, <add> ObjectDefineProperty, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { <ide> ERR_INVALID_CALLBACK, <ide> function Interface(input, output, completer, terminal) { <ide> this.historySize = historySize; <ide> this.removeHistoryDuplicates = !!removeHistoryDuplicates; <ide> this.crlfDelay = crlfDelay ? <del> Math.max(kMincrlfDelay, crlfDelay) : kMincrlfDelay; <add> MathMax(kMincrlfDelay, crlfDelay) : kMincrlfDelay; <ide> // Check arity, 2 - for async, 1 for sync <ide> if (typeof completer === 'function') { <ide> this.completer = completer.length === 2 ? <ide> function Interface(input, output, completer, terminal) { <ide> input.resume(); <ide> } <ide> <del>Object.setPrototypeOf(Interface.prototype, EventEmitter.prototype); <del>Object.setPrototypeOf(Interface, EventEmitter); <add>ObjectSetPrototypeOf(Interface.prototype, EventEmitter.prototype); <add>ObjectSetPrototypeOf(Interface, EventEmitter); <ide> <del>Object.defineProperty(Interface.prototype, 'columns', { <add>ObjectDefineProperty(Interface.prototype, 'columns', { <ide> configurable: true, <ide> enumerable: true, <ide> get: function() { <ide> Interface.prototype._tabComplete = function(lastKeypressWasTab) { <ide> const width = completions.reduce(function completionReducer(a, b) { <ide> return a.length > b.length ? a : b; <ide> }).length + 2; // 2 space padding <del> let maxColumns = Math.floor(self.columns / width); <add> let maxColumns = MathFloor(self.columns / width); <ide> if (!maxColumns || maxColumns === Infinity) { <ide> maxColumns = 1; <ide> } <ide> function handleGroup(self, group, width, maxColumns) { <ide> if (group.length === 0) { <ide> return; <ide> } <del> const minRows = Math.ceil(group.length / maxColumns); <add> const minRows = MathCeil(group.length / maxColumns); <ide> for (let row = 0; row < minRows; row++) { <ide> for (let col = 0; col < maxColumns; col++) { <ide> const idx = row * maxColumns + col; <ide> Interface.prototype._getDisplayPos = function(str) { <ide> } <ide> if (code === 0x0a) { // new line \n <ide> // row must be incremented by 1 even if offset = 0 or col = +Infinity <del> row += Math.ceil(offset / col) || 1; <add> row += MathCeil(offset / col) || 1; <ide> offset = 0; <ide> continue; <ide> } <ide><path>lib/repl.js <ide> <ide> 'use strict'; <ide> <del>const { Math, Object, ObjectPrototype } = primordials; <add>const { <add> MathMax, <add> ObjectAssign, <add> ObjectCreate, <add> ObjectDefineProperty, <add> ObjectGetOwnPropertyDescriptor, <add> ObjectGetOwnPropertyNames, <add> ObjectGetPrototypeOf, <add> ObjectKeys, <add> ObjectPrototypeHasOwnProperty, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { <ide> builtinLibs, <ide> function REPLServer(prompt, <ide> domainSet.add(this._domain); <ide> <ide> let rli = this; <del> Object.defineProperty(this, 'rli', { <add> ObjectDefineProperty(this, 'rli', { <ide> get: deprecate(() => rli, <ide> 'REPLServer.rli is deprecated', 'DEP0124'), <ide> set: deprecate((val) => rli = val, <ide> function REPLServer(prompt, <ide> self.lines.level = []; <ide> <ide> self.clearBufferedCommand(); <del> Object.defineProperty(this, 'bufferedCommand', { <add> ObjectDefineProperty(this, 'bufferedCommand', { <ide> get: deprecate(() => self[kBufferedCommandSymbol], <ide> 'REPLServer.bufferedCommand is deprecated', <ide> 'DEP0074'), <ide> function REPLServer(prompt, <ide> prompt <ide> }); <ide> <del> this.commands = Object.create(null); <add> this.commands = ObjectCreate(null); <ide> defineDefaultCommands(this); <ide> <ide> // Figure out which "writer" function to use <ide> function REPLServer(prompt, <ide> writer.options.colors = self.useColors; <ide> <ide> if (options[kStandaloneREPL]) { <del> Object.defineProperty(inspect, 'replDefaults', { <add> ObjectDefineProperty(inspect, 'replDefaults', { <ide> get() { <ide> return writer.options; <ide> }, <ide> set(options) { <ide> if (options === null || typeof options !== 'object') { <ide> throw new ERR_INVALID_ARG_TYPE('options', 'Object', options); <ide> } <del> return Object.assign(writer.options, options); <add> return ObjectAssign(writer.options, options); <ide> }, <ide> enumerable: true, <ide> configurable: true <ide> function REPLServer(prompt, <ide> <ide> self.displayPrompt(); <ide> } <del>Object.setPrototypeOf(REPLServer.prototype, Interface.prototype); <del>Object.setPrototypeOf(REPLServer, Interface); <add>ObjectSetPrototypeOf(REPLServer.prototype, Interface.prototype); <add>ObjectSetPrototypeOf(REPLServer, Interface); <ide> <ide> exports.REPLServer = REPLServer; <ide> <ide> REPLServer.prototype.createContext = function() { <ide> }, () => { <ide> context = vm.createContext(); <ide> }); <del> for (const name of Object.getOwnPropertyNames(global)) { <add> for (const name of ObjectGetOwnPropertyNames(global)) { <ide> // Only set properties on the context that do not exist as primordial. <ide> if (!(name in primordials)) { <del> Object.defineProperty(context, name, <del> Object.getOwnPropertyDescriptor(global, name)); <add> ObjectDefineProperty(context, name, <add> ObjectGetOwnPropertyDescriptor(global, name)); <ide> } <ide> } <ide> context.global = context; <ide> const _console = new Console(this.outputStream); <del> Object.defineProperty(context, 'console', { <add> ObjectDefineProperty(context, 'console', { <ide> configurable: true, <ide> writable: true, <ide> value: _console <ide> REPLServer.prototype.createContext = function() { <ide> const module = new CJSModule('<repl>'); <ide> module.paths = CJSModule._resolveLookupPaths('<repl>', parentModule) || []; <ide> <del> Object.defineProperty(context, 'module', { <add> ObjectDefineProperty(context, 'module', { <ide> configurable: true, <ide> writable: true, <ide> value: module <ide> }); <del> Object.defineProperty(context, 'require', { <add> ObjectDefineProperty(context, 'require', { <ide> configurable: true, <ide> writable: true, <ide> value: makeRequireFunction(module) <ide> REPLServer.prototype.resetContext = function() { <ide> this.lines = []; <ide> this.lines.level = []; <ide> <del> Object.defineProperty(this.context, '_', { <add> ObjectDefineProperty(this.context, '_', { <ide> configurable: true, <ide> get: () => this.last, <ide> set: (value) => { <ide> REPLServer.prototype.resetContext = function() { <ide> } <ide> }); <ide> <del> Object.defineProperty(this.context, '_error', { <add> ObjectDefineProperty(this.context, '_error', { <ide> configurable: true, <ide> get: () => this.lastError, <ide> set: (value) => { <ide> function ArrayStream() { <ide> this.emit('data', `${data[n]}\n`); <ide> }; <ide> } <del>Object.setPrototypeOf(ArrayStream.prototype, Stream.prototype); <del>Object.setPrototypeOf(ArrayStream, Stream); <add>ObjectSetPrototypeOf(ArrayStream.prototype, Stream.prototype); <add>ObjectSetPrototypeOf(ArrayStream, Stream); <ide> ArrayStream.prototype.readable = true; <ide> ArrayStream.prototype.writable = true; <ide> ArrayStream.prototype.resume = function() {}; <ide> function complete(line, callback) { <ide> let filter; <ide> let match = line.match(/^\s*\.(\w*)$/); <ide> if (match) { <del> completionGroups.push(Object.keys(this.commands)); <add> completionGroups.push(ObjectKeys(this.commands)); <ide> completeOn = match[1]; <ide> if (match[1].length) { <ide> filter = match[1]; <ide> function complete(line, callback) { <ide> completionGroupsLoaded(); <ide> } else if (match = line.match(requireRE)) { <ide> // require('...<Tab>') <del> const exts = Object.keys(this.context.require.extensions); <add> const exts = ObjectKeys(this.context.require.extensions); <ide> const indexRe = new RegExp('^index(?:' + exts.map(regexpEscape).join('|') + <ide> ')$'); <ide> const versionedFileNamesRe = /-\d+\.\d+/; <ide> function complete(line, callback) { <ide> if (this.useGlobal || vm.isContext(this.context)) { <ide> completionGroups.push(getGlobalLexicalScopeNames(this[kContextId])); <ide> let contextProto = this.context; <del> while (contextProto = Object.getPrototypeOf(contextProto)) { <add> while (contextProto = ObjectGetPrototypeOf(contextProto)) { <ide> completionGroups.push( <ide> filteredOwnPropertyNames.call(this, contextProto)); <ide> } <ide> function complete(line, callback) { <ide> let sentinel = 5; <ide> let p; <ide> if (typeof obj === 'object' || typeof obj === 'function') { <del> p = Object.getPrototypeOf(obj); <add> p = ObjectGetPrototypeOf(obj); <ide> } else { <ide> p = obj.constructor ? obj.constructor.prototype : null; <ide> } <ide> while (p !== null) { <ide> memberGroups.push(filteredOwnPropertyNames.call(this, p)); <del> p = Object.getPrototypeOf(p); <add> p = ObjectGetPrototypeOf(p); <ide> // Circular refs possible? Let's guard against that. <ide> sentinel--; <ide> if (sentinel <= 0) { <ide> function complete(line, callback) { <ide> group.sort(); <ide> for (let j = group.length - 1; j >= 0; j--) { <ide> const c = group[j]; <del> if (!ObjectPrototype.hasOwnProperty(uniq, c)) { <add> if (!ObjectPrototypeHasOwnProperty(uniq, c)) { <ide> completions.unshift(c); <ide> uniq[c] = true; <ide> } <ide> function defineDefaultCommands(repl) { <ide> repl.defineCommand('help', { <ide> help: 'Print this help message', <ide> action: function() { <del> const names = Object.keys(this.commands).sort(); <add> const names = ObjectKeys(this.commands).sort(); <ide> const longestNameLength = names.reduce( <del> (max, name) => Math.max(max, name.length), <add> (max, name) => MathMax(max, name.length), <ide> 0 <ide> ); <ide> for (let n = 0; n < names.length; n++) { <ide> function regexpEscape(s) { <ide> function Recoverable(err) { <ide> this.err = err; <ide> } <del>Object.setPrototypeOf(Recoverable.prototype, SyntaxError.prototype); <del>Object.setPrototypeOf(Recoverable, SyntaxError); <add>ObjectSetPrototypeOf(Recoverable.prototype, SyntaxError.prototype); <add>ObjectSetPrototypeOf(Recoverable, SyntaxError); <ide> exports.Recoverable = Recoverable; <ide><path>lib/string_decoder.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperties, <add>} = primordials; <ide> <ide> const { Buffer } = require('buffer'); <ide> const { <ide> StringDecoder.prototype.text = function text(buf, offset) { <ide> return this.write(buf.slice(offset)); <ide> }; <ide> <del>Object.defineProperties(StringDecoder.prototype, { <add>ObjectDefineProperties(StringDecoder.prototype, { <ide> lastChar: { <ide> configurable: true, <ide> enumerable: true, <ide><path>lib/timers.js <ide> <ide> 'use strict'; <ide> <del>const { Math } = primordials; <add>const { <add> MathTrunc, <add>} = primordials; <ide> <ide> const { <ide> immediateInfo, <ide> function unenroll(item) { <ide> // That function could then be used by http and other similar modules. <ide> if (item[kRefed]) { <ide> // Compliment truncation during insert(). <del> const msecs = Math.trunc(item._idleTimeout); <add> const msecs = MathTrunc(item._idleTimeout); <ide> const list = timerListMap[msecs]; <ide> if (list !== undefined && L.isEmpty(list)) { <ide> debug('unenroll: list empty'); <ide><path>lib/tls.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectDefineProperty, <add> ObjectFreeze, <add>} = primordials; <ide> <ide> const { <ide> ERR_TLS_CERT_ALTNAME_INVALID, <ide> exports.getCiphers = internalUtil.cachedResult( <ide> let rootCertificates; <ide> <ide> function cacheRootCertificates() { <del> rootCertificates = Object.freeze(getRootCertificates()); <add> rootCertificates = ObjectFreeze(getRootCertificates()); <ide> } <ide> <del>Object.defineProperty(exports, 'rootCertificates', { <add>ObjectDefineProperty(exports, 'rootCertificates', { <ide> configurable: false, <ide> enumerable: true, <ide> get: () => { <ide><path>lib/tty.js <ide> <ide> 'use strict'; <ide> <del>const { Object } = primordials; <add>const { <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const net = require('net'); <ide> const { TTY, isTTY } = internalBinding('tty_wrap'); <ide> function ReadStream(fd, options) { <ide> this.isTTY = true; <ide> } <ide> <del>Object.setPrototypeOf(ReadStream.prototype, net.Socket.prototype); <del>Object.setPrototypeOf(ReadStream, net.Socket); <add>ObjectSetPrototypeOf(ReadStream.prototype, net.Socket.prototype); <add>ObjectSetPrototypeOf(ReadStream, net.Socket); <ide> <ide> ReadStream.prototype.setRawMode = function(flag) { <ide> flag = !!flag; <ide> function WriteStream(fd) { <ide> } <ide> } <ide> <del>Object.setPrototypeOf(WriteStream.prototype, net.Socket.prototype); <del>Object.setPrototypeOf(WriteStream, net.Socket); <add>ObjectSetPrototypeOf(WriteStream.prototype, net.Socket.prototype); <add>ObjectSetPrototypeOf(WriteStream, net.Socket); <ide> <ide> WriteStream.prototype.isTTY = true; <ide> <ide><path>lib/url.js <ide> <ide> 'use strict'; <ide> <del>const { Object, SafeSet } = primordials; <add>const { <add> ObjectCreate, <add> ObjectKeys, <add> SafeSet, <add>} = primordials; <ide> <ide> const { toASCII } = require('internal/idna'); <ide> const { encodeStr, hexTable } = require('internal/querystring'); <ide> Url.prototype.parse = function parse(url, parseQueryString, slashesDenoteHost) { <ide> } <ide> } else if (parseQueryString) { <ide> this.search = null; <del> this.query = Object.create(null); <add> this.query = ObjectCreate(null); <ide> } <ide> return this; <ide> } <ide> Url.prototype.parse = function parse(url, parseQueryString, slashesDenoteHost) { <ide> } else if (parseQueryString) { <ide> // No query string, but parseQueryString still requested <ide> this.search = null; <del> this.query = Object.create(null); <add> this.query = ObjectCreate(null); <ide> } <ide> <ide> const useQuestionIdx = <ide> Url.prototype.resolveObject = function resolveObject(relative) { <ide> } <ide> <ide> const result = new Url(); <del> const tkeys = Object.keys(this); <add> const tkeys = ObjectKeys(this); <ide> for (let tk = 0; tk < tkeys.length; tk++) { <ide> const tkey = tkeys[tk]; <ide> result[tkey] = this[tkey]; <ide> Url.prototype.resolveObject = function resolveObject(relative) { <ide> // Hrefs like //foo/bar always cut to the protocol. <ide> if (relative.slashes && !relative.protocol) { <ide> // Take everything except the protocol from relative <del> const rkeys = Object.keys(relative); <add> const rkeys = ObjectKeys(relative); <ide> for (let rk = 0; rk < rkeys.length; rk++) { <ide> const rkey = rkeys[rk]; <ide> if (rkey !== 'protocol') <ide> Url.prototype.resolveObject = function resolveObject(relative) { <ide> // because that's known to be hostless. <ide> // anything else is assumed to be absolute. <ide> if (!slashedProtocol.has(relative.protocol)) { <del> const keys = Object.keys(relative); <add> const keys = ObjectKeys(relative); <ide> for (let v = 0; v < keys.length; v++) { <ide> const k = keys[v]; <ide> result[k] = relative[k]; <ide><path>lib/util.js <ide> <ide> 'use strict'; <ide> <del>const { Object, ObjectPrototype, Reflect } = primordials; <add>const { <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add> ObjectGetOwnPropertyDescriptors, <add> ObjectKeys, <add> ObjectPrototypeToString, <add> ObjectSetPrototypeOf, <add> ReflectApply, <add>} = primordials; <ide> <ide> const { <ide> codes: { <ide> function isObject(arg) { <ide> } <ide> <ide> function isError(e) { <del> return ObjectPrototype.toString(e) === '[object Error]' || e instanceof Error; <add> return ObjectPrototypeToString(e) === '[object Error]' || e instanceof Error; <ide> } <ide> <ide> function isFunction(arg) { <ide> function inherits(ctor, superCtor) { <ide> throw new ERR_INVALID_ARG_TYPE('superCtor.prototype', <ide> 'Object', superCtor.prototype); <ide> } <del> Object.defineProperty(ctor, 'super_', { <add> ObjectDefineProperty(ctor, 'super_', { <ide> value: superCtor, <ide> writable: true, <ide> configurable: true <ide> }); <del> Object.setPrototypeOf(ctor.prototype, superCtor.prototype); <add> ObjectSetPrototypeOf(ctor.prototype, superCtor.prototype); <ide> } <ide> <ide> function _extend(target, source) { <ide> // Don't do anything if source isn't an object <ide> if (source === null || typeof source !== 'object') return target; <ide> <del> const keys = Object.keys(source); <add> const keys = ObjectKeys(source); <ide> let i = keys.length; <ide> while (i--) { <ide> target[keys[i]] = source[keys[i]]; <ide> function callbackify(original) { <ide> if (typeof maybeCb !== 'function') { <ide> throw new ERR_INVALID_ARG_TYPE('last argument', 'Function', maybeCb); <ide> } <del> const cb = (...args) => { Reflect.apply(maybeCb, this, args); }; <add> const cb = (...args) => { ReflectApply(maybeCb, this, args); }; <ide> // In true node style we process the callback on `nextTick` with all the <ide> // implications (stack, `uncaughtException`, `async_hooks`) <del> Reflect.apply(original, this, args) <add> ReflectApply(original, this, args) <ide> .then((ret) => process.nextTick(cb, null, ret), <ide> (rej) => process.nextTick(callbackifyOnRejected, rej, cb)); <ide> } <ide> <del> const descriptors = Object.getOwnPropertyDescriptors(original); <add> const descriptors = ObjectGetOwnPropertyDescriptors(original); <ide> // It is possible to manipulate a functions `length` or `name` property. This <ide> // guards against the manipulation. <ide> if (typeof descriptors.length.value === 'number') { <ide> function callbackify(original) { <ide> if (typeof descriptors.name.value === 'string') { <ide> descriptors.name.value += 'Callbackified'; <ide> } <del> Object.defineProperties(callbackified, descriptors); <add> ObjectDefineProperties(callbackified, descriptors); <ide> return callbackified; <ide> } <ide> <ide><path>lib/v8.js <ide> <ide> 'use strict'; <ide> <del>const { ObjectPrototype } = primordials; <add>const { <add> ObjectPrototypeToString, <add>} = primordials; <ide> <ide> const { Buffer } = require('buffer'); <ide> const { validateString } = require('internal/validators'); <ide> const arrayBufferViewTypeToIndex = new Map(); <ide> { <ide> const dummy = new ArrayBuffer(); <ide> for (const [i, ctor] of arrayBufferViewTypes.entries()) { <del> const tag = ObjectPrototype.toString(new ctor(dummy)); <add> const tag = ObjectPrototypeToString(new ctor(dummy)); <ide> arrayBufferViewTypeToIndex.set(tag, i); <ide> } <ide> } <ide> class DefaultSerializer extends Serializer { <ide> if (abView.constructor === Buffer) { <ide> i = bufferConstructorIndex; <ide> } else { <del> const tag = ObjectPrototype.toString(abView); <add> const tag = ObjectPrototypeToString(abView); <ide> i = arrayBufferViewTypeToIndex.get(tag); <ide> <ide> if (i === undefined) { <ide><path>lib/vm.js <ide> <ide> 'use strict'; <ide> <del>const { Array, ArrayPrototype } = primordials; <add>const { <add> ArrayIsArray, <add> ArrayPrototypeForEach, <add>} = primordials; <ide> <ide> const { <ide> ContextifyScript, <ide> function runInThisContext(code, options) { <ide> function compileFunction(code, params, options = {}) { <ide> validateString(code, 'code'); <ide> if (params !== undefined) { <del> if (!Array.isArray(params)) { <add> if (!ArrayIsArray(params)) { <ide> throw new ERR_INVALID_ARG_TYPE('params', 'Array', params); <ide> } <del> ArrayPrototype.forEach(params, <del> (param, i) => validateString(param, `params[${i}]`)); <add> ArrayPrototypeForEach(params, <add> (param, i) => validateString(param, `params[${i}]`)); <ide> } <ide> <ide> const { <ide> function compileFunction(code, params, options = {}) { <ide> ); <ide> } <ide> } <del> if (!Array.isArray(contextExtensions)) { <add> if (!ArrayIsArray(contextExtensions)) { <ide> throw new ERR_INVALID_ARG_TYPE( <ide> 'options.contextExtensions', <ide> 'Array', <ide> contextExtensions <ide> ); <ide> } <del> ArrayPrototype.forEach(contextExtensions, (extension, i) => { <add> ArrayPrototypeForEach(contextExtensions, (extension, i) => { <ide> if (typeof extension !== 'object') { <ide> throw new ERR_INVALID_ARG_TYPE( <ide> `options.contextExtensions[${i}]`, <ide><path>lib/zlib.js <ide> <ide> 'use strict'; <ide> <del>const { Math, Object } = primordials; <add>const { <add> MathMax, <add> ObjectDefineProperties, <add> ObjectDefineProperty, <add> ObjectFreeze, <add> ObjectGetPrototypeOf, <add> ObjectKeys, <add> ObjectSetPrototypeOf, <add>} = primordials; <ide> <ide> const { <ide> codes: { <ide> const codes = { <ide> Z_VERSION_ERROR: constants.Z_VERSION_ERROR <ide> }; <ide> <del>const ckeys = Object.keys(codes); <add>const ckeys = ObjectKeys(codes); <ide> for (var ck = 0; ck < ckeys.length; ck++) { <ide> var ckey = ckeys[ck]; <ide> codes[codes[ckey]] = ckey; <ide> function zlibBuffer(engine, buffer, callback) { <ide> // Streams do not support non-Buffer ArrayBufferViews yet. Convert it to a <ide> // Buffer without copying. <ide> if (isArrayBufferView(buffer) && <del> Object.getPrototypeOf(buffer) !== Buffer.prototype) { <add> ObjectGetPrototypeOf(buffer) !== Buffer.prototype) { <ide> buffer = Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength); <ide> } else if (isAnyArrayBuffer(buffer)) { <ide> buffer = Buffer.from(buffer); <ide> function ZlibBase(opts, mode, handle, { flush, finishFlush, fullFlush }) { <ide> this.once('end', this.close); <ide> this._info = opts && opts.info; <ide> } <del>Object.setPrototypeOf(ZlibBase.prototype, Transform.prototype); <del>Object.setPrototypeOf(ZlibBase, Transform); <add>ObjectSetPrototypeOf(ZlibBase.prototype, Transform.prototype); <add>ObjectSetPrototypeOf(ZlibBase, Transform); <ide> <del>Object.defineProperty(ZlibBase.prototype, '_closed', { <add>ObjectDefineProperty(ZlibBase.prototype, '_closed', { <ide> configurable: true, <ide> enumerable: true, <ide> get() { <ide> Object.defineProperty(ZlibBase.prototype, '_closed', { <ide> // perspective, but it is inconsistent with all other streams exposed by Node.js <ide> // that have this concept, where it stands for the number of bytes read <ide> // *from* the stream (that is, net.Socket/tls.Socket & file system streams). <del>Object.defineProperty(ZlibBase.prototype, 'bytesRead', { <add>ObjectDefineProperty(ZlibBase.prototype, 'bytesRead', { <ide> configurable: true, <ide> enumerable: true, <ide> get: deprecate(function() { <ide> function Zlib(opts, mode) { <ide> this._level = level; <ide> this._strategy = strategy; <ide> } <del>Object.setPrototypeOf(Zlib.prototype, ZlibBase.prototype); <del>Object.setPrototypeOf(Zlib, ZlibBase); <add>ObjectSetPrototypeOf(Zlib.prototype, ZlibBase.prototype); <add>ObjectSetPrototypeOf(Zlib, ZlibBase); <ide> <ide> // This callback is used by `.params()` to wait until a full flush happened <ide> // before adjusting the parameters. In particular, the call to the native <ide> function Deflate(opts) { <ide> return new Deflate(opts); <ide> Zlib.call(this, opts, DEFLATE); <ide> } <del>Object.setPrototypeOf(Deflate.prototype, Zlib.prototype); <del>Object.setPrototypeOf(Deflate, Zlib); <add>ObjectSetPrototypeOf(Deflate.prototype, Zlib.prototype); <add>ObjectSetPrototypeOf(Deflate, Zlib); <ide> <ide> function Inflate(opts) { <ide> if (!(this instanceof Inflate)) <ide> return new Inflate(opts); <ide> Zlib.call(this, opts, INFLATE); <ide> } <del>Object.setPrototypeOf(Inflate.prototype, Zlib.prototype); <del>Object.setPrototypeOf(Inflate, Zlib); <add>ObjectSetPrototypeOf(Inflate.prototype, Zlib.prototype); <add>ObjectSetPrototypeOf(Inflate, Zlib); <ide> <ide> function Gzip(opts) { <ide> if (!(this instanceof Gzip)) <ide> return new Gzip(opts); <ide> Zlib.call(this, opts, GZIP); <ide> } <del>Object.setPrototypeOf(Gzip.prototype, Zlib.prototype); <del>Object.setPrototypeOf(Gzip, Zlib); <add>ObjectSetPrototypeOf(Gzip.prototype, Zlib.prototype); <add>ObjectSetPrototypeOf(Gzip, Zlib); <ide> <ide> function Gunzip(opts) { <ide> if (!(this instanceof Gunzip)) <ide> return new Gunzip(opts); <ide> Zlib.call(this, opts, GUNZIP); <ide> } <del>Object.setPrototypeOf(Gunzip.prototype, Zlib.prototype); <del>Object.setPrototypeOf(Gunzip, Zlib); <add>ObjectSetPrototypeOf(Gunzip.prototype, Zlib.prototype); <add>ObjectSetPrototypeOf(Gunzip, Zlib); <ide> <ide> function DeflateRaw(opts) { <ide> if (opts && opts.windowBits === 8) opts.windowBits = 9; <ide> if (!(this instanceof DeflateRaw)) <ide> return new DeflateRaw(opts); <ide> Zlib.call(this, opts, DEFLATERAW); <ide> } <del>Object.setPrototypeOf(DeflateRaw.prototype, Zlib.prototype); <del>Object.setPrototypeOf(DeflateRaw, Zlib); <add>ObjectSetPrototypeOf(DeflateRaw.prototype, Zlib.prototype); <add>ObjectSetPrototypeOf(DeflateRaw, Zlib); <ide> <ide> function InflateRaw(opts) { <ide> if (!(this instanceof InflateRaw)) <ide> return new InflateRaw(opts); <ide> Zlib.call(this, opts, INFLATERAW); <ide> } <del>Object.setPrototypeOf(InflateRaw.prototype, Zlib.prototype); <del>Object.setPrototypeOf(InflateRaw, Zlib); <add>ObjectSetPrototypeOf(InflateRaw.prototype, Zlib.prototype); <add>ObjectSetPrototypeOf(InflateRaw, Zlib); <ide> <ide> function Unzip(opts) { <ide> if (!(this instanceof Unzip)) <ide> return new Unzip(opts); <ide> Zlib.call(this, opts, UNZIP); <ide> } <del>Object.setPrototypeOf(Unzip.prototype, Zlib.prototype); <del>Object.setPrototypeOf(Unzip, Zlib); <add>ObjectSetPrototypeOf(Unzip.prototype, Zlib.prototype); <add>ObjectSetPrototypeOf(Unzip, Zlib); <ide> <ide> function createConvenienceMethod(ctor, sync) { <ide> if (sync) { <ide> function createConvenienceMethod(ctor, sync) { <ide> } <ide> } <ide> <del>const kMaxBrotliParam = Math.max(...Object.keys(constants).map((key) => { <add>const kMaxBrotliParam = MathMax(...ObjectKeys(constants).map((key) => { <ide> return key.startsWith('BROTLI_PARAM_') ? constants[key] : 0; <ide> })); <ide> <ide> function Brotli(opts, mode) { <ide> <ide> brotliInitParamsArray.fill(-1); <ide> if (opts && opts.params) { <del> for (const origKey of Object.keys(opts.params)) { <add> for (const origKey of ObjectKeys(opts.params)) { <ide> const key = +origKey; <ide> if (Number.isNaN(key) || key < 0 || key > kMaxBrotliParam || <ide> (brotliInitParamsArray[key] | 0) !== -1) { <ide> function Brotli(opts, mode) { <ide> <ide> ZlibBase.call(this, opts, mode, handle, brotliDefaultOpts); <ide> } <del>Object.setPrototypeOf(Brotli.prototype, Zlib.prototype); <del>Object.setPrototypeOf(Brotli, Zlib); <add>ObjectSetPrototypeOf(Brotli.prototype, Zlib.prototype); <add>ObjectSetPrototypeOf(Brotli, Zlib); <ide> <ide> function BrotliCompress(opts) { <ide> if (!(this instanceof BrotliCompress)) <ide> return new BrotliCompress(opts); <ide> Brotli.call(this, opts, BROTLI_ENCODE); <ide> } <del>Object.setPrototypeOf(BrotliCompress.prototype, Brotli.prototype); <del>Object.setPrototypeOf(BrotliCompress, Brotli); <add>ObjectSetPrototypeOf(BrotliCompress.prototype, Brotli.prototype); <add>ObjectSetPrototypeOf(BrotliCompress, Brotli); <ide> <ide> function BrotliDecompress(opts) { <ide> if (!(this instanceof BrotliDecompress)) <ide> return new BrotliDecompress(opts); <ide> Brotli.call(this, opts, BROTLI_DECODE); <ide> } <del>Object.setPrototypeOf(BrotliDecompress.prototype, Brotli.prototype); <del>Object.setPrototypeOf(BrotliDecompress, Brotli); <add>ObjectSetPrototypeOf(BrotliDecompress.prototype, Brotli.prototype); <add>ObjectSetPrototypeOf(BrotliDecompress, Brotli); <ide> <ide> <ide> function createProperty(ctor) { <ide> function createProperty(ctor) { <ide> <ide> // Legacy alias on the C++ wrapper object. This is not public API, so we may <ide> // want to runtime-deprecate it at some point. There's no hurry, though. <del>Object.defineProperty(binding.Zlib.prototype, 'jsref', { <add>ObjectDefineProperty(binding.Zlib.prototype, 'jsref', { <ide> get() { return this[owner_symbol]; }, <ide> set(v) { return this[owner_symbol] = v; } <ide> }); <ide> module.exports = { <ide> brotliDecompressSync: createConvenienceMethod(BrotliDecompress, true), <ide> }; <ide> <del>Object.defineProperties(module.exports, { <add>ObjectDefineProperties(module.exports, { <ide> createDeflate: createProperty(Deflate), <ide> createInflate: createProperty(Inflate), <ide> createDeflateRaw: createProperty(DeflateRaw), <ide> Object.defineProperties(module.exports, { <ide> codes: { <ide> enumerable: true, <ide> writable: false, <del> value: Object.freeze(codes) <add> value: ObjectFreeze(codes) <ide> } <ide> }); <ide> <ide> // These should be considered deprecated <ide> // expose all the zlib constants <del>const bkeys = Object.keys(constants); <add>const bkeys = ObjectKeys(constants); <ide> for (var bk = 0; bk < bkeys.length; bk++) { <ide> var bkey = bkeys[bk]; <ide> if (bkey.startsWith('BROTLI')) continue; <del> Object.defineProperty(module.exports, bkey, { <add> ObjectDefineProperty(module.exports, bkey, { <ide> enumerable: false, value: constants[bkey], writable: false <ide> }); <ide> }
117
Java
Java
fix links for single class
6b07923f2c51d76c45e3879f9c7b9d9ed9ccfb93
<ide><path>src/main/java/io/reactivex/Single.java <ide> * <p> <ide> * <img width="640" height="301" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.legend.png" alt=""> <ide> * <p> <del> * For more information see the <a href="http://reactivex.io/documentation/observable.html">ReactiveX <add> * For more information see the <a href="http://reactivex.io/documentation/single.html">ReactiveX <ide> * documentation</a>. <ide> * <ide> * @param <T> <ide> public final <R> R to(Function<? super Single<T>, R> convert) { <ide> * <ide> * @return a {@link Completable} that calls {@code onComplete} on it's subscriber when the source {@link Single} <ide> * calls {@code onSuccess}. <del> * @see <a href="http://reactivex.io/documentation/completable.html">ReactiveX documentation: Completable</a> <ide> * @since 2.0 <ide> * @deprecated see {@link #ignoreElement()} instead, will be removed in 3.0 <ide> */ <ide> public final Completable toCompletable() { <ide> * <ide> * @return a {@link Completable} that calls {@code onComplete} on it's observer when the source {@link Single} <ide> * calls {@code onSuccess}. <del> * @see <a href="http://reactivex.io/documentation/completable.html">ReactiveX documentation: Completable</a> <ide> * @since 2.1.13 <ide> */ <ide> @CheckReturnValue
1
Text
Text
fix typos in essentials tutorial
d0ef57cd65ca392f7de9377446aa8e16494a96ab
<ide><path>docs/tutorials/essentials/part-5-async-logic.md <ide> export const fetchPosts = createAsyncThunk('posts/fetchPosts', async () => { <ide> <ide> The payload creator will usually make an AJAX call of some kind, and can either return the `Promise` from the AJAX call directly, or extract some data from the API response and return that. We typically write this using the JS `async/await` syntax, which lets us write functions that use `Promise`s while using standard `try/catch` logic instead of `somePromise.then()` chains. <ide> <del>In this case, we pass in `'posts/fetchPosts'` as the action type prefix. Our payload creation callback waits for the API call to return a response. The response object looks like `{posts: []}`, and we want our dispatched Redux action to have a payload that is _just_ the array of posts. So, we extract `response.data`, and return that from the callback. <add>In this case, we pass in `'posts/fetchPosts'` as the action type prefix. Our payload creation callback waits for the API call to return a response. The response object looks like `{data: []}`, and we want our dispatched Redux action to have a payload that is _just_ the array of posts. So, we extract `response.data`, and return that from the callback. <ide> <ide> If we try calling `dispatch(fetchPosts())`, the `fetchPosts` thunk will first dispatch an action type of `'posts/fetchPosts/pending'`: <ide> <ide><path>docs/tutorials/essentials/part-8-rtk-query-advanced.md <ide> export const apiSlice = createApi({ <ide> // highlight-start <ide> editPost: builder.mutation({ <ide> query: post => ({ <del> url: `posts/${post.id}`, <add> url: `/posts/${post.id}`, <ide> method: 'PATCH', <ide> body: post <ide> }) <ide> This dispatch happens automatically inside the query hooks, but we can start it <ide> <ide> :::caution <ide> <del>Manually dispatching an RTKQ request thunk will create a subscription entry, but it's then up to you to [unsubscribe from that data later](https://redux-toolkit.js.org/rtk-query/usage/usage-without-react-hooks#removing-a-subscription) - otherwise the data stays in the cache permanently. In this case, we always need user data, so we can skip unsuscribing. <add>Manually dispatching an RTKQ request thunk will create a subscription entry, but it's then up to you to [unsubscribe from that data later](https://redux-toolkit.js.org/rtk-query/usage/usage-without-react-hooks#removing-a-subscription) - otherwise the data stays in the cache permanently. In this case, we always need user data, so we can skip unsubscribing. <ide> <ide> ::: <ide>
2
PHP
PHP
apply fixes from styleci
df0ede93019c1193efd8c3e933b856bfea830475
<ide><path>src/Illuminate/Console/OutputStyle.php <ide> public function isDebug() <ide> { <ide> return $this->output->isDebug(); <ide> } <del> <add> <ide> /** <ide> * Get the underlying Symfony output implementation. <ide> *
1
Go
Go
use unlocked version of changes for getimage
82bdd88e9c9db40ee8072f7c4c2832dfb3f73823
<ide><path>daemon/container.go <ide> func (container *Container) Mount() error { <ide> return container.daemon.Mount(container) <ide> } <ide> <add>func (container *Container) changes() ([]archive.Change, error) { <add> return container.daemon.Changes(container) <add>} <add> <ide> func (container *Container) Changes() ([]archive.Change, error) { <ide> container.Lock() <ide> defer container.Unlock() <del> return container.daemon.Changes(container) <add> return container.changes() <ide> } <ide> <ide> func (container *Container) GetImage() (*image.Image, error) { <ide> func (container *Container) GetSize() (int64, int64) { <ide> sizeRw = -1 <ide> } <ide> } else { <del> changes, _ := container.Changes() <add> changes, _ := container.changes() <ide> if changes != nil { <ide> sizeRw = archive.ChangesSize(container.basefs, changes) <ide> } else {
1
Text
Text
update changelog for 1.8.0-beta.2 and 1.8.0-beta.3
f4ea6733ee3e0d8bbc9e481a660693381f0f7f34
<ide><path>CHANGELOG.md <ide> # Ember Changelog <ide> <add>### Ember 1.8.0-beta.3 (September, 27, 2014) <add> <add>* [BUGFIX] Use contextualElements to properly handle omitted optional start tags. <add>* [BUGFIX] Ensure that `Route.prototype.activate` is not retriggered when the model for the current route changes. <add>* [PERF] Fix optimization bailouts for `{{view}}` helper. <add>* [BUGFIX] Add `attributeBindings` for `lang` and `dir` (for bidirectional language support) in `Ember.TextField` and `Ember.TextAra`. <add>* [BUGFIX] Fix finishChains for all chains that reference an obj not just the ones rooted at that object. <add>* [BUGFIX] Refactor ES3 `Ember.keys` implementation. <add>* Rewrite Ember.Map to be faster and closer to ES6 implementation: <add> * [PERF + ES6] No longer clone array before enumeration (dramatically reduce allocations) <add> * [PERF] Don’t Rebind the callback of forEach if not needed <add> * [PERF + ES6] No longer allow Map#length to be bindable <add> * [PERF] Don’t double guid keys, as they are passed from map to ordered set (add/remove) <add> * [ES6] Deprecate Map#remove in-favor of the es6 Map#delete <add> * [ES6] Error if callback is not a function <add> * [ES6] Map#set should return the map. This enables chaining map.`map.set(‘foo’,1).set(‘bar’,3);` etc. <add> * [ES6] Remove length in-favor of size. <add> * [ES6] Throw if constructor is invoked without new <add> * [ES6] Make inheritance work correctly <add> <add> <add>### Ember 1.8.0-beta.2 (September, 20, 2014) <add> <add>* [BUGFIX] Allow for bound property {{input}} type. <add>* [BUGFIX] Ensure pushUnique targetQueue is cleared by flush. <add>* [BUGFIX] instrument should still call block even without subscribers. <add>* [BUGFIX] Remove uneeded normalization in query param controller lookup. <add>* [BUGFIX] Do not use defineProperty on each View instance. <add>* [PERF] Speedup `watchKey` by preventing for in related deopt. <add>* [PERF] Change `ENV.MANDATORY_SETTER` to FEATURES so it can be compiled out of production builds. <add>* [PERF] Object.create(null) in Ember.inspect. <add>* [PERF] Extracts computed property set into a separate function. <add>* [BUGFIX] Make `GUID_KEY = intern(GUID_KEY)` actually work on ES3. <add>* [BUGFIX] Ensure nested routes can inherit model from parent. <add> <ide> ### Ember 1.8.0-beta.1 (August 20, 2014) <ide> <ide> * Remove `metamorph` in favor of `morph` package (removes the need for `<script>` tags in the DOM).
1
Python
Python
update a link to point to the correct page
e17bc757be2792d0be4d1f502f13bc2349f937cf
<ide><path>libcloud/security.py <ide> ) <ide> <ide> CA_CERTS_UNAVAILABLE_ERROR_MSG = ( <del> 'No CA Certificates were found in CA_CERTS_PATH. For information on' <add> 'No CA Certificates were found in CA_CERTS_PATH. For information on ' <ide> 'how to get required certificate files, please visit ' <del> 'http://libcloud.apache.org/docs/ssl-certificate-validation.html' <add> 'https://libcloud.readthedocs.org/en/latest/other/' <add> 'ssl-certificate-validation.html' <ide> ) <ide> <ide> VERIFY_SSL_DISABLED_MSG = (
1
Javascript
Javascript
fix typo in new return value of bidi function
3a8426e1792ec3cfef2f93588c7cd50d04e4c8cd
<ide><path>src/bidi.js <ide> var bidi = PDFJS.bidi = (function bidiClosure() { <ide> result += ch; <ide> } <ide> <del> return new bidiResult(str, direction); <add> return new bidiResult(result, direction); <ide> } <ide> <ide> return bidi;
1
Python
Python
add files via upload
c08925adb00c905808273353f135f7b752b81486
<ide><path>samples/outreach/blogs/Blog_Custom_Estimators.py <add># Copyright 2017 The TensorFlow Authors. All Rights Reserved. <add># <add># Licensed under the Apache License, Version 2.0 (the "License"); <add># you may not use this file except in compliance with the License. <add># You may obtain a copy of the License at <add># <add># http://www.apache.org/licenses/LICENSE-2.0 <add># <add># Unless required by applicable law or agreed to in writing, software <add># Distributed_TensorFlow under the License is Distributed_TensorFlow on an "AS IS" BASIS, <add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add># See the License for the specific language governing permissions and <add># limitations under the License. <add># ============================================================================== <add> <add># This is the complete code for the following blogpost: <add># https://developers.googleblog.com/2017/09/introducing-tensorflow-datasets.html <add># (https://goo.gl/Ujm2Ep) <add> <add>import tensorflow as tf <add>import os <add>import sys <add> <add>if sys.version_info < (3, 0, 0): <add> from urllib import urlopen <add>else: <add> from urllib.request import urlopen <add> <add>tf.logging.set_verbosity(tf.logging.INFO) <add> <add># Check that we have correct TensorFlow version installed <add>tf_version = tf.__version__ <add>tf.logging.info("TensorFlow version: {}".format(tf_version)) <add>assert "1.4" <= tf_version, "TensorFlow r1.4 or later is needed" <add> <add># Windows users: You only need to change PATH, rest is platform independent <add>PATH = "/tmp/tf_custom_estimators" <add> <add># Fetch and store Training and Test dataset files <add>PATH_DATASET = PATH + os.sep + "dataset" <add>FILE_TRAIN = PATH_DATASET + os.sep + "iris_training.csv" <add>FILE_TEST = PATH_DATASET + os.sep + "iris_test.csv" <add>URL_TRAIN = "http://download.tensorflow.org/data/iris_training.csv" <add>URL_TEST = "http://download.tensorflow.org/data/iris_test.csv" <add> <add>def downloadDataset(url, file): <add> if not os.path.exists(PATH_DATASET): <add> os.makedirs(PATH_DATASET) <add> if not os.path.exists(file): <add> data = urlopen(url).read() <add> with open(file, "wb") as f: <add> f.write(data) <add> f.close() <add>downloadDataset(URL_TRAIN, FILE_TRAIN) <add>downloadDataset(URL_TEST, FILE_TEST) <add> <add># The CSV features in our training & test data <add>feature_names = [ <add> 'SepalLength', <add> 'SepalWidth', <add> 'PetalLength', <add> 'PetalWidth'] <add> <add># Create an input function reading a file using the Dataset API <add># Then provide the results to the Estimator API <add>def my_input_fn(file_path, repeat_count=1, shuffle_count=1): <add> def decode_csv(line): <add> parsed_line = tf.decode_csv(line, [[0.], [0.], [0.], [0.], [0]]) <add> label = parsed_line[-1:] # Last element is the label <add> del parsed_line[-1] # Delete last element <add> features = parsed_line # Everything but last elements are the features <add> d = dict(zip(feature_names, features)), label <add> return d <add> <add> dataset = (tf.data.TextLineDataset(file_path) # Read text file <add> .skip(1) # Skip header row <add> .map(decode_csv, num_parallel_calls=4) # Decode each line <add> .cache() # Warning: Caches entire dataset, can cause out of memory <add> .shuffle(shuffle_count) # Randomize elems (1 == no operation) <add> .repeat(repeat_count) # Repeats dataset this # times <add> .batch(32) <add> .prefetch(1) # Make sure you always have 1 batch ready to serve <add> ) <add> iterator = dataset.make_one_shot_iterator() <add> batch_features, batch_labels = iterator.get_next() <add> return batch_features, batch_labels <add> <add>def my_model_fn( <add> features, # This is batch_features from input_fn <add> labels, # This is batch_labels from input_fn <add> mode): # And instance of tf.estimator.ModeKeys, see below <add> <add> if mode == tf.estimator.ModeKeys.PREDICT: <add> tf.logging.info("my_model_fn: PREDICT, {}".format(mode)) <add> elif mode == tf.estimator.ModeKeys.EVAL: <add> tf.logging.info("my_model_fn: EVAL, {}".format(mode)) <add> elif mode == tf.estimator.ModeKeys.TRAIN: <add> tf.logging.info("my_model_fn: TRAIN, {}".format(mode)) <add> <add> # All our inputs are feature columns of type numeric_column <add> feature_columns = [ <add> tf.feature_column.numeric_column(feature_names[0]), <add> tf.feature_column.numeric_column(feature_names[1]), <add> tf.feature_column.numeric_column(feature_names[2]), <add> tf.feature_column.numeric_column(feature_names[3]) <add> ] <add> <add> # Create the layer of input <add> input_layer = tf.feature_column.input_layer(features, feature_columns) <add> <add> # Definition of hidden layer: h1 <add> # We implement it as a fully-connected layer (tf.layers.dense) <add> # Has 10 neurons, and uses ReLU as the activation function <add> # Takes input_layer as input <add> # h1 = tf.layers.dense(input_layer, 10, activation=tf.nn.relu) <add> h1 = tf.layers.Dense(10, activation=tf.nn.relu)(input_layer) <add> <add> # Definition of hidden layer: h2 (this is the logits layer) <add> # Similar to h1, but takes h1 as input <add> # h2 = tf.layers.dense(h1, 10, activation=tf.nn.relu) <add> h2 = tf.layers.Dense(10, activation=tf.nn.relu)(h1) <add> <add> # Output 'logits' layer is three number = probability distribution <add> # between Iris Sentosa, Versicolor, and Viginica <add> # logits = tf.layers.dense(h2, 3) <add> logits = tf.layers.Dense(3, activation=tf.nn.relu)(h2) <add> <add> # class_ids will be the model prediction for the class (Iris flower type) <add> # The output node with the highest value is our prediction <add> predictions = { 'class_ids': tf.argmax(input=logits, axis=1) } <add> <add> # 1. Prediction mode <add> # Return our prediction <add> if mode == tf.estimator.ModeKeys.PREDICT: <add> return tf.estimator.EstimatorSpec(mode, predictions=predictions) <add> <add> # Evaluation and Training mode <add> <add> # To calculate the loss, we need to convert our labels <add> # Our input labels have shape: [batch_size, 1] <add> labels = tf.squeeze(labels, 1) # Convert to shape [batch_size] <add> loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits) <add> <add> # Calculate the accuracy between the true labels, and our predictions <add> accuracy = tf.metrics.accuracy(labels, predictions['class_ids']) <add> <add> # 2. Evaluation mode <add> # Return our loss (which is used to evaluate our model) <add> # Set the TensorBoard scalar my_accurace to the accuracy <add> # Obs: This function only sets value during mode == ModeKeys.EVAL <add> # To set values during training, see tf.summary.scalar <add> if mode == tf.estimator.ModeKeys.EVAL: <add> return tf.estimator.EstimatorSpec( <add> mode, <add> loss=loss, <add> eval_metric_ops={'my_accuracy': accuracy}) <add> <add> # If mode is not PREDICT nor EVAL, then we must be in TRAIN <add> assert mode == tf.estimator.ModeKeys.TRAIN, "TRAIN is only ModeKey left" <add> <add> # 3. Training mode <add> <add> # Default optimizer for DNNClassifier: Adagrad with learning rate=0.05 <add> # Our objective (train_op) is to minimize loss <add> # Provide global step counter (used to count gradient updates) <add> optimizer = tf.train.AdagradOptimizer(0.05) <add> train_op = optimizer.minimize( <add> loss, <add> global_step=tf.train.get_global_step()) <add> <add> # Set the TensorBoard scalar my_accuracy to the accuracy <add> # Obs: This function only sets the value during mode == ModeKeys.TRAIN <add> # To set values during evaluation, see eval_metrics_ops <add> tf.summary.scalar('my_accuracy', accuracy[1]) <add> <add> # Return training operations: loss and train_op <add> return tf.estimator.EstimatorSpec( <add> mode, <add> loss=loss, <add> train_op=train_op) <add> <add># Create a custom estimator using my_model_fn to define the model <add>tf.logging.info("Before classifier construction") <add>classifier = tf.estimator.Estimator( <add> model_fn=my_model_fn, <add> model_dir=PATH) # Path to where checkpoints etc are stored <add>tf.logging.info("...done constructing classifier") <add> <add># 500 epochs = 500 * 120 records [60000] = (500 * 120) / 32 batches = 1875 batches <add># 4 epochs = 4 * 30 records = (4 * 30) / 32 batches = 3.75 batches <add> <add># Train our model, use the previously function my_input_fn <add># Input to training is a file with training example <add># Stop training after 8 iterations of train data (epochs) <add>tf.logging.info("Before classifier.train") <add>classifier.train( <add> input_fn=lambda: my_input_fn(FILE_TRAIN, 500, 256)) <add>tf.logging.info("...done classifier.train") <add> <add># Evaluate our model using the examples contained in FILE_TEST <add># Return value will contain evaluation_metrics such as: loss & average_loss <add>tf.logging.info("Before classifier.evaluate") <add>evaluate_result = classifier.evaluate( <add> input_fn=lambda: my_input_fn(FILE_TEST, 4)) <add>tf.logging.info("...done classifier.evaluate") <add>tf.logging.info("Evaluation results") <add>for key in evaluate_result: <add> tf.logging.info(" {}, was: {}".format(key, evaluate_result[key])) <add> <add># Predict the type of some Iris flowers. <add># Let's predict the examples in FILE_TEST, repeat only once. <add>predict_results = classifier.predict( <add> input_fn=lambda: my_input_fn(FILE_TEST, 1)) <add>tf.logging.info("Prediction on test file") <add>for prediction in predict_results: <add> # Will print the predicted class, i.e: 0, 1, or 2 if the prediction <add> # is Iris Sentosa, Vericolor, Virginica, respectively. <add> tf.logging.info("...{}".format(prediction["class_ids"])) <add> <add># Let create a dataset for prediction <add># We've taken the first 3 examples in FILE_TEST <add>prediction_input = [[5.9, 3.0, 4.2, 1.5], # -> 1, Iris Versicolor <add> [6.9, 3.1, 5.4, 2.1], # -> 2, Iris Virginica <add> [5.1, 3.3, 1.7, 0.5]] # -> 0, Iris Sentosa <add> <add>def new_input_fn(): <add> def decode(x): <add> x = tf.split(x, 4) # Need to split into our 4 features <add> return dict(zip(feature_names, x)) # To build a dict of them <add> <add> dataset = tf.data.Dataset.from_tensor_slices(prediction_input) <add> dataset = dataset.map(decode) <add> iterator = dataset.make_one_shot_iterator() <add> next_feature_batch = iterator.get_next() <add> return next_feature_batch, None # In prediction, we have no labels <add> <add># Predict all our prediction_input <add>predict_results = classifier.predict(input_fn=new_input_fn) <add> <add># Print results <add>tf.logging.info("Predictions on memory") <add>for idx, prediction in enumerate(predict_results): <add> type = prediction["class_ids"] # Get the predicted class (index) <add> if type == 0: <add> tf.logging.info("...I think: {}, is Iris Sentosa".format(prediction_input[idx])) <add> elif type == 1: <add> tf.logging.info("...I think: {}, is Iris Versicolor".format(prediction_input[idx])) <add> else: <add> tf.logging.info("...I think: {}, is Iris Virginica".format(prediction_input[idx]))
1
Javascript
Javascript
remove unnecessary default tmpdir value in test
d2c29bda50c4e5579537ab8cb9ac5d1f567da4a8
<ide><path>test/parallel/test-pipe-file-to-http.js <ide> const path = require('path'); <ide> const tmpdir = require('../common/tmpdir'); <ide> tmpdir.refresh(); <ide> <del>const filename = path.join(tmpdir.path || '/tmp', 'big'); <add>const filename = path.join(tmpdir.path, 'big'); <ide> let count = 0; <ide> <ide> const server = http.createServer((req, res) => {
1
Text
Text
fix inconsistent server.listen documentation
278f65351231f9fa034220780fba8aae940c1b9a
<ide><path>doc/api/http.md <ide> added: v0.1.90 <ide> <ide> Stops the server from accepting new connections. See [`net.Server.close()`][]. <ide> <del>### server.listen(handle[, callback]) <del><!-- YAML <del>added: v0.5.10 <del>--> <del> <del>* `handle` {Object} <del>* `callback` {Function} <del> <del>The `handle` object can be set to either a server or socket (anything <del>with an underlying `_handle` member), or a `{fd: <n>}` object. <del> <del>This will cause the server to accept connections on the specified <del>handle, but it is presumed that the file descriptor or handle has <del>already been bound to a port or domain socket. <del> <del>Listening on a file descriptor is not supported on Windows. <del> <del>This function is asynchronous. `callback` will be added as a listener for the <del>[`'listening'`][] event. See also [`net.Server.listen()`][]. <del> <del>Returns `server`. <del> <del>*Note*: The `server.listen()` method can be called again if and only if there was an error <del>during the first `server.listen()` call or `server.close()` has been called. <del>Otherwise, an `ERR_SERVER_ALREADY_LISTEN` error will be thrown. <del> <del>### server.listen(path[, callback]) <del><!-- YAML <del>added: v0.1.90 <del>--> <del> <del>* `path` {string} <del>* `callback` {Function} <del> <del>Start a UNIX socket server listening for connections on the given `path`. <del> <del>This function is asynchronous. `callback` will be added as a listener for the <del>[`'listening'`][] event. See also [`net.Server.listen(path)`][]. <del> <del>*Note*: The `server.listen()` method can be called again if and only if there was an error <del>during the first `server.listen()` call or `server.close()` has been called. <del>Otherwise, an `ERR_SERVER_ALREADY_LISTEN` error will be thrown. <del> <del>### server.listen([port][, hostname][, backlog][, callback]) <del><!-- YAML <del>added: v0.1.90 <del>--> <del> <del>* `port` {number} <del>* `hostname` {string} <del>* `backlog` {number} <del>* `callback` {Function} <del> <del>Begin accepting connections on the specified `port` and `hostname`. If the <del>`hostname` is omitted, the server will accept connections on the <del>[unspecified IPv6 address][] (`::`) when IPv6 is available, or the <del>[unspecified IPv4 address][] (`0.0.0.0`) otherwise. <del> <del>*Note*: In most operating systems, listening to the <del>[unspecified IPv6 address][] (`::`) may cause the `net.Server` to also listen on <del>the [unspecified IPv4 address][] (`0.0.0.0`). <del> <del>Omit the port argument, or use a port value of `0`, to have the operating system <del>assign a random port, which can be retrieved by using `server.address().port` <del>after the `'listening'` event has been emitted. <del> <del>To listen to a unix socket, supply a filename instead of port and hostname. <del> <del>`backlog` is the maximum length of the queue of pending connections. <del>The actual length will be determined by the OS through sysctl settings such as <del>`tcp_max_syn_backlog` and `somaxconn` on linux. The default value of this <del>parameter is 511 (not 512). <del> <del>This function is asynchronous. `callback` will be added as a listener for the <del>[`'listening'`][] event. See also [`net.Server.listen(port)`][]. <add>### server.listen() <ide> <del>*Note*: The `server.listen()` method can be called again if and only if there was an error <del>during the first `server.listen()` call or `server.close()` has been called. <del>Otherwise, an `ERR_SERVER_ALREADY_LISTEN` error will be thrown. <add>Starts the HTTP server listening for connections. <add>This method is identical to [`server.listen()`][] from [`net.Server`][]. <ide> <ide> ### server.listening <ide> <!-- YAML <ide> const req = http.request(options, (res) => { <ide> [`response.write(data, encoding)`]: #http_response_write_chunk_encoding_callback <ide> [`response.writeContinue()`]: #http_response_writecontinue <ide> [`response.writeHead()`]: #http_response_writehead_statuscode_statusmessage_headers <add>[`server.listen()`]: net.html#net_server_listen <ide> [`server.timeout`]: #http_server_timeout <ide> [`setHeader(name, value)`]: #http_request_setheader_name_value <ide> [`socket.setKeepAlive()`]: net.html#net_socket_setkeepalive_enable_initialdelay <ide><path>doc/api/https.md <ide> added: v0.3.4 <ide> This class is a subclass of `tls.Server` and emits events same as <ide> [`http.Server`][]. See [`http.Server`][] for more information. <ide> <add>### server.close([callback]) <add><!-- YAML <add>added: v0.1.90 <add>--> <add>- `callback` {Function} <add> <add>See [`server.close()`][`http.close()`] from the HTTP module for details. <add> <add>### server.listen() <add> <add>Starts the HTTPS server listening for encrypted connections. <add>This method is identical to [`server.listen()`][] from [`net.Server`][]. <add> <ide> ### server.setTimeout([msecs][, callback]) <ide> <!-- YAML <ide> added: v0.11.2 <ide> https.createServer(options, (req, res) => { <ide> }).listen(8000); <ide> ``` <ide> <del>### server.close([callback]) <del><!-- YAML <del>added: v0.1.90 <del>--> <del>- `callback` {Function} <del> <del>See [`http.close()`][] for details. <del> <del>### server.listen(handle[, callback]) <del>- `handle` {Object} <del>- `callback` {Function} <del> <del>### server.listen(path[, callback]) <del>- `path` {string} <del>- `callback` {Function} <del> <del>### server.listen([port][, host][, backlog][, callback]) <del>- `port` {number} <del>- `hostname` {string} <del>- `backlog` {number} <del>- `callback` {Function} <del> <del>See [`http.listen()`][] for details. <del> <ide> ## https.get(options[, callback]) <ide> <!-- YAML <ide> added: v0.3.6 <ide> const req = https.request(options, (res) => { <ide> [`http.request()`]: http.html#http_http_request_options_callback <ide> [`https.Agent`]: #https_class_https_agent <ide> [`https.request()`]: #https_https_request_options_callback <add>[`net.Server`]: net.html#net_class_net_server <add>[`server.listen()`]: net.html#net_server_listen <ide> [`tls.connect()`]: tls.html#tls_tls_connect_options_callback <ide> [`tls.createSecureContext()`]: tls.html#tls_tls_createsecurecontext_options <ide> [`tls.createServer()`]: tls.html#tls_tls_createserver_options_secureconnectionlistener <ide><path>doc/api/tls.md <ide> added: v3.0.0 <ide> Returns a `Buffer` instance holding the keys currently used for <ide> encryption/decryption of the [TLS Session Tickets][] <ide> <del>### server.listen(port[, hostname][, callback]) <del><!-- YAML <del>added: v0.3.2 <del>--> <del> <del>* `port` {number} The TCP/IP port on which to begin listening for connections. <del> A value of `0` (zero) will assign a random port. <del>* `hostname` {string} The hostname, IPv4, or IPv6 address on which to begin <del> listening for connections. If `undefined`, the server will accept connections <del> on any IPv6 address (`::`) when IPv6 is available, or any IPv4 address <del> (`0.0.0.0`) otherwise. <del>* `callback` {Function} A callback function to be invoked when the server has <del> begun listening on the `port` and `hostname`. <del> <del>The `server.listen()` methods instructs the server to begin accepting <del>connections on the specified `port` and `hostname`. <del> <del>This function operates asynchronously. If the `callback` is given, it will be <del>called when the server has started listening. <add>### server.listen() <ide> <del>See [`net.Server`][] for more information. <add>Starts the server listening for encrypted connections. <add>This method is identical to [`server.listen()`][] from [`net.Server`][]. <ide> <ide> ### server.setTicketKeys(keys) <ide> <!-- YAML <ide> where `secure_socket` has the same API as `pair.cleartext`. <ide> [`net.Server`]: net.html#net_class_net_server <ide> [`net.Socket`]: net.html#net_class_net_socket <ide> [`server.getConnections()`]: net.html#net_server_getconnections_callback <add>[`server.listen()`]: net.html#net_server_listen <ide> [`tls.DEFAULT_ECDH_CURVE`]: #tls_tls_default_ecdh_curve <ide> [`tls.TLSSocket.getPeerCertificate()`]: #tls_tlssocket_getpeercertificate_detailed <ide> [`tls.TLSSocket`]: #tls_class_tls_tlssocket
3
Python
Python
fix doc errors in google provider files.
2bfc53b5eb67406d418371b74dc9bc5a07be238e
<ide><path>airflow/providers/google/cloud/utils/mlengine_operator_utils.py <ide> def create_evaluate_ops( # pylint: disable=too-many-arguments <ide> <ide> Callers will provide two python callables, metric_fn and validate_fn, in <ide> order to customize the evaluation behavior as they wish. <add> <ide> - metric_fn receives a dictionary per instance derived from json in the <ide> batch prediction result. The keys might vary depending on the model. <ide> It should return a tuple of metrics. <ide> def create_evaluate_ops( # pylint: disable=too-many-arguments <ide> <ide> Typical examples are like this: <ide> <del> def get_metric_fn_and_keys(): <del> import math # imports should be outside of the metric_fn below. <del> def error_and_squared_error(inst): <del> label = float(inst['input_label']) <del> classes = float(inst['classes']) # 0 or 1 <del> err = abs(classes-label) <del> squared_err = math.pow(classes-label, 2) <del> return (err, squared_err) # returns a tuple. <del> return error_and_squared_error, ['err', 'mse'] # key order must match. <del> <del> def validate_err_and_count(summary): <del> if summary['err'] > 0.2: <del> raise ValueError('Too high err>0.2; summary=%s' % summary) <del> if summary['mse'] > 0.05: <del> raise ValueError('Too high mse>0.05; summary=%s' % summary) <del> if summary['count'] < 1000: <del> raise ValueError('Too few instances<1000; summary=%s' % summary) <del> return summary <add> .. code-block:: python <add> <add> def get_metric_fn_and_keys(): <add> import math # imports should be outside of the metric_fn below. <add> def error_and_squared_error(inst): <add> label = float(inst['input_label']) <add> classes = float(inst['classes']) # 0 or 1 <add> err = abs(classes-label) <add> squared_err = math.pow(classes-label, 2) <add> return (err, squared_err) # returns a tuple. <add> return error_and_squared_error, ['err', 'mse'] # key order must match. <add> <add> def validate_err_and_count(summary): <add> if summary['err'] > 0.2: <add> raise ValueError('Too high err>0.2; summary=%s' % summary) <add> if summary['mse'] > 0.05: <add> raise ValueError('Too high mse>0.05; summary=%s' % summary) <add> if summary['count'] < 1000: <add> raise ValueError('Too few instances<1000; summary=%s' % summary) <add> return summary <ide> <ide> For the details on the other BatchPrediction-related arguments (project_id, <ide> job_id, region, data_format, input_paths, prediction_path, model_uri), <ide> def validate_err_and_count(summary): <ide> :type prediction_path: str <ide> <ide> :param metric_fn_and_keys: a tuple of metric_fn and metric_keys: <add> <ide> - metric_fn is a function that accepts a dictionary (for an instance), <ide> and returns a tuple of metric(s) that it calculates. <add> <ide> - metric_keys is a list of strings to denote the key of each metric. <ide> :type metric_fn_and_keys: tuple of a function and a list[str] <ide> <ide><path>airflow/providers/google/cloud/utils/mlengine_prediction_summary.py <ide> # KIND, either express or implied. See the License for the <ide> # specific language governing permissions and limitations <ide> # under the License. <add>""" <add>A template called by DataFlowPythonOperator to summarize BatchPrediction. <ide> <del>"""A template called by DataFlowPythonOperator to summarize BatchPrediction. <ide> It accepts a user function to calculate the metric(s) per instance in <ide> the prediction results, then aggregates to output as a summary. <del>Args: <del> --prediction_path: <del> The GCS folder that contains BatchPrediction results, containing <del> prediction.results-NNNNN-of-NNNNN files in the json format. <del> Output will be also stored in this folder, as 'prediction.summary.json'. <del> --metric_fn_encoded: <del> An encoded function that calculates and returns a tuple of metric(s) <del> for a given instance (as a dictionary). It should be encoded <del> via base64.b64encode(dill.dumps(fn, recurse=True)). <del> --metric_keys: <del> A comma-separated key(s) of the aggregated metric(s) in the summary <del> output. The order and the size of the keys must match to the output <del> of metric_fn. <del> The summary will have an additional key, 'count', to represent the <del> total number of instances, so the keys shouldn't include 'count'. <del># Usage example: <del>from airflow.providers.google.cloud.operators.dataflow import DataflowCreatePythonJobOperator <del>def get_metric_fn(): <del> import math # all imports must be outside of the function to be passed. <del> def metric_fn(inst): <del> label = float(inst["input_label"]) <del> classes = float(inst["classes"]) <del> prediction = float(inst["scores"][1]) <del> log_loss = math.log(1 + math.exp( <del> -(label * 2 - 1) * math.log(prediction / (1 - prediction)))) <del> squared_err = (classes-label)**2 <del> return (log_loss, squared_err) <del> return metric_fn <del>metric_fn_encoded = base64.b64encode(dill.dumps(get_metric_fn(), recurse=True)) <del>DataflowCreatePythonJobOperator( <del> task_id="summary-prediction", <del> py_options=["-m"], <del> py_file="airflow.providers.google.cloud.utils.mlengine_prediction_summary", <del> options={ <del> "prediction_path": prediction_path, <del> "metric_fn_encoded": metric_fn_encoded, <del> "metric_keys": "log_loss,mse" <del> }, <del> dataflow_default_options={ <del> "project": "xxx", "region": "us-east1", <del> "staging_location": "gs://yy", "temp_location": "gs://zz", <del> }) <del> >> dag <del># When the input file is like the following: <del>{"inputs": "1,x,y,z", "classes": 1, "scores": [0.1, 0.9]} <del>{"inputs": "0,o,m,g", "classes": 0, "scores": [0.7, 0.3]} <del>{"inputs": "1,o,m,w", "classes": 0, "scores": [0.6, 0.4]} <del>{"inputs": "1,b,r,b", "classes": 1, "scores": [0.2, 0.8]} <del># The output file will be: <del>{"log_loss": 0.43890510565304547, "count": 4, "mse": 0.25} <del># To test outside of the dag: <del>subprocess.check_call(["python", <del> "-m", <del> "airflow.providers.google.cloud.utils.mlengine_prediction_summary", <del> "--prediction_path=gs://...", <del> "--metric_fn_encoded=" + metric_fn_encoded, <del> "--metric_keys=log_loss,mse", <del> "--runner=DataflowRunner", <del> "--staging_location=gs://...", <del> "--temp_location=gs://...", <del> ]) <add> <add>It accepts the following arguments: <add> <add>- ``--prediction_path``: <add> The GCS folder that contains BatchPrediction results, containing <add> prediction.results-NNNNN-of-NNNNN files in the json format. <add> Output will be also stored in this folder, as 'prediction.summary.json'. <add>- ``--metric_fn_encoded``: <add> An encoded function that calculates and returns a tuple of metric(s) <add> for a given instance (as a dictionary). It should be encoded <add> via base64.b64encode(dill.dumps(fn, recurse=True)). <add>- ``--metric_keys``: <add> A comma-separated key(s) of the aggregated metric(s) in the summary <add> output. The order and the size of the keys must match to the output <add> of metric_fn. <add> The summary will have an additional key, 'count', to represent the <add> total number of instances, so the keys shouldn't include 'count'. <add> <add> <add>Usage example: <add> <add>.. code-block: python <add> <add> from airflow.providers.google.cloud.operators.dataflow import DataflowCreatePythonJobOperator <add> <add> <add> def get_metric_fn(): <add> import math # all imports must be outside of the function to be passed. <add> def metric_fn(inst): <add> label = float(inst["input_label"]) <add> classes = float(inst["classes"]) <add> prediction = float(inst["scores"][1]) <add> log_loss = math.log(1 + math.exp( <add> -(label * 2 - 1) * math.log(prediction / (1 - prediction)))) <add> squared_err = (classes-label)**2 <add> return (log_loss, squared_err) <add> return metric_fn <add> metric_fn_encoded = base64.b64encode(dill.dumps(get_metric_fn(), recurse=True)) <add> DataflowCreatePythonJobOperator( <add> task_id="summary-prediction", <add> py_options=["-m"], <add> py_file="airflow.providers.google.cloud.utils.mlengine_prediction_summary", <add> options={ <add> "prediction_path": prediction_path, <add> "metric_fn_encoded": metric_fn_encoded, <add> "metric_keys": "log_loss,mse" <add> }, <add> dataflow_default_options={ <add> "project": "xxx", "region": "us-east1", <add> "staging_location": "gs://yy", "temp_location": "gs://zz", <add> } <add> ) >> dag <add> <add>When the input file is like the following:: <add> <add> {"inputs": "1,x,y,z", "classes": 1, "scores": [0.1, 0.9]} <add> {"inputs": "0,o,m,g", "classes": 0, "scores": [0.7, 0.3]} <add> {"inputs": "1,o,m,w", "classes": 0, "scores": [0.6, 0.4]} <add> {"inputs": "1,b,r,b", "classes": 1, "scores": [0.2, 0.8]} <add> <add>The output file will be:: <add> <add> {"log_loss": 0.43890510565304547, "count": 4, "mse": 0.25} <add> <add>To test outside of the dag: <add> <add>.. code-block:: python <add> <add> subprocess.check_call(["python", <add> "-m", <add> "airflow.providers.google.cloud.utils.mlengine_prediction_summary", <add> "--prediction_path=gs://...", <add> "--metric_fn_encoded=" + metric_fn_encoded, <add> "--metric_keys=log_loss,mse", <add> "--runner=DataflowRunner", <add> "--staging_location=gs://...", <add> "--temp_location=gs://...", <add> ]) <ide> """ <ide> <ide> import argparse <ide><path>airflow/providers/google/common/utils/id_token_credentials.py <ide> def _load_credentials_from_file( <ide> <ide> :param filename: The full path to the credentials file. <ide> :type filename: str <del> :return Loaded credentials <del> :rtype google.auth.credentials.Credentials <add> :return: Loaded credentials <add> :rtype: google.auth.credentials.Credentials <ide> :raise google.auth.exceptions.DefaultCredentialsError: if the file is in the wrong format or is missing. <ide> """ <ide> if not os.path.exists(filename): <ide> def get_default_id_token_credentials( <ide> is running on Compute Engine. If not specified, then it will use the standard library http client <ide> to make requests. <ide> :type request: google.auth.transport.Request <del> :return the current environment's credentials. <del> :rtype google.auth.credentials.Credentials <add> :return: the current environment's credentials. <add> :rtype: google.auth.credentials.Credentials <ide> :raises ~google.auth.exceptions.DefaultCredentialsError: <ide> If no credentials were found, or if the credentials found were invalid. <ide> """
3
Go
Go
add applydiff to rwlayer
794e8111b66213191157d6b9d0b6169a175aec1f
<ide><path>layer/layer.go <ide> type RWLayer interface { <ide> <ide> // Metadata returns the low level metadata for the mutable layer <ide> Metadata() (map[string]string, error) <add> <add> // ApplyDiff applies the diff to the RW layer <add> ApplyDiff(diff io.Reader) (int64, error) <ide> } <ide> <ide> // Metadata holds information about a <ide><path>layer/mount_test.go <ide> func TestMountChanges(t *testing.T) { <ide> }) <ide> } <ide> <add>func TestMountApply(t *testing.T) { <add> // TODO Windows: Figure out why this is failing <add> if runtime.GOOS == "windows" { <add> t.Skip("Failing on Windows") <add> } <add> ls, _, cleanup := newTestStore(t) <add> defer cleanup() <add> <add> basefile := newTestFile("testfile.txt", []byte("base data!"), 0644) <add> newfile := newTestFile("newfile.txt", []byte("new data!"), 0755) <add> <add> li := initWithFiles(basefile) <add> layer, err := createLayer(ls, "", li) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> di := initWithFiles(newfile) <add> diffLayer, err := createLayer(ls, "", di) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> m, err := ls.CreateRWLayer("fun-mount", layer.ChainID(), nil) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> r, err := diffLayer.TarStream() <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> if _, err := m.ApplyDiff(r); err != nil { <add> t.Fatal(err) <add> } <add> <add> pathFS, err := m.Mount("") <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> f, err := pathFS.Open(pathFS.Join(pathFS.Path(), "newfile.txt")) <add> if err != nil { <add> t.Fatal(err) <add> } <add> defer f.Close() <add> <add> b, err := ioutil.ReadAll(f) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> if expected := "new data!"; string(b) != expected { <add> t.Fatalf("Unexpected test file contents %q, expected %q", string(b), expected) <add> } <add>} <add> <ide> func assertChange(t *testing.T, actual, expected archive.Change) { <ide> if actual.Path != expected.Path { <ide> t.Fatalf("Unexpected change path %s, expected %s", actual.Path, expected.Path) <ide><path>layer/mounted_layer.go <ide> func (rl *referencedRWLayer) Mount(mountLabel string) (containerfs.ContainerFS, <ide> func (rl *referencedRWLayer) Unmount() error { <ide> return rl.layerStore.driver.Put(rl.mountedLayer.mountID) <ide> } <add> <add>// ApplyDiff applies specified diff to the layer <add>func (rl *referencedRWLayer) ApplyDiff(diff io.Reader) (int64, error) { <add> return rl.layerStore.driver.ApplyDiff(rl.mountID, rl.cacheParent(), diff) <add>}
3
Ruby
Ruby
use oo instead of meta programming
2f32aa068513c6bd1fab63087756417e01af06c8
<ide><path>lib/arel/algebra/predicates.rb <ide> module Arel <ide> module Predicates <del> class Predicate <add> class Predicate < Struct.new(:children) <ide> def or(other_predicate) <ide> Or.new(self, other_predicate) <ide> end <ide> def complement <ide> def not <ide> self.complement <ide> end <add> <add> def == other <add> super || (self.class === other && children == other.children) <add> end <ide> end <ide> <ide> class Polyadic < Predicate <del> attr_reader :predicates <add> alias :predicates :children <ide> <ide> def initialize(*predicates) <del> @predicates = predicates <add> super(predicates) <ide> end <ide> <ide> # Build a Polyadic predicate based on: <ide> def complement <ide> end <ide> <ide> class Unary < Predicate <del> attributes :operand <del> deriving :initialize, :== <add> alias :operand :children <ide> <ide> def bind(relation) <ide> self.class.new(operand.find_correlate_in(relation)) <ide> def complement <ide> end <ide> <ide> class Binary < Predicate <del> attributes :operand1, :operand2 <del> deriving :initialize <add> alias :operand1 :children <add> attr_reader :operand2 <add> <add> def initialize left, right <add> super(left) <add> @operand2 = right <add> end <ide> <ide> def ==(other) <del> self.class === other and <del> @operand1 == other.operand1 and <del> @operand2 == other.operand2 <add> super && @operand2 == other.operand2 <ide> end <ide> <ide> def bind(relation)
1
Python
Python
add ragged support for keras.layers.resizing
a9740c65508d41e60f18f06705be5bbb73ffed41
<ide><path>keras/layers/preprocessing/image_preprocessing.py <ide> from keras.layers.preprocessing import preprocessing_utils as utils <ide> from keras.preprocessing.image import smart_resize <ide> from keras.utils import control_flow_util <add>from keras.utils import tf_utils <ide> import numpy as np <ide> import tensorflow.compat.v2 as tf <ide> from tensorflow.python.util.tf_export import keras_export <ide> class Resizing(base_layer.Layer): <ide> Input pixel values can be of any range (e.g. `[0., 1.)` or `[0, 255]`) and of <ide> interger or floating point dtype. By default, the layer will output floats. <ide> <add> This layer can be called on tf.RaggedTensor batches of input images of <add> distinct sizes, and will resize the outputs to dense tensors of uniform size. <add> <ide> For an overview and full list of preprocessing layers, see the preprocessing <ide> [guide](https://www.tensorflow.org/guide/keras/preprocessing_layers). <ide> <ide> def call(self, inputs): <ide> else: <ide> input_dtype = tf.float32 <ide> inputs = utils.ensure_tensor(inputs, dtype=input_dtype) <add> size = [self.height, self.width] <ide> if self.crop_to_aspect_ratio: <del> outputs = smart_resize( <del> inputs, <del> size=[self.height, self.width], <del> interpolation=self._interpolation_method) <add> def resize_to_aspect(x): <add> if tf_utils.is_ragged(inputs): <add> x = x.to_tensor() <add> return smart_resize( <add> x, <add> size=size, <add> interpolation=self._interpolation_method) <add> <add> if tf_utils.is_ragged(inputs): <add> size_as_shape = tf.TensorShape(size) <add> shape = size_as_shape + inputs.shape[-1:] <add> spec = tf.TensorSpec(shape, input_dtype) <add> outputs = tf.map_fn(resize_to_aspect, inputs, fn_output_signature=spec) <add> else: <add> outputs = resize_to_aspect(inputs) <ide> else: <ide> outputs = tf.image.resize( <ide> inputs, <del> size=[self.height, self.width], <add> size=size, <ide> method=self._interpolation_method) <del> outputs = tf.cast(outputs, self.compute_dtype) <del> return outputs <add> return tf.cast(outputs, self.compute_dtype) <ide> <ide> def compute_output_shape(self, input_shape): <ide> input_shape = tf.TensorShape(input_shape).as_list() <ide><path>keras/layers/preprocessing/image_preprocessing_test.py <ide> def test_unbatched_image(self): <ide> expected_output = np.reshape(expected_output, (2, 2, 1)) <ide> self.assertAllEqual(expected_output, output_image) <ide> <add> @parameterized.named_parameters(('crop_to_aspect_ratio_false', False), <add> ('crop_to_aspect_ratio_true', True)) <add> def test_ragged_image(self, crop_to_aspect_ratio): <add> with testing_utils.use_gpu(): <add> inputs = tf.ragged.constant([ <add> np.ones((8, 8, 1)), <add> np.ones((8, 4, 1)), <add> np.ones((4, 8, 1)), <add> np.ones((2, 2, 1)), <add> ], dtype='float32') <add> layer = image_preprocessing.Resizing( <add> 2, <add> 2, <add> interpolation='nearest', <add> crop_to_aspect_ratio=crop_to_aspect_ratio) <add> outputs = layer(inputs) <add> expected_output = [[[[1.], [1.]], [[1.], [1.]]], <add> [[[1.], [1.]], [[1.], [1.]]], <add> [[[1.], [1.]], [[1.], [1.]]], <add> [[[1.], [1.]], [[1.], [1.]]]] <add> self.assertIsInstance(outputs, tf.Tensor) <add> self.assertNotIsInstance(outputs, tf.RaggedTensor) <add> self.assertAllEqual(expected_output, outputs) <add> <ide> @testing_utils.run_v2_only <ide> def test_output_dtypes(self): <ide> inputs = np.array([[[1], [2]], [[3], [4]]], dtype='float64')
2
Text
Text
change comments for improved consistency
9586a6a98ef4bad7894bfe31da4fab42f6b3d6cd
<ide><path>CONTRIBUTING.md <ide> Algorithms in this repo should not be how-to examples for existing Python packag <ide> Use [pre-commit](https://pre-commit.com/#installation) to automatically format your code to match our coding style: <ide> <ide> ```bash <del>python3 -m pip install pre-commit # required only once <add>python3 -m pip install pre-commit # only required the first time <ide> pre-commit install <ide> ``` <ide> That's it! The plugin will run every time you commit any changes. If there are any errors found during the run, fix them and commit those changes. You can even run the plugin manually on all files: <ide><path>DIRECTORY.md <ide> * [Test Send File](https://github.com/TheAlgorithms/Python/blob/master/file_transfer/tests/test_send_file.py) <ide> <ide> ## Fractals <add> * [Julia Sets](https://github.com/TheAlgorithms/Python/blob/master/fractals/julia_sets.py) <ide> * [Koch Snowflake](https://github.com/TheAlgorithms/Python/blob/master/fractals/koch_snowflake.py) <ide> * [Mandelbrot](https://github.com/TheAlgorithms/Python/blob/master/fractals/mandelbrot.py) <ide> * [Sierpinski Triangle](https://github.com/TheAlgorithms/Python/blob/master/fractals/sierpinski_triangle.py) <ide> * [Binomial Distribution](https://github.com/TheAlgorithms/Python/blob/master/maths/binomial_distribution.py) <ide> * [Bisection](https://github.com/TheAlgorithms/Python/blob/master/maths/bisection.py) <ide> * [Ceil](https://github.com/TheAlgorithms/Python/blob/master/maths/ceil.py) <add> * [Check Polygon](https://github.com/TheAlgorithms/Python/blob/master/maths/check_polygon.py) <ide> * [Chudnovsky Algorithm](https://github.com/TheAlgorithms/Python/blob/master/maths/chudnovsky_algorithm.py) <ide> * [Collatz Sequence](https://github.com/TheAlgorithms/Python/blob/master/maths/collatz_sequence.py) <ide> * [Combinations](https://github.com/TheAlgorithms/Python/blob/master/maths/combinations.py) <ide> * [Decimal Isolate](https://github.com/TheAlgorithms/Python/blob/master/maths/decimal_isolate.py) <add> * [Double Factorial Iterative](https://github.com/TheAlgorithms/Python/blob/master/maths/double_factorial_iterative.py) <ide> * [Double Factorial Recursive](https://github.com/TheAlgorithms/Python/blob/master/maths/double_factorial_recursive.py) <ide> * [Entropy](https://github.com/TheAlgorithms/Python/blob/master/maths/entropy.py) <ide> * [Euclidean Distance](https://github.com/TheAlgorithms/Python/blob/master/maths/euclidean_distance.py) <ide> * [Sum Of Arithmetic Series](https://github.com/TheAlgorithms/Python/blob/master/maths/sum_of_arithmetic_series.py) <ide> * [Sum Of Digits](https://github.com/TheAlgorithms/Python/blob/master/maths/sum_of_digits.py) <ide> * [Sum Of Geometric Progression](https://github.com/TheAlgorithms/Python/blob/master/maths/sum_of_geometric_progression.py) <add> * [Sylvester Sequence](https://github.com/TheAlgorithms/Python/blob/master/maths/sylvester_sequence.py) <ide> * [Test Prime Check](https://github.com/TheAlgorithms/Python/blob/master/maths/test_prime_check.py) <ide> * [Trapezoidal Rule](https://github.com/TheAlgorithms/Python/blob/master/maths/trapezoidal_rule.py) <ide> * [Triplet Sum](https://github.com/TheAlgorithms/Python/blob/master/maths/triplet_sum.py) <ide> * [Reverse Letters](https://github.com/TheAlgorithms/Python/blob/master/strings/reverse_letters.py) <ide> * [Reverse Words](https://github.com/TheAlgorithms/Python/blob/master/strings/reverse_words.py) <ide> * [Split](https://github.com/TheAlgorithms/Python/blob/master/strings/split.py) <del> * [Swap Case](https://github.com/TheAlgorithms/Python/blob/master/strings/swap_case.py) <ide> * [Upper](https://github.com/TheAlgorithms/Python/blob/master/strings/upper.py) <ide> * [Word Occurrence](https://github.com/TheAlgorithms/Python/blob/master/strings/word_occurrence.py) <ide> * [Word Patterns](https://github.com/TheAlgorithms/Python/blob/master/strings/word_patterns.py)
2
Python
Python
fix mypy errors in microsoft azure provider
374574b8d0ef795855f8d2bb212ba6d653e62727
<ide><path>airflow/providers/microsoft/azure/hooks/container_instance.py <ide> class AzureContainerInstanceHook(AzureBaseHook): <ide> conn_type = 'azure_container_instance' <ide> hook_name = 'Azure Container Instance' <ide> <del> def __init__(self, *args, **kwargs) -> None: <del> super().__init__(sdk_client=ContainerInstanceManagementClient, *args, **kwargs) <add> def __init__(self, conn_id: str = default_conn_name) -> None: <add> super().__init__(sdk_client=ContainerInstanceManagementClient, conn_id=conn_id) <ide> self.connection = self.get_conn() <ide> <ide> def create_or_update(self, resource_group: str, name: str, container_group: ContainerGroup) -> None: <ide><path>airflow/providers/microsoft/azure/hooks/data_factory.py <ide> <ide> from airflow.exceptions import AirflowException <ide> from airflow.hooks.base import BaseHook <add>from airflow.typing_compat import TypedDict <add> <add>Credentials = Union[ClientSecretCredential, DefaultAzureCredential] <ide> <ide> <ide> def provide_targeted_factory(func: Callable) -> Callable: <ide> def bind_argument(arg, default_key): <ide> return wrapper <ide> <ide> <add>class PipelineRunInfo(TypedDict): <add> """Type class for the pipeline run info dictionary.""" <add> <add> run_id: str <add> factory_name: Optional[str] <add> resource_group_name: Optional[str] <add> <add> <ide> class AzureDataFactoryPipelineRunStatus: <ide> """Azure Data Factory pipeline operation statuses.""" <ide> <ide> def get_connection_form_widgets() -> Dict[str, Any]: <ide> from flask_appbuilder.fieldwidgets import BS3TextFieldWidget <ide> from flask_babel import lazy_gettext <ide> from wtforms import StringField <add> from wtforms.validators import InputRequired <ide> <ide> return { <ide> "extra__azure_data_factory__tenantId": StringField( <ide> lazy_gettext('Tenant ID'), widget=BS3TextFieldWidget() <ide> ), <ide> "extra__azure_data_factory__subscriptionId": StringField( <del> lazy_gettext('Subscription ID'), widget=BS3TextFieldWidget() <add> lazy_gettext('Subscription ID'), validators=[InputRequired()], widget=BS3TextFieldWidget() <ide> ), <ide> "extra__azure_data_factory__resource_group_name": StringField( <ide> lazy_gettext('Resource Group Name'), widget=BS3TextFieldWidget() <ide> def get_ui_field_behaviour() -> Dict: <ide> }, <ide> } <ide> <del> def __init__(self, azure_data_factory_conn_id: Optional[str] = default_conn_name): <add> def __init__(self, azure_data_factory_conn_id: str = default_conn_name): <ide> self._conn: DataFactoryManagementClient = None <ide> self.conn_id = azure_data_factory_conn_id <ide> super().__init__() <ide> def get_conn(self) -> DataFactoryManagementClient: <ide> <ide> conn = self.get_connection(self.conn_id) <ide> tenant = conn.extra_dejson.get('extra__azure_data_factory__tenantId') <del> subscription_id = conn.extra_dejson.get('extra__azure_data_factory__subscriptionId') <ide> <del> credential = None <add> try: <add> subscription_id = conn.extra_dejson['extra__azure_data_factory__subscriptionId'] <add> except KeyError: <add> raise ValueError("A Subscription ID is required to connect to Azure Data Factory.") <add> <add> credential: Credentials <ide> if conn.login is not None and conn.password is not None: <add> if not tenant: <add> raise ValueError("A Tenant ID is required when authenticating with Client ID and Secret.") <add> <ide> credential = ClientSecretCredential( <ide> client_id=conn.login, client_secret=conn.password, tenant_id=tenant <ide> ) <ide> def _factory_exists(self, resource_group_name, factory_name) -> bool: <ide> return factory_name in factories <ide> <ide> @staticmethod <del> def _create_client(credential, subscription_id): <add> def _create_client(credential: Credentials, subscription_id: str): <ide> return DataFactoryManagementClient( <ide> credential=credential, <ide> subscription_id=subscription_id, <ide> def wait_for_pipeline_run_status( <ide> expected_statuses: Union[str, Set[str]], <ide> resource_group_name: Optional[str] = None, <ide> factory_name: Optional[str] = None, <del> check_interval: Optional[int] = 60, <del> timeout: Optional[int] = 60 * 60 * 24 * 7, <add> check_interval: int = 60, <add> timeout: int = 60 * 60 * 24 * 7, <ide> ) -> bool: <ide> """ <ide> Waits for a pipeline run to match an expected status. <ide> def wait_for_pipeline_run_status( <ide> status. <ide> :return: Boolean indicating if the pipeline run has reached the ``expected_status``. <ide> """ <del> pipeline_run_info = { <del> "run_id": run_id, <del> "factory_name": factory_name, <del> "resource_group_name": resource_group_name, <del> } <add> pipeline_run_info = PipelineRunInfo( <add> run_id=run_id, <add> factory_name=factory_name, <add> resource_group_name=resource_group_name, <add> ) <ide> pipeline_run_status = self.get_pipeline_run_status(**pipeline_run_info) <ide> <ide> start_time = time.monotonic() <ide><path>airflow/providers/microsoft/azure/hooks/wasb.py <ide> def get_conn(self) -> BlobServiceClient: <ide> # use Active Directory auth <ide> app_id = conn.login <ide> app_secret = conn.password <del> tenant = extra.get('tenant_id') or extra.get('extra__wasb__tenant_id') <add> tenant = extra.get('tenant_id', extra.get('extra__wasb__tenant_id')) <ide> token_credential = ClientSecretCredential(tenant, app_id, app_secret) <ide> return BlobServiceClient(account_url=conn.host, credential=token_credential) <ide> sas_token = extra.get('sas_token') or extra.get('extra__wasb__sas_token') <ide><path>airflow/providers/microsoft/azure/log/wasb_task_handler.py <ide> from azure.common import AzureHttpError <ide> <ide> try: <del> from functools import cached_property <add> from functools import cached_property # type: ignore[attr-defined] <ide> except ImportError: <ide> from cached_property import cached_property <ide> <ide><path>airflow/providers/microsoft/azure/operators/data_factory.py <ide> def __init__( <ide> start_activity_name: Optional[str] = None, <ide> start_from_failure: Optional[bool] = None, <ide> parameters: Optional[Dict[str, Any]] = None, <del> timeout: Optional[int] = 60 * 60 * 24 * 7, <del> check_interval: Optional[int] = 60, <add> timeout: int = 60 * 60 * 24 * 7, <add> check_interval: int = 60, <ide> **kwargs, <ide> ) -> None: <ide> super().__init__(**kwargs) <ide><path>airflow/providers/microsoft/azure/secrets/key_vault.py <ide> from azure.keyvault.secrets import SecretClient <ide> <ide> try: <del> from functools import cached_property <add> from functools import cached_property # type: ignore[attr-defined] <ide> except ImportError: <ide> from cached_property import cached_property <ide> <ide><path>tests/providers/microsoft/azure/hooks/test_azure_data_factory.py <ide> <ide> import json <ide> from typing import Type <del>from unittest.mock import MagicMock, Mock, patch <add>from unittest.mock import MagicMock, patch <ide> <ide> import pytest <ide> from azure.identity import ClientSecretCredential, DefaultAzureCredential <ide> def test_create_factory(hook: AzureDataFactoryHook, user_args, sdk_args): <ide> implicit_factory=((MODEL,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, MODEL)), <ide> ) <ide> def test_update_factory(hook: AzureDataFactoryHook, user_args, sdk_args): <del> hook._factory_exists = Mock(return_value=True) <del> hook.update_factory(*user_args) <add> with patch.object(hook, "_factory_exists") as mock_factory_exists: <add> mock_factory_exists.return_value = True <add> hook.update_factory(*user_args) <ide> <ide> hook._conn.factories.create_or_update.assert_called_with(*sdk_args) <ide> <ide> def test_update_factory(hook: AzureDataFactoryHook, user_args, sdk_args): <ide> implicit_factory=((MODEL,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, MODEL)), <ide> ) <ide> def test_update_factory_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args): <del> hook._factory_exists = Mock(return_value=False) <add> with patch.object(hook, "_factory_exists") as mock_factory_exists: <add> mock_factory_exists.return_value = False <ide> <ide> with pytest.raises(AirflowException, match=r"Factory .+ does not exist"): <ide> hook.update_factory(*user_args) <ide> def test_create_linked_service(hook: AzureDataFactoryHook, user_args, sdk_args): <ide> implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), <ide> ) <ide> def test_update_linked_service(hook: AzureDataFactoryHook, user_args, sdk_args): <del> hook._linked_service_exists = Mock(return_value=True) <del> hook.update_linked_service(*user_args) <add> with patch.object(hook, "_linked_service_exists") as mock_linked_service_exists: <add> mock_linked_service_exists.return_value = True <add> hook.update_linked_service(*user_args) <ide> <ide> hook._conn.linked_services.create_or_update(*sdk_args) <ide> <ide> def test_update_linked_service(hook: AzureDataFactoryHook, user_args, sdk_args): <ide> implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), <ide> ) <ide> def test_update_linked_service_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args): <del> hook._linked_service_exists = Mock(return_value=False) <add> with patch.object(hook, "_linked_service_exists") as mock_linked_service_exists: <add> mock_linked_service_exists.return_value = False <ide> <ide> with pytest.raises(AirflowException, match=r"Linked service .+ does not exist"): <ide> hook.update_linked_service(*user_args) <ide> def test_create_dataset(hook: AzureDataFactoryHook, user_args, sdk_args): <ide> implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), <ide> ) <ide> def test_update_dataset(hook: AzureDataFactoryHook, user_args, sdk_args): <del> hook._dataset_exists = Mock(return_value=True) <del> hook.update_dataset(*user_args) <add> with patch.object(hook, "_dataset_exists") as mock_dataset_exists: <add> mock_dataset_exists.return_value = True <add> hook.update_dataset(*user_args) <ide> <ide> hook._conn.datasets.create_or_update.assert_called_with(*sdk_args) <ide> <ide> def test_update_dataset(hook: AzureDataFactoryHook, user_args, sdk_args): <ide> implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), <ide> ) <ide> def test_update_dataset_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args): <del> hook._dataset_exists = Mock(return_value=False) <add> with patch.object(hook, "_dataset_exists") as mock_dataset_exists: <add> mock_dataset_exists.return_value = False <ide> <ide> with pytest.raises(AirflowException, match=r"Dataset .+ does not exist"): <ide> hook.update_dataset(*user_args) <ide> def test_create_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args): <ide> implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), <ide> ) <ide> def test_update_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args): <del> hook._pipeline_exists = Mock(return_value=True) <del> hook.update_pipeline(*user_args) <add> with patch.object(hook, "_pipeline_exists") as mock_pipeline_exists: <add> mock_pipeline_exists.return_value = True <add> hook.update_pipeline(*user_args) <ide> <ide> hook._conn.pipelines.create_or_update.assert_called_with(*sdk_args) <ide> <ide> def test_update_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args): <ide> implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), <ide> ) <ide> def test_update_pipeline_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args): <del> hook._pipeline_exists = Mock(return_value=False) <add> with patch.object(hook, "_pipeline_exists") as mock_pipeline_exists: <add> mock_pipeline_exists.return_value = False <ide> <ide> with pytest.raises(AirflowException, match=r"Pipeline .+ does not exist"): <ide> hook.update_pipeline(*user_args) <ide> def test_create_trigger(hook: AzureDataFactoryHook, user_args, sdk_args): <ide> implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), <ide> ) <ide> def test_update_trigger(hook: AzureDataFactoryHook, user_args, sdk_args): <del> hook._trigger_exists = Mock(return_value=True) <del> hook.update_trigger(*user_args) <add> with patch.object(hook, "_trigger_exists") as mock_trigger_exists: <add> mock_trigger_exists.return_value = True <add> hook.update_trigger(*user_args) <ide> <ide> hook._conn.triggers.create_or_update.assert_called_with(*sdk_args) <ide> <ide> def test_update_trigger(hook: AzureDataFactoryHook, user_args, sdk_args): <ide> implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), <ide> ) <ide> def test_update_trigger_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args): <del> hook._trigger_exists = Mock(return_value=False) <add> with patch.object(hook, "_trigger_exists") as mock_trigger_exists: <add> mock_trigger_exists.return_value = False <ide> <ide> with pytest.raises(AirflowException, match=r"Trigger .+ does not exist"): <ide> hook.update_trigger(*user_args)
7
Text
Text
fix typo in create-subscription readme
488ad5a6b94ac4b71ff587ecde05e48a218aba62
<ide><path>packages/create-subscription/README.md <ide> Below is an example showing how `create-subscription` can be used with native Pr <ide> <ide> **Note** that it an initial render value of `undefined` is unavoidable due to the fact that Promises provide no way to synchronously read their current value. <ide> <del>**Note** the lack of a way to "unsubscribe" from a Promise can result in memory leaks as long as something has a reference to the Promise. This should be taken into considerationg when determining whether Promises are appropriate to use in this way within your application. <add>**Note** the lack of a way to "unsubscribe" from a Promise can result in memory leaks as long as something has a reference to the Promise. This should be taken into consideration when determining whether Promises are appropriate to use in this way within your application. <ide> <ide> ```js <ide> import React from "react";
1
Text
Text
add changes for 1.3.16
e967abcd3080be3a1fc9c645fb94d411753bd633
<ide><path>CHANGELOG.md <add><a name="1.3.16"></a> <add># 1.3.16 cookie-oatmealification (2015-06-05) <add> <add> <add>## Bug Fixes <add> <add>- **$compile:** throw error on invalid directive name <add> ([634e4671](https://github.com/angular/angular.js/commit/634e467172efa696eb32ef8942ffbedeecbd030e), <add> [#11281](https://github.com/angular/angular.js/issues/11281), [#11109](https://github.com/angular/angular.js/issues/11109)) <add>- **$cookies:** update $cookies to prevent duplicate cookie writes and play nice with external code <add> ([706a93ab](https://github.com/angular/angular.js/commit/706a93ab6960e3474698ccf9a8048b3c32e567c6), <add> [#11490](https://github.com/angular/angular.js/issues/11490), [#11515](https://github.com/angular/angular.js/issues/11515)) <add>- **$http:** throw error if `success` and `error` methods do not receive a function <add> ([731e1f65](https://github.com/angular/angular.js/commit/731e1f6534ab7fd1e053b8d7a25c902fcd934fea), <add> [#11330](https://github.com/angular/angular.js/issues/11330), [#11333](https://github.com/angular/angular.js/issues/11333)) <add>- **core:** ensure that multiple requests to requestAnimationFrame are buffered <add> ([0adc0364](https://github.com/angular/angular.js/commit/0adc0364265b06c567ccc8e90a7f09cc46f235b2), <add> [#11791](https://github.com/angular/angular.js/issues/11791)) <add>- **filterFilter:** fix matching against `null`/`undefined` <add> ([9dd0fe35](https://github.com/angular/angular.js/commit/9dd0fe35d1027e59b84b2396abee00d8683f3b50), <add> [#11573](https://github.com/angular/angular.js/issues/11573), [#11617](https://github.com/angular/angular.js/issues/11617)) <add>- **jqLite:** <add> - check for "length" in obj in isArrayLike to prevent iOS8 JIT bug from surfacing <add> ([647f3f55](https://github.com/angular/angular.js/commit/647f3f55eb7100a255272f7277f0f962de234a32), <add> [#11508](https://github.com/angular/angular.js/issues/11508)) <add> - attr should ignore comment, text and attribute nodes <add> ([181e5ebc](https://github.com/angular/angular.js/commit/181e5ebc3fce5312feacaeace4fcad0d32f4d73c)) <add>- **ngAnimate:** <add> - ensure that minified repaint code isn't removed <add> ([d5c99ea4](https://github.com/angular/angular.js/commit/d5c99ea42b834343fd0362cfc572f47e7536ccfb), <add> [#9936](https://github.com/angular/angular.js/issues/9936)) <add>- **ngAria:** handle elements with role="checkbox/menuitemcheckbox" <add> ([1c282af5](https://github.com/angular/angular.js/commit/1c282af5abc205d4aac37c05c5cb725d71747134), <add> [#11317](https://github.com/angular/angular.js/issues/11317), [#11321](https://github.com/angular/angular.js/issues/11321)) <add>- **ngModel:** allow setting model to NaN when asyncValidator is present <add> ([b64519fe](https://github.com/angular/angular.js/commit/b64519fea7f1a5ec75e32c4b71b012b827314153), <add> [#11315](https://github.com/angular/angular.js/issues/11315), [#11411](https://github.com/angular/angular.js/issues/11411)) <add>- **ngTouch:** <add> - check undefined tagName for SVG event target <add> ([7560a8d2](https://github.com/angular/angular.js/commit/7560a8d2d65955ddb60ede9d586502f4e3cbd062)) <add> - register touches properly when jQuery is used <add> ([40441f6d](https://github.com/angular/angular.js/commit/40441f6dfc5ebd5cdc679c269c4639238f5351eb), <add> [#4001](https://github.com/angular/angular.js/issues/4001), [#8584](https://github.com/angular/angular.js/issues/8584), [#10797](https://github.com/angular/angular.js/issues/10797), [#11488](https://github.com/angular/angular.js/issues/11488)) <add>- **select:** prevent unknown option being added to select when bound to null property <add> ([9e3f82bb](https://github.com/angular/angular.js/commit/9e3f82bbaf83cad7bb3121db756099b0880562e6), <add> [#11872](https://github.com/angular/angular.js/issues/11872), [#11875](https://github.com/angular/angular.js/issues/11875)) <add> <add> <add>## Features <add> <add>- **travis:** run unit tests on iOS 8 <add> ([1f650871](https://github.com/angular/angular.js/commit/1f650871266b88b3dab4a894a839a82ac9a06b69), <add> [#11479](https://github.com/angular/angular.js/issues/11479)) <add> <add> <add> <ide> <a name="1.4.0"></a> <ide> # 1.4.0 jaracimrman-existence (2015-05-26) <ide>
1
Ruby
Ruby
enable static asset server by default
dbb32115ef45dd58667e450125deba80d7016341
<ide><path>railties/lib/rails/commands/server.rb <ide> app = Rack::Builder.new { <ide> use Rails::Rack::LogTailer unless options[:detach] <ide> use Rails::Rack::Debugger if options[:debugger] <del> use Rails::Rack::Static <ide> run ActionDispatch::Utils.parse_config(options[:config]) <ide> }.to_app <ide> <ide><path>railties/lib/rails/configuration.rb <ide> class Configuration <ide> :log_path, :log_level, :logger, :preload_frameworks, <ide> :database_configuration_file, :cache_store, :time_zone, <ide> :view_path, :metals, :controller_paths, :routes_configuration_file, <del> :eager_load_paths, :dependency_loading, :paths <add> :eager_load_paths, :dependency_loading, :paths, :serve_static_assets <ide> <ide> def initialize <ide> set_root_path! <ide> def initialize <ide> @controller_paths = default_controller_paths <ide> @routes_configuration_file = default_routes_configuration_file <ide> @database_configuration_file = default_database_configuration_file <add> @serve_static_assets = default_serve_static_assets <ide> <ide> for framework in default_frameworks <ide> self.send("#{framework}=", Rails::OrderedOptions.new) <ide> def default_i18n <ide> i18n <ide> end <ide> <add> def default_serve_static_assets <add> true <add> end <add> <ide> # Adds a single Gem dependency to the rails application. By default, it will require <ide> # the library with the same name as the gem. Use :lib to specify a different name. <ide> # <ide><path>railties/lib/rails/generators/rails/app/templates/config/environments/production.rb <ide> # Use a different cache store in production <ide> # config.cache_store = :mem_cache_store <ide> <add># Disable Rails's static asset server <add># In production, Apache or nginx will already do this <add>config.serve_static_assets = false <add> <ide> # Enable serving of images, stylesheets, and javascripts from an asset server <ide> # config.action_controller.asset_host = "http://assets.example.com" <ide> <ide> # Disable delivery errors, bad email addresses will be ignored <ide> # config.action_mailer.raise_delivery_errors = false <ide> <ide> # Enable threaded mode <del># config.threadsafe! <ide>\ No newline at end of file <add># config.threadsafe! <ide><path>railties/lib/rails/initializer.rb <ide> def self.run(initializer = nil, config = nil) <ide> end <ide> end <ide> <add> # Include middleware to serve up static assets <add> Initializer.default.add :initialize_static_server do <add> if configuration.frameworks.include?(:action_controller) && configuration.serve_static_assets <add> configuration.middleware.insert(0, Rails::Rack::Static, Rails.public_path) <add> end <add> end <add> <ide> Initializer.default.add :initialize_cache do <ide> unless defined?(RAILS_CACHE) <ide> silence_warnings { Object.const_set "RAILS_CACHE", ActiveSupport::Cache.lookup_store(configuration.cache_store) } <ide><path>railties/lib/rails/rack/static.rb <ide> module Rack <ide> class Static <ide> FILE_METHODS = %w(GET HEAD).freeze <ide> <del> def initialize(app) <add> def initialize(app, root) <ide> @app = app <del> @file_server = ::Rack::File.new(File.join(RAILS_ROOT, "public")) <add> @file_server = ::Rack::File.new(root) <ide> end <ide> <ide> def call(env) <ide><path>railties/test/application/load_test.rb <ide> def setup <ide> test "running Rails::Application.load on the path returns a (vaguely) useful application" do <ide> app_file "config.ru", <<-CONFIG <ide> require File.dirname(__FILE__) + '/config/environment' <del> use Rails::Rack::Static <ide> run ActionController::Dispatcher.new <ide> CONFIG <ide> <ide><path>railties/test/rack_static_test.rb <ide> def teardown <ide> DummyApp = lambda { |env| <ide> [200, {"Content-Type" => "text/plain"}, ["Hello, World!"]] <ide> } <del> App = Rails::Rack::Static.new(DummyApp) <add> App = Rails::Rack::Static.new(DummyApp, "#{RAILS_ROOT}/public") <ide> <ide> test "serves dynamic content" do <ide> assert_equal "Hello, World!", get("/nofile")
7
Ruby
Ruby
remove unneeded comment. [ci skip]
f5b192837a14bf01936eea82c16374b2e9278e60
<ide><path>actionpack/test/journey/routes_test.rb <ide> def test_simulator_changes <ide> end <ide> <ide> def test_first_name_wins <del> #def add_route app, path, conditions, defaults, name = nil <ide> routes = Routes.new <ide> <ide> one = Path::Pattern.from_string '/hello'
1
Ruby
Ruby
fix a typo in test helper
d22fe41cf87d781ce1af264b37ba3eca762b74c3
<ide><path>railties/lib/test_help.rb <ide> <ide> require 'test/unit' <ide> require 'active_support/test_case' <del>require 'active_controller/test_case' <add>require 'action_controller/test_case' <ide> require 'action_view/test_case' <ide> require 'action_controller/integration' <ide> require 'action_mailer/test_case' if defined?(ActionMailer)
1
Python
Python
fix trainer in dataparallel setting
ce374ba87767d551f720242d5e64bfa976531079
<ide><path>src/transformers/trainer.py <ide> def _training_step( <ide> <ide> if self.args.past_index >= 0 and self._past is not None: <ide> inputs["mems"] = self._past <add> # Our model outputs do not work with DataParallel, so forcing return tuple. <add> if self.args.n_gpu > 1: <add> inputs["return_tuple"] = True <ide> <ide> outputs = model(**inputs) <ide> loss = outputs[0] # model outputs are always tuple in transformers (see doc) <ide> def _prediction_loop( <ide> inputs[k] = v.to(self.args.device) <ide> if self.args.past_index >= 0: <ide> inputs["mems"] = past <add> # Our model outputs do not work with DataParallel, so forcing return tuple. <add> if self.args.n_gpu > 1: <add> inputs["return_tuple"] = True <ide> <ide> with torch.no_grad(): <ide> outputs = model(**inputs)
1
Java
Java
make changes for timing related test failures
bfa6645c7d8e401ae2d1ea2587dbdd0eaa943265
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/BrokerAvailabilityEvent.java <ide> public BrokerAvailabilityEvent(boolean brokerAvailable, Object source) { <ide> public boolean isBrokerAvailable() { <ide> return this.brokerAvailable; <ide> } <add> <add> @Override <add> public String toString() { <add> return "BrokerAvailabilityEvent=" + this.brokerAvailable; <add> } <add> <ide> } <ide><path>spring-messaging/src/test/java/org/springframework/messaging/simp/stomp/StompBrokerRelayMessageHandlerIntegrationTests.java <ide> public class StompBrokerRelayMessageHandlerIntegrationTests { <ide> <ide> private int port; <ide> <add> <ide> @Before <ide> public void setUp() throws Exception { <ide> <ide> this.port = SocketUtils.findAvailableTcpPort(61613); <ide> <del> createAndStartBroker(); <del> <ide> this.responseChannel = new ExecutorSubscribableChannel(); <ide> this.responseHandler = new ExpectationMatchingMessageHandler(); <ide> this.responseChannel.subscribe(this.responseHandler); <del> <ide> this.eventPublisher = new ExpectationMatchingEventPublisher(); <ide> <add> startActiveMqBroker(); <ide> createAndStartRelay(); <ide> } <ide> <del> private void createAndStartBroker() throws Exception { <add> private void startActiveMqBroker() throws Exception { <ide> this.activeMQBroker = new BrokerService(); <del> this.activeMQBroker.addConnector("stomp://localhost:" + port); <add> this.activeMQBroker.addConnector("stomp://localhost:" + this.port); <ide> this.activeMQBroker.setStartAsync(false); <ide> this.activeMQBroker.setDeleteAllMessagesOnStartup(true); <ide> this.activeMQBroker.start(); <ide> } <ide> <ide> private void createAndStartRelay() throws InterruptedException { <ide> this.relay = new StompBrokerRelayMessageHandler(this.responseChannel, Arrays.asList("/queue/", "/topic/")); <del> this.relay.setRelayPort(port); <add> this.relay.setRelayPort(this.port); <ide> this.relay.setApplicationEventPublisher(this.eventPublisher); <ide> this.relay.setSystemHeartbeatReceiveInterval(0); <ide> this.relay.setSystemHeartbeatSendInterval(0); <ide> public void tearDown() throws Exception { <ide> this.relay.stop(); <ide> } <ide> finally { <del> stopBrokerAndAwait(); <add> stopActiveMqBrokerAndAwait(); <ide> } <ide> } <ide> <add> private void stopActiveMqBrokerAndAwait() throws Exception { <add> logger.debug("Stopping ActiveMQ broker and will await shutdown"); <add> if (!this.activeMQBroker.isStarted()) { <add> logger.debug("Broker not running"); <add> return; <add> } <add> final CountDownLatch latch = new CountDownLatch(1); <add> this.activeMQBroker.addShutdownHook(new Runnable() { <add> public void run() { <add> latch.countDown(); <add> } <add> }); <add> this.activeMQBroker.stop(); <add> assertTrue("Broker did not stop", latch.await(5, TimeUnit.SECONDS)); <add> logger.debug("Broker stopped"); <add> } <add> <add> <ide> // When TCP client is behind interface and configurable: <ide> // test "host" header (virtualHost property) <ide> // test "/user/.." destination is excluded <ide> public void tearDown() throws Exception { <ide> public void publishSubscribe() throws Exception { <ide> <ide> String sess1 = "sess1"; <del> MessageExchange conn1 = MessageExchangeBuilder.connect(sess1).build(); <del> this.relay.handleMessage(conn1.message); <del> this.responseHandler.expect(conn1); <del> <ide> String sess2 = "sess2"; <add> MessageExchange conn1 = MessageExchangeBuilder.connect(sess1).build(); <ide> MessageExchange conn2 = MessageExchangeBuilder.connect(sess2).build(); <del> this.relay.handleMessage(conn2.message); <del> this.responseHandler.expect(conn2); <add> this.responseHandler.expect(conn1, conn2); <ide> <add> this.relay.handleMessage(conn1.message); <add> this.relay.handleMessage(conn2.message); <ide> this.responseHandler.awaitAndAssert(); <ide> <ide> String subs1 = "subs1"; <ide> String destination = "/topic/test"; <ide> <ide> MessageExchange subscribe = MessageExchangeBuilder.subscribeWithReceipt(sess1, subs1, destination, "r1").build(); <del> this.relay.handleMessage(subscribe.message); <ide> this.responseHandler.expect(subscribe); <add> <add> this.relay.handleMessage(subscribe.message); <ide> this.responseHandler.awaitAndAssert(); <ide> <ide> MessageExchange send = MessageExchangeBuilder.send(destination, "foo").andExpectMessage(sess1, subs1).build(); <ide> public void publishSubscribe() throws Exception { <ide> @Test <ide> public void brokerUnvailableErrorFrameOnConnect() throws Exception { <ide> <del> stopBrokerAndAwait(); <add> stopActiveMqBrokerAndAwait(); <ide> <ide> MessageExchange connect = MessageExchangeBuilder.connectWithError("sess1").build(); <ide> this.responseHandler.expect(connect); <ide> public void brokerUnvailableErrorFrameOnConnect() throws Exception { <ide> <ide> @Test(expected=MessageDeliveryException.class) <ide> public void messageDeliverExceptionIfSystemSessionForwardFails() throws Exception { <del> stopBrokerAndAwait(); <add> stopActiveMqBrokerAndAwait(); <ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.SEND); <ide> this.relay.handleMessage(MessageBuilder.withPayload("test".getBytes()).setHeaders(headers).build()); <ide> } <ide> public void brokerBecomingUnvailableTriggersErrorFrame() throws Exception { <ide> this.responseHandler.expect(connect); <ide> <ide> this.relay.handleMessage(connect.message); <del> <ide> this.responseHandler.awaitAndAssert(); <ide> <ide> this.responseHandler.expect(MessageExchangeBuilder.error(sess1).build()); <ide> <del> stopBrokerAndAwait(); <add> stopActiveMqBrokerAndAwait(); <ide> <ide> this.responseHandler.awaitAndAssert(); <ide> } <ide> <ide> @Test <ide> public void brokerAvailabilityEventWhenStopped() throws Exception { <ide> this.eventPublisher.expectAvailabilityStatusChanges(false); <del> stopBrokerAndAwait(); <add> stopActiveMqBrokerAndAwait(); <ide> this.eventPublisher.awaitAndAssert(); <ide> } <ide> <ide> public void relayReconnectsIfBrokerComesBackUp() throws Exception { <ide> String sess1 = "sess1"; <ide> MessageExchange conn1 = MessageExchangeBuilder.connect(sess1).build(); <ide> this.responseHandler.expect(conn1); <add> <ide> this.relay.handleMessage(conn1.message); <ide> this.responseHandler.awaitAndAssert(); <ide> <ide> public void relayReconnectsIfBrokerComesBackUp() throws Exception { <ide> <ide> this.responseHandler.expect(MessageExchangeBuilder.error(sess1).build()); <ide> <del> stopBrokerAndAwait(); <add> stopActiveMqBrokerAndAwait(); <ide> <ide> this.responseHandler.awaitAndAssert(); <ide> <ide> this.eventPublisher.expectAvailabilityStatusChanges(false); <ide> this.eventPublisher.awaitAndAssert(); <ide> <ide> this.eventPublisher.expectAvailabilityStatusChanges(true); <del> createAndStartBroker(); <add> startActiveMqBroker(); <ide> this.eventPublisher.awaitAndAssert(); <ide> <ide> // TODO The event publisher assertions show that the broker's back up and the system relay session <ide> public void relayReconnectsIfBrokerComesBackUp() throws Exception { <ide> <ide> @Test <ide> public void disconnectClosesRelaySessionCleanly() throws Exception { <add> <ide> MessageExchange connect = MessageExchangeBuilder.connect("sess1").build(); <ide> this.responseHandler.expect(connect); <add> <ide> this.relay.handleMessage(connect.message); <ide> this.responseHandler.awaitAndAssert(); <ide> <ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.DISCONNECT); <ide> headers.setSessionId("sess1"); <del> <ide> this.relay.handleMessage(MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build()); <ide> <ide> Thread.sleep(2000); <ide> public void disconnectClosesRelaySessionCleanly() throws Exception { <ide> } <ide> <ide> <del> private void stopBrokerAndAwait() throws Exception { <del> logger.debug("Stopping ActiveMQ broker and will await shutdown"); <del> if (!this.activeMQBroker.isStarted()) { <del> logger.debug("Broker not running"); <del> return; <del> } <del> final CountDownLatch latch = new CountDownLatch(1); <del> this.activeMQBroker.addShutdownHook(new Runnable() { <del> public void run() { <del> latch.countDown(); <del> } <del> }); <del> this.activeMQBroker.stop(); <del> assertTrue("Broker did not stop", latch.await(5, TimeUnit.SECONDS)); <del> logger.debug("Broker stopped"); <del> } <del> <del> <ide> /** <ide> * Handles messages by matching them to expectations including a latch to wait for <ide> * the completion of expected messages. <ide> public static MessageExchangeBuilder connect(String sessionId) { <ide> StompHeaderAccessor headers = StompHeaderAccessor.create(StompCommand.CONNECT); <ide> headers.setSessionId(sessionId); <ide> headers.setAcceptVersion("1.1,1.2"); <add> headers.setHeartbeat(0, 0); <ide> Message<?> message = MessageBuilder.withPayload(new byte[0]).setHeaders(headers).build(); <ide> <ide> MessageExchangeBuilder builder = new MessageExchangeBuilder(message); <ide> public void expectAvailabilityStatusChanges(Boolean... expected) { <ide> <ide> public void awaitAndAssert() throws InterruptedException { <ide> synchronized(this.monitor) { <del> long endTime = System.currentTimeMillis() + 6000; <del> while (this.expected.size() != this.actual.size() && System.currentTimeMillis() < endTime) { <add> long endTime = System.currentTimeMillis() + 10000; <add> while ((this.expected.size() != this.actual.size()) && (System.currentTimeMillis() < endTime)) { <ide> this.monitor.wait(500); <ide> } <ide> assertEquals(this.expected, this.actual); <ide> public void awaitAndAssert() throws InterruptedException { <ide> <ide> @Override <ide> public void publishEvent(ApplicationEvent event) { <add> logger.debug("Processing ApplicationEvent " + event); <ide> if (event instanceof BrokerAvailabilityEvent) { <ide> synchronized(this.monitor) { <ide> this.actual.add(((BrokerAvailabilityEvent) event).isBrokerAvailable());
2
Javascript
Javascript
remove cruft from viewtargetactionsupport test
6b47a7e6fe7723d86749b65dad92d2c6d9484866
<ide><path>packages_es6/ember-views/tests/mixins/view_target_action_support_test.js <ide> import EmberObject from "ember-runtime/system/object"; <ide> import {View} from "ember-views/views/view"; <ide> import ViewTargetActionSupport from "ember-views/mixins/view_target_action_support"; <ide> <del>var originalLookup; <del> <del>module("ViewTargetActionSupport", { <del> setup: function() { <del> originalLookup = Ember.lookup; <del> }, <del> teardown: function() { <del> Ember.lookup = originalLookup; <del> } <del>}); <add>module("ViewTargetActionSupport"); <ide> <ide> test("it should return false if no action is specified", function() { <ide> expect(1);
1
Text
Text
describe catching events with plugin
ed73dce18bd5a2d523409d1fcaadaf2bcdd1d89c
<ide><path>docs/configuration/interactions.md <ide> var chart = new Chart(ctx, { <ide> }); <ide> ``` <ide> <add>Events that do not fire over chartArea, like `mouseout`, can be captured using a simple plugin: <add> <add>```javascript <add>var chart = new Chart(ctx, { <add> type: 'line', <add> data: data, <add> options: { <add> // these are the default events: <add> // events: ['mousemove', 'mouseout', 'click', 'touchstart', 'touchmove'], <add> }, <add> plugins: [{ <add> id: 'myEventCatcher', <add> beforeEvent(chart, args, pluginOptions) { <add> const event = args.event; <add> if (event.type === 'mouseout') { <add> // process the event <add> } <add> } <add> }] <add>}); <add>``` <add> <add>For more information about plugins, see [Plugins](../developers/plugins.md) <add> <ide> ### Converting Events to Data Values <ide> <ide> A common occurrence is taking an event, such as a click, and finding the data coordinates on the chart where the event occurred. Chart.js provides helpers that make this a straightforward process.
1
Text
Text
add documentation for using overlay2
a546042b91f655c7cf53484cdb0c5c8b3cf12d33
<ide><path>docs/reference/commandline/dockerd.md <ide> precedence over `HTTP_PROXY`. <ide> ### Daemon storage-driver option <ide> <ide> The Docker daemon has support for several different image layer storage <del>drivers: `aufs`, `devicemapper`, `btrfs`, `zfs` and `overlay`. <add>drivers: `aufs`, `devicemapper`, `btrfs`, `zfs`, `overlay` and `overlay2`. <ide> <ide> The `aufs` driver is the oldest, but is based on a Linux kernel patch-set that <ide> is unlikely to be merged into the main kernel. These are also known to cause <ide> Linux kernel as of [3.18.0](https://lkml.org/lkml/2014/10/26/137). Call <ide> > inode consumption (especially as the number of images grows), as well as <ide> > being incompatible with the use of RPMs. <ide> <add>The `overlay2` uses the same fast union filesystem but takes advantage of <add>[additional features](https://lkml.org/lkml/2015/2/11/106) added in Linux <add>kernel 4.0 to avoid excessive inode consumption. Call `dockerd -s overlay2` <add>to use it. <add> <ide> > **Note:** <del>> It is currently unsupported on `btrfs` or any Copy on Write filesystem <del>> and should only be used over `ext4` partitions. <add>> Both `overlay` and `overlay2` are currently unsupported on `btrfs` or any <add>> Copy on Write filesystem and should only be used over `ext4` partitions. <ide> <ide> ### Storage driver options <ide> <ide><path>docs/userguide/storagedriver/selectadriver.md <ide> and all containers created by that daemon instance use the same storage driver. <ide> The table below shows the supported storage driver technologies and their <ide> driver names: <ide> <del>|Technology |Storage driver name | <del>|--------------|---------------------| <del>|OverlayFS |`overlay` | <del>|AUFS |`aufs` | <del>|Btrfs |`btrfs` | <del>|Device Mapper |`devicemapper` | <del>|VFS |`vfs` | <del>|ZFS |`zfs` | <add>|Technology |Storage driver name | <add>|--------------|-----------------------| <add>|OverlayFS |`overlay` or `overlay2`| <add>|AUFS |`aufs` | <add>|Btrfs |`btrfs` | <add>|Device Mapper |`devicemapper` | <add>|VFS |`vfs` | <add>|ZFS |`zfs` | <ide> <ide> To find out which storage driver is set on the daemon, you use the <ide> `docker info` command: <ide> For example, the `btrfs` storage driver on a Btrfs backing filesystem. The <ide> following table lists each storage driver and whether it must match the host's <ide> backing file system: <ide> <del>|Storage driver |Commonly used on |Disabled on | <del>|---------------|-----------------|-----------------------------------------| <del>|`overlay` |`ext4` `xfs` |`btrfs` `aufs` `overlay` `zfs` `eCryptfs`| <del>|`aufs` |`ext4` `xfs` |`btrfs` `aufs` `eCryptfs` | <del>|`btrfs` |`btrfs` _only_ | N/A | <del>|`devicemapper` |`direct-lvm` | N/A | <del>|`vfs` |debugging only | N/A | <del>|`zfs` |`zfs` _only_ | N/A | <add>|Storage driver |Commonly used on |Disabled on | <add>|---------------|-----------------|----------------------------------------------------| <add>|`overlay` |`ext4` `xfs` |`btrfs` `aufs` `overlay` `overlay2` `zfs` `eCryptfs`| <add>|`overlay2` |`ext4` `xfs` |`btrfs` `aufs` `overlay` `overlay2` `zfs` `eCryptfs`| <add>|`aufs` |`ext4` `xfs` |`btrfs` `aufs` `eCryptfs` | <add>|`btrfs` |`btrfs` _only_ | N/A | <add>|`devicemapper` |`direct-lvm` | N/A | <add>|`vfs` |debugging only | N/A | <add>|`zfs` |`zfs` _only_ | N/A | <ide> <ide> <ide> > **Note** <ide> the guidance offered by the table below along with the points mentioned above. <ide> <ide> ![](images/driver-pros-cons.png) <ide> <add>### Overlay vs Overlay2 <add> <add>OverlayFS has 2 storage drivers which both make use of the same OverlayFS <add>technology but with different implementations and incompatible on disk <add>storage. Since the storage is incompatible, switching between the two <add>will require re-creating all image content. The `overlay` driver is the <add>original implementation and the only option in Docker 1.11 and before. <add>The `overlay` driver has known limitations with inode exhaustion and <add>commit performance. The `overlay2` driver addresses this limitation, but <add>is only compatible with Linux kernel 4.0 and later. For users on a pre-4.0 <add>kernel or with an existing `overlay` graph, it is recommended to stay <add>on `overlay`. For users with at least a 4.0 kernel and no existing or required <add>`overlay` graph data, then `overlay2` may be used. <add> <add>> **Note** <add>> `overlay2` graph data will not interfere with `overlay` graph data. However <add>> when switching to `overlay2`, the user is responsible for removing <add>> `overlay` graph data to avoid storage duplication. <ide> <ide> ## Related information <ide> <ide><path>man/dockerd.8.md <ide> output otherwise. <ide> Force the Docker runtime to use a specific storage driver. <ide> <ide> **--selinux-enabled**=*true*|*false* <del> Enable selinux support. Default is false. SELinux does not presently support the overlay storage driver. <add> Enable selinux support. Default is false. SELinux does not presently support either of the overlay storage drivers. <ide> <ide> **--storage-opt**=[] <ide> Set storage driver options. See STORAGE DRIVER OPTIONS.
3
PHP
PHP
reduce some psalm noise
6158b08dd739527596881c162be9cadbbc08665c
<ide><path>src/Core/InstanceConfigTrait.php <ide> public function setConfig($key, $value = null, $merge = true) <ide> * ``` <ide> * <ide> * @param string|null $key The key to get or null for the whole config. <del> * @param mixed|null $default The return value when the key does not exist. <del> * @return mixed|null Configuration data at the named key or null if the key does not exist. <add> * @param mixed $default The return value when the key does not exist. <add> * @return mixed Configuration data at the named key or null if the key does not exist. <ide> */ <ide> public function getConfig(?string $key = null, $default = null) <ide> {
1
Text
Text
fix error in the markdown
a22526d17f0dfe21563b4683e4bf51d9bcb6538a
<ide><path>guide/english/certifications/front-end-libraries/react/use--for-a-more-concise-conditional/index.md <ide> title: Use && for a More Concise Conditional <ide> --- <ide> ## Use && for a More Concise Conditional <ide> The example given is <del>``` <add>```jsx <ide> {condition && <p>markup</p>} <ide> ``` <ide> which is demonstrated below using the condition of the this.state.dinnerCooked boolean. <ide> If the boolean is true the markup included in the {} with the condition will display, if not it will not display <ide> <del>``` <add>```jsx <ide> class MyComponent extends React.Component { <ide> constructor(props) { <ide> super(props); <ide> class MyComponent extends React.Component { <ide> ); <ide> } <ide> }; <add>``` <ide> <ide> ## Hint: <ide>
1
Javascript
Javascript
remove the expando when there's no more data
56bb677725b21415905e5c3eeb1e05be4480e780
<ide><path>src/data/Data.js <ide> Data.prototype = { <ide> return; <ide> } <ide> <del> if ( key === undefined ) { <del> this.register( owner ); <del> <del> } else { <add> if ( key !== undefined ) { <ide> <ide> // Support array or space separated string of keys <ide> if ( jQuery.isArray( key ) ) { <ide> Data.prototype = { <ide> delete cache[ key[ i ] ]; <ide> } <ide> } <add> <add> // Remove the expando if there's no more data <add> if ( key === undefined || jQuery.isEmptyObject( cache ) ) { <add> delete owner[ this.expando ]; <add> } <ide> }, <ide> hasData: function( owner ) { <ide> var cache = owner[ this.expando ]; <ide><path>src/event.js <ide> jQuery.event = { <ide> } <ide> } <ide> <del> // Remove the expando if it's no longer used <add> // Remove data and the expando if it's no longer used <ide> if ( jQuery.isEmptyObject( events ) ) { <del> // Normally this should go through the data api <del> // but since event.js owns these properties, <del> // this exception is made for the sake of optimizing <del> // the operation. <del> delete elemData.handle; <del> delete elemData.events; <add> dataPriv.remove( elem, "handle events" ); <ide> } <ide> }, <ide> <ide><path>test/unit/data.js <ide> test("Check proper data removal of non-element descendants nodes (#8335)", 1, fu <ide> }); <ide> <ide> testIframeWithCallback( "enumerate data attrs on body (#14894)", "data/dataAttrs.html", function( result ) { <del> expect(1); <add> expect( 1 ); <add> <add> equal( result, "ok", "enumeration of data- attrs on body" ); <add>}); <add> <add>test( "Check that the expando is removed when there's no more data", function() { <add> expect( 1 ); <ide> <del> equal(result, "ok", "enumeration of data- attrs on body" ); <add> var key, <add> div = jQuery( "<div/>" ); <add> div.data( "some", "data" ); <add> equal( div.data( "some" ), "data", "Data is added" ); <add> div.removeData( "some" ); <add> <add> // Make sure the expando is gone <add> for ( key in div[ 0 ] ) { <add> if ( /^jQuery/.test( key ) ) { <add> ok( false, "Expando was not removed when there was no more data" ); <add> } <add> } <ide> }); <ide><path>test/unit/event.js <ide> test( "Inline event result is returned (#13993)", function() { <ide> equal( result, 42, "inline handler returned value" ); <ide> }); <ide> <add>test( ".off() removes the expando when there's no more data", function() { <add> expect( 1 ); <add> <add> var key, <add> div = jQuery( "<div/>" ).appendTo( "#qunit-fixture" ); <add> <add> div.on( "click", false ); <add> div.on( "custom", function() { <add> ok( true, "Custom event triggered" ); <add> } ); <add> div.trigger( "custom" ); <add> div.off( "click custom" ); <add> <add> // Make sure the expando is gone <add> for ( key in div[ 0 ] ) { <add> if ( /^jQuery/.test( key ) ) { <add> ok( false, "Expando was not removed when there was no more data" ); <add> } <add> } <add>}); <add> <ide> // This tests are unreliable in Firefox <ide> if ( !(/firefox/i.test( window.navigator.userAgent )) ) { <ide> test( "Check order of focusin/focusout events", 2, function() {
4
Text
Text
update changes and readme
14a954cd969b0c86e3e3c28be9e7f7257e15e62f
<ide><path>CHANGES.md <ide> <ide> Version 1.x can be found at https://github.com/ReactiveX/RxJava/blob/1.x/CHANGES.md <ide> <add>### Version 2.0.0-RC5 - October 21, 2016 ([Maven](http://search.maven.org/#artifactdetails%7Cio.reactivex.rxjava2%7Crxjava%7C2.0.0-RC5%7C)) <add> <add>This release contains API fixes, further cleanups to code and javadoc, better test coverage and bugfixes. Thanks to the respective contributors and @JakeWharton for the reviews. <add> <add>**API enhancements** <add> <add> - [Pull 4685](https://github.com/ReactiveX/RxJava/pull/4685): Test static from methods and add `Maybe.fromSingle` & `fromCompletable` <add> - [Pull 4687](https://github.com/ReactiveX/RxJava/pull/4687): Add `Observable.rangeLong` & `Flowable.rangeLong`. <add> - [Pull 4690](https://github.com/ReactiveX/RxJava/pull/4690): `BaseTestConsumer` add `assertValueAt(index, Predicate<T>)`. <add> - [Pull 4711](https://github.com/ReactiveX/RxJava/pull/4711): Decouple stream operators from Function interface (`FlowableOperator` and co). <add> - [Pull 4712](https://github.com/ReactiveX/RxJava/pull/4712): make `Observable.sequenceEqual` return Single<Boolean> <add> - [Pull 4714](https://github.com/ReactiveX/RxJava/pull/4714): have `Flowable.toList(Callable)` return Single <add> - [Pull 4720](https://github.com/ReactiveX/RxJava/pull/4720): remove variance from the input source of `retryWhen` <add> - [Pull 4723](https://github.com/ReactiveX/RxJava/pull/4723): remove `flatMapIterable(Function, int)` overload and have `flatMapIterable(Function)` use the flatten operator. <add> - [Pull 4729](https://github.com/ReactiveX/RxJava/pull/4729): Merge `FlowableEmitter.BackpressureMode` into `BackpressureStrategy` <add> - [Pull 4710](https://github.com/ReactiveX/RxJava/pull/4710): Remove checked exceptions from transformer interfaces. <add> <add>**Performance enhancements** <add> <add> - [Pull 4723](https://github.com/ReactiveX/RxJava/pull/4723): enable fusion on `Observable.observeOn` <add> <add>**Bugfixes** <add> <add> - [Pull 4681](https://github.com/ReactiveX/RxJava/pull/4681): Fix `Flowable` + `Single` `elementAt` and `elementAtOrError` operators on empty sources. <add> - [Pull 4686](https://github.com/ReactiveX/RxJava/pull/4686): Fix `flatMapX` over-cancellation in case of an inner error. <add> - [Pull 4689](https://github.com/ReactiveX/RxJava/pull/4689): Fix `doOnEvent` NPE on `dispose()` <add> - [Pull 4695](https://github.com/ReactiveX/RxJava/pull/4695): `CompositeException` fix order of exceptions <add> - [Pull 4696](https://github.com/ReactiveX/RxJava/pull/4696): Fix inner `Throwable` order for `CompletablePeek` <add> - [Pull 4705](https://github.com/ReactiveX/RxJava/pull/4705): fix `Observable.flatMap`'s dispose behavior and error accumulation <add> - [Pull 4707](https://github.com/ReactiveX/RxJava/pull/4707): Fix `Flowable.elementAt` on empty sources. <add> - [Pull 4708](https://github.com/ReactiveX/RxJava/pull/4708): fix `Observable.publish(Function)` latecommer behavior <add> - [Pull 4712](https://github.com/ReactiveX/RxJava/pull/4712): fix `Observable.combineLatest` error management, fix `Observable.flatMap` `maxConcurrency` behavior with scalars, use of unbounded queue, fix `Observable.timeInterval` not saving the `Disposable` <add> - [Pull 4723](https://github.com/ReactiveX/RxJava/pull/4723): fix fusion of `Observable.just`, fix `Observable.replay()` potential emission before `onSubscribe` call <add> - [Pull 4731](https://github.com/ReactiveX/RxJava/pull/4731): Delegate null `Collection`s down to `onError` in `toList` <add> - [Pull 4736](https://github.com/ReactiveX/RxJava/pull/4736): fix `onBackpressureBuffer(long, Action, BufferOverflowStrategy)` return type, fix `concatMapDelayError` wrong barrier mode selected. <add> - [Pull 4738](https://github.com/ReactiveX/RxJava/pull/4738): Fix `Flowable.flatMap` error, cancellation and resource management. <add> <add>**Removals** <add> <add> - [Pull 4689](https://github.com/ReactiveX/RxJava/pull/4689): Remove `Maybe.toCompletable`, use `Maybe.ignoreElement`. <add> - [Pull 4708](https://github.com/ReactiveX/RxJava/pull/4708): remove `bufferSize` overloads of `Observable.publish` as there is no need to buffer anything for an `Observable` <add> - [Pull 4723](https://github.com/ReactiveX/RxJava/pull/4723): remove `flatMapIterable(Function, int)` overload <add> <add>**Other** <add> <add> - [Pull 4682](https://github.com/ReactiveX/RxJava/pull/4682): Update Mockito to 2.1.0 <add> - [Pull 4699](https://github.com/ReactiveX/RxJava/pull/4699): Clean up null usages by using `ObjectHelper.requireNonNull`. <add> <add> <ide> ### Version 2.0.0-RC4 - October 7, 2016 ([Maven](http://search.maven.org/#artifactdetails%7Cio.reactivex.rxjava2%7Crxjava%7C2.0.0-RC4%7C)) <ide> <ide> This release contains new operators, further cleanups, better test coverage and bugfixes. Thanks to the respective contributors and @JakeWharton for the reviews. <ide><path>README.md <ide> The release plan for 2.x is as [follows](https://github.com/ReactiveX/RxJava/iss <ide> - **05/09/2016: Release Candidate 2: addressing feedback from RC 1** <ide> - **23/09/2016: Release Candidate 3: addressing feedback from RC 2** <ide> - **07/10/2016: Release Candidate 4: addressing feedback from RC 3** <del> - 21/10/2016: Release Candidate 5: addressing feedback from RC 4 <add> - **21/10/2016: Release Candidate 5: addressing feedback from RC 4** <ide> - 29/10/2016: Stable Release: General availability <ide> <ide> ## Communication <ide> The release plan for 2.x is as [follows](https://github.com/ReactiveX/RxJava/iss <ide> <ide> ## Versioning <ide> <del>Version 2.x has reached Release Candidate 1. <add>Version 2.x has reached Release Candidate 5. <ide> <ide> Version 1.x is now a stable API and will be supported for several years. <ide> <del>Minor 1.x increments (such as 1.1, 1.2, etc) will occur when non-trivial new functionality is added or significant enhancements or bug fixes occur that may have behavioral changes that may affect some edge cases (such as dependence on behavior resulting from a bug). An example of an enhancement that would classify as this is adding reactive pull backpressure support to an operator that previously did not support it. This should be backwards compatible but does behave differently. <add>Minor 2.x increments (such as 2.1, 2.2, etc) will occur when non-trivial new functionality is added or significant enhancements or bug fixes occur that may have behavioral changes that may affect some edge cases (such as dependence on behavior resulting from a bug). An example of an enhancement that would classify as this is adding reactive pull backpressure support to an operator that previously did not support it. This should be backwards compatible but does behave differently. <ide> <del>Patch 1.x.y increments (such as 1.0.0 -> 1.0.1, 1.3.1 -> 1.3.2, etc) will occur for bug fixes and trivial functionality (like adding a method overload). New functionality marked with an [`@Beta`][beta source link] or [`@Experimental`][experimental source link] annotation can also be added in patch releases to allow rapid exploration and iteration of unstable new functionality. <add>Patch 2.x.y increments (such as 2.0.0 -> 2.0.1, 2.3.1 -> 2.3.2, etc) will occur for bug fixes and trivial functionality (like adding a method overload). New functionality marked with an [`@Beta`][beta source link] or [`@Experimental`][experimental source link] annotation can also be added in patch releases to allow rapid exploration and iteration of unstable new functionality. <ide> <ide> #### @Beta <ide>
2
Go
Go
remove useless goroutine
3dc93e390ad3d310dede84948b726ce67e261375
<ide><path>commands.go <ide> func (cli *DockerCli) CmdAttach(args ...string) error { <ide> return err <ide> } <ide> <del> chErrors := make(chan error) <ide> if container.Config.Tty { <ide> cli.monitorTtySize(cmd.Arg(0)) <ide> } <ide> func (cli *DockerCli) CmdAttach(args ...string) error { <ide> v.Set("stdout", "1") <ide> v.Set("stderr", "1") <ide> <del> go func() { <del> chErrors <- cli.hijack("POST", "/containers/"+cmd.Arg(0)+"/attach?"+v.Encode(), container.Config.Tty, os.Stdin, os.Stdout) <del> }() <del> <del> if err := <-chErrors; err != nil { <add> if err := cli.hijack("POST", "/containers/"+cmd.Arg(0)+"/attach?"+v.Encode(), container.Config.Tty, os.Stdin, os.Stdout); err != nil { <ide> return err <ide> } <ide> return nil
1
Javascript
Javascript
fix a strange chrome issue
52a02383fa521c51d9996a46f03a7080dd825f11
<ide><path>src/traversing.js <ide> jQuery.each({ <ide> } <ide> }, function( name, fn ) { <ide> jQuery.fn[ name ] = function( until, selector ) { <del> var ret = jQuery.map( this, fn, until ); <add> var ret = jQuery.map( this, fn, until ), <add> args = slice.call(arguments); <ide> <ide> if ( !runtil.test( name ) ) { <ide> selector = until; <ide> jQuery.each({ <ide> ret = ret.reverse(); <ide> } <ide> <del> return this.pushStack( ret, name, slice.call(arguments).join(",") ); <add> return this.pushStack( ret, name, args.join(",") ); <ide> }; <ide> }); <ide>
1
Python
Python
allow overriding learning phase
23e1ad2df7c81bcf89b20225ccf90f815d5e978a
<ide><path>keras/backend/tensorflow_backend.py <ide> def learning_phase(): <ide> <ide> def set_learning_phase(value): <ide> global _LEARNING_PHASE <del> _LEARNING_PHASE = tf.constant(value, name='keras_learning_phase') <add> if value not in {0, 1}: <add> raise ValueError('Expected learning phase to be ' <add> '0 or 1.') <add> _LEARNING_PHASE = value <ide> <ide> <ide> def get_session(): <ide> def __init__(self, inputs, outputs, updates=[]): <ide> <ide> def __call__(self, inputs): <ide> assert type(inputs) in {list, tuple} <del> names = [v.name for v in self.inputs] <add> names = [getattr(v, 'name', None) for v in self.inputs] <ide> feed_dict = dict(zip(names, inputs)) <ide> session = get_session() <ide> updated = session.run(self.outputs + [self.updates_op], feed_dict=feed_dict) <ide> def in_train_phase(x, alt): <ide> '''Selects `x` in train phase, and `alt` otherwise. <ide> Note that `alt` should have the *same shape* as `x`. <ide> ''' <add> if _LEARNING_PHASE is 1: <add> return x <add> elif _LEARNING_PHASE is 0: <add> return alt <add> # else: assume learning phase is a placeholder. <ide> x_shape = copy.copy(x.get_shape()) <ide> x = tf.python.control_flow_ops.cond(tf.cast(_LEARNING_PHASE, 'bool'), <ide> lambda: x, <ide> def in_test_phase(x, alt): <ide> '''Selects `x` in test phase, and `alt` otherwise. <ide> Note that `alt` should have the *same shape* as `x`. <ide> ''' <add> if _LEARNING_PHASE is 1: <add> return alt <add> elif _LEARNING_PHASE is 0: <add> return x <ide> x_shape = copy.copy(x.get_shape()) <ide> x = tf.python.control_flow_ops.cond(tf.cast(_LEARNING_PHASE, 'bool'), <ide> lambda: alt, <ide><path>keras/backend/theano_backend.py <ide> def learning_phase(): <ide> return _LEARNING_PHASE <ide> <ide> <add>def set_learning_phase(value): <add> global _LEARNING_PHASE <add> if value not in {0, 1}: <add> raise ValueError('Expected learning phase to be ' <add> '0 or 1.') <add> _LEARNING_PHASE = value <add> <add> <ide> # VARIABLE MANIPULATION <ide> <ide> def variable(value, dtype=_FLOATX, name=None): <ide> class Function(object): <ide> def __init__(self, inputs, outputs, updates=[], **kwargs): <ide> self.function = theano.function(inputs, outputs, updates=updates, <ide> allow_input_downcast=True, <del> on_unused_input='warn', <add> on_unused_input='ignore', <ide> **kwargs) <ide> <ide> def __call__(self, inputs): <ide> def switch(condition, then_expression, else_expression): <ide> <ide> <ide> def in_train_phase(x, alt): <add> if _LEARNING_PHASE is 1: <add> return x <add> elif _LEARNING_PHASE is 0: <add> return alt <ide> x = T.switch(_LEARNING_PHASE, x, alt) <ide> x._uses_learning_phase = True <ide> return x <ide> <ide> <ide> def in_test_phase(x, alt): <add> if _LEARNING_PHASE is 1: <add> return alt <add> elif _LEARNING_PHASE is 0: <add> return x <ide> x = T.switch(_LEARNING_PHASE, alt, x) <ide> x._uses_learning_phase = True <ide> return x <ide><path>keras/engine/training.py <ide> def _make_train_function(self): <ide> if not hasattr(self, 'train_function'): <ide> raise Exception('You must compile your model before using it.') <ide> if self.train_function is None: <del> if self.uses_learning_phase: <add> if self.uses_learning_phase and type(K.learning_phase()) is not int: <ide> inputs = self.inputs + self.targets + self.sample_weights + [K.learning_phase()] <ide> else: <ide> inputs = self.inputs + self.targets + self.sample_weights <ide> def _make_test_function(self): <ide> if not hasattr(self, 'test_function'): <ide> raise Exception('You must compile your model before using it.') <ide> if self.test_function is None: <del> if self.uses_learning_phase: <add> if self.uses_learning_phase and type(K.learning_phase()) is not int: <ide> inputs = self.inputs + self.targets + self.sample_weights + [K.learning_phase()] <ide> else: <ide> inputs = self.inputs + self.targets + self.sample_weights <ide> def _make_predict_function(self): <ide> if not hasattr(self, 'predict_function'): <ide> self.predict_function = None <ide> if self.predict_function is None: <del> if self.uses_learning_phase: <add> if self.uses_learning_phase and type(K.learning_phase()) is not int: <ide> inputs = self.inputs + [K.learning_phase()] <ide> else: <ide> inputs = self.inputs <ide> def fit(self, x, y, batch_size=32, nb_epoch=10, verbose=1, callbacks=[], <ide> batch_size=batch_size) <ide> self._make_test_function() <ide> val_f = self.test_function <del> if self.uses_learning_phase: <add> if self.uses_learning_phase and type(K.learning_phase()) is not int: <ide> val_ins = val_x + val_y + val_sample_weights + [0.] <ide> else: <ide> val_ins = val_x + val_y + val_sample_weights <ide> def fit(self, x, y, batch_size=32, nb_epoch=10, verbose=1, callbacks=[], <ide> slice_X(sample_weights, 0, split_at), slice_X(sample_weights, split_at)) <ide> self._make_test_function() <ide> val_f = self.test_function <del> if self.uses_learning_phase: <add> if self.uses_learning_phase and type(K.learning_phase()) is not int: <ide> val_ins = val_x + val_y + val_sample_weights + [0.] <ide> else: <ide> val_ins = val_x + val_y + val_sample_weights <ide> def fit(self, x, y, batch_size=32, nb_epoch=10, verbose=1, callbacks=[], <ide> val_ins = None <ide> <ide> # prepare input arrays and training function <del> if self.uses_learning_phase: <add> if self.uses_learning_phase and type(K.learning_phase()) is not int: <ide> ins = x + y + sample_weights + [1.] <ide> else: <ide> ins = x + y + sample_weights <ide> def evaluate(self, x, y, batch_size=32, verbose=1, sample_weight=None): <ide> check_batch_dim=False, <ide> batch_size=batch_size) <ide> # prepare inputs, delegate logic to _test_loop <del> if self.uses_learning_phase: <add> if self.uses_learning_phase and type(K.learning_phase()) is not int: <ide> ins = x + y + sample_weights + [0.] <ide> else: <ide> ins = x + y + sample_weights <ide> def predict(self, x, batch_size=32, verbose=0): <ide> 'Batch size: ' + str(batch_size) + '.') <ide> <ide> # prepare inputs, delegate logic to _predict_loop <del> if self.uses_learning_phase: <add> if self.uses_learning_phase and type(K.learning_phase()) is not int: <ide> ins = x + [0.] <ide> else: <ide> ins = x <ide> def train_on_batch(self, x, y, <ide> sample_weight=sample_weight, <ide> class_weight=class_weight, <ide> check_batch_dim=True) <del> if self.uses_learning_phase: <add> if self.uses_learning_phase and type(K.learning_phase()) is not int: <ide> ins = x + y + sample_weights + [1.] <ide> else: <ide> ins = x + y + sample_weights <ide> def test_on_batch(self, x, y, sample_weight=None): <ide> x, y, sample_weights = self._standardize_user_data(x, y, <ide> sample_weight=sample_weight, <ide> check_batch_dim=True) <del> if self.uses_learning_phase: <add> if self.uses_learning_phase and type(K.learning_phase()) is not int: <ide> ins = x + y + sample_weights + [0.] <ide> else: <ide> ins = x + y + sample_weights <ide> def predict_on_batch(self, x): <ide> ''' <ide> x = standardize_input_data(x, self.input_names, <ide> self.internal_input_shapes) <del> if self.uses_learning_phase: <add> if self.uses_learning_phase and type(K.learning_phase()) is not int: <ide> ins = x + [0.] <ide> else: <ide> ins = x
3
Text
Text
release notes for 1.0.0rc9 eggplant-teleportation
8ad02bb5a8b20d5158f1ca4ac86d09b57f1465b4
<ide><path>CHANGELOG.md <add><a name="1.0.0rc9"></a> <add># 1.0.0rc9 eggplant-teleportation (2012-05-14) <add> <add> <add>## Bug Fixes <add> <add>- **$location:** <add> - single quote in url causes infinite digest in FF <add> ([679cb8a7](https://github.com/angular/angular.js/commit/679cb8a74a684454fe38fa9e1ddad396bb598c52), <add> [#920](https://github.com/angular/angular.js/issues/920)) <add> - support urls with any protocol <add> ([c1533ef5](https://github.com/angular/angular.js/commit/c1533ef5762199bea18d3bf3bcba7fcf89272931)) <add> - don't use buggy history.pushState api on Android < 4 <add> ([7b739c97](https://github.com/angular/angular.js/commit/7b739c97028be2a5d5aef679ef1f8064cd10d386), <add> [#904](https://github.com/angular/angular.js/issues/904)) <add> - work around Opera's base href issue <add> ([b99f65f6](https://github.com/angular/angular.js/commit/b99f65f64d1e54315b3210d78a9a9adbcf34c96c), <add> [#938](https://github.com/angular/angular.js/issues/938)) <add>- **docs app:** get docs app to work on IE8 <add> ([aa025348](https://github.com/angular/angular.js/commit/aa02534865c8e43dcef9e218b12c8c717c837205)) <add> <add> <add> <ide> <a name="1.0.0rc8"></a> <ide> # 1.0.0rc8 blooming-touch (2012-05-06) <ide>
1
Javascript
Javascript
stabilize another test
d97d045dfc7bbad53dfee8b092e1d95bcaf099f1
<ide><path>test/integration/empty-object-getInitialProps/test/index.test.js <ide> import { <ide> launchApp, <ide> killApp, <ide> waitFor, <add> check, <ide> } from 'next-test-utils' <ide> <ide> jest.setTimeout(1000 * 60 * 2) <ide> describe('Empty Project', () => { <ide> <ide> it('should show empty object warning during client transition', async () => { <ide> const browser = await webdriver(appPort, '/static') <del> await browser.eval(`(function() { <del> window.gotWarn = false <del> const origWarn = console.warn <del> window.console.warn = function () { <del> if (arguments[0].match(/returned an empty object from \`getInitialProps\`/)) { <del> window.gotWarn = true <add> try { <add> await browser.eval(`(function() { <add> window.gotWarn = false <add> const origWarn = console.warn <add> window.console.warn = function () { <add> if (arguments[0].match(/returned an empty object from \`getInitialProps\`/)) { <add> window.gotWarn = true <add> } <add> origWarn.apply(this, arguments) <ide> } <del> origWarn.apply(this, arguments) <del> } <del> window.next.router.replace('/another') <del> })()`) <del> await waitFor(1000) <del> const gotWarn = await browser.eval(`window.gotWarn`) <del> expect(gotWarn).toBe(true) <del> await browser.close() <add> window.next.router.replace('/another') <add> })()`) <add> await check(async () => { <add> const gotWarn = await browser.eval(`window.gotWarn`) <add> return gotWarn ? 'pass' : 'fail' <add> }, 'pass') <add> } finally { <add> await browser.close() <add> } <ide> }) <ide> })
1
Go
Go
simplify logic for registering ports
da61b99b392657343df4dc221ba5cd9ad6b1c9e1
<ide><path>network.go <ide> import ( <ide> "log" <ide> "net" <ide> "strconv" <del> "sync" <ide> "syscall" <ide> "unsafe" <ide> ) <ide> func newPortMapper(config *DaemonConfig) (*PortMapper, error) { <ide> return mapper, nil <ide> } <ide> <del>// Port allocator: Automatically allocate and release networking ports <del>type PortAllocator struct { <del> sync.Mutex <del> inUse map[string]struct{} <del> fountain chan int <del> quit chan bool <del>} <del> <del>func (alloc *PortAllocator) runFountain() { <del> for { <del> for port := portRangeStart; port < portRangeEnd; port++ { <del> select { <del> case alloc.fountain <- port: <del> case quit := <-alloc.quit: <del> if quit { <del> return <del> } <del> } <del> } <del> } <del>} <del> <del>// FIXME: Release can no longer fail, change its prototype to reflect that. <del>func (alloc *PortAllocator) Release(addr net.IP, port int) error { <del> mapKey := (&net.TCPAddr{Port: port, IP: addr}).String() <del> utils.Debugf("Releasing %d", port) <del> alloc.Lock() <del> delete(alloc.inUse, mapKey) <del> alloc.Unlock() <del> return nil <del>} <del> <del>func (alloc *PortAllocator) Acquire(addr net.IP, port int) (int, error) { <del> mapKey := (&net.TCPAddr{Port: port, IP: addr}).String() <del> utils.Debugf("Acquiring %s", mapKey) <del> if port == 0 { <del> // Allocate a port from the fountain <del> for port := range alloc.fountain { <del> if _, err := alloc.Acquire(addr, port); err == nil { <del> return port, nil <del> } <del> } <del> return -1, fmt.Errorf("Port generator ended unexpectedly") <del> } <del> alloc.Lock() <del> defer alloc.Unlock() <del> if _, inUse := alloc.inUse[mapKey]; inUse { <del> return -1, fmt.Errorf("Port already in use: %d", port) <del> } <del> alloc.inUse[mapKey] = struct{}{} <del> return port, nil <del>} <del> <del>func (alloc *PortAllocator) Close() error { <del> alloc.quit <- true <del> close(alloc.quit) <del> close(alloc.fountain) <del> return nil <del>} <del> <del>func newPortAllocator() (*PortAllocator, error) { <del> allocator := &PortAllocator{ <del> inUse: make(map[string]struct{}), <del> fountain: make(chan int), <del> quit: make(chan bool), <del> } <del> go allocator.runFountain() <del> return allocator, nil <del>} <del> <ide> // Network interface represents the networking stack of a container <ide> type NetworkInterface struct { <ide> IPNet net.IPNet <ide> func (iface *NetworkInterface) AllocatePort(port Port, binding PortBinding) (*Na <ide> <ide> hostPort, _ := parsePort(nat.Binding.HostPort) <ide> <del> if nat.Port.Proto() == "tcp" { <del> extPort, err := iface.manager.tcpPortAllocator.Acquire(ip, hostPort) <del> if err != nil { <del> return nil, err <del> } <add> extPort, err := portallocator.RequestPort(ip, nat.Port.Proto(), hostPort) <add> if err != nil { <add> return nil, err <add> } <ide> <del> backend := &net.TCPAddr{IP: iface.IPNet.IP, Port: containerPort} <del> if err := iface.manager.portMapper.Map(ip, extPort, backend); err != nil { <del> iface.manager.tcpPortAllocator.Release(ip, extPort) <del> return nil, err <del> } <del> nat.Binding.HostPort = strconv.Itoa(extPort) <add> var backend net.Addr <add> if nat.Port.Proto() == "tcp" { <add> backend = &net.TCPAddr{IP: iface.IPNet.IP, Port: containerPort} <ide> } else { <del> extPort, err := iface.manager.udpPortAllocator.Acquire(ip, hostPort) <del> if err != nil { <del> return nil, err <del> } <del> backend := &net.UDPAddr{IP: iface.IPNet.IP, Port: containerPort} <del> if err := iface.manager.portMapper.Map(ip, extPort, backend); err != nil { <del> iface.manager.udpPortAllocator.Release(ip, extPort) <del> return nil, err <del> } <del> nat.Binding.HostPort = strconv.Itoa(extPort) <add> backend = &net.UDPAddr{IP: iface.IPNet.IP, Port: containerPort} <ide> } <add> <add> if err := iface.manager.portMapper.Map(ip, extPort, backend); err != nil { <add> portallocator.ReleasePort(ip, nat.Port.Proto(), extPort) <add> return nil, err <add> } <add> <add> nat.Binding.HostPort = strconv.Itoa(extPort) <ide> iface.extPorts = append(iface.extPorts, nat) <ide> <ide> return nat, nil <ide> func (iface *NetworkInterface) Release() { <ide> log.Printf("Unable to unmap port %s: %s", nat, err) <ide> } <ide> <del> if nat.Port.Proto() == "tcp" { <del> if err := iface.manager.tcpPortAllocator.Release(ip, hostPort); err != nil { <del> log.Printf("Unable to release port %s", nat) <del> } <del> } else if nat.Port.Proto() == "udp" { <del> if err := iface.manager.udpPortAllocator.Release(ip, hostPort); err != nil { <del> log.Printf("Unable to release port %s: %s", nat, err) <del> } <add> if err := portallocator.ReleasePort(ip, nat.Port.Proto(), hostPort); err != nil { <add> log.Printf("Unable to release port %s", nat) <ide> } <ide> } <ide> <ide> type NetworkManager struct { <ide> bridgeIface string <ide> bridgeNetwork *net.IPNet <ide> <del> tcpPortAllocator *PortAllocator <del> udpPortAllocator *PortAllocator <del> portMapper *PortMapper <add> portMapper *PortMapper <ide> <ide> disabled bool <ide> } <ide> func (manager *NetworkManager) Allocate() (*NetworkInterface, error) { <ide> return iface, nil <ide> } <ide> <del>func (manager *NetworkManager) Close() error { <del> if manager.disabled { <del> return nil <del> } <del> err1 := manager.tcpPortAllocator.Close() <del> err2 := manager.udpPortAllocator.Close() <del> if err1 != nil { <del> return err1 <del> } <del> if err2 != nil { <del> return err2 <del> } <del> return nil <del>} <del> <ide> func newNetworkManager(config *DaemonConfig) (*NetworkManager, error) { <ide> if config.BridgeIface == DisableNetworkBridge { <ide> manager := &NetworkManager{ <ide> func newNetworkManager(config *DaemonConfig) (*NetworkManager, error) { <ide> } <ide> } <ide> <del> tcpPortAllocator, err := newPortAllocator() <del> if err != nil { <del> return nil, err <del> } <del> <del> udpPortAllocator, err := newPortAllocator() <del> if err != nil { <del> return nil, err <del> } <del> <ide> portMapper, err := newPortMapper(config) <ide> if err != nil { <ide> return nil, err <ide> } <ide> <ide> manager := &NetworkManager{ <del> bridgeIface: config.BridgeIface, <del> bridgeNetwork: network, <del> tcpPortAllocator: tcpPortAllocator, <del> udpPortAllocator: udpPortAllocator, <del> portMapper: portMapper, <add> bridgeIface: config.BridgeIface, <add> bridgeNetwork: network, <add> portMapper: portMapper, <ide> } <ide> <ide> return manager, nil <ide><path>network_test.go <ide> import ( <ide> "testing" <ide> ) <ide> <del>func TestPortAllocation(t *testing.T) { <del> ip := net.ParseIP("192.168.0.1") <del> ip2 := net.ParseIP("192.168.0.2") <del> allocator, err := newPortAllocator() <del> if err != nil { <del> t.Fatal(err) <del> } <del> if port, err := allocator.Acquire(ip, 80); err != nil { <del> t.Fatal(err) <del> } else if port != 80 { <del> t.Fatalf("Acquire(80) should return 80, not %d", port) <del> } <del> port, err := allocator.Acquire(ip, 0) <del> if err != nil { <del> t.Fatal(err) <del> } <del> if port <= 0 { <del> t.Fatalf("Acquire(0) should return a non-zero port") <del> } <del> if _, err := allocator.Acquire(ip, port); err == nil { <del> t.Fatalf("Acquiring a port already in use should return an error") <del> } <del> if newPort, err := allocator.Acquire(ip, 0); err != nil { <del> t.Fatal(err) <del> } else if newPort == port { <del> t.Fatalf("Acquire(0) allocated the same port twice: %d", port) <del> } <del> if _, err := allocator.Acquire(ip, 80); err == nil { <del> t.Fatalf("Acquiring a port already in use should return an error") <del> } <del> if _, err := allocator.Acquire(ip2, 80); err != nil { <del> t.Fatalf("It should be possible to allocate the same port on a different interface") <del> } <del> if _, err := allocator.Acquire(ip2, 80); err == nil { <del> t.Fatalf("Acquiring a port already in use should return an error") <del> } <del> if err := allocator.Release(ip, 80); err != nil { <del> t.Fatal(err) <del> } <del> if _, err := allocator.Acquire(ip, 80); err != nil { <del> t.Fatal(err) <del> } <del>} <del> <ide> type StubProxy struct { <ide> frontendAddr *net.Addr <ide> backendAddr *net.Addr <ide><path>networkdriver/portallocator/portallocator.go <ide> import ( <ide> "sync" <ide> ) <ide> <del>type portMappings map[string]*collections.OrderedIntSet <del> <del>type ipData struct { <del> allocatedPorts portMappings <del> availablePorts portMappings <del>} <del> <del>type ipMapping map[net.IP]*ipData <del> <ide> const ( <ide> BeginPortRange = 49153 <ide> EndPortRange = 65535 <ide> ) <ide> <add>type ( <add> portMappings map[string]*collections.OrderedIntSet <add> ipMapping map[string]portMappings <add>) <add> <ide> var ( <ide> ErrPortAlreadyAllocated = errors.New("port has already been allocated") <ide> ErrPortExceedsRange = errors.New("port exceeds upper range") <ide> ErrUnknownProtocol = errors.New("unknown protocol") <ide> ) <ide> <ide> var ( <del> defaultIPData *ipData <del> <del> lock = sync.Mutex{} <del> ips = ipMapping{} <del> defaultIP = net.ParseIP("0.0.0.0") <add> currentDynamicPort = map[string]int{ <add> "tcp": BeginPortRange - 1, <add> "udp": BeginPortRange - 1, <add> } <add> defaultIP = net.ParseIP("0.0.0.0") <add> defaultAllocatedPorts = portMappings{} <add> otherAllocatedPorts = ipMapping{} <add> lock = sync.Mutex{} <ide> ) <ide> <ide> func init() { <del> defaultIPData = newIpData() <del> ips[defaultIP] = defaultIP <del>} <del> <del>func newIpData() { <del> data := &ipData{ <del> allocatedPorts: portMappings{}, <del> availablePorts: portMappings{}, <del> } <del> <del> data.allocatedPorts["udp"] = collections.NewOrderedIntSet() <del> data.availablePorts["udp"] = collections.NewOrderedIntSet() <del> data.allocatedPorts["tcp"] = collections.NewOrderedIntSet() <del> data.availablePorts["tcp"] = collections.NewOrderedIntSet() <del> <del> return data <del>} <del> <del>func getData(ip net.IP) *ipData { <del> data, exists := ips[ip] <del> if !exists { <del> data = newIpData() <del> ips[ip] = data <del> } <del> return data <del>} <del> <del>func validateMapping(data *ipData, proto string, port int) error { <del> allocated := data.allocatedPorts[proto] <del> if allocated.Exists(proto) { <del> return ErrPortAlreadyAllocated <del> } <del> return nil <del>} <del> <del>func usePort(data *ipData, proto string, port int) { <del> allocated, available := data.allocatedPorts[proto], data.availablePorts[proto] <del> for i := 0; i < 2; i++ { <del> allocated.Push(port) <del> available.Remove(port) <del> allocated, available = defaultIPData.allocatedPorts[proto], defaultIPData.availablePorts[proto] <del> } <add> defaultAllocatedPorts["tcp"] = collections.NewOrderedIntSet() <add> defaultAllocatedPorts["udp"] = collections.NewOrderedIntSet() <ide> } <ide> <ide> // RequestPort returns an available port if the port is 0 <ide> func RequestPort(ip net.IP, proto string, port int) (int, error) { <ide> return 0, err <ide> } <ide> <del> data := getData(ip) <del> allocated, available := data.allocatedPorts[proto], data.availablePorts[proto] <del> <ide> // If the user requested a specific port to be allocated <ide> if port != 0 { <del> if err := validateMapping(defaultIP, proto, port); err != nil { <add> if err := registerSetPort(ip, proto, port); err != nil { <ide> return 0, err <ide> } <del> <del> if !defaultIP.Equal(ip) { <del> if err := validateMapping(data, proto, port); err != nil { <del> return 0, err <del> } <del> } <del> <del> available.Remove(port) <del> allocated.Push(port) <del> <ide> return port, nil <ide> } <del> <del> // Dynamic allocation <del> next := available.Pop() <del> if next == 0 { <del> next = allocated.PullBack() <del> if next == 0 { <del> next = BeginPortRange <del> } else { <del> next++ <del> } <del> if next > EndPortRange { <del> return 0, ErrPortExceedsRange <del> } <del> } <del> <del> allocated.Push(next) <del> return next, nil <add> return registerDynamicPort(ip, proto) <ide> } <ide> <ide> // ReleasePort will return the provided port back into the <ide> func ReleasePort(ip net.IP, proto string, port int) error { <ide> return err <ide> } <ide> <del> allocated, available := getCollection(ip, proto) <del> <add> allocated := defaultAllocatedPorts[proto] <ide> allocated.Remove(port) <del> available.Push(port) <ide> <add> if !equalsDefault(ip) { <add> registerIP(ip) <add> <add> // Remove the port for the specific ip address <add> allocated = otherAllocatedPorts[ip.String()][proto] <add> allocated.Remove(port) <add> } <ide> return nil <ide> } <ide> <add>func ReleaseAll() error { <add> lock.Lock() <add> defer lock.Unlock() <add> <add> currentDynamicPort["tcp"] = BeginPortRange - 1 <add> currentDynamicPort["udp"] = BeginPortRange - 1 <add> <add> defaultAllocatedPorts = portMappings{} <add> defaultAllocatedPorts["tcp"] = collections.NewOrderedIntSet() <add> defaultAllocatedPorts["udp"] = collections.NewOrderedIntSet() <add> <add> otherAllocatedPorts = ipMapping{} <add> <add> return nil <add>} <add> <add>func registerDynamicPort(ip net.IP, proto string) (int, error) { <add> allocated := defaultAllocatedPorts[proto] <add> <add> port := nextPort(proto) <add> if port > EndPortRange { <add> return 0, ErrPortExceedsRange <add> } <add> <add> if !equalsDefault(ip) { <add> registerIP(ip) <add> <add> ipAllocated := otherAllocatedPorts[ip.String()][proto] <add> ipAllocated.Push(port) <add> } else { <add> allocated.Push(port) <add> } <add> return port, nil <add>} <add> <add>func registerSetPort(ip net.IP, proto string, port int) error { <add> allocated := defaultAllocatedPorts[proto] <add> if allocated.Exists(port) { <add> return ErrPortAlreadyAllocated <add> } <add> <add> if !equalsDefault(ip) { <add> registerIP(ip) <add> <add> ipAllocated := otherAllocatedPorts[ip.String()][proto] <add> if ipAllocated.Exists(port) { <add> return ErrPortAlreadyAllocated <add> } <add> ipAllocated.Push(port) <add> } else { <add> allocated.Push(port) <add> } <add> return nil <add>} <add> <add>func equalsDefault(ip net.IP) bool { <add> return ip == nil || ip.Equal(defaultIP) <add>} <add> <add>func nextPort(proto string) int { <add> c := currentDynamicPort[proto] + 1 <add> currentDynamicPort[proto] = c <add> return c <add>} <add> <add>func registerIP(ip net.IP) { <add> if _, exists := otherAllocatedPorts[ip.String()]; !exists { <add> otherAllocatedPorts[ip.String()] = portMappings{ <add> "tcp": collections.NewOrderedIntSet(), <add> "udp": collections.NewOrderedIntSet(), <add> } <add> } <add>} <add> <ide> func validateProtocol(proto string) error { <del> if _, exists := allocatedPorts[proto]; !exists { <add> if _, exists := defaultAllocatedPorts[proto]; !exists { <ide> return ErrUnknownProtocol <ide> } <ide> return nil <ide><path>networkdriver/portallocator/portallocator_test.go <ide> package portallocator <ide> <ide> import ( <del> "github.com/dotcloud/docker/pkg/collections" <add> "net" <ide> "testing" <ide> ) <ide> <ide> func reset() { <del> lock.Lock() <del> defer lock.Unlock() <del> <del> allocatedPorts = portMappings{} <del> availablePorts = portMappings{} <del> <del> allocatedPorts["udp"] = collections.NewOrderedIntSet() <del> availablePorts["udp"] = collections.NewOrderedIntSet() <del> allocatedPorts["tcp"] = collections.NewOrderedIntSet() <del> availablePorts["tcp"] = collections.NewOrderedIntSet() <add> ReleaseAll() <ide> } <ide> <ide> func TestRequestNewPort(t *testing.T) { <ide> defer reset() <ide> <del> port, err := RequestPort("tcp", 0) <add> port, err := RequestPort(defaultIP, "tcp", 0) <ide> if err != nil { <ide> t.Fatal(err) <ide> } <ide> func TestRequestNewPort(t *testing.T) { <ide> func TestRequestSpecificPort(t *testing.T) { <ide> defer reset() <ide> <del> port, err := RequestPort("tcp", 5000) <add> port, err := RequestPort(defaultIP, "tcp", 5000) <ide> if err != nil { <ide> t.Fatal(err) <ide> } <ide> func TestRequestSpecificPort(t *testing.T) { <ide> func TestReleasePort(t *testing.T) { <ide> defer reset() <ide> <del> port, err := RequestPort("tcp", 5000) <add> port, err := RequestPort(defaultIP, "tcp", 5000) <ide> if err != nil { <ide> t.Fatal(err) <ide> } <ide> if port != 5000 { <ide> t.Fatalf("Expected port 5000 got %d", port) <ide> } <ide> <del> if err := ReleasePort("tcp", 5000); err != nil { <add> if err := ReleasePort(defaultIP, "tcp", 5000); err != nil { <ide> t.Fatal(err) <ide> } <ide> } <ide> <ide> func TestReuseReleasedPort(t *testing.T) { <ide> defer reset() <ide> <del> port, err := RequestPort("tcp", 5000) <add> port, err := RequestPort(defaultIP, "tcp", 5000) <ide> if err != nil { <ide> t.Fatal(err) <ide> } <ide> if port != 5000 { <ide> t.Fatalf("Expected port 5000 got %d", port) <ide> } <ide> <del> if err := ReleasePort("tcp", 5000); err != nil { <add> if err := ReleasePort(defaultIP, "tcp", 5000); err != nil { <ide> t.Fatal(err) <ide> } <ide> <del> port, err = RequestPort("tcp", 5000) <add> port, err = RequestPort(defaultIP, "tcp", 5000) <ide> if err != nil { <ide> t.Fatal(err) <ide> } <ide> func TestReuseReleasedPort(t *testing.T) { <ide> func TestReleaseUnreadledPort(t *testing.T) { <ide> defer reset() <ide> <del> port, err := RequestPort("tcp", 5000) <add> port, err := RequestPort(defaultIP, "tcp", 5000) <ide> if err != nil { <ide> t.Fatal(err) <ide> } <ide> if port != 5000 { <ide> t.Fatalf("Expected port 5000 got %d", port) <ide> } <ide> <del> port, err = RequestPort("tcp", 5000) <add> port, err = RequestPort(defaultIP, "tcp", 5000) <ide> if err != ErrPortAlreadyAllocated { <ide> t.Fatalf("Expected error %s got %s", ErrPortAlreadyAllocated, err) <ide> } <ide> func TestReleaseUnreadledPort(t *testing.T) { <ide> func TestUnknowProtocol(t *testing.T) { <ide> defer reset() <ide> <del> if _, err := RequestPort("tcpp", 0); err != ErrUnknownProtocol { <add> if _, err := RequestPort(defaultIP, "tcpp", 0); err != ErrUnknownProtocol { <ide> t.Fatalf("Expected error %s got %s", ErrUnknownProtocol, err) <ide> } <ide> } <ide> func TestAllocateAllPorts(t *testing.T) { <ide> defer reset() <ide> <ide> for i := 0; i <= EndPortRange-BeginPortRange; i++ { <del> port, err := RequestPort("tcp", 0) <add> port, err := RequestPort(defaultIP, "tcp", 0) <ide> if err != nil { <ide> t.Fatal(err) <ide> } <ide> func TestAllocateAllPorts(t *testing.T) { <ide> } <ide> } <ide> <del> if _, err := RequestPort("tcp", 0); err != ErrPortExceedsRange { <add> if _, err := RequestPort(defaultIP, "tcp", 0); err != ErrPortExceedsRange { <ide> t.Fatalf("Expected error %s got %s", ErrPortExceedsRange, err) <ide> } <ide> <del> _, err := RequestPort("udp", 0) <add> _, err := RequestPort(defaultIP, "udp", 0) <ide> if err != nil { <ide> t.Fatal(err) <ide> } <ide> func TestAllocateAllPorts(t *testing.T) { <ide> func BenchmarkAllocatePorts(b *testing.B) { <ide> defer reset() <ide> <del> b.StartTimer() <ide> for i := 0; i < b.N; i++ { <ide> for i := 0; i <= EndPortRange-BeginPortRange; i++ { <del> port, err := RequestPort("tcp", 0) <add> port, err := RequestPort(defaultIP, "tcp", 0) <ide> if err != nil { <ide> b.Fatal(err) <ide> } <ide> func BenchmarkAllocatePorts(b *testing.B) { <ide> } <ide> reset() <ide> } <del> b.StopTimer() <add>} <add> <add>func TestPortAllocation(t *testing.T) { <add> defer reset() <add> <add> ip := net.ParseIP("192.168.0.1") <add> ip2 := net.ParseIP("192.168.0.2") <add> if port, err := RequestPort(ip, "tcp", 80); err != nil { <add> t.Fatal(err) <add> } else if port != 80 { <add> t.Fatalf("Acquire(80) should return 80, not %d", port) <add> } <add> port, err := RequestPort(ip, "tcp", 0) <add> if err != nil { <add> t.Fatal(err) <add> } <add> if port <= 0 { <add> t.Fatalf("Acquire(0) should return a non-zero port") <add> } <add> <add> if _, err := RequestPort(ip, "tcp", port); err == nil { <add> t.Fatalf("Acquiring a port already in use should return an error") <add> } <add> <add> if newPort, err := RequestPort(ip, "tcp", 0); err != nil { <add> t.Fatal(err) <add> } else if newPort == port { <add> t.Fatalf("Acquire(0) allocated the same port twice: %d", port) <add> } <add> <add> if _, err := RequestPort(ip, "tcp", 80); err == nil { <add> t.Fatalf("Acquiring a port already in use should return an error") <add> } <add> if _, err := RequestPort(ip2, "tcp", 80); err != nil { <add> t.Fatalf("It should be possible to allocate the same port on a different interface") <add> } <add> if _, err := RequestPort(ip2, "tcp", 80); err == nil { <add> t.Fatalf("Acquiring a port already in use should return an error") <add> } <add> if err := ReleasePort(ip, "tcp", 80); err != nil { <add> t.Fatal(err) <add> } <add> if _, err := RequestPort(ip, "tcp", 80); err != nil { <add> t.Fatal(err) <add> } <ide> } <ide><path>runtime.go <ide> import ( <ide> "github.com/dotcloud/docker/graphdriver/aufs" <ide> _ "github.com/dotcloud/docker/graphdriver/devmapper" <ide> _ "github.com/dotcloud/docker/graphdriver/vfs" <add> "github.com/dotcloud/docker/networkdriver/portallocator" <ide> "github.com/dotcloud/docker/pkg/graphdb" <ide> "github.com/dotcloud/docker/pkg/sysinfo" <ide> "github.com/dotcloud/docker/utils" <ide> func NewRuntimeFromDirectory(config *DaemonConfig) (*Runtime, error) { <ide> <ide> func (runtime *Runtime) Close() error { <ide> errorsStrings := []string{} <del> if err := runtime.networkManager.Close(); err != nil { <del> utils.Errorf("runtime.networkManager.Close(): %s", err.Error()) <add> if err := portallocator.ReleaseAll(); err != nil { <add> utils.Errorf("portallocator.ReleaseAll(): %s", err) <ide> errorsStrings = append(errorsStrings, err.Error()) <ide> } <ide> if err := runtime.driver.Cleanup(); err != nil {
5
Javascript
Javascript
use gfm example format rather than <pre> tags
2ca6d650e8a13cee28f11c38622cab231787325f
<ide><path>src/ng/cacheFactory.js <ide> function $CacheFactoryProvider() { <ide> * `$templateCache` service directly. <ide> * <ide> * Adding via the `script` tag: <add> * <ide> * ```html <del> * <html ng-app> <del> * <head> <del> * <script type="text/ng-template" id="templateId.html"> <del> * This is the content of the template <del> * </script> <del> * </head> <del> * ... <del> * </html> <add> * <script type="text/ng-template" id="templateId.html"> <add> * <p>This is the content of the template</p> <add> * </script> <ide> * ``` <del> * <add> * <ide> * **Note:** the `script` tag containing the template does not need to be included in the `head` of <ide> * the document, but it must be below the `ng-app` definition. <ide> *
1
Go
Go
add image_name as a key to journald log messages
5f7e102df73672e5b53c813104ecb061c45a5f52
<ide><path>daemon/logger/journald/journald.go <ide> func New(info logger.Info) (logger.Logger, error) { <ide> "CONTAINER_ID_FULL": info.ContainerID, <ide> "CONTAINER_NAME": info.Name(), <ide> "CONTAINER_TAG": tag, <add> "IMAGE_NAME": info.ImageName(), <ide> "SYSLOG_IDENTIFIER": tag, <ide> } <ide> extraAttrs, err := info.ExtraAttributes(sanitizeKeyMod)
1
Python
Python
improve test cases of integer division
6f205497e01bc68113fc4b5bb7589e56de8d0620
<ide><path>numpy/core/tests/test_simd.py <ide> # NOTE: Please avoid the use of numpy.testing since NPYV intrinsics <ide> # may be involved in their functionality. <ide> import pytest, math, re <add>import itertools <ide> from numpy.core._simd import targets <ide> from numpy.core._multiarray_umath import __cpu_baseline__ <ide> <ide> def test_arithmetic_intdiv(self): <ide> def trunc_div(a, d): <ide> """ <ide> Divide towards zero works with large integers > 2^53, <del> equivalent to int(a/d) <add> and wrap around overflow similar to what C does. <ide> """ <add> if d == -1 and a == int_min: <add> return a <ide> sign_a, sign_d = a < 0, d < 0 <ide> if a == 0 or sign_a == sign_d: <ide> return a // d <ide> def trunc_div(a, d): <ide> 0, 1, self.nlanes, int_max-self.nlanes, <ide> int_min, int_min//2 + 1 <ide> ) <del> divisors = (1, 2, self.nlanes, int_min, int_max, int_max//2) <add> divisors = (1, 2, 9, 13, self.nlanes, int_min, int_max, int_max//2) <ide> <del> for x, d in zip(rdata, divisors): <add> for x, d in itertools.product(rdata, divisors): <ide> data = self._data(x) <ide> vdata = self.load(data) <ide> data_divc = [trunc_div(a, d) for a in data] <ide> def trunc_div(a, d): <ide> <ide> safe_neg = lambda x: -x-1 if -x > int_max else -x <ide> # test round divison for signed integers <del> for x, d in zip(rdata, divisors): <add> for x, d in itertools.product(rdata, divisors): <ide> d_neg = safe_neg(d) <ide> data = self._data(x) <ide> data_neg = [safe_neg(a) for a in data] <ide><path>numpy/core/tests/test_umath.py <ide> import pytest <ide> import sys <ide> from fractions import Fraction <add>from functools import reduce <ide> <ide> import numpy.core.umath as ncu <ide> from numpy.core import _umath_tests as ncu_tests <ide> def test_division_int(self): <ide> assert_equal(x // 100, [0, 0, 0, 1, -1, -1, -1, -1, -2]) <ide> assert_equal(x % 100, [5, 10, 90, 0, 95, 90, 10, 0, 80]) <ide> <del> @pytest.mark.parametrize("input_dtype", <del> np.sctypes['int'] + np.sctypes['uint']) <del> def test_division_int_boundary(self, input_dtype): <del> iinfo = np.iinfo(input_dtype) <del> <del> # Unsigned: <del> # Create list with 0, 25th, 50th, 75th percentile and max <del> if iinfo.min == 0: <del> lst = [0, iinfo.max//4, iinfo.max//2, <del> int(iinfo.max/1.33), iinfo.max] <del> divisors = [iinfo.max//4, iinfo.max//2, <del> int(iinfo.max/1.33), iinfo.max] <del> # Signed: <del> # Create list with min, 25th percentile, 0, 75th percentile, max <del> else: <del> lst = [iinfo.min, iinfo.min//2, 0, iinfo.max//2, iinfo.max] <del> divisors = [iinfo.min, iinfo.min//2, iinfo.max//2, iinfo.max] <del> a = np.array(lst, dtype=input_dtype) <add> @pytest.mark.parametrize("dtype,ex_val", itertools.product( <add> np.sctypes['int'] + np.sctypes['uint'], ( <add> ( <add> # dividend <add> "np.arange(fo.max-lsize, fo.max, dtype=dtype)," <add> # divisors <add> "np.arange(lsize, dtype=dtype)," <add> # scalar divisors <add> "range(15)" <add> ), <add> ( <add> # dividend <add> "np.arange(fo.min, fo.min+lsize, dtype=dtype)," <add> # divisors <add> "np.arange(lsize//-2, lsize//2, dtype=dtype)," <add> # scalar divisors <add> "range(fo.min, fo.min + 15)" <add> ), ( <add> # dividend <add> "np.arange(fo.max-lsize, fo.max, dtype=dtype)," <add> # divisors <add> "np.arange(lsize, dtype=dtype)," <add> # scalar divisors <add> "[1,3,9,13,neg, fo.min+1, fo.min//2, fo.max//3, fo.max//4]" <add> ) <add> ) <add> )) <add> def test_division_int_boundary(self, dtype, ex_val): <add> fo = np.iinfo(dtype) <add> neg = -1 if fo.min < 0 else 1 <add> # Large enough to test SIMD loops and remaind elements <add> lsize = 512 + 7 <add> a, b, divisors = eval(ex_val) <add> a_lst, b_lst = a.tolist(), b.tolist() <add> <add> c_div = lambda n, d: ( <add> 0 if d == 0 or (n and n == fo.min and d == -1) else n//d <add> ) <add> with np.errstate(divide='ignore'): <add> ac = a.copy() <add> ac //= b <add> div_ab = a // b <add> div_lst = [c_div(x, y) for x, y in zip(a_lst, b_lst)] <add> <add> msg = "Integer arrays floor division check (//)" <add> assert all(div_ab == div_lst), msg <add> msg_eq = "Integer arrays floor division check (//=)" <add> assert all(ac == div_lst), msg_eq <ide> <ide> for divisor in divisors: <del> div_a = a // divisor <del> b = a.copy(); b //= divisor <del> div_lst = [i // divisor for i in lst] <add> ac = a.copy() <add> with np.errstate(divide='ignore'): <add> div_a = a // divisor <add> ac //= divisor <add> div_lst = [c_div(i, divisor) for i in a_lst] <ide> <del> msg = "Integer arrays floor division check (//)" <ide> assert all(div_a == div_lst), msg <del> <del> msg = "Integer arrays floor division check (//=)" <del> assert all(div_a == b), msg <add> assert all(ac == div_lst), msg_eq <ide> <ide> with np.errstate(divide='raise'): <add> if 0 in b or (fo.min and -1 in b and fo.min in a): <add> # Verify overflow case <add> with pytest.raises(FloatingPointError): <add> a // b <add> else: <add> a // b <add> if fo.min and fo.min in a: <add> with pytest.raises(FloatingPointError): <add> a // -1 <add> elif fo.min: <add> a // -1 <ide> with pytest.raises(FloatingPointError): <ide> a // 0 <ide> with pytest.raises(FloatingPointError): <del> a //= 0 <add> ac = a.copy() <add> ac //= 0 <add> <add> np.array([], dtype=dtype) // 0 <add> <add> @pytest.mark.parametrize("dtype,ex_val", itertools.product( <add> np.sctypes['int'] + np.sctypes['uint'], ( <add> "np.array([fo.max, 1, 2, 1, 1, 2, 3], dtype=dtype)", <add> "np.array([fo.min, 1, -2, 1, 1, 2, -3], dtype=dtype)", <add> "np.arange(fo.min, fo.min+(100*10), 10, dtype=dtype)", <add> "np.arange(fo.max-(100*7), fo.max, 7, dtype=dtype)", <add> ) <add> )) <add> def test_division_int_reduce(self, dtype, ex_val): <add> fo = np.iinfo(dtype) <add> a = eval(ex_val) <add> lst = a.tolist() <add> c_div = lambda n, d: ( <add> 0 if d == 0 or (n and n == fo.min and d == -1) else n//d <add> ) <add> <add> with np.errstate(divide='ignore'): <add> div_a = np.floor_divide.reduce(a) <add> div_lst = reduce(c_div, lst) <add> msg = "Reduce floor integer division check" <add> assert div_a == div_lst, msg <ide> <del> np.array([], dtype=input_dtype) // 0 <add> with np.errstate(divide='raise'): <add> with pytest.raises(FloatingPointError): <add> np.floor_divide.reduce(np.arange(-100, 100, dtype=dtype)) <add> if fo.min: <add> with pytest.raises(FloatingPointError): <add> np.floor_divide.reduce( <add> np.array([fo.min, 1, -1], dtype=dtype) <add> ) <ide> <ide> @pytest.mark.parametrize( <ide> "dividend,divisor,quotient",
2
Ruby
Ruby
move some ar test cases to inheritance_test.rb
2fe8baf9b2ffd4d87e9534f6f1e91b207e8c0fc1
<ide><path>activerecord/test/cases/base_test.rb <ide> def test_find_symbol_ordered_last <ide> assert_equal last, Developer.all.merge!(:order => :salary).to_a.last <ide> end <ide> <del> def test_abstract_class <del> assert !ActiveRecord::Base.abstract_class? <del> assert LoosePerson.abstract_class? <del> assert !LooseDescendant.abstract_class? <del> end <del> <ide> def test_abstract_class_table_name <ide> assert_nil AbstractCompany.table_name <ide> end <ide> <del> def test_descends_from_active_record <del> assert !ActiveRecord::Base.descends_from_active_record? <del> <del> # Abstract subclass of AR::Base. <del> assert LoosePerson.descends_from_active_record? <del> <del> # Concrete subclass of an abstract class. <del> assert LooseDescendant.descends_from_active_record? <del> <del> # Concrete subclass of AR::Base. <del> assert TightPerson.descends_from_active_record? <del> <del> # Concrete subclass of a concrete class but has no type column. <del> assert TightDescendant.descends_from_active_record? <del> <del> # Concrete subclass of AR::Base. <del> assert Post.descends_from_active_record? <del> <del> # Abstract subclass of a concrete class which has a type column. <del> # This is pathological, as you'll never have Sub < Abstract < Concrete. <del> assert !StiPost.descends_from_active_record? <del> <del> # Concrete subclasses an abstract class which has a type column. <del> assert !SubStiPost.descends_from_active_record? <del> end <del> <ide> def test_find_on_abstract_base_class_doesnt_use_type_condition <ide> old_class = LooseDescendant <ide> Object.send :remove_const, :LooseDescendant <ide> def test_benchmark_with_use_silence <ide> ActiveRecord::Base.logger = original_logger <ide> end <ide> <del> def test_compute_type_success <del> assert_equal Author, ActiveRecord::Base.send(:compute_type, 'Author') <del> end <del> <del> def test_compute_type_nonexistent_constant <del> e = assert_raises NameError do <del> ActiveRecord::Base.send :compute_type, 'NonexistentModel' <del> end <del> assert_equal 'uninitialized constant ActiveRecord::Base::NonexistentModel', e.message <del> assert_equal 'ActiveRecord::Base::NonexistentModel', e.name <del> end <del> <del> def test_compute_type_no_method_error <del> ActiveSupport::Dependencies.stub(:safe_constantize, proc{ raise NoMethodError }) do <del> assert_raises NoMethodError do <del> ActiveRecord::Base.send :compute_type, 'InvalidModel' <del> end <del> end <del> end <del> <del> def test_compute_type_on_undefined_method <del> error = nil <del> begin <del> Class.new(Author) do <del> alias_method :foo, :bar <del> end <del> rescue => e <del> error = e <del> end <del> <del> ActiveSupport::Dependencies.stub(:safe_constantize, proc{ raise e }) do <del> <del> exception = assert_raises NameError do <del> ActiveRecord::Base.send :compute_type, 'InvalidModel' <del> end <del> assert_equal error.message, exception.message <del> end <del> end <del> <del> def test_compute_type_argument_error <del> ActiveSupport::Dependencies.stub(:safe_constantize, proc{ raise ArgumentError }) do <del> assert_raises ArgumentError do <del> ActiveRecord::Base.send :compute_type, 'InvalidModel' <del> end <del> end <del> end <del> <ide> def test_clear_cache! <ide> # preheat cache <ide> c1 = Post.connection.schema_cache.columns('posts') <ide><path>activerecord/test/cases/inheritance_test.rb <ide> require 'cases/helper' <add>require 'models/author' <ide> require 'models/company' <ide> require 'models/person' <ide> require 'models/post' <ide> def test_class_without_store_full_sti_class_returns_demodulized_name <ide> end <ide> end <ide> <add> def test_compute_type_success <add> assert_equal Author, ActiveRecord::Base.send(:compute_type, 'Author') <add> end <add> <add> def test_compute_type_nonexistent_constant <add> e = assert_raises NameError do <add> ActiveRecord::Base.send :compute_type, 'NonexistentModel' <add> end <add> assert_equal 'uninitialized constant ActiveRecord::Base::NonexistentModel', e.message <add> assert_equal 'ActiveRecord::Base::NonexistentModel', e.name <add> end <add> <add> def test_compute_type_no_method_error <add> ActiveSupport::Dependencies.stub(:safe_constantize, proc{ raise NoMethodError }) do <add> assert_raises NoMethodError do <add> ActiveRecord::Base.send :compute_type, 'InvalidModel' <add> end <add> end <add> end <add> <add> def test_compute_type_on_undefined_method <add> error = nil <add> begin <add> Class.new(Author) do <add> alias_method :foo, :bar <add> end <add> rescue => e <add> error = e <add> end <add> <add> ActiveSupport::Dependencies.stub(:safe_constantize, proc{ raise e }) do <add> <add> exception = assert_raises NameError do <add> ActiveRecord::Base.send :compute_type, 'InvalidModel' <add> end <add> assert_equal error.message, exception.message <add> end <add> end <add> <add> def test_compute_type_argument_error <add> ActiveSupport::Dependencies.stub(:safe_constantize, proc{ raise ArgumentError }) do <add> assert_raises ArgumentError do <add> ActiveRecord::Base.send :compute_type, 'InvalidModel' <add> end <add> end <add> end <add> <ide> def test_should_store_demodulized_class_name_with_store_full_sti_class_option_disabled <ide> without_store_full_sti_class do <ide> item = Namespaced::Company.new <ide> def test_different_namespace_subclass_should_load_correctly_with_store_full_sti_ <ide> end <ide> end <ide> <add> def test_descends_from_active_record <add> assert !ActiveRecord::Base.descends_from_active_record? <add> <add> # Abstract subclass of AR::Base. <add> assert LoosePerson.descends_from_active_record? <add> <add> # Concrete subclass of an abstract class. <add> assert LooseDescendant.descends_from_active_record? <add> <add> # Concrete subclass of AR::Base. <add> assert TightPerson.descends_from_active_record? <add> <add> # Concrete subclass of a concrete class but has no type column. <add> assert TightDescendant.descends_from_active_record? <add> <add> # Concrete subclass of AR::Base. <add> assert Post.descends_from_active_record? <add> <add> # Abstract subclass of a concrete class which has a type column. <add> # This is pathological, as you'll never have Sub < Abstract < Concrete. <add> assert !StiPost.descends_from_active_record? <add> <add> # Concrete subclasses an abstract class which has a type column. <add> assert !SubStiPost.descends_from_active_record? <add> end <add> <ide> def test_company_descends_from_active_record <ide> assert !ActiveRecord::Base.descends_from_active_record? <ide> assert AbstractCompany.descends_from_active_record?, 'AbstractCompany should descend from ActiveRecord::Base' <ide> assert Company.descends_from_active_record?, 'Company should descend from ActiveRecord::Base' <ide> assert !Class.new(Company).descends_from_active_record?, 'Company subclass should not descend from ActiveRecord::Base' <ide> end <ide> <add> def test_abstract_class <add> assert !ActiveRecord::Base.abstract_class? <add> assert LoosePerson.abstract_class? <add> assert !LooseDescendant.abstract_class? <add> end <add> <ide> def test_inheritance_base_class <ide> assert_equal Post, Post.base_class <ide> assert_equal Post, SpecialPost.base_class <ide> def test_new_with_unrelated_namespaced_type <ide> end <ide> end <ide> <del> <ide> def test_new_with_complex_inheritance <ide> assert_nothing_raised { Client.new(type: 'VerySpecialClient') } <ide> end
2
Text
Text
update commands for untar-anywhere method
8398be5cde653f4637a81af8f7e078ddacc49417
<ide><path>docs/Homebrew-on-Linux.md <ide> You may need to install your own Ruby using your system package manager, a PPA, <ide> <ide> Homebrew does not currently support 32-bit x86 platforms. It would be possible for Homebrew to work on 32-bit x86 platforms with some effort. An interested and dedicated person could maintain a fork of Homebrew to develop support for 32-bit x86. <ide> <del>## Alternative Installation <del> <del>Extract or `git clone` Homebrew wherever you want. Use `/home/linuxbrew/.linuxbrew` if possible (to enable the use of binary packages). <del> <del>```sh <del>git clone https://github.com/Homebrew/brew ~/.linuxbrew/Homebrew <del>mkdir ~/.linuxbrew/bin <del>ln -s ~/.linuxbrew/Homebrew/bin/brew ~/.linuxbrew/bin <del>eval "$(~/.linuxbrew/bin/brew shellenv)" <del>``` <del> <ide> ## Homebrew on Linux Community <ide> <ide> - [@HomebrewOnLinux on Twitter](https://twitter.com/HomebrewOnLinux) <ide><path>docs/Installation.md <ide> here. *Pick another prefix at your peril!* <ide> mkdir homebrew && curl -L https://github.com/Homebrew/brew/tarball/master | tar xz --strip 1 -C homebrew <ide> ``` <ide> <add>or <add> <add>```sh <add>git clone https://github.com/Homebrew/brew homebrew <add>``` <add> <add>then <add> <add>```sh <add>eval "$(homebrew/bin/brew shellenv)" <add>brew update --force --quiet <add>chmod -R go-w "$(brew --prefix)/share/zsh" <add>``` <add> <ide> ### Multiple installations <ide> <ide> Create a Homebrew installation wherever you extract the tarball. Whichever `brew` command is called is where the packages will be installed. You can use this as you see fit, e.g. a system set of libs in the default prefix and tweaked formulae for development in `~/homebrew`.
2
PHP
PHP
update memcached engine description
c8eee749b15f36ef6d63a90ce2d3d4e58b13dd23
<ide><path>src/Cache/Engine/MemcachedEngine.php <ide> * control you have over expire times far in the future. See MemcachedEngine::write() for <ide> * more information. <ide> * <del> * Main advantage of this Memcached engine over the memcached engine is <del> * support of binary protocol, and igbinary serialization <del> * (if memcached extension compiled with --enable-igbinary) <add> * Memcached engine support of binary protocol and igbinary <add> * serialization (if memcached extension compiled with --enable-igbinary). <ide> * Compressed keys can also be incremented/decremented <ide> */ <ide> class MemcachedEngine extends CacheEngine
1
Mixed
Go
add some basic doc for sysinfo
01bbc3fbb9c3ab3dec0f271710739465b0f80b7a
<ide><path>pkg/sysinfo/README.md <add>SysInfo stores information about which features a kernel supports. <ide><path>pkg/sysinfo/sysinfo.go <ide> import ( <ide> "github.com/docker/libcontainer/cgroups" <ide> ) <ide> <add>// SysInfo stores information about which features a kernel supports. <ide> type SysInfo struct { <ide> MemoryLimit bool <ide> SwapLimit bool <ide> IPv4ForwardingDisabled bool <ide> AppArmor bool <ide> } <ide> <add>// Returns a new SysInfo, using the filesystem to detect which features the kernel supports. <ide> func New(quiet bool) *SysInfo { <ide> sysInfo := &SysInfo{} <ide> if cgroupMemoryMountpoint, err := cgroups.FindCgroupMountpoint("memory"); err != nil { <ide> func New(quiet bool) *SysInfo { <ide> } <ide> } <ide> <del> // Check if AppArmor seems to be enabled on this system. <add> // Check if AppArmor is supported <ide> if _, err := os.Stat("/sys/kernel/security/apparmor"); os.IsNotExist(err) { <ide> sysInfo.AppArmor = false <ide> } else {
2
Ruby
Ruby
fix document for radio_button
d089fe2810216fb85cd52a2b89a861c07dc5bbb9
<ide><path>actionview/lib/action_view/helpers/form_helper.rb <ide> def check_box(object_name, method, options = {}, checked_value = "1", unchecked_ <ide> # # => <input type="radio" id="post_category_rails" name="post[category]" value="rails" checked="checked" /> <ide> # # <input type="radio" id="post_category_java" name="post[category]" value="java" /> <ide> # <add> # # Let's say that @user.receive_newsletter returns "no": <ide> # radio_button("user", "receive_newsletter", "yes") <ide> # radio_button("user", "receive_newsletter", "no") <ide> # # => <input type="radio" id="user_receive_newsletter_yes" name="user[receive_newsletter]" value="yes" /> <ide> def check_box(method, options = {}, checked_value = "1", unchecked_value = "0") <ide> # # => <input type="radio" id="post_category_rails" name="post[category]" value="rails" checked="checked" /> <ide> # # <input type="radio" id="post_category_java" name="post[category]" value="java" /> <ide> # <del> # # Let's say that @user.category returns "no": <add> # # Let's say that @user.receive_newsletter returns "no": <ide> # radio_button("receive_newsletter", "yes") <ide> # radio_button("receive_newsletter", "no") <ide> # # => <input type="radio" id="user_receive_newsletter_yes" name="user[receive_newsletter]" value="yes" />
1
Javascript
Javascript
escape component keys used in reactid
a6b888b2142608fb415debe2a9c2a6be2f266ec6
<ide><path>src/core/__tests__/ReactIdentity-test.js <ide> describe('ReactIdentity', function() { <ide> }); <ide> <ide> it('should not allow scripts in keys to execute', function() { <del> var h4x0rKey = '"><script>window.YOUVEBEENH4X0RED=true;</script><div id="'; <add> var h4x0rKey = <add> '"><script>window[\'YOUVEBEENH4X0RED\']=true;</script><div id="'; <ide> <ide> var attachedContainer = document.createElement('div'); <ide> document.body.appendChild(attachedContainer); <ide><path>src/utils/__tests__/ReactChildren-test.js <ide> describe('ReactChildren', function() { <ide> var mappedForcedKeys = Object.keys(mappedChildrenForcedKeys); <ide> expect(mappedForcedKeys).toEqual(expectedForcedKeys); <ide> <del> var expectedRemappedForcedKeys = ['{{keyZero}}{giraffe}', '{{keyOne}}[0]']; <add> var expectedRemappedForcedKeys = [ <add> '{{keyZero^C}{giraffe}', <add> '{{keyOne^C}[0]' <add> ]; <ide> var remappedChildrenForcedKeys = <ide> ReactChildren.map(mappedChildrenForcedKeys, mapFn); <ide> expect( <ide><path>src/utils/traverseAllChildren.js <ide> var invariant = require('invariant'); <ide> * }); <ide> */ <ide> <add>var userProvidedKeyEscaperLookup = { <add> '^': '^X', <add> '.': '^D', <add> '}': '^C' <add>}; <add> <add>var userProvidedKeyEscapeRegex = /[.^}]/g; <add> <add>function userProvidedKeyEscaper(match) { <add> return userProvidedKeyEscaperLookup[match]; <add>} <add> <ide> /** <ide> * Generate a key string that identifies a component within a set. <ide> * <ide> function getComponentKey(component, index) { <ide> return '[' + index + ']'; <ide> } <ide> <add>/** <add> * Escape a component key so that it is safe to use in a reactid. <add> * <add> * @param {*} key Component key to be escaped. <add> * @return {string} An escaped string. <add> */ <add>function escapeUserProvidedKey(text) { <add> return ('' + text).replace( <add> userProvidedKeyEscapeRegex, <add> userProvidedKeyEscaper <add> ); <add>} <add> <ide> /** <ide> * Wrap a `key` value explicitly provided by the user to distinguish it from <ide> * implicitly-generated keys generated by a component's index in its parent. <ide> function getComponentKey(component, index) { <ide> * @return {string} <ide> */ <ide> function wrapUserProvidedKey(key) { <del> return '{' + key + '}'; <add> return '{' + escapeUserProvidedKey(key) + '}'; <ide> } <ide> <ide> /**
3