content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Go | Go | remove unused getcache endpoint | 0d7044955a4b63c4df2a611095b87ae417a3cf9b | <ide><path>api.go
<ide> func getImagesByName(srv *Server, version float64, w http.ResponseWriter, r *htt
<ide> return nil
<ide> }
<ide>
<del>func postImagesGetCache(srv *Server, version float64, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
<del> apiConfig := &APIImageConfig{}
<del> if err := json.NewDecoder(r.Body).Decode(apiConfig); err != nil {
<del> return err
<del> }
<del>
<del> image, err := srv.ImageGetCached(apiConfig.ID, apiConfig.Config)
<del> if err != nil {
<del> return err
<del> }
<del> if image == nil {
<del> w.WriteHeader(http.StatusNotFound)
<del> return nil
<del> }
<del> apiID := &APIID{ID: image.ID}
<del> b, err := json.Marshal(apiID)
<del> if err != nil {
<del> return err
<del> }
<del> writeJSON(w, b)
<del> return nil
<del>}
<del>
<ide> func postBuild(srv *Server, version float64, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
<ide> if version < 1.3 {
<ide> return fmt.Errorf("Multipart upload for build is no longer supported. Please upgrade your docker client.")
<ide> func createRouter(srv *Server, logging bool) (*mux.Router, error) {
<ide> "/images/{name:.*}/insert": postImagesInsert,
<ide> "/images/{name:.*}/push": postImagesPush,
<ide> "/images/{name:.*}/tag": postImagesTag,
<del> "/images/getCache": postImagesGetCache,
<ide> "/containers/create": postContainersCreate,
<ide> "/containers/{name:.*}/kill": postContainersKill,
<ide> "/containers/{name:.*}/restart": postContainersRestart, | 1 |
Javascript | Javascript | fix bug in line3.distance/distancesq | e08369e8151e50b2aae861c86a2bfdcf8073e8e9 | <ide><path>src/math/Line3.js
<ide> THREE.extend( THREE.Line3.prototype, {
<ide>
<ide> },
<ide>
<del> distanceSq: function ( optionalTarget ) {
<add> distanceSq: function () {
<ide>
<ide> return this.start.distanceToSquared( this.end );
<ide>
<ide> },
<ide>
<del> distance: function ( optionalTarget ) {
<add> distance: function () {
<ide>
<ide> return this.start.distanceTo( this.end );
<ide> | 1 |
Javascript | Javascript | fix normal view | c8d4156f89f67f9dffa7cd4740a58f28fd8f06f1 | <ide><path>examples/js/nodes/accessors/NormalNode.js
<ide> NormalNode.prototype.generate = function ( builder, output ) {
<ide>
<ide> case NormalNode.VIEW:
<ide>
<del> result = 'vNormal';
<add> result = 'normal';
<ide>
<ide> break;
<ide> | 1 |
Python | Python | set default backend to amqp if not configured | 26a3aea32016e2eaf3fe0fa616843cede74dbaaf | <ide><path>celery/backends/amqp.py
<ide> class AMQPBackend(BaseDictBackend):
<ide> serializer = conf.RESULT_SERIALIZER
<ide> _connection = None
<ide>
<add> def __init__(self, *args, **kwargs):
<add> self._connection = kwargs.get("connection", None)
<add> super(AMQPBackend, self).__init__(*args, **kwargs)
<add>
<ide> def _create_publisher(self, task_id, connection):
<ide> delivery_mode = self.persistent and 2 or 1
<ide>
<ide><path>celery/loaders/default.py
<ide> "CELERY_IMPORTS": (),
<ide> }
<ide>
<add>DEFAULT_UNCONFIGURED_SETTINGS = {
<add> "CELERY_RESULT_BACKEND": "amqp",
<add>}
<add>
<ide>
<ide> class NotConfigured(UserWarning):
<ide> """Celery has not been configured, as no config module has been found."""
<ide> def read_configuration(self):
<ide> warnings.warn("No celeryconfig.py module found! Please make "
<ide> "sure it exists and is available to Python.",
<ide> NotConfigured)
<del> return self.setup_settings({})
<add> return self.setup_settings(DEFAULT_UNCONFIGURED_SETTINGS)
<ide> else:
<ide> usercfg = dict((key, getattr(celeryconfig, key))
<ide> for key in dir(celeryconfig) | 2 |
Javascript | Javascript | fix bug in profiling test | 61e899f42b8f1be23af2e0f244a1d11130db92d0 | <ide><path>test/integration/profiling/test/index.test.js
<ide> import { join } from 'path'
<ide> import { nextBuild } from 'next-test-utils'
<ide> import fs from 'fs'
<ide> const appDir = join(__dirname, '../')
<add>const profileEventsPath = join(appDir, '.next', 'profile-events.json')
<ide> jest.setTimeout(1000 * 60 * 5)
<ide>
<ide> describe('Profiling Usage', () => {
<ide> beforeAll(async () => {
<add> // Delete file if it already exists
<add> if (await fs.existsSync(profileEventsPath))
<add> await fs.unlink(profileEventsPath, () => {
<add> console.log('Deleted Existing profile-events.json file')
<add> })
<add>
<ide> await nextBuild(appDir)
<ide> })
<ide>
<ide> describe('Profiling the build', () => {
<ide> it('should emit files', async () => {
<del> expect(fs.existsSync(join(appDir, '.next', 'profile-events.json'))).toBe(
<del> true
<del> )
<add> expect(fs.existsSync(profileEventsPath)).toBe(true)
<ide> })
<ide> })
<ide> }) | 1 |
Python | Python | remove unwanted imports | 910ccf1a1ae2c6fdd3df60dcfd03e1c1807d2ba7 | <ide><path>official/vision/beta/projects/__init__.py
<ide> # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<ide> # See the License for the specific language governing permissions and
<ide> # limitations under the License.
<del>
<del>from official.vision.beta.projects import panoptic_maskrcnn
<ide><path>official/vision/beta/projects/panoptic_maskrcnn/__init__.py
<ide> # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<ide> # See the License for the specific language governing permissions and
<ide> # limitations under the License.
<del>
<del># Copyright 2021 The TensorFlow Authors. All Rights Reserved.
<del>#
<del># Licensed under the Apache License, Version 2.0 (the "License");
<del># you may not use this file except in compliance with the License.
<del># You may obtain a copy of the License at
<del>#
<del># http://www.apache.org/licenses/LICENSE-2.0
<del>#
<del># Unless required by applicable law or agreed to in writing, software
<del># distributed under the License is distributed on an "AS IS" BASIS,
<del># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<del># See the License for the specific language governing permissions and
<del># limitations under the License.
<del>
<del>from official.vision.beta.projects.panoptic_maskrcnn import configs
<del>from official.vision.beta.projects.panoptic_maskrcnn import tasks
<ide><path>official/vision/beta/projects/panoptic_maskrcnn/configs/__init__.py
<ide> # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<ide> # See the License for the specific language governing permissions and
<ide> # limitations under the License.
<del>
<del># Lint as: python3
<del>"""Configs package definition."""
<del>
<del>from official.vision.beta.projects.panoptic_maskrcnn.configs import panoptic_maskrcnn
<ide><path>official/vision/beta/projects/panoptic_maskrcnn/tasks/__init__.py
<ide> # See the License for the specific language governing permissions and
<ide> # limitations under the License.
<ide>
<del># Lint as: python3
<del>"""Tasks package definition."""
<del>
<del>from official.vision.beta.projects.panoptic_maskrcnn.tasks import panoptic_maskrcnn
<ide>\ No newline at end of file | 4 |
PHP | PHP | resolve commands in artisan | 8dece94eedeb6c743f805ab5e8a0cf90c2ca512d | <ide><path>src/Illuminate/Foundation/Console/Kernel.php
<ide> protected function getArtisan()
<ide> {
<ide> if (is_null($this->artisan))
<ide> {
<del> return $this->artisan = new Artisan($this->app, $this->events);
<add> return $this->artisan = (new Artisan($this->app, $this->events))
<add> ->resolveCommands($this->commands);
<ide> }
<ide>
<ide> return $this->artisan; | 1 |
Ruby | Ruby | fix rakefile loading generator relatively | 7ffd4c849dcfa4691477fe9dc4dbd2df6c9c6b7b | <ide><path>railties/lib/rails/api/task.rb
<ide> require "rdoc/task"
<del>require "rails/api/generator"
<add>require_relative "generator"
<ide>
<ide> module Rails
<ide> module API | 1 |
Ruby | Ruby | use delete if the rhs is nil | aaa2abf73fa39e0d455b4b781fb4d00e51d0bdc7 | <ide><path>actionpack/test/template/asset_tag_helper_test.rb
<ide> def test_image_tag_windows_behaviour
<ide> File.stubs(:exist?).with('template/../fixtures/public/images/rails.png.').returns(true)
<ide> assert_equal '<img alt="Rails" src="/images/rails.png?1" />', image_tag('rails.png')
<ide> ensure
<del> ENV["RAILS_ASSET_ID"] = old_asset_id
<add> if old_asset_id
<add> ENV["RAILS_ASSET_ID"] = old_asset_id
<add> else
<add> ENV.delete("RAILS_ASSET_ID")
<add> end
<ide> end
<ide> end
<ide> | 1 |
Text | Text | add modelcard with acknowledgements | 8a017cbb5ad0fe43db04eef8e56f0d4846c2f422 | <ide><path>model_cards/ViktorAlm/electra-base-norwegian-uncased-discriminator/README.md
<add># Norwegian Electra
<add>Image incoming, im going to have som fun with this one.
<add>
<add>Trained on Oscar + wikipedia + opensubtitles + some other data I had with the awesome power of TPUs(V3-8)
<add>
<add>Use with caution. I have no downstream tasks in Norwegian to test on so I have no idea of its performance yet.
<add>
<add># Acknowledgments
<add>
<add>### TensorFlow Research Cloud
<add>Research supported with Cloud TPUs from Google's TensorFlow Research Cloud (TFRC). Thanks for providing access to the TFRC ❤️
<add>- https://www.tensorflow.org/tfrc
<add>
<add>#### OSCAR corpus
<add>- https://oscar-corpus.com/
<add>
<add>#### OPUS
<add>- http://opus.nlpl.eu/
<add>- http://www.opensubtitles.org/ | 1 |
Ruby | Ruby | convert specs to tests | 7fc628e3fcda6342a8cee0a6236987b57ac10270 | <ide><path>activemodel/spec/observing_spec.rb
<del>require File.join(File.dirname(__FILE__), 'spec_helper')
<del>
<del>class ObservedModel < ActiveModel::Base
<del> class Observer
<del> end
<del>end
<del>
<del>class FooObserver < ActiveModel::Observer
<del> class << self
<del> public :new
<del> end
<del>
<del> attr_accessor :stub
<del>
<del> def on_spec(record)
<del> stub.event_with(record) if stub
<del> end
<del>end
<del>
<del>class Foo < ActiveModel::Base
<del>end
<del>
<del>module ActiveModel
<del> describe Observing do
<del> before do
<del> ObservedModel.observers.clear
<del> end
<del>
<del> it "initializes model with no cached observers" do
<del> ObservedModel.observers.should be_empty
<del> end
<del>
<del> it "stores cached observers in an array" do
<del> ObservedModel.observers << :foo
<del> ObservedModel.observers.should include(:foo)
<del> end
<del>
<del> it "flattens array of assigned cached observers" do
<del> ObservedModel.observers = [[:foo], :bar]
<del> ObservedModel.observers.should include(:foo)
<del> ObservedModel.observers.should include(:bar)
<del> end
<del>
<del> it "instantiates observer names passed as strings" do
<del> ObservedModel.observers << 'foo_observer'
<del> FooObserver.should_receive(:instance)
<del> ObservedModel.instantiate_observers
<del> end
<del>
<del> it "instantiates observer names passed as symbols" do
<del> ObservedModel.observers << :foo_observer
<del> FooObserver.should_receive(:instance)
<del> ObservedModel.instantiate_observers
<del> end
<del>
<del> it "instantiates observer classes" do
<del> ObservedModel.observers << ObservedModel::Observer
<del> ObservedModel::Observer.should_receive(:instance)
<del> ObservedModel.instantiate_observers
<del> end
<del>
<del> it "should pass observers to subclasses" do
<del> FooObserver.instance
<del> bar = Class.new(Foo)
<del> bar.count_observers.should == 1
<del> end
<del> end
<del>
<del> describe Observer do
<del> before do
<del> ObservedModel.observers = :foo_observer
<del> FooObserver.models = nil
<del> end
<del>
<del> it "guesses implicit observable model name" do
<del> FooObserver.observed_class_name.should == 'Foo'
<del> end
<del>
<del> it "tracks implicit observable models" do
<del> instance = FooObserver.new
<del> instance.send(:observed_classes).should include(Foo)
<del> instance.send(:observed_classes).should_not include(ObservedModel)
<del> end
<del>
<del> it "tracks explicit observed model class" do
<del> FooObserver.new.send(:observed_classes).should_not include(ObservedModel)
<del> FooObserver.observe ObservedModel
<del> instance = FooObserver.new
<del> instance.send(:observed_classes).should include(ObservedModel)
<del> end
<del>
<del> it "tracks explicit observed model as string" do
<del> FooObserver.new.send(:observed_classes).should_not include(ObservedModel)
<del> FooObserver.observe 'observed_model'
<del> instance = FooObserver.new
<del> instance.send(:observed_classes).should include(ObservedModel)
<del> end
<del>
<del> it "tracks explicit observed model as symbol" do
<del> FooObserver.new.send(:observed_classes).should_not include(ObservedModel)
<del> FooObserver.observe :observed_model
<del> instance = FooObserver.new
<del> instance.send(:observed_classes).should include(ObservedModel)
<del> end
<del>
<del> it "calls existing observer event" do
<del> foo = Foo.new
<del> FooObserver.instance.stub = stub!(:stub)
<del> FooObserver.instance.stub.should_receive(:event_with).with(foo)
<del> Foo.send(:changed)
<del> Foo.send(:notify_observers, :on_spec, foo)
<del> end
<del>
<del> it "skips nonexistent observer event" do
<del> foo = Foo.new
<del> Foo.send(:changed)
<del> Foo.send(:notify_observers, :whatever, foo)
<del> end
<del> end
<del>end
<ide>\ No newline at end of file
<ide><path>activemodel/spec/spec_helper.rb
<del>ENV['LOG_NAME'] = 'spec'
<del>$LOAD_PATH << File.join(File.dirname(__FILE__), '..', 'vendor', 'rspec', 'lib')
<del>$LOAD_PATH << File.join(File.dirname(__FILE__), '..', 'lib')
<del>require 'active_model'
<del>begin
<del> require 'spec'
<del>rescue LoadError
<del> require 'rubygems'
<del> require 'spec'
<del>end
<del>
<del>begin
<del> require 'ruby-debug'
<del> Debugger.start
<del>rescue LoadError
<del> # you do not know the ways of ruby-debug yet, what a shame
<del>end
<ide>\ No newline at end of file
<ide><path>activemodel/test/observing_test.rb
<add>require File.join(File.dirname(__FILE__), 'test_helper')
<add>
<add>class ObservedModel < ActiveModel::Base
<add> class Observer
<add> end
<add>end
<add>
<add>class FooObserver < ActiveModel::Observer
<add> class << self
<add> public :new
<add> end
<add>
<add> attr_accessor :stub
<add>
<add> def on_spec(record)
<add> stub.event_with(record) if stub
<add> end
<add>end
<add>
<add>class Foo < ActiveModel::Base
<add>end
<add>
<add>class ObservingTest < ActiveSupport::TestCase
<add> def setup
<add> ObservedModel.observers.clear
<add> end
<add>
<add> test "initializes model with no cached observers" do
<add> assert ObservedModel.observers.empty?, "Not empty: #{ObservedModel.observers.inspect}"
<add> end
<add>
<add> test "stores cached observers in an array" do
<add> ObservedModel.observers << :foo
<add> assert ObservedModel.observers.include?(:foo), ":foo not in #{ObservedModel.observers.inspect}"
<add> end
<add>
<add> test "flattens array of assigned cached observers" do
<add> ObservedModel.observers = [[:foo], :bar]
<add> assert ObservedModel.observers.include?(:foo), ":foo not in #{ObservedModel.observers.inspect}"
<add> assert ObservedModel.observers.include?(:bar), ":bar not in #{ObservedModel.observers.inspect}"
<add> end
<add>
<add> uses_mocha "observer instantiation" do
<add> test "instantiates observer names passed as strings" do
<add> ObservedModel.observers << 'foo_observer'
<add> FooObserver.expects(:instance)
<add> ObservedModel.instantiate_observers
<add> end
<add>
<add> test "instantiates observer names passed as symbols" do
<add> ObservedModel.observers << :foo_observer
<add> FooObserver.expects(:instance)
<add> ObservedModel.instantiate_observers
<add> end
<add>
<add> test "instantiates observer classes" do
<add> ObservedModel.observers << ObservedModel::Observer
<add> ObservedModel::Observer.expects(:instance)
<add> ObservedModel.instantiate_observers
<add> end
<add> end
<add>
<add> test "passes observers to subclasses" do
<add> FooObserver.instance
<add> bar = Class.new(Foo)
<add> assert_equal Foo.count_observers, bar.count_observers
<add> end
<add>end
<add>
<add>class ObserverTest < ActiveSupport::TestCase
<add> def setup
<add> ObservedModel.observers = :foo_observer
<add> FooObserver.models = nil
<add> end
<add>
<add> test "guesses implicit observable model name" do
<add> assert_equal 'Foo', FooObserver.observed_class_name
<add> end
<add>
<add> test "tracks implicit observable models" do
<add> instance = FooObserver.new
<add> assert instance.send(:observed_classes).include?(Foo), "Foo not in #{instance.send(:observed_classes).inspect}"
<add> assert !instance.send(:observed_classes).include?(ObservedModel), "ObservedModel in #{instance.send(:observed_classes).inspect}"
<add> end
<add>
<add> test "tracks explicit observed model class" do
<add> old_instance = FooObserver.new
<add> assert !old_instance.send(:observed_classes).include?(ObservedModel), "ObservedModel in #{old_instance.send(:observed_classes).inspect}"
<add> FooObserver.observe ObservedModel
<add> instance = FooObserver.new
<add> assert instance.send(:observed_classes).include?(ObservedModel), "ObservedModel not in #{instance.send(:observed_classes).inspect}"
<add> end
<add>
<add> test "tracks explicit observed model as string" do
<add> old_instance = FooObserver.new
<add> assert !old_instance.send(:observed_classes).include?(ObservedModel), "ObservedModel in #{old_instance.send(:observed_classes).inspect}"
<add> FooObserver.observe 'observed_model'
<add> instance = FooObserver.new
<add> assert instance.send(:observed_classes).include?(ObservedModel), "ObservedModel not in #{instance.send(:observed_classes).inspect}"
<add> end
<add>
<add> test "tracks explicit observed model as symbol" do
<add> old_instance = FooObserver.new
<add> assert !old_instance.send(:observed_classes).include?(ObservedModel), "ObservedModel in #{old_instance.send(:observed_classes).inspect}"
<add> FooObserver.observe :observed_model
<add> instance = FooObserver.new
<add> assert instance.send(:observed_classes).include?(ObservedModel), "ObservedModel not in #{instance.send(:observed_classes).inspect}"
<add> end
<add>
<add> test "calls existing observer event" do
<add> foo = Foo.new
<add> FooObserver.instance.stub = stub
<add> FooObserver.instance.stub.expects(:event_with).with(foo)
<add> Foo.send(:changed)
<add> Foo.send(:notify_observers, :on_spec, foo)
<add> end
<add>
<add> test "skips nonexistent observer event" do
<add> foo = Foo.new
<add> Foo.send(:changed)
<add> Foo.send(:notify_observers, :whatever, foo)
<add> end
<add>end
<ide>\ No newline at end of file
<ide><path>activemodel/test/test_helper.rb
<add>$:.unshift "#{File.dirname(__FILE__)}/../lib"
<add>$:.unshift File.dirname(__FILE__)
<add>
<add>require 'test/unit'
<add>require 'active_model'
<add>require 'active_support/callbacks' # needed by ActiveSupport::TestCase
<add>require 'active_support/test_case'
<add>
<add>def uses_gem(gem_name, test_name, version = '> 0')
<add> require 'rubygems'
<add> gem gem_name.to_s, version
<add> require gem_name.to_s
<add> yield
<add>rescue LoadError
<add> $stderr.puts "Skipping #{test_name} tests. `gem install #{gem_name}` and try again."
<add>end
<add>
<add># Wrap tests that use Mocha and skip if unavailable.
<add>unless defined? uses_mocha
<add> def uses_mocha(test_name, &block)
<add> uses_gem('mocha', test_name, '>= 0.5.5', &block)
<add> end
<add>end | 4 |
Javascript | Javascript | use externals plugin for node target | 9a3e3443fa2f17d6b91ef68e27a025b8659e5799 | <ide><path>lib/node/NodeTargetPlugin.js
<ide> MIT License http://www.opensource.org/licenses/mit-license.php
<ide> Author Tobias Koppers @sokra
<ide> */
<del>var Dependency = require("../Dependency");
<del>var Module = require("../Module");
<del>var RawSource = require("webpack-core/lib/RawSource");
<del>
<del>
<del>
<del>function NodeNativeDependency(request, range) {
<del> Dependency.call(this);
<del> this.Class = NodeNativeDependency;
<del> this.userRequest = request;
<del> this.request = request;
<del> this.range = range;
<del>}
<del>
<del>NodeNativeDependency.prototype = Object.create(Dependency.prototype);
<del>NodeNativeDependency.prototype.type = "native module";
<del>
<del>NodeNativeDependency.prototype.isEqualResource = function isEqualResource(other) {
<del> if(!(other instanceof NodeNativeDependency))
<del> return false;
<del> return this.request == other.request;
<del>};
<del>
<del>NodeNativeDependency.Template = require("../dependencies/ModuleDependencyTemplateAsRequireId");
<del>
<del>
<del>
<del>function NodeNativeCommonJsDependency(request, range) {
<del> NodeNativeDependency.call(this, request, range);
<del> this.Class = NodeNativeCommonJsDependency;
<del>}
<del>
<del>NodeNativeCommonJsDependency.prototype = Object.create(NodeNativeDependency.prototype);
<del>NodeNativeCommonJsDependency.Template = require("../dependencies/ModuleDependencyTemplateAsId");
<del>
<del>
<del>
<del>function NodeNativeModule(request) {
<del> Module.call(this);
<del> this.request = request;
<del> this.built = false;
<del> this.cacheable = true;
<del>}
<del>NodeNativeModule.prototype = Object.create(Module.prototype);
<del>
<del>NodeNativeModule.prototype.identifier = NodeNativeModule.prototype.readableIdentifier = function() {
<del> return this.request;
<del>};
<del>
<del>NodeNativeModule.prototype.build = function(options, compilation, resolver, fs, callback) {callback()};
<del>
<del>NodeNativeModule.prototype.source = function() {
<del> return new RawSource("module.exports = require(" + JSON.stringify(this.request) + ");");
<del>};
<del>
<del>NodeNativeModule.prototype.needRebuild = function() {
<del> return false;
<del>};
<del>
<del>NodeNativeModule.prototype.size = function() {
<del> return 42 + this.request.length;
<del>};
<del>
<del>
<del>
<del>function NodeNativeModuleFactory() {
<del>}
<del>NodeNativeModuleFactory.prototype.create = function(context, dependency, callback) {
<del> return callback(null, new NodeNativeModule(dependency.request));
<del>}
<del>
<del>
<del>
<add>var ExternalsPlugin = require("../ExternalsPlugin");
<ide>
<ide> function NodeTargetPlugin() {
<ide> }
<add>
<ide> module.exports = NodeTargetPlugin;
<ide> NodeTargetPlugin.prototype.apply = function(compiler) {
<del> compiler.plugin("compilation", function(compilation, params) {
<del> compilation.dependencyFactories.set(NodeNativeDependency, new NodeNativeModuleFactory());
<del> compilation.dependencyTemplates.set(NodeNativeDependency, new NodeNativeDependency.Template());
<del>
<del> compilation.dependencyFactories.set(NodeNativeCommonJsDependency, new NodeNativeModuleFactory());
<del> compilation.dependencyTemplates.set(NodeNativeCommonJsDependency, new NodeNativeCommonJsDependency.Template());
<del> });
<del>
<del> var natives = Object.keys(process.binding("natives"));
<del> compiler.parser.plugin("call require:commonjs:item", function(expr, param) {
<del> if(param.isString() && natives.indexOf(param.string) >= 0) {
<del> var dep = new NodeNativeCommonJsDependency(param.string, param.range);
<del> this.state.current.addDependency(dep);
<del> return true;
<del> }
<del> });
<del> compiler.parser.plugin("call define:amd:item", function(expr, param) {
<del> if(param.isString() && natives.indexOf(param.string) >= 0) {
<del> var dep = new NodeNativeDependency(param.string, param.range);
<del> this.state.current.addDependency(dep);
<del> return true;
<del> }
<del> });
<del> compiler.parser.plugin("call require:amd:item", function(expr, param) {
<del> if(param.isString() && natives.indexOf(param.string) >= 0) {
<del> var dep = new NodeNativeDependency(param.string, param.range);
<del> this.state.current.addDependency(dep);
<del> return true;
<del> }
<del> });
<add> new ExternalsPlugin("commonjs", Object.keys(process.binding("natives"))).apply(compiler);
<ide> };
<ide>\ No newline at end of file | 1 |
PHP | PHP | handle assoc mode within db commands | 2d834af7b54a24380819ec35c59e258e1cac3216 | <ide><path>src/Illuminate/Database/Console/DatabaseInspectionCommand.php
<ide> use Illuminate\Database\MySqlConnection;
<ide> use Illuminate\Database\PostgresConnection;
<ide> use Illuminate\Database\SQLiteConnection;
<add>use Illuminate\Database\SqlServerConnection;
<ide> use Illuminate\Support\Arr;
<ide> use Illuminate\Support\Composer;
<ide> use Symfony\Component\Process\Exception\ProcessSignaledException;
<ide> protected function getTableSize(ConnectionInterface $connection, string $table)
<ide> */
<ide> protected function getMySQLTableSize(ConnectionInterface $connection, string $table)
<ide> {
<del> return $connection->selectOne('SELECT (data_length + index_length) AS size FROM information_schema.TABLES WHERE table_schema = ? AND table_name = ?', [
<add> $result = $connection->selectOne('SELECT (data_length + index_length) AS size FROM information_schema.TABLES WHERE table_schema = ? AND table_name = ?', [
<ide> $connection->getDatabaseName(),
<ide> $table,
<del> ])->size;
<add> ]);
<add>
<add> return Arr::wrap((array) $result)['size'];
<ide> }
<ide>
<ide> /**
<ide> protected function getMySQLTableSize(ConnectionInterface $connection, string $ta
<ide> */
<ide> protected function getPostgresTableSize(ConnectionInterface $connection, string $table)
<ide> {
<del> return $connection->selectOne('SELECT pg_total_relation_size(?) AS size;', [
<add> $result = $connection->selectOne('SELECT pg_total_relation_size(?) AS size;', [
<ide> $table,
<del> ])->size;
<add> ]);
<add>
<add> return Arr::wrap((array) $result)['size'];
<ide> }
<ide>
<ide> /**
<ide> protected function getPostgresTableSize(ConnectionInterface $connection, string
<ide> */
<ide> protected function getSqliteTableSize(ConnectionInterface $connection, string $table)
<ide> {
<del> return $connection->selectOne('SELECT SUM(pgsize) FROM dbstat WHERE name=?', [
<add> $result = $connection->selectOne('SELECT SUM(pgsize) FROM dbstat WHERE name=?', [
<ide> $table,
<del> ])->size;
<add> ]);
<add>
<add> return Arr::wrap((array) $result)['size'];
<ide> }
<ide>
<ide> /**
<ide> * Get the number of open connections for a database.
<ide> *
<ide> * @param \Illuminate\Database\ConnectionInterface $connection
<del> * @return null
<add> * @return int|null
<ide> */
<ide> protected function getConnectionCount(ConnectionInterface $connection)
<ide> {
<del> return match (class_basename($connection)) {
<del> 'MySqlConnection' => (int) $connection->selectOne($connection->raw('show status where variable_name = "threads_connected"'))->Value,
<del> 'PostgresConnection' => (int) $connection->selectOne('select count(*) as connections from pg_stat_activity')->connections,
<del> 'SqlServerConnection' => (int) $connection->selectOne('SELECT COUNT(*) connections FROM sys.dm_exec_sessions WHERE status = ?', ['running'])->connections,
<add> $result = match (true) {
<add> $connection instanceof MySqlConnection => $connection->selectOne('show status where variable_name = "threads_connected"'),
<add> $connection instanceof PostgresConnection => $connection->selectOne('select count(*) AS "Value" from pg_stat_activity'),
<add> $connection instanceof SqlServerConnection => $connection->selectOne('SELECT COUNT(*) Value FROM sys.dm_exec_sessions WHERE status = ?', ['running']),
<ide> default => null,
<ide> };
<add>
<add> if (! $result) {
<add> return null;
<add> }
<add>
<add> return Arr::wrap((array) $result)['Value'];
<ide> }
<ide>
<ide> /** | 1 |
Python | Python | update docstrings for busdays code | 4af52375dbd720dffc55b49f48bb59e8587ed762 | <ide><path>numpy/add_newdocs.py
<ide> def luf(lamdaexpr, *args, **kwargs):
<ide> """
<ide> busdaycalendar(weekmask='1111100', holidays=None)
<ide>
<del> A business day calendar object that efficiently stores
<del> information defining business days for the business
<del> day-related functions.
<add> A business day calendar object that efficiently stores information
<add> defining valid days for the busday family of functions.
<add>
<add> The default valid days are Monday through Friday ("business days").
<add> A busdaycalendar object can be specified with any set of weekly
<add> valid days, plus an optional "holiday" dates that always will be invalid.
<add>
<add> Once a busdaycalendar object is created, the weekmask and holidays
<add> cannot be modified.
<ide>
<ide> .. versionadded:: 1.7.0
<ide>
<ide> Parameters
<ide> ----------
<ide> weekmask : str or array_like of bool, optional
<del> A seven-element array indicating which of Monday through Sunday may
<del> be valid business days. May be specified as a list or array, like
<del> [1,1,1,1,1,0,0], a length-seven string like '1111100', or a string
<del> of three-letter weekday names, like 'MonTueWedThuFri'. The latter
<del> string representation is most useful when only one day of the
<del> week is important, like 'Mon' if you want to calculate the date
<del> of Easter.
<add> A seven-element array indicating which of Monday through Sunday are
<add> valid days. May be specified as a length-seven list or array, like
<add> [1,1,1,1,1,0,0]; a length-seven string, like '1111100'; or a string
<add> like "Mon Tue Wed Thu Fri", made up of 3-character abbreviations for
<add> weekdays, optionally separated by white space. Valid abbreviations
<add> are: Mon Tue Wed Thu Fri Sat Sun
<ide> holidays : array_like of datetime64[D], optional
<del> An array of dates which should be blacked out from being considered
<del> as business days. They may be specified in any order, and NaT
<del> (not-a-time) dates are ignored. Internally, this list is normalized
<del> into a form suited for fast business day calculations.
<add> An array of dates to consider as invalid dates, no matter which
<add> weekday they fall upon. Holiday dates may be specified in any
<add> order, and NaT (not-a-time) dates are ignored. This list is
<add> saved in a normalized form that is suited for fast calculations
<add> of valid days.
<ide>
<ide> Returns
<ide> -------
<ide> out : busdaycalendar
<ide> A business day calendar object containing the specified
<del> weekmask and holidays.
<add> weekmask and holidays values.
<ide>
<ide> See Also
<ide> --------
<del> is_busday : Returns a boolean array indicating valid business days.
<del> busday_offset : Applies an offset counted in business days.
<del> busday_count : Counts how many business days are in a half-open date range.
<add> is_busday : Returns a boolean array indicating valid days.
<add> busday_offset : Applies an offset counted in valid days.
<add> busday_count : Counts how many valid days are in a half-open date range.
<ide>
<ide> Attributes
<ide> ----------
<del> weekmask : seven-element array of bool
<del> holidays : sorted array of datetime64[D]
<add> Note: once a busdaycalendar object is created, you cannot modify the
<add> weekmask or holidays. The attributes return copies of internal data.
<add> weekmask : (copy) seven-element array of bool
<add> holidays : (copy) sorted array of datetime64[D]
<ide>
<ide> Examples
<ide> --------
<ide> def luf(lamdaexpr, *args, **kwargs):
<ide> """)
<ide>
<ide> add_newdoc('numpy.core.multiarray', 'busdaycalendar', ('weekmask',
<del> """A copy of the seven-element boolean mask indicating valid business days."""))
<add> """A copy of the seven-element boolean mask indicating valid days."""))
<ide>
<ide> add_newdoc('numpy.core.multiarray', 'busdaycalendar', ('holidays',
<del> """A copy of the holiday array indicating blacked out business days."""))
<add> """A copy of the holiday array indicating additional invalid days."""))
<ide>
<ide> add_newdoc('numpy.core.multiarray', 'is_busday',
<ide> """
<ide> is_busday(dates, weekmask='1111100', holidays=None, busdaycal=None, out=None)
<ide>
<del> Calculates which of the given dates are valid business days, and
<del> which are not.
<add> Calculates which of the given dates are valid days, and which are not.
<ide>
<ide> .. versionadded:: 1.7.0
<ide>
<ide> def luf(lamdaexpr, *args, **kwargs):
<ide> dates : array_like of datetime64[D]
<ide> The array of dates to process.
<ide> weekmask : str or array_like of bool, optional
<del> A seven-element array indicating which of Monday through Sunday may
<del> be valid business days. May be specified as a list or array, like
<del> [1,1,1,1,1,0,0], a length-seven string like '1111100', or a string
<del> of three-letter weekday names, like 'MonTueWedThuFri'. The latter
<del> string representation is most useful when only one day of the
<del> week is important, like 'Mon' if you want to calculate the date
<del> of Easter.
<add> A seven-element array indicating which of Monday through Sunday are
<add> valid days. May be specified as a length-seven list or array, like
<add> [1,1,1,1,1,0,0]; a length-seven string, like '1111100'; or a string
<add> like "Mon Tue Wed Thu Fri", made up of 3-character abbreviations for
<add> weekdays, optionally separated by white space. Valid abbreviations
<add> are: Mon Tue Wed Thu Fri Sat Sun
<ide> holidays : array_like of datetime64[D], optional
<del> An array of dates which should be blacked out from being considered
<del> as business days. They may be specified in any order, and NaT
<del> (not-a-time) dates are ignored. Internally, this list is normalized
<del> into a form suited for fast business day calculations.
<add> An array of dates to consider as invalid dates. They may be
<add> specified in any order, and NaT (not-a-time) dates are ignored.
<add> This list is saved in a normalized form that is suited for
<add> fast calculations of valid days.
<ide> busdaycal : busdaycalendar, optional
<del> A `busdaycalendar` object which specifies the business days. If this
<add> A `busdaycalendar` object which specifies the valid days. If this
<ide> parameter is provided, neither weekmask nor holidays may be
<ide> provided.
<ide> out : array of bool, optional
<ide> def luf(lamdaexpr, *args, **kwargs):
<ide> -------
<ide> out : array of bool
<ide> An array with the same shape as ``dates``, containing True for
<del> each valid business day, and False for the others.
<add> each valid day, and False for each invalid day.
<ide>
<ide> See Also
<ide> --------
<del> busdaycalendar: An object for efficiently specifying which are business days.
<del> busday_offset : Applies an offset counted in business days.
<del> busday_count : Counts how many business days are in a half-open date range.
<add> busdaycalendar: An object that specifies a custom set of valid days.
<add> busday_offset : Applies an offset counted in valid days.
<add> busday_count : Counts how many valid days are in a half-open date range.
<ide>
<ide> Examples
<ide> --------
<ide> def luf(lamdaexpr, *args, **kwargs):
<ide> """
<ide> busday_offset(dates, offsets, roll='raise', weekmask='1111100', holidays=None, busdaycal=None, out=None)
<ide>
<del> First adjusts the date to fall on a business day according to
<add> First adjusts the date to fall on a valid day according to
<ide> the ``roll`` rule, then applies offsets to the given dates
<del> counted in business days.
<add> counted in valid days.
<ide>
<ide> .. versionadded:: 1.7.0
<ide>
<ide> def luf(lamdaexpr, *args, **kwargs):
<ide> offsets : array_like of int
<ide> The array of offsets, which is broadcast with ``dates``.
<ide> roll : {'raise', 'nat', 'forward', 'following', 'backward', 'preceding', 'modifiedfollowing', 'modifiedpreceding'}, optional
<del> How to treat dates that do not fall on a business day. The default
<add> How to treat dates that do not fall on a valid day. The default
<ide> is 'raise'.
<ide>
<del> * 'raise' means to raise an exception for invalid business days.
<del> * 'nat' means to return a NaT (not-a-time) for invalid business days.
<del> * 'forward' and 'following' mean to take the first business day
<add> * 'raise' means to raise an exception for an invalid day.
<add> * 'nat' means to return a NaT (not-a-time) for an invalid day.
<add> * 'forward' and 'following' mean to take the first valid day
<ide> later in time.
<del> * 'backward' and 'preceding' mean to take the first business day
<add> * 'backward' and 'preceding' mean to take the first valid day
<ide> earlier in time.
<del> * 'modifiedfollowing' means to take the first business day
<add> * 'modifiedfollowing' means to take the first valid day
<ide> later in time unless it is across a Month boundary, in which
<del> case to take the first business day earlier in time.
<del> * 'modifiedpreceding' means to take the first business day
<add> case to take the first valid day earlier in time.
<add> * 'modifiedpreceding' means to take the first valid day
<ide> earlier in time unless it is across a Month boundary, in which
<del> case to take the first business day later in time.
<add> case to take the first valid day later in time.
<ide> weekmask : str or array_like of bool, optional
<del> A seven-element array indicating which of Monday through Sunday may
<del> be valid business days. May be specified as a list or array, like
<del> [1,1,1,1,1,0,0], a length-seven string like '1111100', or a string
<del> of three-letter weekday names, like 'MonTueWedThuFri'. The latter
<del> string representation is most useful when only one day of the
<del> week is important, like 'Mon' if you want to calculate the date
<del> of Easter.
<add> A seven-element array indicating which of Monday through Sunday are
<add> valid days. May be specified as a length-seven list or array, like
<add> [1,1,1,1,1,0,0]; a length-seven string, like '1111100'; or a string
<add> like "Mon Tue Wed Thu Fri", made up of 3-character abbreviations for
<add> weekdays, optionally separated by white space. Valid abbreviations
<add> are: Mon Tue Wed Thu Fri Sat Sun
<ide> holidays : array_like of datetime64[D], optional
<del> An array of dates which should be blacked out from being considered
<del> as business days. They may be specified in any order, and NaT
<del> (not-a-time) dates are ignored. Internally, this list is normalized
<del> into a form suited for fast business day calculations.
<add> An array of dates to consider as invalid dates. They may be
<add> specified in any order, and NaT (not-a-time) dates are ignored.
<add> This list is saved in a normalized form that is suited for
<add> fast calculations of valid days.
<ide> busdaycal : busdaycalendar, optional
<del> A `busdaycalendar` object which specifies the business days. If this
<add> A `busdaycalendar` object which specifies the valid days. If this
<ide> parameter is provided, neither weekmask nor holidays may be
<ide> provided.
<ide> out : array of datetime64[D], optional
<ide> def luf(lamdaexpr, *args, **kwargs):
<ide>
<ide> See Also
<ide> --------
<del> busdaycalendar: An object for efficiently specifying which are business days.
<del> is_busday : Returns a boolean array indicating valid business days.
<del> busday_count : Counts how many business days are in a half-open date range.
<add> busdaycalendar: An object that specifies a custom set of valid days.
<add> is_busday : Returns a boolean array indicating valid days.
<add> busday_count : Counts how many valid days are in a half-open date range.
<ide>
<ide> Examples
<ide> --------
<ide> def luf(lamdaexpr, *args, **kwargs):
<ide> """
<ide> busday_count(begindates, enddates, weekmask='1111100', holidays=[], busdaycal=None, out=None)
<ide>
<del> Counts the number of business days between `begindates` and
<add> Counts the number of valid days between `begindates` and
<ide> `enddates`, not including the day of `enddates`.
<ide>
<add> If ``enddates`` specifies a date value that is earlier than the
<add> corresponding ``begindates`` date value, the count will be 0.
<add> However, in future this may change to a negative count of valid
<add> days.
<add>
<ide> .. versionadded:: 1.7.0
<ide>
<ide> Parameters
<ide> def luf(lamdaexpr, *args, **kwargs):
<ide> The array of the end dates for counting, which are excluded
<ide> from the count themselves.
<ide> weekmask : str or array_like of bool, optional
<del> A seven-element array indicating which of Monday through Sunday may
<del> be valid business days. May be specified as a list or array, like
<del> [1,1,1,1,1,0,0], a length-seven string like '1111100', or a string
<del> of three-letter weekday names, like 'MonTueWedThuFri'. The latter
<del> string representation is most useful when only one day of the
<del> week is important, like 'Mon' if you want to calculate the date
<del> of Easter.
<add> A seven-element array indicating which of Monday through Sunday are
<add> valid days. May be specified as a length-seven list or array, like
<add> [1,1,1,1,1,0,0]; a length-seven string, like '1111100'; or a string
<add> like "Mon Tue Wed Thu Fri", made up of 3-character abbreviations for
<add> weekdays, optionally separated by white space. Valid abbreviations
<add> are: Mon Tue Wed Thu Fri Sat Sun
<ide> holidays : array_like of datetime64[D], optional
<del> An array of dates which should be blacked out from being considered
<del> as business days. They may be specified in any order, and NaT
<del> (not-a-time) dates are ignored. Internally, this list is normalized
<del> into a form suited for fast business day calculations.
<add> An array of dates to consider as invalid dates. They may be
<add> specified in any order, and NaT (not-a-time) dates are ignored.
<add> This list is saved in a normalized form that is suited for
<add> fast calculations of valid days.
<ide> busdaycal : busdaycalendar, optional
<del> A `busdaycalendar` object which specifies the business days. If this
<add> A `busdaycalendar` object which specifies the valid days. If this
<ide> parameter is provided, neither weekmask nor holidays may be
<ide> provided.
<ide> out : array of int, optional
<ide> def luf(lamdaexpr, *args, **kwargs):
<ide> -------
<ide> out : array of int
<ide> An array with a shape from broadcasting ``begindates`` and ``enddates``
<del> together, containing the number of business days between
<add> together, containing the number of valid days between
<ide> the begin and end dates.
<ide>
<ide> See Also
<ide> --------
<del> busdaycalendar: An object for efficiently specifying which are business days.
<del> is_busday : Returns a boolean array indicating valid business days.
<del> busday_offset : Applies an offset counted in business days.
<add> busdaycalendar: An object that specifies a custom set of valid days.
<add> is_busday : Returns a boolean array indicating valid days.
<add> busday_offset : Applies an offset counted in valid days.
<ide>
<ide> Examples
<ide> -------- | 1 |
Python | Python | add support for chebyshev series and polynomials | 9211df98609ea0348ad51cab611387b8e898a974 | <ide><path>numpy/__init__.py
<ide> Core Linear Algebra Tools
<ide> fft
<ide> Core FFT routines
<add>polynomial
<add> Polynomial tools
<ide> testing
<ide> Numpy testing tools
<ide> f2py
<ide> def pkgload(*packages, **options):
<ide> from lib import *
<ide> import linalg
<ide> import fft
<add> import polynomial
<ide> import random
<ide> import ctypeslib
<ide> import ma
<ide><path>numpy/polynomial/__init__.py
<add>from polynomial import *
<add>from chebyshev import *
<add>from polyutils import *
<add>
<add>from numpy.testing import Tester
<add>test = Tester().test
<add>bench = Tester().bench
<ide><path>numpy/polynomial/chebyshev.py
<add>"""Functions for dealing with Chebyshev series.
<add>
<add>This module provide s a number of functions that are useful in dealing with
<add>Chebyshev series as well as a ``Chebyshev`` class that encapsuletes the usual
<add>arithmetic operations. All the Chebyshev series are assumed to be ordered
<add>from low to high, thus ``array([1,2,3])`` will be treated as the series
<add>``T_0 + 2*T_1 + 3*T_2``
<add>
<add>Constants
<add>---------
<add>- chebdomain -- Chebyshev series default domain
<add>- chebzero -- Chebyshev series that evaluates to 0.
<add>- chebone -- Chebyshev series that evaluates to 1.
<add>- chebx -- Chebyshev series of the identity map (x).
<add>
<add>Arithmetic
<add>----------
<add>- chebadd -- add a Chebyshev series to another.
<add>- chebsub -- subtract a Chebyshev series from another.
<add>- chebmul -- multiply a Chebyshev series by another
<add>- chebdiv -- divide one Chebyshev series by another.
<add>- chebval -- evaluate a Chebyshev series at given points.
<add>
<add>Calculus
<add>--------
<add>- chebder -- differentiate a Chebyshev series.
<add>- chebint -- integrate a Chebyshev series.
<add>
<add>Misc Functions
<add>--------------
<add>- chebfromroots -- create a Chebyshev series with specified roots.
<add>- chebroots -- find the roots of a Chebyshev series.
<add>- chebvander -- Vandermode like matrix for Chebyshev polynomials.
<add>- chebfit -- least squares fit returning a Chebyshev series.
<add>- chebtrim -- trim leading coefficients from a Chebyshev series.
<add>- chebline -- Chebyshev series of given straight line
<add>- cheb2poly -- convert a Chebyshev series to a polynomial.
<add>- poly2cheb -- convert a polynomial to a Chebyshev series.
<add>
<add>Classes
<add>-------
<add>- Chebyshev -- Chebyshev series class.
<add>
<add>Notes
<add>-----
<add>The implementations of multiplication, division, integration, and
<add>differentiation use the algebraic identities:
<add>
<add>.. math ::
<add> T_n(x) = \\frac{z^n + z^{-n}}{2} \\\\
<add> z\\frac{dx}{dz} = \\frac{z - z^{-1}}{2}.
<add>
<add>where
<add>
<add>.. math :: x = \\frac{z + z^{-1}}{2}.
<add>
<add>These identities allow a Chebyshev series to be expressed as a finite,
<add>symmetric Laurent series. These sorts of Laurent series are referred to as
<add>z-series in this module.
<add>
<add>"""
<add>from __future__ import division
<add>
<add>__all__ = ['chebzero', 'chebone', 'chebx', 'chebdomain', 'chebline',
<add> 'chebadd', 'chebsub', 'chebmul', 'chebdiv', 'chebval', 'chebder',
<add> 'chebint', 'cheb2poly', 'poly2cheb', 'chebfromroots', 'chebvander',
<add> 'chebfit', 'chebtrim', 'chebroots', 'Chebyshev']
<add>
<add>import numpy as np
<add>import numpy.linalg as la
<add>import polyutils as pu
<add>from polytemplate import polytemplate
<add>from polyutils import RankWarning, PolyError, PolyDomainError
<add>
<add>chebtrim = pu.trimcoef
<add>
<add>#
<add># A collection of functions for manipulating z-series. These are private
<add># functions and do minimal error checking.
<add>#
<add>
<add>def _cseries_to_zseries(cs) :
<add> """Covert Chebyshev series to z-series.
<add>
<add> Covert a Chebyshev series to the equivalent z-series. The result is
<add> never an empty array. The dtype of the return is the same as that of
<add> the input. No checks are run on the arguments as this routine is for
<add> internal use.
<add>
<add> Parameters
<add> ----------
<add> cs : 1-d ndarray
<add> Chebyshev coefficients, ordered from low to high
<add>
<add> Returns
<add> -------
<add> zs : 1-d ndarray
<add> Odd length symmetric z-series, ordered from low to high.
<add>
<add> """
<add> n = cs.size
<add> zs = np.zeros(2*n-1, dtype=cs.dtype)
<add> zs[n-1:] = cs/2
<add> return zs + zs[::-1]
<add>
<add>def _zseries_to_cseries(zs) :
<add> """Covert z-series to a Chebyshev series.
<add>
<add> Covert a z series to the equivalent Chebyshev series. The result is
<add> never an empty array. The dtype of the return is the same as that of
<add> the input. No checks are run on the arguments as this routine is for
<add> internal use.
<add>
<add> Parameters
<add> ----------
<add> zs : 1-d ndarray
<add> Odd length symmetric z-series, ordered from low to high.
<add>
<add> Returns
<add> -------
<add> cs : 1-d ndarray
<add> Chebyshev coefficients, ordered from low to high.
<add>
<add> """
<add> n = (zs.size + 1)//2
<add> cs = zs[n-1:].copy()
<add> cs[1:n] *= 2
<add> return cs
<add>
<add>def _zseries_mul(z1, z2) :
<add> """Multiply two z-series.
<add>
<add> Multiply two z-series to produce a z-series.
<add>
<add> Parameters
<add> ----------
<add> z1, z2 : 1-d ndarray
<add> The arrays must be 1-d but this is not checked.
<add>
<add> Returns
<add> -------
<add> product : 1-d ndarray
<add> The product z-series.
<add>
<add> Notes
<add> -----
<add> This is simply convolution. If symmetic/anti-symmetric z-series are
<add> denoted by S/A then the following rules apply:
<add>
<add> S*S, A*A -> S
<add> S*A, A*S -> A
<add>
<add> """
<add> return np.convolve(z1, z2)
<add>
<add>def _zseries_div(z1, z2) :
<add> """Divide the first z-series by the second.
<add>
<add> Divide `z1` by `z2` and return the quotient and remainder as z-series.
<add> Warning: this implementation only applies when both z1 and z2 have the
<add> same symmetry, which is sufficient for present purposes.
<add>
<add> Parameters
<add> ----------
<add> z1, z2 : 1-d ndarray
<add> The arrays must be 1-d and have the same symmetry, but this is not
<add> checked.
<add>
<add> Returns
<add> -------
<add>
<add> (quotient, remainder) : 1-d ndarrays
<add> Quotient and remainder as z-series.
<add>
<add> Notes
<add> -----
<add> This is not the same as polynomial division on account of the desired form
<add> of the remainder. If symmetic/anti-symmetric z-series are denoted by S/A
<add> then the following rules apply:
<add>
<add> S/S -> S,S
<add> A/A -> S,A
<add>
<add> The restriction to types of the same symmetry could be fixed but seems like
<add> uneeded generality. There is no natural form for the remainder in the case
<add> where there is no symmetry.
<add>
<add> """
<add> z1 = z1.copy()
<add> z2 = z2.copy()
<add> len1 = len(z1)
<add> len2 = len(z2)
<add> if len2 == 1 :
<add> z1 /= z2
<add> return z1, z1[:1]*0
<add> elif len1 < len2 :
<add> return z1[:1]*0, z1
<add> else :
<add> dlen = len1 - len2
<add> scl = z2[0]
<add> z2 /= scl
<add> quo = np.empty(dlen + 1, dtype=z1.dtype)
<add> i = 0
<add> j = dlen
<add> while i < j :
<add> r = z1[i]
<add> quo[i] = z1[i]
<add> quo[dlen - i] = r
<add> tmp = r*z2
<add> z1[i:i+len2] -= tmp
<add> z1[j:j+len2] -= tmp
<add> i += 1
<add> j -= 1
<add> r = z1[i]
<add> quo[i] = r
<add> tmp = r*z2
<add> z1[i:i+len2] -= tmp
<add> quo /= scl
<add> rem = z1[i+1:i-1+len2].copy()
<add> return quo, rem
<add>
<add>def _zseries_der(zs) :
<add> """Differentiate a z-series.
<add>
<add> The derivative is with respect to x, not z. This is achieved using the
<add> chain rule and the value of dx/dz given in the module notes.
<add>
<add> Parameters
<add> ----------
<add> zs : z-series
<add> The z-series to differentiate.
<add>
<add> Returns
<add> -------
<add> derivative : z-series
<add> The derivative
<add>
<add> Notes
<add> -----
<add> The zseries for x (ns) has been multiplied by two in order to avoid
<add> using floats that are incompatible with Decimal and likely other
<add> specialized scalar types. This scaling has been compensated by
<add> multiplying the value of zs by two also so that the two cancels in the
<add> division.
<add>
<add> """
<add> n = len(zs)//2
<add> ns = np.array([-1, 0, 1], dtype=zs.dtype)
<add> zs *= np.arange(-n, n+1)*2
<add> d, r = _zseries_div(zs, ns)
<add> return d
<add>
<add>def _zseries_int(zs) :
<add> """Integrate a z-series.
<add>
<add> The integral is with respect to x, not z. This is achieved by a change
<add> of variable using dx/dz given in the module notes.
<add>
<add> Parameters
<add> ----------
<add> zs : z-series
<add> The z-series to integrate
<add>
<add> Returns
<add> -------
<add> integral : z-series
<add> The indefinite integral
<add>
<add> Notes
<add> -----
<add> The zseries for x (ns) has been multiplied by two in order to avoid
<add> using floats that are incompatible with Decimal and likely other
<add> specialized scalar types. This scaling has been compensated by
<add> dividing the resulting zs by two.
<add>
<add> """
<add> n = 1 + len(zs)//2
<add> ns = np.array([-1, 0, 1], dtype=zs.dtype)
<add> zs = _zseries_mul(zs, ns)
<add> zs /= np.arange(-n, n+1)*2
<add> zs[n] = 0
<add> return zs
<add>
<add>#
<add># Chebyshev series functions
<add>#
<add>
<add>
<add>def poly2cheb(pol) :
<add> """Convert a polynomial to a Chebyshev series.
<add>
<add> Convert a series containing polynomial coefficients ordered by degree
<add> from low to high to an equivalent Chebyshev series ordered from low to
<add> high.
<add>
<add> Inputs
<add> ------
<add> pol : array_like
<add> 1-d array containing the polynomial coeffients
<add>
<add> Returns
<add> -------
<add> cseries : ndarray
<add> 1-d array containing the coefficients of the equivalent Chebyshev
<add> series.
<add>
<add> See Also
<add> --------
<add> cheb2poly
<add>
<add> """
<add> [pol] = pu.as_series([pol])
<add> pol = pol[::-1]
<add> zs = pol[:1].copy()
<add> x = np.array([.5, 0, .5], dtype=pol.dtype)
<add> for i in range(1, len(pol)) :
<add> zs = _zseries_mul(zs, x)
<add> zs[i] += pol[i]
<add> return _zseries_to_cseries(zs)
<add>
<add>
<add>def cheb2poly(cs) :
<add> """Convert a Chebyshev series to a polynomial.
<add>
<add> Covert a series containing Chebyshev series coefficients orderd from
<add> low to high to an equivalent polynomial ordered from low to
<add> high by degree.
<add>
<add> Inputs
<add> ------
<add> cs : array_like
<add> 1-d array containing the Chebyshev series coeffients ordered from
<add> low to high.
<add>
<add> Returns
<add> -------
<add> pol : ndarray
<add> 1-d array containing the coefficients of the equivalent polynomial
<add> ordered from low to high by degree.
<add>
<add> See Also
<add> --------
<add> poly2cheb
<add>
<add> """
<add> [cs] = pu.as_series([cs])
<add> pol = np.zeros(len(cs), dtype=cs.dtype)
<add> quo = _cseries_to_zseries(cs)
<add> x = np.array([.5, 0, .5], dtype=pol.dtype)
<add> for i in range(0, len(cs) - 1) :
<add> quo, rem = _zseries_div(quo, x)
<add> pol[i] = rem[0]
<add> pol[-1] = quo[0]
<add> return pol
<add>
<add>#
<add># These are constant arrays are of integer type so as to be compatible
<add># with the widest range of other types, such as Decimal.
<add>#
<add>
<add># Chebyshev default domain.
<add>chebdomain = np.array([-1,1])
<add>
<add># Chebyshev coefficients representing zero.
<add>chebzero = np.array([0])
<add>
<add># Chebyshev coefficients representing one.
<add>chebone = np.array([1])
<add>
<add># Chebyshev coefficients representing the identity x.
<add>chebx = np.array([0,1])
<add>
<add>def chebline(off, scl) :
<add> """Chebyshev series whose graph is a straight line
<add>
<add> The line has the formula ``off + scl*x``
<add>
<add> Parameters:
<add> -----------
<add> off, scl : scalars
<add> The specified line is given by ``off + scl*x``.
<add>
<add> Returns:
<add> --------
<add> series : 1d ndarray
<add> The Chebyshev series representation of ``off + scl*x``.
<add>
<add> """
<add> if scl != 0 :
<add> return np.array([off,scl])
<add> else :
<add> return np.array([off])
<add>
<add>def chebfromroots(roots) :
<add> """Generate a Chebyschev series with given roots.
<add>
<add> Generate a Chebyshev series whose roots are given by `roots`. The
<add> resulting series is the produet `(x - roots[0])*(x - roots[1])*...`
<add>
<add> Inputs
<add> ------
<add> roots : array_like
<add> 1-d array containing the roots in sorted order.
<add>
<add> Returns
<add> -------
<add> series : ndarray
<add> 1-d array containing the coefficients of the Chebeshev series
<add> ordered from low to high.
<add>
<add> See Also
<add> --------
<add> chebroots
<add>
<add> """
<add> if len(roots) == 0 :
<add> return np.ones(1)
<add> else :
<add> [roots] = pu.as_series([roots], trim=False)
<add> prd = np.array([1], dtype=roots.dtype)
<add> for r in roots :
<add> fac = np.array([.5, -r, .5], dtype=roots.dtype)
<add> prd = _zseries_mul(fac, prd)
<add> return _zseries_to_cseries(prd)
<add>
<add>
<add>def chebadd(c1, c2):
<add> """Add one Chebyshev series to another.
<add>
<add> Returns the sum of two Chebyshev series `c1` + `c2`. The arguments are
<add> sequences of coefficients ordered from low to high, i.e., [1,2,3] is
<add> the series "T_0 + 2*T_1 + 3*T_2".
<add>
<add> Parameters
<add> ----------
<add> c1, c2 : array_like
<add> 1d arrays of Chebyshev series coefficients ordered from low to
<add> high.
<add>
<add> Returns
<add> -------
<add> out : ndarray
<add> Chebyshev series of the sum.
<add>
<add> See Also
<add> --------
<add> chebsub, chebmul, chebdiv, chebpow
<add>
<add> """
<add> # c1, c2 are trimmed copies
<add> [c1, c2] = pu.as_series([c1, c2])
<add> if len(c1) > len(c2) :
<add> c1[:c2.size] += c2
<add> ret = c1
<add> else :
<add> c2[:c1.size] += c1
<add> ret = c2
<add> return pu.trimseq(ret)
<add>
<add>
<add>def chebsub(c1, c2):
<add> """Subtract one Chebyshev series from another.
<add>
<add> Returns the difference of two Chebyshev series `c1` - `c2`. The
<add> sequences of coefficients are ordered from low to high, i.e., [1,2,3]
<add> is the series ``T_0 + 2*T_1 + 3*T_2.``
<add>
<add> Parameters
<add> ----------
<add> c1, c2 : array_like
<add> 1d arrays of Chebyshev series coefficients ordered from low to
<add> high.
<add>
<add> Returns
<add> -------
<add> out : ndarray
<add> Chebyshev series of the difference.
<add>
<add> See Also
<add> --------
<add> chebadd, chebmul, chebdiv, chebpow
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # c1, c2 are trimmed copies
<add> [c1, c2] = pu.as_series([c1, c2])
<add> if len(c1) > len(c2) :
<add> c1[:c2.size] -= c2
<add> ret = c1
<add> else :
<add> c2 = -c2
<add> c2[:c1.size] += c1
<add> ret = c2
<add> return pu.trimseq(ret)
<add>
<add>
<add>def chebmul(c1, c2):
<add> """Multiply one Chebyshev series by another.
<add>
<add> Returns the product of two Chebyshev series `c1` * `c2`. The arguments
<add> are sequences of coefficients ordered from low to high, i.e., [1,2,3]
<add> is the series ``T_0 + 2*T_1 + 3*T_2.``
<add>
<add> Parameters
<add> ----------
<add> c1, c2 : array_like
<add> 1d arrays of chebyshev series coefficients ordered from low to
<add> high.
<add>
<add> Returns
<add> -------
<add> out : ndarray
<add> Chebyshev series of the product.
<add>
<add> See Also
<add> --------
<add> chebadd, chebsub, chebdiv, chebpow
<add>
<add> """
<add> # c1, c2 are trimmed copies
<add> [c1, c2] = pu.as_series([c1, c2])
<add> z1 = _cseries_to_zseries(c1)
<add> z2 = _cseries_to_zseries(c2)
<add> prd = _zseries_mul(z1, z2)
<add> ret = _zseries_to_cseries(prd)
<add> return pu.trimseq(ret)
<add>
<add>
<add>def chebdiv(c1, c2):
<add> """Divide one Chebyshev series by another.
<add>
<add> Returns the quotient of two Chebyshev series `c1` / `c2`. The arguments
<add> are sequences of coefficients ordered from low to high, i.e., [1,2,3]
<add> is the series ``T_0 + 2*T_1 + 3*T_2.``
<add>
<add> Parameters
<add> ----------
<add> c1, c2 : array_like
<add> 1d arrays of chebyshev series coefficients ordered from low to
<add> high.
<add>
<add> Returns
<add> -------
<add> [quo, rem] : ndarray
<add> Chebyshev series of the quotient and remainder.
<add>
<add> See Also
<add> --------
<add> chebadd, chebsub, chebmul, chebpow
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # c1, c2 are trimmed copies
<add> [c1, c2] = pu.as_series([c1, c2])
<add> if c2[-1] == 0 :
<add> raise ZeroDivisionError()
<add>
<add> lc1 = len(c1)
<add> lc2 = len(c2)
<add> if lc1 < lc2 :
<add> return c1[:1]*0, c1
<add> elif lc2 == 1 :
<add> return c1/c2[-1], c1[:1]*0
<add> else :
<add> z1 = _cseries_to_zseries(c1)
<add> z2 = _cseries_to_zseries(c2)
<add> quo, rem = _zseries_div(z1, z2)
<add> quo = pu.trimseq(_zseries_to_cseries(quo))
<add> rem = pu.trimseq(_zseries_to_cseries(rem))
<add> return quo, rem
<add>
<add>def chebpow(cs, pow, maxpower=16) :
<add> """Raise a Chebyshev series to a power.
<add>
<add> Returns the Chebyshev series `cs` raised to the power `pow`. The
<add> arguement `cs` is a sequence of coefficients ordered from low to high.
<add> i.e., [1,2,3] is the series ``T_0 + 2*T_1 + 3*T_2.``
<add>
<add> Parameters
<add> ----------
<add> cs : array_like
<add> 1d array of chebyshev series coefficients ordered from low to
<add> high.
<add> pow : integer
<add> Power to which the series will be raised
<add> maxpower : integer, optional
<add> Maximum power allowed. This is mainly to limit growth of the series
<add> to umanageable size. Default is 16
<add>
<add> Returns
<add> -------
<add> coef : ndarray
<add> Chebyshev series of power.
<add>
<add> See Also
<add> --------
<add> chebadd, chebsub, chebmul, chebdiv
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # cs is a trimmed copy
<add> [cs] = pu.as_series([cs])
<add> power = int(pow)
<add> if power != pow or power < 0 :
<add> raise ValueError("Power must be a non-negative integer.")
<add> elif maxpower is not None and power > maxpower :
<add> raise ValueError("Power is too large")
<add> elif power == 0 :
<add> return np.array([1], dtype=cs.dtype)
<add> elif power == 1 :
<add> return cs
<add> else :
<add> # This can be made more efficient by using powers of two
<add> # in the usual way.
<add> zs = _cseries_to_zseries(cs)
<add> prd = zs
<add> for i in range(2, power + 1) :
<add> prd = np.convolve(prd, zs)
<add> return _zseries_to_cseries(prd)
<add>
<add>def chebder(cs, m=1, scl=1) :
<add> """Differentiate a Chebyshev series.
<add>
<add> Returns the series `cs` differentiated `m` times. At each iteration the
<add> result is multiplied by `scl`. The scaling factor is for use in a
<add> linear change of variable. The argument `cs` is a sequence of
<add> coefficients ordered from low to high. i.e., [1,2,3] is the series
<add> ``T_0 + 2*T_1 + 3*T_2``.
<add>
<add> Parameters
<add> ----------
<add> cs: array_like
<add> 1d array of chebyshev series coefficients ordered from low to high.
<add> m : int, optional
<add> Order of differentiation, must be non-negative. (default: 1)
<add> scl : scalar, optional
<add> The result of each derivation is multiplied by `scl`. The end
<add> result is multiplication by `scl`**`m`. This is for use in a linear
<add> change of variable. (default: 1)
<add>
<add> Returns
<add> -------
<add> der : ndarray
<add> Chebyshev series of the derivative.
<add>
<add> See Also
<add> --------
<add> chebint
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # cs is a trimmed copy
<add> [cs] = pu.as_series([cs])
<add> if m < 0 :
<add> raise ValueError, "The order of derivation must be positive"
<add> if not np.isscalar(scl) :
<add> raise ValueError, "The scl parameter must be a scalar"
<add>
<add> if m == 0 :
<add> return cs
<add> elif m >= len(cs) :
<add> return cs[:1]*0
<add> else :
<add> zs = _cseries_to_zseries(cs)
<add> for i in range(m) :
<add> zs = _zseries_der(zs)*scl
<add> return _zseries_to_cseries(zs)
<add>
<add>
<add>def chebint(cs, m=1, k=[], lbnd=0, scl=1) :
<add> """Integrate a Chebyshev series.
<add>
<add> Returns the series integrated from `lbnd` to x `m` times. At each
<add> iteration the resulting series is multiplied by `scl` and an
<add> integration constant specified by `k` is added. The scaling factor is
<add> for use in a linear change of variable. The argument `cs` is a sequence
<add> of coefficients ordered from low to high. i.e., [1,2,3] is the series
<add> ``T_0 + 2*T_1 + 3*T_2``.
<add>
<add>
<add> Parameters
<add> ----------
<add> cs: array_like
<add> 1d array of chebyshev series coefficients ordered from low to high.
<add> m : int, optional
<add> Order of integration, must be positeve. (default: 1)
<add> k : {[], list, scalar}, optional
<add> Integration constants. The value of the first integral at zero is
<add> the first value in the list, the value of the second integral at
<add> zero is the second value in the list, and so on. If ``[]``
<add> (default), all constants are set zero. If `m = 1`, a single scalar
<add> can be given instead of a list.
<add> lbnd : scalar, optional
<add> The lower bound of the integral. (default: 0)
<add> scl : scalar, optional
<add> Following each integration the result is multiplied by `scl` before
<add> the integration constant is added. (default: 1)
<add>
<add> Returns
<add> -------
<add> der : ndarray
<add> Chebyshev series of the integral.
<add>
<add> Raises
<add> ------
<add> ValueError
<add>
<add> See Also
<add> --------
<add> chebder
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> if np.isscalar(k) :
<add> k = [k]
<add> if m < 1 :
<add> raise ValueError, "The order of integration must be positive"
<add> if len(k) > m :
<add> raise ValueError, "Too many integration constants"
<add> if not np.isscalar(lbnd) :
<add> raise ValueError, "The lbnd parameter must be a scalar"
<add> if not np.isscalar(scl) :
<add> raise ValueError, "The scl parameter must be a scalar"
<add>
<add> # cs is a trimmed copy
<add> [cs] = pu.as_series([cs])
<add> k = list(k) + [0]*(m - len(k))
<add> for i in range(m) :
<add> zs = _cseries_to_zseries(cs)*scl
<add> zs = _zseries_int(zs)
<add> cs = _zseries_to_cseries(zs)
<add> cs[0] += k[i] - chebval(lbnd, cs)
<add> return cs
<add>
<add>def chebval(x, cs):
<add> """Evaluate a Chebyshev series.
<add>
<add> If `cs` is of length `n`, this function returns :
<add>
<add> ``p(x) = cs[0]*T_0(x) + cs[1]*T_1(x) + ... + cs[n-1]*T_{n-1}(x)``
<add>
<add> If x is a sequence or array then p(x) will have the same shape as x.
<add> If r is a ring_like object that supports multiplication and addition
<add> by the values in `cs`, then an object of the same type is returned.
<add>
<add> Parameters
<add> ----------
<add> x : array_like, ring_like
<add> Array of numbers or objects that support multiplication and
<add> addition with themselves and with the elements of `cs`.
<add> cs : array_like
<add> 1-d array of Chebyshev coefficients ordered from low to high.
<add>
<add> Returns
<add> -------
<add> values : ndarray, ring_like
<add> If the return is an ndarray then it has the same shape as `x`.
<add>
<add> See Also
<add> --------
<add> chebfit
<add>
<add> Examples
<add> --------
<add>
<add> Notes
<add> -----
<add> The evaluation uses Clenshaw recursion, aka synthetic division.
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # cs is a trimmed copy
<add> [cs] = pu.as_series([cs])
<add> if isinstance(x, tuple) or isinstance(x, list) :
<add> x = np.asarray(x)
<add>
<add> if len(cs) == 1 :
<add> c0 = cs[0]
<add> c1 = 0
<add> elif len(cs) == 2 :
<add> c0 = cs[0]
<add> c1 = cs[1]
<add> else :
<add> x2 = 2*x
<add> c0 = cs[-2]
<add> c1 = cs[-1]
<add> for i in range(3, len(cs) + 1) :
<add> tmp = c0
<add> c0 = cs[-i] - c1
<add> c1 = tmp + c1*x2
<add> return c0 + c1*x
<add>
<add>def chebvander(x, deg) :
<add> """Vandermonde matrix of given degree.
<add>
<add> Returns the Vandermonde matrix of degree `deg` and sample points `x`.
<add> This isn't a true Vandermonde matrix because `x` can be an arbitrary
<add> ndarray and the Chebyshev polynomials aren't powers. If ``V`` is the
<add> returned matrix and `x` is a 2d array, then the elements of ``V`` are
<add> ``V[i,j,k] = T_k(x[i,j])``, where ``T_k`` is the Chebyshev polynomial
<add> of degree ``k``.
<add>
<add> Parameters
<add> ----------
<add> x : array_like
<add> Array of points. The values are converted to double or complex doubles.
<add> deg : integer
<add> Degree of the resulting matrix.
<add>
<add> Returns
<add> -------
<add> vander : Vandermonde matrix.
<add> The shape of the returned matrix is ``x.shape + (deg+1,)``. The last
<add> index is the degree.
<add>
<add> """
<add> x = np.asarray(x) + 0.0
<add> order = int(deg) + 1
<add> v = np.ones(x.shape + (order,), dtype=x.dtype)
<add> if order > 1 :
<add> x2 = 2*x
<add> v[...,1] = x
<add> for i in range(2, order) :
<add> v[...,i] = x2*v[...,i-1] - v[...,i-2]
<add> return v
<add>
<add>def chebfit(x, y, deg, rcond=None, full=False):
<add> """Least squares fit of Chebyshev series to data.
<add>
<add> Fit a Chebyshev series ``p(x) = p[0] * T_{deq}(x) + ... + p[deg] *
<add> T_{0}(x)`` of degree `deg` to points `(x, y)`. Returns a vector of
<add> coefficients `p` that minimises the squared error.
<add>
<add> Parameters
<add> ----------
<add> x : array_like, shape (M,)
<add> x-coordinates of the M sample points ``(x[i], y[i])``.
<add> y : array_like, shape (M,) or (M, K)
<add> y-coordinates of the sample points. Several data sets of sample
<add> points sharing the same x-coordinates can be fitted at once by
<add> passing in a 2D-array that contains one dataset per column.
<add> deg : int
<add> Degree of the fitting polynomial
<add> rcond : float, optional
<add> Relative condition number of the fit. Singular values smaller than
<add> this relative to the largest singular value will be ignored. The
<add> default value is len(x)*eps, where eps is the relative precision of
<add> the float type, about 2e-16 in most cases.
<add> full : bool, optional
<add> Switch determining nature of return value. When it is False (the
<add> default) just the coefficients are returned, when True diagnostic
<add> information from the singular value decomposition is also returned.
<add>
<add> Returns
<add> -------
<add> coef : ndarray, shape (M,) or (M, K)
<add> Chebyshev coefficients ordered from low to high. If `y` was 2-D,
<add> the coefficients for the data in column k of `y` are in column
<add> `k`.
<add>
<add> [residuals, rank, singular_values, rcond] : present when `full` = True
<add> Residuals of the least-squares fit, the effective rank of the
<add> scaled Vandermonde matrix and its singular values, and the
<add> specified value of `rcond`. For more details, see `linalg.lstsq`.
<add>
<add> Warns
<add> -----
<add> RankWarning
<add> The rank of the coefficient matrix in the least-squares fit is
<add> deficient. The warning is only raised if `full` = False. The
<add> warnings can be turned off by
<add>
<add> >>> import warnings
<add> >>> warnings.simplefilter('ignore', RankWarning)
<add>
<add> See Also
<add> --------
<add> chebval : Evaluates a Chebyshev series.
<add> chebvander : Vandermonde matrix of Chebyshev series.
<add> polyfit : least squares fit using polynomials.
<add> linalg.lstsq : Computes a least-squares fit from the matrix.
<add> scipy.interpolate.UnivariateSpline : Computes spline fits.
<add>
<add> Notes
<add> -----
<add> The solution are the coefficients ``c[i]`` of the Chebyshev series
<add> ``T(x)`` that minimizes the squared error
<add>
<add> ``E = \sum_j |y_j - T(x_j)|^2``.
<add>
<add> This problem is solved by setting up as the overdetermined matrix
<add> equation
<add>
<add> ``V(x)*c = y``,
<add>
<add> where ``V`` is the Vandermonde matrix of `x`, the elements of ``c`` are
<add> the coefficients to be solved for, and the elements of `y` are the
<add> observed values. This equation is then solved using the singular value
<add> decomposition of ``V``.
<add>
<add> If some of the singular values of ``V`` are so small that they are
<add> neglected, then a `RankWarning` will be issued. This means that the
<add> coeficient values may be poorly determined. Using a lower order fit
<add> will usually get rid of the warning. The `rcond` parameter can also be
<add> set to a value smaller than its default, but the resulting fit may be
<add> spurious and have large contributions from roundoff error.
<add>
<add> Fits using Chebyshev series are usually better conditioned than fits
<add> using power series, but much can depend on the distribution of the
<add> sample points and the smoothness of the data. If the quality of the fit
<add> is inadequate splines may be a good alternative.
<add>
<add> References
<add> ----------
<add> .. [1] Wikipedia, "Curve fitting",
<add> http://en.wikipedia.org/wiki/Curve_fitting
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> order = int(deg) + 1
<add> x = np.asarray(x) + 0.0
<add> y = np.asarray(y) + 0.0
<add>
<add> # check arguments.
<add> if deg < 0 :
<add> raise ValueError, "expected deg >= 0"
<add> if x.ndim != 1:
<add> raise TypeError, "expected 1D vector for x"
<add> if x.size == 0:
<add> raise TypeError, "expected non-empty vector for x"
<add> if y.ndim < 1 or y.ndim > 2 :
<add> raise TypeError, "expected 1D or 2D array for y"
<add> if x.shape[0] != y.shape[0] :
<add> raise TypeError, "expected x and y to have same length"
<add>
<add> # set rcond
<add> if rcond is None :
<add> rcond = len(x)*np.finfo(x.dtype).eps
<add>
<add> # set up the design matrix and solve the least squares equation
<add> A = chebvander(x, deg)
<add> scl = np.sqrt((A*A).sum(0))
<add> c, resids, rank, s = la.lstsq(A/scl, y, rcond)
<add> c = (c.T/scl).T
<add>
<add> # warn on rank reduction
<add> if rank != order and not full:
<add> msg = "The fit may be poorly conditioned"
<add> warnings.warn(msg, pu.RankWarning)
<add>
<add> if full :
<add> return c, [resids, rank, s, rcond]
<add> else :
<add> return c
<add>
<add>
<add>def chebroots(cs):
<add> """Roots of a Chebyshev series.
<add>
<add> Compute the roots of the Chebyshev series `cs`. The argument `cs` is a
<add> sequence of coefficients ordered from low to high. i.e., [1,2,3] is the
<add> series ``T_0 + 2*T_1 + 3*T_2``.
<add>
<add> Parameters
<add> ----------
<add> cs : array_like
<add> 1D array of Chebyshev coefficients ordered from low to high.
<add>
<add> Returns
<add> -------
<add> out : ndarray
<add> An array containing the complex roots of the chebyshev series.
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # cs is a trimmed copy
<add> [cs] = pu.as_series([cs])
<add> if len(cs) <= 1 :
<add> return np.array([], dtype=cs.dtype)
<add> if len(cs) == 2 :
<add> return np.array([-cs[0]/cs[1]])
<add> n = len(cs) - 1
<add> cmat = np.zeros((n,n), dtype=cs.dtype)
<add> cmat.flat[1::n+1] = .5
<add> cmat.flat[n::n+1] = .5
<add> cmat[1, 0] = 1
<add> cmat[:,-1] -= cs[:-1]*(.5/cs[-1])
<add> roots = la.eigvals(cmat)
<add> roots.sort()
<add> return roots
<add>
<add>
<add>#
<add># Chebyshev series class
<add>#
<add>
<add>exec polytemplate.substitute(name='Chebyshev', nick='cheb', domain='[-1,1]')
<add>
<ide><path>numpy/polynomial/polynomial.py
<add>"""Functions for dealing with polynomials.
<add>
<add>This module provides a number of functions that are useful in dealing with
<add>polynomials as well as a ``Polynomial`` class that encapsuletes the usual
<add>arithmetic operations. All arrays of polynomial coefficients are assumed to
<add>be ordered from low to high degree, thus `array([1,2,3])` will be treated
<add>as the polynomial ``1 + 2*x + 3*x**2``
<add>
<add>Constants
<add>---------
<add>- polydomain -- Polynomial default domain
<add>- polyzero -- Polynomial that evaluates to 0.
<add>- polyone -- Polynomial that evaluates to 1.
<add>- polyx -- Polynomial of the identity map (x).
<add>
<add>Arithmetic
<add>----------
<add>- polyadd -- add a polynomial to another.
<add>- polysub -- subtract a polynomial from another.
<add>- polymul -- multiply a polynomial by another
<add>- polydiv -- divide one polynomial by another.
<add>- polyval -- evaluate a polynomial at given points.
<add>
<add>Calculus
<add>--------
<add>- polyder -- differentiate a polynomial.
<add>- polyint -- integrate a polynomial.
<add>
<add>Misc Functions
<add>--------------
<add>- polyfromroots -- create a polynomial with specified roots.
<add>- polyroots -- find the roots of a polynomial.
<add>- polyvander -- Vandermode like matrix for powers.
<add>- polyfit -- least squares fit returning a polynomial.
<add>- polytrim -- trim leading coefficients from a polynomial.
<add>- polyline -- Polynomial of given straight line
<add>
<add>Classes
<add>-------
<add>- Polynomial -- polynomial class.
<add>
<add>"""
<add>from __future__ import division
<add>
<add>__all__ = ['polyzero', 'polyone', 'polyx', 'polydomain',
<add> 'polyline','polyadd', 'polysub', 'polymul', 'polydiv', 'polyval',
<add> 'polyder', 'polyint', 'polyfromroots', 'polyvander', 'polyfit',
<add> 'polytrim', 'polyroots', 'Polynomial']
<add>
<add>import numpy as np
<add>import numpy.linalg as la
<add>import polyutils as pu
<add>from polytemplate import polytemplate
<add>
<add>polytrim = pu.trimcoef
<add>
<add>#
<add># These are constant arrays are of integer type so as to be compatible
<add># with the widest range of other types, such as Decimal.
<add>#
<add>
<add># Polynomial default domain.
<add>polydomain = np.array([-1,1])
<add>
<add># Polynomial coefficients representing zero.
<add>polyzero = np.array([0])
<add>
<add># Polynomial coefficients representing one.
<add>polyone = np.array([1])
<add>
<add># Polynomial coefficients representing the identity x.
<add>polyx = np.array([0,1])
<add>
<add>#
<add># Polynomial series functions
<add>#
<add>
<add>def polyline(off, scl) :
<add> """Polynomial whose graph is a straight line.
<add>
<add> The line has the formula ``off + scl*x``
<add>
<add> Parameters:
<add> -----------
<add> off, scl : scalars
<add> The specified line is given by ``off + scl*x``.
<add>
<add> Returns:
<add> --------
<add> series : 1d ndarray
<add> The polynomial equal to ``off + scl*x``.
<add>
<add> """
<add> if scl != 0 :
<add> return np.array([off,scl])
<add> else :
<add> return np.array([off])
<add>
<add>def polyfromroots(roots) :
<add> """Generate a polynomial with given roots.
<add>
<add> Generate a polynomial whose roots are given by `roots`. The resulting
<add> series is the produet `(x - roots[0])*(x - roots[1])*...`
<add>
<add> Inputs
<add> ------
<add> roots : array_like
<add> 1-d array containing the roots in sorted order.
<add>
<add> Returns
<add> -------
<add> series : ndarray
<add> 1-d array containing the coefficients of the Chebeshev series
<add> ordered from low to high.
<add>
<add> See Also
<add> --------
<add> polyroots
<add>
<add> """
<add> if len(roots) == 0 :
<add> return np.ones(1)
<add> else :
<add> [roots] = pu.as_series([roots], trim=False)
<add> prd = np.zeros(len(roots) + 1, dtype=roots.dtype)
<add> prd[-1] = 1
<add> for i in range(len(roots)) :
<add> prd[-(i+2):-1] -= roots[i]*prd[-(i+1):]
<add> return prd
<add>
<add>
<add>def polyadd(c1, c2):
<add> """Add one polynomial to another.
<add>
<add> Returns the sum of two polynomials `c1` + `c2`. The arguments are
<add> sequences of coefficients ordered from low to high, i.e., [1,2,3] is
<add> the polynomial ``1 + 2*x + 3*x**2"``.
<add>
<add> Parameters
<add> ----------
<add> c1, c2 : array_like
<add> 1d arrays of polynomial coefficients ordered from low to
<add> high.
<add>
<add> Returns
<add> -------
<add> out : ndarray
<add> polynomial of the sum.
<add>
<add> See Also
<add> --------
<add> polysub, polymul, polydiv, polypow
<add>
<add> """
<add> # c1, c2 are trimmed copies
<add> [c1, c2] = pu.as_series([c1, c2])
<add> if len(c1) > len(c2) :
<add> c1[:c2.size] += c2
<add> ret = c1
<add> else :
<add> c2[:c1.size] += c1
<add> ret = c2
<add> return pu.trimseq(ret)
<add>
<add>
<add>def polysub(c1, c2):
<add> """Subtract one polynomial from another.
<add>
<add> Returns the difference of two polynomials `c1` - `c2`. The arguments
<add> are sequences of coefficients ordered from low to high, i.e., [1,2,3]
<add> is the polynomial ``1 + 2*x + 3*x**2``.
<add>
<add> Parameters
<add> ----------
<add> c1, c2 : array_like
<add> 1d arrays of polynomial coefficients ordered from low to
<add> high.
<add>
<add> Returns
<add> -------
<add> out : ndarray
<add> polynomial of the difference.
<add>
<add> See Also
<add> --------
<add> polyadd, polymul, polydiv, polypow
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # c1, c2 are trimmed copies
<add> [c1, c2] = pu.as_series([c1, c2])
<add> if len(c1) > len(c2) :
<add> c1[:c2.size] -= c2
<add> ret = c1
<add> else :
<add> c2 = -c2
<add> c2[:c1.size] += c1
<add> ret = c2
<add> return pu.trimseq(ret)
<add>
<add>
<add>def polymul(c1, c2):
<add> """Multiply one polynomial by another.
<add>
<add> Returns the product of two polynomials `c1` * `c2`. The arguments
<add> are sequences of coefficients ordered from low to high, i.e., [1,2,3]
<add> is the polynomial ``1 + 2*x + 3*x**2.``
<add>
<add> Parameters
<add> ----------
<add> c1, c2 : array_like
<add> 1d arrays of polyyshev series coefficients ordered from low to
<add> high.
<add>
<add> Returns
<add> -------
<add> out : ndarray
<add> polynomial of the product.
<add>
<add> See Also
<add> --------
<add> polyadd, polysub, polydiv, polypow
<add>
<add> """
<add> # c1, c2 are trimmed copies
<add> [c1, c2] = pu.as_series([c1, c2])
<add> ret = np.convolve(c1, c2)
<add> return pu.trimseq(ret)
<add>
<add>
<add>def polydiv(c1, c2):
<add> """Divide one polynomial by another.
<add>
<add> Returns the quotient of two polynomials `c1` / `c2`. The arguments are
<add> sequences of coefficients ordered from low to high, i.e., [1,2,3] is
<add> the series ``1 + 2*x + 3*x**2.``
<add>
<add> Parameters
<add> ----------
<add> c1, c2 : array_like
<add> 1d arrays of chebyshev series coefficients ordered from low to
<add> high.
<add>
<add> Returns
<add> -------
<add> [quo, rem] : ndarray
<add> polynomial of the quotient and remainder.
<add>
<add> See Also
<add> --------
<add> polyadd, polysub, polymul, polypow
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # c1, c2 are trimmed copies
<add> [c1, c2] = pu.as_series([c1, c2])
<add> if c2[-1] == 0 :
<add> raise ZeroDivisionError()
<add>
<add> len1 = len(c1)
<add> len2 = len(c2)
<add> if len2 == 1 :
<add> return c1/c2[-1], c1[:1]*0
<add> elif len1 < len2 :
<add> return c1[:1]*0, c1
<add> else :
<add> dlen = len1 - len2
<add> scl = c2[-1]
<add> c2 = c2[:-1]/scl
<add> i = dlen
<add> j = len1 - 1
<add> while i >= 0 :
<add> c1[i:j] -= c2*c1[j]
<add> i -= 1
<add> j -= 1
<add> return c1[j+1:]/scl, pu.trimseq(c1[:j+1])
<add>
<add>def polypow(cs, pow, maxpower=None) :
<add> """Raise a polynomial to a power.
<add>
<add> Returns the polynomial `cs` raised to the power `pow`. The argument
<add> `cs` is a sequence of coefficients ordered from low to high. i.e.,
<add> [1,2,3] is the series ``1 + 2*x + 3*x**2.``
<add>
<add> Parameters
<add> ----------
<add> cs : array_like
<add> 1d array of chebyshev series coefficients ordered from low to
<add> high.
<add> pow : integer
<add> Power to which the series will be raised
<add> maxpower : integer, optional
<add> Maximum power allowed. This is mainly to limit growth of the series
<add> to umanageable size. Default is 16
<add>
<add> Returns
<add> -------
<add> coef : ndarray
<add> Chebyshev series of power.
<add>
<add> See Also
<add> --------
<add> chebadd, chebsub, chebmul, chebdiv
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # cs is a trimmed copy
<add> [cs] = pu.as_series([cs])
<add> power = int(pow)
<add> if power != pow or power < 0 :
<add> raise ValueError("Power must be a non-negative integer.")
<add> elif maxpower is not None and power > maxpower :
<add> raise ValueError("Power is too large")
<add> elif power == 0 :
<add> return np.array([1], dtype=cs.dtype)
<add> elif power == 1 :
<add> return cs
<add> else :
<add> # This can be made more efficient by using powers of two
<add> # in the usual way.
<add> prd = cs
<add> for i in range(2, power + 1) :
<add> prd = np.convolve(prd, cs)
<add> return prd
<add>
<add>def polyder(cs, m=1, scl=1) :
<add> """Differentiate a polynomial.
<add>
<add> Returns the polynomial `cs` differentiated `m` times. The argument `cs`
<add> is a sequence of coefficients ordered from low to high. i.e., [1,2,3]
<add> is the series ``1 + 2*x + 3*x**2.``
<add>
<add> Parameters
<add> ----------
<add> cs: array_like
<add> 1d array of chebyshev series coefficients ordered from low to high.
<add> m : int, optional
<add> Order of differentiation, must be non-negative. (default: 1)
<add> scl : scalar, optional
<add> The result of each derivation is multiplied by `scl`. The end
<add> result is multiplication by `scl`**`m`. This is for use in a linear
<add> change of variable. (default: 1)
<add>
<add> Returns
<add> -------
<add> der : ndarray
<add> polynomial of the derivative.
<add>
<add> See Also
<add> --------
<add> polyint
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # cs is a trimmed copy
<add> [cs] = pu.as_series([cs])
<add> if m < 0 :
<add> raise ValueError, "The order of derivation must be positive"
<add> if not np.isscalar(scl) :
<add> raise ValueError, "The scl parameter must be a scalar"
<add>
<add> if m == 0 :
<add> return cs
<add> elif m >= len(cs) :
<add> return cs[:1]*0
<add> else :
<add> n = len(cs)
<add> d = np.arange(n)*scl
<add> for i in range(m) :
<add> cs[i:] *= d[:n-i]
<add> return cs[i+1:].copy()
<add>
<add>def polyint(cs, m=1, k=[], lbnd=0, scl=1) :
<add> """Integrate a polynomial.
<add>
<add> Returns the polynomial `cs` integrated from `lbnd` to x `m` times. At
<add> each iteration the resulting series is multiplied by `scl` and an
<add> integration constant specified by `k` is added. The scaling factor is
<add> for use in a linear change of variable. The argument `cs` is a sequence
<add> of coefficients ordered from low to high. i.e., [1,2,3] is the
<add> polynomial ``1 + 2*x + 3*x**2``.
<add>
<add>
<add> Parameters
<add> ----------
<add> cs : array_like
<add> 1d array of chebyshev series coefficients ordered from low to high.
<add> m : int, optional
<add> Order of integration, must be positeve. (default: 1)
<add> k : {[], list, scalar}, optional
<add> Integration constants. The value of the first integral at zero is
<add> the first value in the list, the value of the second integral at
<add> zero is the second value in the list, and so on. If ``[]``
<add> (default), all constants are set zero. If `m = 1`, a single scalar
<add> can be given instead of a list.
<add> lbnd : scalar, optional
<add> The lower bound of the integral. (default: 0)
<add> scl : scalar, optional
<add> Following each integration the result is multiplied by `scl` before
<add> the integration constant is added. (default: 1)
<add>
<add> Returns
<add> -------
<add> der : ndarray
<add> polynomial of the integral.
<add>
<add> Raises
<add> ------
<add> ValueError
<add>
<add> See Also
<add> --------
<add> polyder
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> if np.isscalar(k) :
<add> k = [k]
<add> if m < 1 :
<add> raise ValueError, "The order of integration must be positive"
<add> if len(k) > m :
<add> raise ValueError, "Too many integration constants"
<add> if not np.isscalar(lbnd) :
<add> raise ValueError, "The lbnd parameter must be a scalar"
<add> if not np.isscalar(scl) :
<add> raise ValueError, "The scl parameter must be a scalar"
<add>
<add> # cs is a trimmed copy
<add> [cs] = pu.as_series([cs])
<add> k = list(k) + [0]*(m - len(k))
<add> fac = np.arange(1, len(cs) + m)/scl
<add> ret = np.zeros(len(cs) + m, dtype=cs.dtype)
<add> ret[m:] = cs
<add> for i in range(m) :
<add> ret[m - i:] /= fac[:len(cs) + i]
<add> ret[m - i - 1] += k[i] - polyval(lbnd, ret[m - i - 1:])
<add> return ret
<add>
<add>def polyval(x, cs):
<add> """Evaluate a polynomial.
<add>
<add> If `cs` is of length `n`, this function returns :
<add>
<add> ``p(x) = cs[0] + cs[1]*x + ... + cs[n-1]*x**(n-1)``
<add>
<add> If x is a sequence or array then p(x) will have the same shape as x.
<add> If r is a ring_like object that supports multiplication and addition
<add> by the values in `cs`, then an object of the same type is returned.
<add>
<add> Parameters
<add> ----------
<add> x : array_like, ring_like
<add> If x is a list or tuple, it is converted to an ndarray. Otherwise
<add> it must support addition and multiplication with itself and the
<add> elements of `cs`.
<add> cs : array_like
<add> 1-d array of Chebyshev coefficients ordered from low to high.
<add>
<add> Returns
<add> -------
<add> values : ndarray
<add> The return array has the same shape as `x`.
<add>
<add> See Also
<add> --------
<add> polyfit
<add>
<add> Examples
<add> --------
<add>
<add> Notes
<add> -----
<add> The evaluation uses Horner's method.
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # cs is a trimmed copy
<add> [cs] = pu.as_series([cs])
<add> if isinstance(x, tuple) or isinstance(x, list) :
<add> x = np.asarray(x)
<add>
<add> c0 = cs[-1] + x*0
<add> for i in range(2, len(cs) + 1) :
<add> c0 = cs[-i] + c0*x
<add> return c0
<add>
<add>def polyvander(x, deg) :
<add> """Vandermonde matrix of given degree.
<add>
<add> Returns the Vandermonde matrix of degree `deg` and sample points `x`.
<add> This isn't a true Vandermonde matrix because `x` can be an arbitrary
<add> ndarray. If ``V`` is the returned matrix and `x` is a 2d array, then
<add> the elements of ``V`` are ``V[i,j,k] = x[i,j]**k``
<add>
<add> Parameters
<add> ----------
<add> x : array_like
<add> Array of points. The values are converted to double or complex doubles.
<add> deg : integer
<add> Degree of the resulting matrix.
<add>
<add> Returns
<add> -------
<add> vander : Vandermonde matrix.
<add> The shape of the returned matrix is ``x.shape + (deg+1,)``. The last
<add> index is the degree.
<add>
<add> """
<add> x = np.asarray(x) + 0.0
<add> order = int(deg) + 1
<add> v = np.ones(x.shape + (order,), dtype=x.dtype)
<add> if order > 1 :
<add> v[...,1] = x
<add> for i in range(2, order) :
<add> v[...,i] = x*v[...,i-1]
<add> return v
<add>
<add>def polyfit(x, y, deg, rcond=None, full=False):
<add> """Least squares fit of polynomial to data.
<add>
<add> Fit a polynomial ``p(x) = p[0] * T_{deq}(x) + ... + p[deg] *
<add> T_{0}(x)`` of degree `deg` to points `(x, y)`. Returns a vector of
<add> coefficients `p` that minimises the squared error.
<add>
<add> Parameters
<add> ----------
<add> x : array_like, shape (M,)
<add> x-coordinates of the M sample points ``(x[i], y[i])``.
<add> y : array_like, shape (M,) or (M, K)
<add> y-coordinates of the sample points. Several data sets of sample
<add> points sharing the same x-coordinates can be fitted at once by
<add> passing in a 2D-array that contains one dataset per column.
<add> deg : int
<add> Degree of the fitting polynomial
<add> rcond : float, optional
<add> Relative condition number of the fit. Singular values smaller than
<add> this relative to the largest singular value will be ignored. The
<add> default value is len(x)*eps, where eps is the relative precision of
<add> the float type, about 2e-16 in most cases.
<add> full : bool, optional
<add> Switch determining nature of return value. When it is False (the
<add> default) just the coefficients are returned, when True diagnostic
<add> information from the singular value decomposition is also returned.
<add>
<add> Returns
<add> -------
<add> coef : ndarray, shape (M,) or (M, K)
<add> Polynomial coefficients ordered from low to high. If `y` was 2-D,
<add> the coefficients for the data in column k of `y` are in column
<add> `k`.
<add>
<add> [residuals, rank, singular_values, rcond] : present when `full` = True
<add> Residuals of the least-squares fit, the effective rank of the
<add> scaled Vandermonde matrix and its singular values, and the
<add> specified value of `rcond`. For more details, see `linalg.lstsq`.
<add>
<add> Warns
<add> -----
<add> RankWarning
<add> The rank of the coefficient matrix in the least-squares fit is
<add> deficient. The warning is only raised if `full` = False. The
<add> warnings can be turned off by
<add>
<add> >>> import warnings
<add> >>> warnings.simplefilter('ignore', RankWarning)
<add>
<add> See Also
<add> --------
<add> polyval : Evaluates a polynomial.
<add> polyvander : Vandermonde matrix for powers.
<add> chebfit : least squares fit using Chebyshev series.
<add> linalg.lstsq : Computes a least-squares fit from the matrix.
<add> scipy.interpolate.UnivariateSpline : Computes spline fits.
<add>
<add> Notes
<add> -----
<add> The solution are the coefficients ``c[i]`` of the polynomial ``P(x)``
<add> that minimizes the squared error
<add>
<add> ``E = \sum_j |y_j - P(x_j)|^2``.
<add>
<add> This problem is solved by setting up as the overdetermined matrix
<add> equation
<add>
<add> ``V(x)*c = y``,
<add>
<add> where ``V`` is the Vandermonde matrix of `x`, the elements of ``c`` are
<add> the coefficients to be solved for, and the elements of `y` are the
<add> observed values. This equation is then solved using the singular value
<add> decomposition of ``V``.
<add>
<add> If some of the singular values of ``V`` are so small that they are
<add> neglected, then a `RankWarning` will be issued. This means that the
<add> coeficient values may be poorly determined. Using a lower order fit
<add> will usually get rid of the warning. The `rcond` parameter can also be
<add> set to a value smaller than its default, but the resulting fit may be
<add> spurious and have large contributions from roundoff error.
<add>
<add> Fits using double precision and polynomials tend to fail at about
<add> degree 20. Fits using Chebyshev series are generally better
<add> conditioned, but much can still depend on the distribution of the
<add> sample points and the smoothness of the data. If the quality of the fit
<add> is inadequate splines may be a good alternative.
<add>
<add> References
<add> ----------
<add> .. [1] Wikipedia, "Curve fitting",
<add> http://en.wikipedia.org/wiki/Curve_fitting
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> order = int(deg) + 1
<add> x = np.asarray(x) + 0.0
<add> y = np.asarray(y) + 0.0
<add>
<add> # check arguments.
<add> if deg < 0 :
<add> raise ValueError, "expected deg >= 0"
<add> if x.ndim != 1:
<add> raise TypeError, "expected 1D vector for x"
<add> if x.size == 0:
<add> raise TypeError, "expected non-empty vector for x"
<add> if y.ndim < 1 or y.ndim > 2 :
<add> raise TypeError, "expected 1D or 2D array for y"
<add> if x.shape[0] != y.shape[0] :
<add> raise TypeError, "expected x and y to have same length"
<add>
<add> # set rcond
<add> if rcond is None :
<add> rcond = len(x)*np.finfo(x.dtype).eps
<add>
<add> # set up the design matrix and solve the least squares equation
<add> A = polyvander(x, deg)
<add> scl = np.sqrt((A*A).sum(0))
<add> c, resids, rank, s = la.lstsq(A/scl, y, rcond)
<add> c = (c.T/scl).T
<add>
<add> # warn on rank reduction
<add> if rank != order and not full:
<add> msg = "The fit may be poorly conditioned"
<add> warnings.warn(msg, pu.RankWarning)
<add>
<add> if full :
<add> return c, [resids, rank, s, rcond]
<add> else :
<add> return c
<add>
<add>
<add>def polyroots(cs):
<add> """Roots of a polynomial.
<add>
<add> Compute the roots of the Chebyshev series `cs`. The argument `cs` is a
<add> sequence of coefficients ordered from low to high. i.e., [1,2,3] is the
<add> polynomial ``1 + 2*x + 3*x**2``.
<add>
<add> Parameters
<add> ----------
<add> cs : array_like of shape(M,)
<add> 1D array of polynomial coefficients ordered from low to high.
<add>
<add> Returns
<add> -------
<add> out : ndarray
<add> An array containing the complex roots of the polynomial series.
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # cs is a trimmed copy
<add> [cs] = pu.as_series([cs])
<add> if len(cs) <= 1 :
<add> return np.array([], dtype=cs.dtype)
<add> if len(cs) == 2 :
<add> return np.array([-cs[0]/cs[1]])
<add> n = len(cs) - 1
<add> cmat = np.zeros((n,n), dtype=cs.dtype)
<add> cmat.flat[n::n+1] = 1
<add> cmat[:,-1] -= cs[:-1]/cs[-1]
<add> roots = la.eigvals(cmat)
<add> roots.sort()
<add> return roots
<add>
<add>
<add>#
<add># polynomial class
<add>#
<add>
<add>exec polytemplate.substitute(name='Polynomial', nick='poly', domain='[-1,1]')
<add>
<ide><path>numpy/polynomial/polytemplate.py
<add>"""Template for the Chebyshev and Polynomial classes.
<add>
<add>"""
<add>import string
<add>
<add>polytemplate = string.Template('''
<add>from __future__ import division
<add>import polyutils as pu
<add>import numpy as np
<add>
<add>class $name(pu.PolyBase) :
<add> """A $name series class.
<add>
<add> Parameters
<add> ----------
<add> coef : array_like
<add> $name coefficients, in increasing order. For example,
<add> ``(1, 2, 3)`` implies ``P_0 + 2P_1 + 3P_2`` where the
<add> ``P_i`` are a graded polynomial basis.
<add> domain : (2,) array_like
<add> Domain to use. The interval ``[domain[0], domain[1]]`` is mapped to
<add> the interval ``$domain`` by shifting and scaling.
<add>
<add> Attributes
<add> ----------
<add> coef : (N,) array
<add> $name coefficients, from low to high.
<add> domain : (2,) array_like
<add> Domain that is mapped to ``$domain``.
<add>
<add> Class Attributes
<add> ----------------
<add> maxpower : int
<add> Maximum power allowed, i.e., the largest number ``n`` such that
<add> ``p(x)**n`` is allowed. This is to limit runaway polynomial size.
<add> domain : (2,) ndarray
<add> Default domain of the class.
<add>
<add> Notes
<add> -----
<add> It is important to specify the domain for many uses of graded polynomial,
<add> for instance in fitting data. This is because many of the important
<add> properties of the polynomial basis only hold in a specified interval and
<add> thus the data must be mapped into that domain in order to benefit.
<add>
<add> Examples
<add> --------
<add>
<add> """
<add> # Limit runaway size. T_n^m has degree n*2^m
<add> maxpower = 16
<add> domain = np.array($domain)
<add>
<add> def __init__(self, coef, domain=$domain) :
<add> [coef, domain] = pu.as_series([coef, domain], trim=False)
<add> if len(domain) != 2 :
<add> raise ValueError("Domain has wrong number of elements.")
<add> self.coef = coef
<add> self.domain = domain
<add>
<add> def __repr__(self):
<add> format = "%s(%s, %s)"
<add> coef = repr(self.coef)[6:-1]
<add> domain = repr(self.domain)[6:-1]
<add> return format % ('$name', coef, domain)
<add>
<add> def __str__(self) :
<add> format = "%s(%s, %s)"
<add> return format % ('$nick', str(self.coef), str(self.domain))
<add>
<add> # Pickle and copy
<add>
<add> def __getstate__(self) :
<add> ret = self.__dict__.copy()
<add> ret['coef'] = self.coef.copy()
<add> ret['domain'] = self.domain.copy()
<add> return ret
<add>
<add> def __setstate__(self, dict) :
<add> self.__dict__ = dict
<add>
<add> # Call
<add>
<add> def __call__(self, arg) :
<add> off, scl = pu.mapparms(self.domain, $domain)
<add> arg = off + scl*arg
<add> return ${nick}val(arg, self.coef)
<add>
<add>
<add> def __iter__(self) :
<add> return iter(self.coef)
<add>
<add> def __len__(self) :
<add> return len(self.coef)
<add>
<add> # Numeric properties.
<add>
<add>
<add> def __neg__(self) :
<add> return self.__class__(-self.coef, self.domain)
<add>
<add> def __pos__(self) :
<add> return self
<add>
<add> def __add__(self, other) :
<add> """Returns sum"""
<add> if isinstance(other, self.__class__) :
<add> if np.all(self.domain == other.domain) :
<add> coef = ${nick}add(self.coef, other.coef)
<add> else :
<add> raise PolyDomainError()
<add> else :
<add> try :
<add> coef = ${nick}add(self.coef, other)
<add> except :
<add> return NotImplemented
<add> return self.__class__(coef, self.domain)
<add>
<add> def __sub__(self, other) :
<add> """Returns difference"""
<add> if isinstance(other, self.__class__) :
<add> if np.all(self.domain == other.domain) :
<add> coef = ${nick}sub(self.coef, other.coef)
<add> else :
<add> raise PolyDomainError()
<add> else :
<add> try :
<add> coef = ${nick}sub(self.coef, other)
<add> except :
<add> return NotImplemented
<add> return self.__class__(coef, self.domain)
<add>
<add> def __mul__(self, other) :
<add> """Returns product"""
<add> if isinstance(other, self.__class__) :
<add> if np.all(self.domain == other.domain) :
<add> coef = ${nick}mul(self.coef, other.coef)
<add> else :
<add> raise PolyDomainError()
<add> else :
<add> try :
<add> coef = ${nick}mul(self.coef, other)
<add> except :
<add> return NotImplemented
<add> return self.__class__(coef, self.domain)
<add>
<add> def __div__(self, other):
<add> # set to __floordiv__ /.
<add> return self.__floordiv__(other)
<add>
<add> def __truediv__(self, other) :
<add> # there is no true divide if the rhs is not a scalar, although it
<add> # could return the first n elements of an infinite series.
<add> # It is hard to see where n would come from, though.
<add> if isinstance(other, self.__class__) :
<add> if len(other.coef) == 1 :
<add> coef = div(self.coef, other.coef)
<add> else :
<add> return NotImplemented
<add> elif np.isscalar(other) :
<add> coef = self.coef/other
<add> else :
<add> return NotImplemented
<add> return self.__class__(coef, self.domain)
<add>
<add> def __floordiv__(self, other) :
<add> """Returns the quotient."""
<add> if isinstance(other, self.__class__) :
<add> if np.all(self.domain == other.domain) :
<add> quo, rem = ${nick}div(self.coef, other.coef)
<add> else :
<add> raise PolyDomainError()
<add> else :
<add> try :
<add> quo, rem = ${nick}div(self.coef, other)
<add> except :
<add> return NotImplemented
<add> return self.__class__(quo, self.domain)
<add>
<add> def __mod__(self, other) :
<add> """Returns the remainder."""
<add> if isinstance(other, self.__class__) :
<add> if np.all(self.domain == other.domain) :
<add> quo, rem = ${nick}div(self.coef, other.coef)
<add> else :
<add> raise PolyDomainError()
<add> else :
<add> try :
<add> quo, rem = ${nick}div(self.coef, other)
<add> except :
<add> return NotImplemented
<add> return self.__class__(rem, self.domain)
<add>
<add> def __divmod__(self, other) :
<add> """Returns quo, remainder"""
<add> if isinstance(other, self.__class__) :
<add> if np.all(self.domain == other.domain) :
<add> quo, rem = ${nick}div(self.coef, other.coef)
<add> else :
<add> raise PolyDomainError()
<add> else :
<add> try :
<add> quo, rem = ${nick}div(self.coef, other)
<add> except :
<add> return NotImplemented
<add> return self.__class__(quo, self.domain), self.__class__(rem, self.domain)
<add>
<add> def __pow__(self, other) :
<add> try :
<add> coef = ${nick}pow(self.coef, other, maxpower = self.maxpower)
<add> except :
<add> raise
<add> return self.__class__(coef, self.domain)
<add>
<add> def __radd__(self, other) :
<add> try :
<add> coef = ${nick}add(other, self.coef)
<add> except :
<add> return NotImplemented
<add> return self.__class__(coef, self.domain)
<add>
<add> def __rsub__(self, other):
<add> try :
<add> coef = ${nick}sub(other, self.coef)
<add> except :
<add> return NotImplemented
<add> return self.__class__(coef, self.domain)
<add>
<add> def __rmul__(self, other) :
<add> try :
<add> coef = ${nick}mul(other, self.coef)
<add> except :
<add> return NotImplemented
<add> return self.__class__(coef, self.domain)
<add>
<add> def __rdiv__(self, other):
<add> # set to __floordiv__ /.
<add> return self.__rfloordiv__(other)
<add>
<add> def __rtruediv__(self, other) :
<add> # there is no true divide if the rhs is not a scalar, although it
<add> # could return the first n elements of an infinite series.
<add> # It is hard to see where n would come from, though.
<add> if len(self.coef) == 1 :
<add> try :
<add> quo, rem = ${nick}div(other, self.coef[0])
<add> except :
<add> return NotImplemented
<add> return self.__class__(quo, self.domain)
<add>
<add> def __rfloordiv__(self, other) :
<add> try :
<add> quo, rem = ${nick}div(other, self.coef)
<add> except :
<add> return NotImplemented
<add> return self.__class__(quo, self.domain)
<add>
<add> def __rmod__(self, other) :
<add> try :
<add> quo, rem = ${nick}div(other, self.coef)
<add> except :
<add> return NotImplemented
<add> return self.__class__(rem, self.domain)
<add>
<add> def __rdivmod__(self, other) :
<add> try :
<add> quo, rem = ${nick}div(other, self.coef)
<add> except :
<add> return NotImplemented
<add> return self.__class__(quo, self.domain), self.__class__(rem, self.domain)
<add>
<add> # Enhance me
<add> # some augmented arithmetic operations could be added here
<add>
<add> def __eq__(self, other) :
<add> res = isinstance(other, self.__class__) \
<add> and len(self.coef) == len(other.coef) \
<add> and np.all(self.domain == other.domain) \
<add> and np.all(self.coef == other.coef)
<add> return res
<add>
<add> def __ne__(self, other) :
<add> return not self.__eq__(other)
<add>
<add> #
<add> # Extra numeric functions.
<add> #
<add>
<add> def convert(self, domain=None, kind=None) :
<add> """Convert to different class and/or domain.
<add>
<add> Parameters:
<add> -----------
<add> domain : {None, array_like}
<add> The domain of the new series type instance. If the value is is
<add> ``None``, then the default domain of `kind` is used.
<add> kind : {None, class}
<add> The polynomial series type class to which the current instance
<add> should be converted. If kind is ``None``, then the class of the
<add> current instance is used.
<add>
<add> Returns:
<add> --------
<add> new_series_instance : `kind`
<add> The returned class can be of different type than the current
<add> instance and/or have a different domain.
<add>
<add> Examples:
<add> ---------
<add>
<add> Notes:
<add> ------
<add> Conversion between domains and class types can result in
<add> numerically ill defined series.
<add>
<add> """
<add> if kind is None :
<add> kind = $name
<add> if domain is None :
<add> domain = kind.domain
<add> return self(kind.identity(domain))
<add>
<add> def mapparms(self) :
<add> """Return the mapping parameters.
<add>
<add> The returned values define a linear map ``off + scl*x`` that is
<add> applied to the input arguments before the series is evaluated. The
<add> of the map depend on the domain; if the current domain is equal to
<add> the default domain ``$domain`` the resulting map is the identity.
<add> If the coeffients of the ``$name`` instance are to be used
<add> separately, then the linear function must be substituted for the
<add> ``x`` in the standard representation of the base polynomials.
<add>
<add> Returns:
<add> --------
<add> off, scl : floats or complex
<add> The mapping function is defined by ``off + scl*x``.
<add>
<add> Notes:
<add> ------
<add> If the current domain is the interval ``[l_1, r_1]`` and the default
<add> interval is ``[l_2, r_2]``, then the linear mapping function ``L`` is
<add> defined by the equations:
<add>
<add> L(l_1) = l_2
<add> L(r_1) = r_2
<add>
<add> """
<add> return pu.mapparms(self.domain, $domain)
<add>
<add> def trim(self, tol=0) :
<add> """Remove small leading coefficients
<add>
<add> Remove leading coefficients until a coefficient is reached whose
<add> absolute value greater than `tol` or the beginning of the series is
<add> reached. If all the coefficients would be removed the series is set to
<add> ``[0]``. A new $name instance is returned with the new coefficients.
<add> The current instance remains unchanged.
<add>
<add> Parameters:
<add> -----------
<add> tol : non-negative number.
<add> All trailing coefficients less than `tol` will be removed.
<add>
<add> Returns:
<add> -------
<add> new_instance : $name
<add> Contains the new set of coefficients.
<add>
<add> """
<add> return self.__class__(pu.trimcoef(self.coef, tol), self.domain)
<add>
<add> def truncate(self, size) :
<add> """Truncate series by discarding trailing coefficients.
<add>
<add> Reduce the $name series to length `size` by removing trailing
<add> coefficients. The value of `size` must be greater than zero. This
<add> is most likely to be useful in least squares fits when the high
<add> order coefficients are very small.
<add>
<add> Parameters:
<add> -----------
<add> size : int
<add> The series is reduced to length `size` by discarding trailing
<add> coefficients. The value of `size` must be greater than zero.
<add>
<add> Returns:
<add> -------
<add> new_instance : $name
<add> New instance of $name with truncated coefficients.
<add>
<add> """
<add> if size < 1 :
<add> raise ValueError("size must be > 0")
<add> if size >= len(self.coef) :
<add> return self.__class__(self.coef, self.domain)
<add> else :
<add> return self.__class__(self.coef[:size], self.domain)
<add>
<add> def copy(self) :
<add> """Return a copy.
<add>
<add> A new instance of $name is returned that has the same
<add> coefficients and domain as the current instance.
<add>
<add> Returns:
<add> --------
<add> new_instance : $name
<add> New instance of $name with the same coefficients and domain.
<add>
<add> """
<add> return self.__class__(self.coef, self.domain)
<add>
<add> def integ(self, m=1, k=[], lbnd=None) :
<add> """Integrate.
<add>
<add> Return an instance of $name that is the definite integral of the
<add> current series. Refer to `${nick}int` for full documentation.
<add>
<add> Parameters:
<add> -----------
<add> m : positive integer
<add> The number of integrations to perform.
<add> k : array_like
<add> Integration constants. The first constant is applied to the
<add> first integration, the second to the second, and so on. The
<add> list of values must less than or equal to `m` in length and any
<add> missing values are set to zero.
<add> lbnd : Scalar
<add> The lower bound of the definite integral.
<add>
<add> Returns:
<add> --------
<add> integral : $name
<add> The integral of the original series defined with the same
<add> domain.
<add>
<add> See Also
<add> --------
<add> `${nick}int` : similar function.
<add> `${nick}der` : similar function for derivative.
<add>
<add> """
<add> off, scl = pu.mapparms($domain, self.domain)
<add> if lbnd is None :
<add> lbnd = 0
<add> else :
<add> lbnd = off + scl*x
<add> coef = ${nick}int(self.coef, m, k, lbnd, scl)
<add> return self.__class__(coef, self.domain)
<add>
<add> def deriv(self, m=1):
<add> """Differentiate.
<add>
<add> Return an instance of $name that is the derivative of the current
<add> series. Refer to `${nick}der` for full documentation.
<add>
<add> Parameters:
<add> -----------
<add> m : positive integer
<add> The number of integrations to perform.
<add>
<add> Returns:
<add> --------
<add> derivative : $name
<add> The derivative of the original series defined with the same
<add> domain.
<add>
<add> See Also
<add> --------
<add> `${nick}der` : similar function.
<add> `${nick}int` : similar function for integration.
<add>
<add> """
<add> off, scl = pu.mapparms(self.domain, $domain)
<add> coef = ${nick}der(self.coef, m, scl)
<add> return self.__class__(coef, self.domain)
<add>
<add> def roots(self) :
<add> """Return list of roots.
<add>
<add> Return ndarray of roots for this series. See `${nick}roots` for
<add> full documentation. Note that the accuracy of the roots is likely to
<add> decrease the further outside the domain they lie.
<add>
<add> See Also
<add> --------
<add> `${nick}roots` : similar function
<add> `${nick}fromroots` : function to go generate series from roots.
<add>
<add> """
<add> roots = ${nick}roots(self.coef)
<add> return pu.mapdomain(roots, $domain, self.domain)
<add>
<add> @staticmethod
<add> def fit(x, y, deg, domain=$domain, rcond=None, full=False) :
<add> """Least squares fit to data.
<add>
<add> Return the least squares fit to the data `y` sampled at `x` as a
<add> $name object. See ${nick}fit for full documentation.
<add>
<add> See Also
<add> --------
<add> ${nick}fit : similar function
<add>
<add> """
<add> if domain is None :
<add> domain = pu.getdomain(x)
<add> xnew = pu.mapdomain(x, domain, $domain)
<add> res = ${nick}fit(xnew, y, deg, rcond=None, full=full)
<add> if full :
<add> [coef, status] = res
<add> return $name(coef, domain=domain), status
<add> else :
<add> coef = res
<add> return $name(coef, domain=domain)
<add>
<add> @staticmethod
<add> def fromroots(roots, domain=$domain) :
<add> """Return $name object with specified roots.
<add>
<add> See ${nick}fromroots for full documentation.
<add>
<add> See Also
<add> --------
<add> ${nick}fromroots : equivalent function
<add>
<add> """
<add> if domain is None :
<add> domain = pu.getdomain(roots)
<add> rnew = pu.mapdomain(roots, domain, $domain)
<add> coef = ${nick}fromroots(rnew)
<add> return $name(coef, domain=domain)
<add>
<add> @staticmethod
<add> def identity(domain=$domain) :
<add> """Identity function.
<add>
<add> If ``p`` is the returned $name object, then ``p(x) == x`` for all
<add> values of x.
<add>
<add> Parameters:
<add> -----------
<add> domain : array_like
<add> The resulting array must be if the form ``[beg, end]``, where
<add> ``beg`` and ``end`` are the endpoints of the domain.
<add>
<add> Returns:
<add> --------
<add> identity : $name object
<add>
<add> """
<add> off, scl = pu.mapparms($domain, domain)
<add> coef = ${nick}line(off, scl)
<add> return $name(coef, domain)
<add>''')
<ide><path>numpy/polynomial/polyutils.py
<add>"""Utililty functions for polynomial modules.
<add>
<add>This modules provides errors, warnings, and a polynomial base class along
<add>with some common routines that are used in both the polynomial and
<add>chebyshev modules.
<add>
<add>Errors
<add>------
<add>- PolyError -- base class for errors
<add>- PolyDomainError -- mismatched domains
<add>
<add>Warnings
<add>--------
<add>- RankWarning -- issued by least squares fits to warn of deficient rank
<add>
<add>Base Class
<add>----------
<add>- PolyBase -- Base class for the Polynomial and Chebyshev classes.
<add>
<add>Functions
<add>---------
<add>- as_series -- turns list of array_like into 1d arrays of common type
<add>- trimseq -- removes trailing zeros
<add>- trimcoef -- removes trailing coefficients less than given magnitude
<add>- getdomain -- finds appropriate domain for collection of points
<add>- mapdomain -- maps points between domains
<add>- mapparms -- parameters of the linear map between domains
<add>
<add>"""
<add>from __future__ import division
<add>
<add>__all__ = ['RankWarning', 'PolyError', 'PolyDomainError', 'PolyBase',
<add> 'as_series', 'trimseq', 'trimcoef', 'getdomain', 'mapdomain',
<add> 'mapparms']
<add>
<add>import warnings, exceptions
<add>import numpy as np
<add>
<add>#
<add># Warnings and Exceptions
<add>#
<add>
<add>class RankWarning(UserWarning) :
<add> """Issued by chebfit when the design matrix is rank deficient."""
<add> pass
<add>
<add>class PolyError(Exception) :
<add> """Base class for errors in this module."""
<add> pass
<add>
<add>class PolyDomainError(PolyError) :
<add> """Issued by the generic Poly class when two domains don't match.
<add>
<add> This is raised when an binary operation is passed Poly objects with
<add> different domains.
<add>
<add> """
<add> pass
<add>
<add>#
<add># Base class for all polynomial types
<add>#
<add>
<add>class PolyBase(object) :
<add> pass
<add>
<add>#
<add># Helper functions to convert inputs to 1d arrays
<add>#
<add>def trimseq(seq) :
<add> """Remove small Poly series coefficients.
<add>
<add> Parameters
<add> ----------
<add> seq : sequence
<add> Sequence of Poly series coefficients. This routine fails for
<add> empty sequences.
<add>
<add> Returns
<add> -------
<add> series : sequence
<add> Subsequence with trailing zeros removed. If the resulting sequence
<add> would be empty, return the first element. The returned sequence may
<add> or may not be a view.
<add>
<add> Notes
<add> -----
<add> Do not lose the type info if the sequence contains unknown objects.
<add>
<add> """
<add> if len(seq) == 0 :
<add> return seq
<add> else :
<add> for i in range(len(seq) - 1, -1, -1) :
<add> if seq[i] != 0 :
<add> break
<add> return seq[:i+1]
<add>
<add>
<add>def as_series(alist, trim=True) :
<add> """Return arguments as a list of 1d arrays.
<add>
<add> The return type will always be an array of double, complex double. or
<add> object.
<add>
<add> Parameters
<add> ----------
<add> [a1, a2,...] : list of array_like.
<add> The arrays must have no more than one dimension when converted.
<add> trim : boolean
<add> When True, trailing zeros are removed from the inputs.
<add> When False, the inputs are passed through intact.
<add>
<add> Returns
<add> -------
<add> [a1, a2,...] : list of 1d-arrays
<add> A copy of the input data as a 1d-arrays.
<add>
<add> Raises
<add> ------
<add> ValueError :
<add> Raised when an input can not be coverted to 1-d array or the
<add> resulting array is empty.
<add>
<add> """
<add> arrays = [np.array(a, ndmin=1, copy=0) for a in alist]
<add> if min([a.size for a in arrays]) == 0 :
<add> raise ValueError("Coefficient array is empty")
<add> if max([a.ndim for a in arrays]) > 1 :
<add> raise ValueError("Coefficient array is not 1-d")
<add> if trim :
<add> arrays = [trimseq(a) for a in arrays]
<add>
<add> if any([a.dtype == np.dtype(object) for a in arrays]) :
<add> ret = []
<add> for a in arrays :
<add> if a.dtype != np.dtype(object) :
<add> tmp = np.empty(len(a), dtype=np.dtype(object))
<add> tmp[:] = a[:]
<add> ret.append(tmp)
<add> else :
<add> ret.append(a.copy())
<add> else :
<add> try :
<add> dtype = np.common_type(*arrays)
<add> except :
<add> raise ValueError("Coefficient arrays have no common type")
<add> ret = [np.array(a, copy=1, dtype=dtype) for a in arrays]
<add> return ret
<add>
<add>
<add>def trimcoef(c, tol=0) :
<add> """Remove small trailing coefficients from a polynomial series.
<add>
<add> Parameters
<add> ----------
<add> c : array_like
<add> 1-d array of coefficients, ordered from low to high.
<add> tol : number
<add> Trailing elements with absolute value less than tol are removed.
<add>
<add> Returns
<add> -------
<add> trimmed : ndarray
<add> 1_d array with tailing zeros removed. If the resulting series would
<add> be empty, a series containing a singel zero is returned.
<add>
<add> Raises
<add> ------
<add> ValueError : if tol < 0
<add>
<add> """
<add> if tol < 0 :
<add> raise ValueError("tol must be non-negative")
<add>
<add> [c] = as_series([c])
<add> [ind] = np.where(np.abs(c) > tol)
<add> if len(ind) == 0 :
<add> return c[:1]*0
<add> else :
<add> return c[:ind[-1] + 1].copy()
<add>
<add>def getdomain(x) :
<add> """Determine suitable domain for given points.
<add>
<add> Find a suitable domain in which to fit a function defined at the points
<add> `x` with a polynomial or Chebyshev series.
<add>
<add> Parameters
<add> ----------
<add> x : array_like
<add> 1D array of points whose domain will be determined.
<add>
<add> Returns
<add> -------
<add> domain : ndarray
<add> 1D ndarray containing two values. If the inputs are complex, then
<add> the two points are the corners of the smallest rectangle alinged
<add> with the axes in the complex plane containing the points `x`. If
<add> the inputs are real, then the two points are the ends of the
<add> smallest interval containing the points `x`,
<add>
<add> See Also
<add> --------
<add> mapparms, mapdomain
<add>
<add> """
<add> [x] = as_series([x], trim=False)
<add> if x.dtype.char in np.typecodes['Complex'] :
<add> rmin, rmax = x.real.min(), x.real.max()
<add> imin, imax = x.imag.min(), x.imag.max()
<add> return np.array((complex(rmin, imin), complex(rmax, imax)))
<add> else :
<add> return np.array((x.min(), x.max()))
<add>
<add>def mapparms(old, new) :
<add> """Linear map between domains.
<add>
<add> Return the parameters of the linear map ``off + scl*x`` that maps the
<add> `old` domain to the `new` domain. The map is defined by the requirement
<add> that the left end of the old domain map to the left end of the new
<add> domain, and similarly for the right ends.
<add>
<add> Parameters
<add> ----------
<add> old, new : array_like
<add> The two domains should convert as 1D arrays containing two values.
<add>
<add> Returns
<add> -------
<add> off, scl : scalars
<add> The map `=``off + scl*x`` maps the first domain to the second.
<add>
<add> See Also
<add> --------
<add> getdomain, mapdomain
<add>
<add> """
<add> oldlen = old[1] - old[0]
<add> newlen = new[1] - new[0]
<add> off = (old[1]*new[0] - old[0]*new[1])/oldlen
<add> scl = newlen/oldlen
<add> return off, scl
<add>
<add>def mapdomain(x, old, new) :
<add> """Apply linear map to input points.
<add>
<add> The linear map of the form ``off + scl*x`` that takes the `old` domain
<add> to the `new` domain is applied to the points `x`.
<add>
<add> Parameters
<add> ----------
<add> x : array_like
<add> Points to be mapped
<add> old, new : array_like
<add> The two domains that determin the map. They should both convert as
<add> 1D arrays containing two values.
<add>
<add>
<add> Returns
<add> -------
<add> new_x : ndarray
<add> Array of points of the same shape as the input `x` after the linear
<add> map defined by the two domains is applied.
<add>
<add> See Also
<add> --------
<add> getdomain, mapparms
<add>
<add> """
<add> [x] = as_series([x], trim=False)
<add> off, scl = mapparms(old, new)
<add> return off + scl*x
<ide><path>numpy/polynomial/setup.py
<add>
<add>
<add>def configuration(parent_package='',top_path=None):
<add> from numpy.distutils.misc_util import Configuration
<add> config = Configuration('polynomial',parent_package,top_path)
<add> config.add_data_dir('tests')
<add> return config
<add>
<add>if __name__ == '__main__':
<add> from numpy.distutils.core import setup
<add> setup(configuration=configuration)
<ide><path>numpy/polynomial/tests/test_chebyshev.py
<add>"""Tests for chebyshev module.
<add>
<add>"""
<add>from __future__ import division
<add>
<add>import numpy as np
<add>import numpy.polynomial.chebyshev as ch
<add>from numpy.testing import *
<add>from exceptions import TypeError, ValueError
<add>
<add>def trim(x) :
<add> return ch.chebtrim(x, tol=1e-6)
<add>
<add>T0 = [ 1]
<add>T1 = [ 0, 1]
<add>T2 = [-1, 0, 2]
<add>T3 = [ 0, -3, 0, 4]
<add>T4 = [ 1, 0, -8, 0, 8]
<add>T5 = [ 0, 5, 0, -20, 0, 16]
<add>T6 = [-1, 0, 18, 0, -48, 0, 32]
<add>T7 = [ 0, -7, 0, 56, 0, -112, 0, 64]
<add>T8 = [ 1, 0, -32, 0, 160, 0, -256, 0, 128]
<add>T9 = [ 0, 9, 0, -120, 0, 432, 0, -576, 0, 256]
<add>
<add>Tlist = [T0, T1, T2, T3, T4, T5, T6, T7, T8, T9]
<add>
<add>
<add>class TestPrivate(TestCase) :
<add>
<add> def test__cseries_to_zseries(self) :
<add> for i in range(5) :
<add> inp = np.array([2] + [1]*i, np.double)
<add> tgt = np.array([.5]*i + [2] + [.5]*i, np.double)
<add> res = ch._cseries_to_zseries(inp)
<add> assert_equal(res, tgt)
<add>
<add> def test__zseries_to_cseries(self) :
<add> for i in range(5) :
<add> inp = np.array([.5]*i + [2] + [.5]*i, np.double)
<add> tgt = np.array([2] + [1]*i, np.double)
<add> res = ch._zseries_to_cseries(inp)
<add> assert_equal(res, tgt)
<add>
<add>
<add>class TestConstants(TestCase) :
<add>
<add> def test_chebdomain(self) :
<add> assert_equal(ch.chebdomain, [-1, 1])
<add>
<add> def test_chebzero(self) :
<add> assert_equal(ch.chebzero, [0])
<add>
<add> def test_chebone(self) :
<add> assert_equal(ch.chebone, [1])
<add>
<add> def test_chebx(self) :
<add> assert_equal(ch.chebx, [0, 1])
<add>
<add>
<add>class TestArithmetic(TestCase) :
<add>
<add> def test_chebadd(self) :
<add> for i in range(5) :
<add> for j in range(5) :
<add> msg = "At i=%d, j=%d" % (i,j)
<add> tgt = np.zeros(max(i,j) + 1)
<add> tgt[i] += 1
<add> tgt[j] += 1
<add> res = ch.chebadd([0]*i + [1], [0]*j + [1])
<add> assert_equal(trim(res), trim(tgt), err_msg=msg)
<add>
<add> def test_chebsub(self) :
<add> for i in range(5) :
<add> for j in range(5) :
<add> msg = "At i=%d, j=%d" % (i,j)
<add> tgt = np.zeros(max(i,j) + 1)
<add> tgt[i] += 1
<add> tgt[j] -= 1
<add> res = ch.chebsub([0]*i + [1], [0]*j + [1])
<add> assert_equal(trim(res), trim(tgt), err_msg=msg)
<add>
<add> def test_chebmul(self) :
<add> for i in range(5) :
<add> for j in range(5) :
<add> msg = "At i=%d, j=%d" % (i,j)
<add> tgt = np.zeros(i + j + 1)
<add> tgt[i + j] += .5
<add> tgt[abs(i - j)] += .5
<add> res = ch.chebmul([0]*i + [1], [0]*j + [1])
<add> assert_equal(trim(res), trim(tgt), err_msg=msg)
<add>
<add> def test_chebdiv(self) :
<add> for i in range(5) :
<add> for j in range(5) :
<add> msg = "At i=%d, j=%d" % (i,j)
<add> ci = [0]*i + [1]
<add> cj = [0]*j + [1]
<add> tgt = ch.chebadd(ci, cj)
<add> quo, rem = ch.chebdiv(tgt, ci)
<add> res = ch.chebadd(ch.chebmul(quo, ci), rem)
<add> assert_equal(trim(res), trim(tgt), err_msg=msg)
<add>
<add> def test_chebval(self) :
<add> def f(x) :
<add> return x*(x**2 - 1)
<add>
<add> #check empty input
<add> assert_equal(ch.chebval([], 1).size, 0)
<add>
<add> #check normal input)
<add> for i in range(5) :
<add> tgt = 1
<add> res = ch.chebval(1, [0]*i + [1])
<add> assert_almost_equal(res, tgt)
<add> tgt = (-1)**i
<add> res = ch.chebval(-1, [0]*i + [1])
<add> assert_almost_equal(res, tgt)
<add> zeros = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
<add> tgt = 0
<add> res = ch.chebval(zeros, [0]*i + [1])
<add> assert_almost_equal(res, tgt)
<add> x = np.linspace(-1,1)
<add> tgt = f(x)
<add> res = ch.chebval(x, [0, -.25, 0, .25])
<add> assert_almost_equal(res, tgt)
<add>
<add> #check that shape is preserved
<add> for i in range(3) :
<add> dims = [2]*i
<add> x = np.zeros(dims)
<add> assert_equal(ch.chebval(x, [1]).shape, dims)
<add> assert_equal(ch.chebval(x, [1,0]).shape, dims)
<add> assert_equal(ch.chebval(x, [1,0,0]).shape, dims)
<add>
<add>
<add>class TestCalculus(TestCase) :
<add>
<add> def test_chebint(self) :
<add> # check exceptions
<add> assert_raises(ValueError, ch.chebint, [0], -1)
<add> assert_raises(ValueError, ch.chebint, [0], 1, [0,0])
<add> assert_raises(ValueError, ch.chebint, [0], 1, lbnd=[0,0])
<add> assert_raises(ValueError, ch.chebint, [0], 1, scl=[0,0])
<add>
<add> # check single integration with integration constant
<add> for i in range(5) :
<add> scl = i + 1
<add> pol = [0]*i + [1]
<add> tgt = [i] + [0]*i + [1/scl]
<add> chebpol = ch.poly2cheb(pol)
<add> chebint = ch.chebint(chebpol, m=1, k=[i])
<add> res = ch.cheb2poly(chebint)
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> # check single integration with integration constant and lbnd
<add> for i in range(5) :
<add> scl = i + 1
<add> pol = [0]*i + [1]
<add> chebpol = ch.poly2cheb(pol)
<add> chebint = ch.chebint(chebpol, m=1, k=[i], lbnd=-1)
<add> assert_almost_equal(ch.chebval(-1, chebint), i)
<add>
<add> # check single integration with integration constant and scaling
<add> for i in range(5) :
<add> scl = i + 1
<add> pol = [0]*i + [1]
<add> tgt = [i] + [0]*i + [2/scl]
<add> chebpol = ch.poly2cheb(pol)
<add> chebint = ch.chebint(chebpol, m=1, k=[i], scl=2)
<add> res = ch.cheb2poly(chebint)
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> # check multiple integrations with default k
<add> for i in range(5) :
<add> for j in range(2,5) :
<add> pol = [0]*i + [1]
<add> tgt = pol[:]
<add> for k in range(j) :
<add> tgt = ch.chebint(tgt, m=1)
<add> res = ch.chebint(pol, m=j)
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> # check multiple integrations with defined k
<add> for i in range(5) :
<add> for j in range(2,5) :
<add> pol = [0]*i + [1]
<add> tgt = pol[:]
<add> for k in range(j) :
<add> tgt = ch.chebint(tgt, m=1, k=[k])
<add> res = ch.chebint(pol, m=j, k=range(j))
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> # check multiple integrations with lbnd
<add> for i in range(5) :
<add> for j in range(2,5) :
<add> pol = [0]*i + [1]
<add> tgt = pol[:]
<add> for k in range(j) :
<add> tgt = ch.chebint(tgt, m=1, k=[k], lbnd=-1)
<add> res = ch.chebint(pol, m=j, k=range(j), lbnd=-1)
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> # check multiple integrations with scaling
<add> for i in range(5) :
<add> for j in range(2,5) :
<add> pol = [0]*i + [1]
<add> tgt = pol[:]
<add> for k in range(j) :
<add> tgt = ch.chebint(tgt, m=1, k=[k], scl=2)
<add> res = ch.chebint(pol, m=j, k=range(j), scl=2)
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> def test_chebder(self) :
<add> # check exceptions
<add> assert_raises(ValueError, ch.chebder, [0], -1)
<add> # check that zeroth deriviative does nothing
<add> for i in range(5) :
<add> tgt = [1] + [0]*i
<add> res = ch.chebder(tgt, m=0)
<add> assert_equal(trim(res), trim(tgt))
<add> # check that derivation is the inverse of integration
<add> for i in range(5) :
<add> for j in range(2,5) :
<add> tgt = [1] + [0]*i
<add> res = ch.chebder(ch.chebint(tgt, m=j), m=j)
<add> assert_almost_equal(trim(res), trim(tgt))
<add> # check derivation with scaling
<add> for i in range(5) :
<add> for j in range(2,5) :
<add> tgt = [1] + [0]*i
<add> res = ch.chebder(ch.chebint(tgt, m=j, scl=2), m=j, scl=.5)
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add>
<add>class TestMisc(TestCase) :
<add>
<add> def test_chebfromroots(self) :
<add> res = ch.chebfromroots([])
<add> assert_almost_equal(trim(res), [1])
<add> for i in range(1,5) :
<add> roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
<add> tgt = [0]*i + [1]
<add> res = ch.chebfromroots(roots)*2**(i-1)
<add> assert_almost_equal(trim(res),trim(tgt))
<add>
<add> def test_chebroots(self) :
<add> assert_almost_equal(ch.chebroots([1]), [])
<add> assert_almost_equal(ch.chebroots([1, 2]), [-.5])
<add> for i in range(2,5) :
<add> tgt = np.linspace(-1, 1, i)
<add> res = ch.chebroots(ch.chebfromroots(tgt))
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> def test_chebvander(self) :
<add> # check for 1d x
<add> x = np.arange(3)
<add> v = ch.chebvander(x, 3)
<add> assert_(v.shape == (3,4))
<add> for i in range(4) :
<add> coef = [0]*i + [1]
<add> assert_almost_equal(v[...,i], ch.chebval(x, coef))
<add> # check for 2d x
<add> x = np.array([[1,2],[3,4],[5,6]])
<add> v = ch.chebvander(x, 3)
<add> assert_(v.shape == (3,2,4))
<add> for i in range(4) :
<add> coef = [0]*i + [1]
<add> assert_almost_equal(v[...,i], ch.chebval(x, coef))
<add>
<add> def test_chebfit(self) :
<add> def f(x) :
<add> return x*(x - 1)*(x - 2)
<add> # Test exceptions
<add> assert_raises(ValueError, ch.chebfit, [1], [1], -1)
<add> assert_raises(TypeError, ch.chebfit, [[1]], [1], 0)
<add> assert_raises(TypeError, ch.chebfit, [], [1], 0)
<add> assert_raises(TypeError, ch.chebfit, [1], [[[1]]], 0)
<add> assert_raises(TypeError, ch.chebfit, [1, 2], [1], 0)
<add> assert_raises(TypeError, ch.chebfit, [1], [1, 2], 0)
<add> # Test fit
<add> x = np.linspace(0,2)
<add> y = f(x)
<add> coef = ch.chebfit(x, y, 3)
<add> assert_equal(len(coef), 4)
<add> assert_almost_equal(ch.chebval(x, coef), y)
<add> coef = ch.chebfit(x, y, 4)
<add> assert_equal(len(coef), 5)
<add> assert_almost_equal(ch.chebval(x, coef), y)
<add> coef2d = ch.chebfit(x, np.array([y,y]).T, 4)
<add> assert_almost_equal(coef2d, np.array([coef,coef]).T)
<add>
<add> def test_chebtrim(self) :
<add> coef = [2, -1, 1, 0]
<add> # Test exceptions
<add> assert_raises(ValueError, ch.chebtrim, coef, -1)
<add> # Test results
<add> assert_equal(ch.chebtrim(coef), coef[:-1])
<add> assert_equal(ch.chebtrim(coef, 1), coef[:-3])
<add> assert_equal(ch.chebtrim(coef, 2), [0])
<add>
<add> def test_chebline(self) :
<add> assert_equal(ch.chebline(3,4), [3, 4])
<add>
<add> def test_cheb2poly(self) :
<add> for i in range(10) :
<add> assert_equal(ch.cheb2poly([0]*i + [1]), Tlist[i])
<add>
<add> def test_poly2cheb(self) :
<add> for i in range(10) :
<add> assert_equal(ch.poly2cheb(Tlist[i]), [0]*i + [1])
<add>
<add>
<add>class TestChebyshevClass(TestCase) :
<add>
<add> p1 = ch.Chebyshev([1,2,3])
<add> p2 = ch.Chebyshev([1,2,3], [0,1])
<add> p3 = ch.Chebyshev([1,2])
<add> p4 = ch.Chebyshev([2,2,3])
<add> p5 = ch.Chebyshev([3,2,3])
<add>
<add> def test_equal(self) :
<add> assert_(self.p1 == self.p1)
<add> assert_(self.p2 == self.p2)
<add> assert_(not self.p1 == self.p2)
<add> assert_(not self.p1 == self.p3)
<add> assert_(not self.p1 == [1,2,3])
<add>
<add> def test_not_equal(self) :
<add> assert_(not self.p1 != self.p1)
<add> assert_(not self.p2 != self.p2)
<add> assert_(self.p1 != self.p2)
<add> assert_(self.p1 != self.p3)
<add> assert_(self.p1 != [1,2,3])
<add>
<add> def test_add(self) :
<add> tgt = ch.Chebyshev([2,4,6])
<add> assert_(self.p1 + self.p1 == tgt)
<add> assert_(self.p1 + [1,2,3] == tgt)
<add> assert_([1,2,3] + self.p1 == tgt)
<add>
<add> def test_sub(self) :
<add> tgt = ch.Chebyshev([1])
<add> assert_(self.p4 - self.p1 == tgt)
<add> assert_(self.p4 - [1,2,3] == tgt)
<add> assert_([2,2,3] - self.p1 == tgt)
<add>
<add> def test_mul(self) :
<add> tgt = ch.Chebyshev([7.5, 10., 8., 6., 4.5])
<add> assert_(self.p1 * self.p1 == tgt)
<add> assert_(self.p1 * [1,2,3] == tgt)
<add> assert_([1,2,3] * self.p1 == tgt)
<add>
<add> def test_floordiv(self) :
<add> tgt = ch.Chebyshev([1])
<add> assert_(self.p4 // self.p1 == tgt)
<add> assert_(self.p4 // [1,2,3] == tgt)
<add> assert_([2,2,3] // self.p1 == tgt)
<add>
<add> def test_mod(self) :
<add> tgt = ch.Chebyshev([1])
<add> assert_((self.p4 % self.p1) == tgt)
<add> assert_((self.p4 % [1,2,3]) == tgt)
<add> assert_(([2,2,3] % self.p1) == tgt)
<add>
<add> def test_divmod(self) :
<add> tquo = ch.Chebyshev([1])
<add> trem = ch.Chebyshev([2])
<add> quo, rem = divmod(self.p5, self.p1)
<add> assert_(quo == tquo and rem == trem)
<add> quo, rem = divmod(self.p5, [1,2,3])
<add> assert_(quo == tquo and rem == trem)
<add> quo, rem = divmod([3,2,3], self.p1)
<add> assert_(quo == tquo and rem == trem)
<add>
<add> def test_pow(self) :
<add> tgt = ch.Chebyshev([1])
<add> for i in range(5) :
<add> res = self.p1**i
<add> assert_(res == tgt)
<add> tgt *= self.p1
<add>
<add> def test_call(self) :
<add> # domain = [-1, 1]
<add> x = np.linspace(-1, 1)
<add> tgt = 3*(2*x**2 - 1) + 2*x + 1
<add> assert_almost_equal(self.p1(x), tgt)
<add>
<add> # domain = [0, 1]
<add> x = np.linspace(0, 1)
<add> xx = 2*x - 1
<add> assert_almost_equal(self.p2(x), self.p1(xx))
<add>
<add> def test_convert(self) :
<add> x = np.linspace(-1,1)
<add> p = self.p1.convert(domain=[0,1])
<add> assert_almost_equal(p(x), self.p1(x))
<add>
<add> def test_mapparms(self) :
<add> parms = self.p2.mapparms()
<add> assert_almost_equal(parms, [-1, 2])
<add>
<add> def test_trim(self) :
<add> coef = [1, 1e-6, 1e-12, 0]
<add> p = ch.Chebyshev(coef)
<add> assert_equal(p.trim().coef, coef[:3])
<add> assert_equal(p.trim(1e-10).coef, coef[:2])
<add> assert_equal(p.trim(1e-5).coef, coef[:1])
<add>
<add> def test_truncate(self) :
<add> assert_raises(ValueError, self.p1.truncate, 0)
<add> assert_equal(len(self.p1.truncate(4)), 3)
<add> assert_equal(len(self.p1.truncate(3)), 3)
<add> assert_equal(len(self.p1.truncate(2)), 2)
<add> assert_equal(len(self.p1.truncate(1)), 1)
<add>
<add> def test_copy(self) :
<add> p = self.p1.copy()
<add> assert_(self.p1 == p)
<add>
<add> def test_integ(self) :
<add> p = self.p2.integ()
<add> assert_almost_equal(p.coef, ch.chebint([1,2,3], 1, 0, scl=.5))
<add> p = self.p2.integ(1, 1)
<add> assert_almost_equal(p.coef, ch.chebint([1,2,3], 1, 1, scl=.5))
<add> p = self.p2.integ(2, [1, 2])
<add> assert_almost_equal(p.coef, ch.chebint([1,2,3], 2, [1,2], scl=.5))
<add>
<add> def test_deriv(self) :
<add> p = self.p2.integ(2, [1, 2])
<add> assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
<add> assert_almost_equal(p.deriv(2).coef, self.p2.coef)
<add>
<add> def test_roots(self) :
<add> p = ch.Chebyshev(ch.poly2cheb([0, -1, 0, 1]), [0, 1])
<add> res = p.roots()
<add> tgt = [0, .5, 1]
<add> assert_almost_equal(res, tgt)
<add>
<add> def test_fromroots(self) :
<add> roots = [0, .5, 1]
<add> p = ch.Chebyshev.fromroots(roots, domain=[0, 1])
<add> res = p.coef
<add> tgt = ch.poly2cheb([0, -1, 0, 1])
<add> assert_almost_equal(res, tgt)
<add>
<add> def test_fit(self) :
<add> def f(x) :
<add> return x*(x - 1)*(x - 2)
<add> x = np.linspace(0,3)
<add> y = f(x)
<add> p = ch.Chebyshev.fit(x, y, 3)
<add> assert_almost_equal(p(x), y)
<add> p = ch.Chebyshev.fit(x, y, 3, None)
<add> assert_almost_equal(p(x), y)
<add> assert_almost_equal(p.domain, [0,3])
<add>
<add> def test_identity(self) :
<add> x = np.linspace(0,3)
<add> p = ch.Chebyshev.identity()
<add> assert_almost_equal(p(x), x)
<add> p = ch.Chebyshev.identity([1,3])
<add> assert_almost_equal(p(x), x)
<ide><path>numpy/polynomial/tests/test_polynomial.py
<add>"""Tests for polynomial module.
<add>
<add>"""
<add>from __future__ import division
<add>
<add>import numpy as np
<add>import numpy.polynomial.polynomial as poly
<add>from numpy.testing import *
<add>from exceptions import TypeError, ValueError
<add>
<add>def trim(x) :
<add> return poly.polytrim(x, tol=1e-6)
<add>
<add>T0 = [ 1]
<add>T1 = [ 0, 1]
<add>T2 = [-1, 0, 2]
<add>T3 = [ 0, -3, 0, 4]
<add>T4 = [ 1, 0, -8, 0, 8]
<add>T5 = [ 0, 5, 0, -20, 0, 16]
<add>T6 = [-1, 0, 18, 0, -48, 0, 32]
<add>T7 = [ 0, -7, 0, 56, 0, -112, 0, 64]
<add>T8 = [ 1, 0, -32, 0, 160, 0, -256, 0, 128]
<add>T9 = [ 0, 9, 0, -120, 0, 432, 0, -576, 0, 256]
<add>
<add>Tlist = [T0, T1, T2, T3, T4, T5, T6, T7, T8, T9]
<add>
<add>
<add>class TestConstants(TestCase) :
<add>
<add> def test_polydomain(self) :
<add> assert_equal(poly.polydomain, [-1, 1])
<add>
<add> def test_polyzero(self) :
<add> assert_equal(poly.polyzero, [0])
<add>
<add> def test_polyone(self) :
<add> assert_equal(poly.polyone, [1])
<add>
<add> def test_polyx(self) :
<add> assert_equal(poly.polyx, [0, 1])
<add>
<add>
<add>class TestArithmetic(TestCase) :
<add>
<add> def test_polyadd(self) :
<add> for i in range(5) :
<add> for j in range(5) :
<add> msg = "At i=%d, j=%d" % (i,j)
<add> tgt = np.zeros(max(i,j) + 1)
<add> tgt[i] += 1
<add> tgt[j] += 1
<add> res = poly.polyadd([0]*i + [1], [0]*j + [1])
<add> assert_equal(trim(res), trim(tgt), err_msg=msg)
<add>
<add> def test_polysub(self) :
<add> for i in range(5) :
<add> for j in range(5) :
<add> msg = "At i=%d, j=%d" % (i,j)
<add> tgt = np.zeros(max(i,j) + 1)
<add> tgt[i] += 1
<add> tgt[j] -= 1
<add> res = poly.polysub([0]*i + [1], [0]*j + [1])
<add> assert_equal(trim(res), trim(tgt), err_msg=msg)
<add>
<add> def test_polymul(self) :
<add> for i in range(5) :
<add> for j in range(5) :
<add> msg = "At i=%d, j=%d" % (i,j)
<add> tgt = np.zeros(i + j + 1)
<add> tgt[i + j] += 1
<add> res = poly.polymul([0]*i + [1], [0]*j + [1])
<add> assert_equal(trim(res), trim(tgt), err_msg=msg)
<add>
<add> def test_polydiv(self) :
<add> # check zero division
<add> assert_raises(ZeroDivisionError, poly.polydiv, [1], [0])
<add>
<add> # check scalar division
<add> quo, rem = poly.polydiv([2],[2])
<add> assert_equal((quo, rem), (1, 0))
<add> quo, rem = poly.polydiv([2,2],[2])
<add> assert_equal((quo, rem), ((1,1), 0))
<add>
<add> # check rest.
<add> for i in range(5) :
<add> for j in range(5) :
<add> msg = "At i=%d, j=%d" % (i,j)
<add> ci = [0]*i + [1,2]
<add> cj = [0]*j + [1,2]
<add> tgt = poly.polyadd(ci, cj)
<add> quo, rem = poly.polydiv(tgt, ci)
<add> res = poly.polyadd(poly.polymul(quo, ci), rem)
<add> assert_equal(res, tgt, err_msg=msg)
<add>
<add> def test_polyval(self) :
<add> def f(x) :
<add> return x*(x**2 - 1)
<add>
<add> #check empty input
<add> assert_equal(poly.polyval([], 1).size, 0)
<add>
<add> #check normal input)
<add> x = np.linspace(-1,1)
<add> for i in range(5) :
<add> tgt = x**i
<add> res = poly.polyval(x, [0]*i + [1])
<add> assert_almost_equal(res, tgt)
<add> tgt = f(x)
<add> res = poly.polyval(x, [0, -1, 0, 1])
<add> assert_almost_equal(res, tgt)
<add>
<add> #check that shape is preserved
<add> for i in range(3) :
<add> dims = [2]*i
<add> x = np.zeros(dims)
<add> assert_equal(poly.polyval(x, [1]).shape, dims)
<add> assert_equal(poly.polyval(x, [1,0]).shape, dims)
<add> assert_equal(poly.polyval(x, [1,0,0]).shape, dims)
<add>
<add>
<add>class TestCalculus(TestCase) :
<add>
<add> def test_polyint(self) :
<add> # check exceptions
<add> assert_raises(ValueError, poly.polyint, [0], -1)
<add> assert_raises(ValueError, poly.polyint, [0], 1, [0,0])
<add> assert_raises(ValueError, poly.polyint, [0], 1, lbnd=[0,0])
<add> assert_raises(ValueError, poly.polyint, [0], 1, scl=[0,0])
<add>
<add> # check single integration with integration constant
<add> for i in range(5) :
<add> scl = i + 1
<add> pol = [0]*i + [1]
<add> tgt = [i] + [0]*i + [1/scl]
<add> res = poly.polyint(pol, m=1, k=[i])
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> # check single integration with integration constant and lbnd
<add> for i in range(5) :
<add> scl = i + 1
<add> pol = [0]*i + [1]
<add> res = poly.polyint(pol, m=1, k=[i], lbnd=-1)
<add> assert_almost_equal(poly.polyval(-1, res), i)
<add>
<add> # check single integration with integration constant and scaling
<add> for i in range(5) :
<add> scl = i + 1
<add> pol = [0]*i + [1]
<add> tgt = [i] + [0]*i + [2/scl]
<add> res = poly.polyint(pol, m=1, k=[i], scl=2)
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> # check multiple integrations with default k
<add> for i in range(5) :
<add> for j in range(2,5) :
<add> pol = [0]*i + [1]
<add> tgt = pol[:]
<add> for k in range(j) :
<add> tgt = poly.polyint(tgt, m=1)
<add> res = poly.polyint(pol, m=j)
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> # check multiple integrations with defined k
<add> for i in range(5) :
<add> for j in range(2,5) :
<add> pol = [0]*i + [1]
<add> tgt = pol[:]
<add> for k in range(j) :
<add> tgt = poly.polyint(tgt, m=1, k=[k])
<add> res = poly.polyint(pol, m=j, k=range(j))
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> # check multiple integrations with lbnd
<add> for i in range(5) :
<add> for j in range(2,5) :
<add> pol = [0]*i + [1]
<add> tgt = pol[:]
<add> for k in range(j) :
<add> tgt = poly.polyint(tgt, m=1, k=[k], lbnd=-1)
<add> res = poly.polyint(pol, m=j, k=range(j), lbnd=-1)
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> # check multiple integrations with scaling
<add> for i in range(5) :
<add> for j in range(2,5) :
<add> pol = [0]*i + [1]
<add> tgt = pol[:]
<add> for k in range(j) :
<add> tgt = poly.polyint(tgt, m=1, k=[k], scl=2)
<add> res = poly.polyint(pol, m=j, k=range(j), scl=2)
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> def test_polyder(self) :
<add> # check exceptions
<add> assert_raises(ValueError, poly.polyder, [0], -1)
<add>
<add> # check that zeroth deriviative does nothing
<add> for i in range(5) :
<add> tgt = [1] + [0]*i
<add> res = poly.polyder(tgt, m=0)
<add> assert_equal(trim(res), trim(tgt))
<add>
<add> # check that derivation is the inverse of integration
<add> for i in range(5) :
<add> for j in range(2,5) :
<add> tgt = [1] + [0]*i
<add> res = poly.polyder(poly.polyint(tgt, m=j), m=j)
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> # check derivation with scaling
<add> for i in range(5) :
<add> for j in range(2,5) :
<add> tgt = [1] + [0]*i
<add> res = poly.polyder(poly.polyint(tgt, m=j, scl=2), m=j, scl=.5)
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add>
<add>class TestMisc(TestCase) :
<add>
<add> def test_polyfromroots(self) :
<add> res = poly.polyfromroots([])
<add> assert_almost_equal(trim(res), [1])
<add> for i in range(1,5) :
<add> roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
<add> tgt = Tlist[i]
<add> res = poly.polyfromroots(roots)*2**(i-1)
<add> assert_almost_equal(trim(res),trim(tgt))
<add>
<add> def test_polyroots(self) :
<add> assert_almost_equal(poly.polyroots([1]), [])
<add> assert_almost_equal(poly.polyroots([1, 2]), [-.5])
<add> for i in range(2,5) :
<add> tgt = np.linspace(-1, 1, i)
<add> res = poly.polyroots(poly.polyfromroots(tgt))
<add> assert_almost_equal(trim(res), trim(tgt))
<add>
<add> def test_polyvander(self) :
<add> # check for 1d x
<add> x = np.arange(3)
<add> v = poly.polyvander(x, 3)
<add> assert_(v.shape == (3,4))
<add> for i in range(4) :
<add> coef = [0]*i + [1]
<add> assert_almost_equal(v[...,i], poly.polyval(x, coef))
<add> # check for 2d x
<add> x = np.array([[1,2],[3,4],[5,6]])
<add> v = poly.polyvander(x, 3)
<add> assert_(v.shape == (3,2,4))
<add> for i in range(4) :
<add> coef = [0]*i + [1]
<add> assert_almost_equal(v[...,i], poly.polyval(x, coef))
<add>
<add> def test_polyfit(self) :
<add> def f(x) :
<add> return x*(x - 1)*(x - 2)
<add> # Test exceptions
<add> assert_raises(ValueError, poly.polyfit, [1], [1], -1)
<add> assert_raises(TypeError, poly.polyfit, [[1]], [1], 0)
<add> assert_raises(TypeError, poly.polyfit, [], [1], 0)
<add> assert_raises(TypeError, poly.polyfit, [1], [[[1]]], 0)
<add> assert_raises(TypeError, poly.polyfit, [1, 2], [1], 0)
<add> assert_raises(TypeError, poly.polyfit, [1], [1, 2], 0)
<add> # Test fit
<add> x = np.linspace(0,2)
<add> y = f(x)
<add> coef = poly.polyfit(x, y, 3)
<add> assert_equal(len(coef), 4)
<add> assert_almost_equal(poly.polyval(x, coef), y)
<add> coef = poly.polyfit(x, y, 4)
<add> assert_equal(len(coef), 5)
<add> assert_almost_equal(poly.polyval(x, coef), y)
<add> coef2d = poly.polyfit(x, np.array([y,y]).T, 4)
<add> assert_almost_equal(coef2d, np.array([coef,coef]).T)
<add>
<add> def test_polytrim(self) :
<add> coef = [2, -1, 1, 0]
<add> # Test exceptions
<add> assert_raises(ValueError, poly.polytrim, coef, -1)
<add> # Test results
<add> assert_equal(poly.polytrim(coef), coef[:-1])
<add> assert_equal(poly.polytrim(coef, 1), coef[:-3])
<add> assert_equal(poly.polytrim(coef, 2), [0])
<add>
<add> def test_polyline(self) :
<add> assert_equal(poly.polyline(3,4), [3, 4])
<add>
<add>class TestPolynomialClass(TestCase) :
<add>
<add> p1 = poly.Polynomial([1,2,3])
<add> p2 = poly.Polynomial([1,2,3], [0,1])
<add> p3 = poly.Polynomial([1,2])
<add> p4 = poly.Polynomial([2,2,3])
<add> p5 = poly.Polynomial([3,2,3])
<add>
<add> def test_equal(self) :
<add> assert_(self.p1 == self.p1)
<add> assert_(self.p2 == self.p2)
<add> assert_(not self.p1 == self.p2)
<add> assert_(not self.p1 == self.p3)
<add> assert_(not self.p1 == [1,2,3])
<add>
<add> def test_not_equal(self) :
<add> assert_(not self.p1 != self.p1)
<add> assert_(not self.p2 != self.p2)
<add> assert_(self.p1 != self.p2)
<add> assert_(self.p1 != self.p3)
<add> assert_(self.p1 != [1,2,3])
<add>
<add> def test_add(self) :
<add> tgt = poly.Polynomial([2,4,6])
<add> assert_(self.p1 + self.p1 == tgt)
<add> assert_(self.p1 + [1,2,3] == tgt)
<add> assert_([1,2,3] + self.p1 == tgt)
<add>
<add> def test_sub(self) :
<add> tgt = poly.Polynomial([1])
<add> assert_(self.p4 - self.p1 == tgt)
<add> assert_(self.p4 - [1,2,3] == tgt)
<add> assert_([2,2,3] - self.p1 == tgt)
<add>
<add> def test_mul(self) :
<add> tgt = poly.Polynomial([1,4,10,12,9])
<add> assert_(self.p1 * self.p1 == tgt)
<add> assert_(self.p1 * [1,2,3] == tgt)
<add> assert_([1,2,3] * self.p1 == tgt)
<add>
<add> def test_floordiv(self) :
<add> tgt = poly.Polynomial([1])
<add> assert_(self.p4 // self.p1 == tgt)
<add> assert_(self.p4 // [1,2,3] == tgt)
<add> assert_([2,2,3] // self.p1 == tgt)
<add>
<add> def test_mod(self) :
<add> tgt = poly.Polynomial([1])
<add> assert_((self.p4 % self.p1) == tgt)
<add> assert_((self.p4 % [1,2,3]) == tgt)
<add> assert_(([2,2,3] % self.p1) == tgt)
<add>
<add> def test_divmod(self) :
<add> tquo = poly.Polynomial([1])
<add> trem = poly.Polynomial([2])
<add> quo, rem = divmod(self.p5, self.p1)
<add> assert_(quo == tquo and rem == trem)
<add> quo, rem = divmod(self.p5, [1,2,3])
<add> assert_(quo == tquo and rem == trem)
<add> quo, rem = divmod([3,2,3], self.p1)
<add> assert_(quo == tquo and rem == trem)
<add>
<add> def test_pow(self) :
<add> tgt = poly.Polynomial([1])
<add> for i in range(5) :
<add> res = self.p1**i
<add> assert_(res == tgt)
<add> tgt *= self.p1
<add>
<add> def test_call(self) :
<add> # domain = [-1, 1]
<add> x = np.linspace(-1, 1)
<add> tgt = (3*x + 2)*x + 1
<add> assert_almost_equal(self.p1(x), tgt)
<add>
<add> # domain = [0, 1]
<add> x = np.linspace(0, 1)
<add> xx = 2*x - 1
<add> assert_almost_equal(self.p2(x), self.p1(xx))
<add>
<add> def test_convert(self) :
<add> x = np.linspace(-1,1)
<add> p = self.p1.convert(domain=[0,1])
<add> assert_almost_equal(p(x), self.p1(x))
<add>
<add> def test_mapparms(self) :
<add> parms = self.p2.mapparms()
<add> assert_almost_equal(parms, [-1, 2])
<add>
<add> def test_trim(self) :
<add> coef = [1, 1e-6, 1e-12, 0]
<add> p = poly.Polynomial(coef)
<add> assert_equal(p.trim().coef, coef[:3])
<add> assert_equal(p.trim(1e-10).coef, coef[:2])
<add> assert_equal(p.trim(1e-5).coef, coef[:1])
<add>
<add> def test_truncate(self) :
<add> assert_raises(ValueError, self.p1.truncate, 0)
<add> assert_equal(len(self.p1.truncate(4)), 3)
<add> assert_equal(len(self.p1.truncate(3)), 3)
<add> assert_equal(len(self.p1.truncate(2)), 2)
<add> assert_equal(len(self.p1.truncate(1)), 1)
<add>
<add> def test_copy(self) :
<add> p = self.p1.copy()
<add> assert_(self.p1 == p)
<add>
<add> def test_integ(self) :
<add> p = self.p2.integ()
<add> assert_almost_equal(p.coef, poly.polyint([1,2,3], 1, 0, scl=.5))
<add> p = self.p2.integ(1, 1)
<add> assert_almost_equal(p.coef, poly.polyint([1,2,3], 1, 1, scl=.5))
<add> p = self.p2.integ(2, [1, 2])
<add> assert_almost_equal(p.coef, poly.polyint([1,2,3], 2, [1, 2], scl=.5))
<add>
<add> def test_deriv(self) :
<add> p = self.p2.integ(2, [1, 2])
<add> assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
<add> assert_almost_equal(p.deriv(2).coef, self.p2.coef)
<add>
<add> def test_roots(self) :
<add> p = poly.Polynomial([0, -1, 0, 1], [0, 1])
<add> res = p.roots()
<add> tgt = [0, .5, 1]
<add> assert_almost_equal(res, tgt)
<add>
<add> def test_fromroots(self) :
<add> roots = [0, .5, 1]
<add> p = poly.Polynomial.fromroots(roots, domain=[0, 1])
<add> res = p.coef
<add> tgt = [0, -1, 0, 1]
<add> assert_almost_equal(res, tgt)
<add>
<add> def test_fit(self) :
<add> def f(x) :
<add> return x*(x - 1)*(x - 2)
<add> x = np.linspace(0,3)
<add> y = f(x)
<add> p = poly.Polynomial.fit(x, y, 3)
<add> assert_almost_equal(p(x), y)
<add> p = poly.Polynomial.fit(x, y, 3, None)
<add> assert_almost_equal(p(x), y)
<add> assert_almost_equal(p.domain, [0,3])
<add>
<add> def test_identity(self) :
<add> x = np.linspace(0,3)
<add> p = poly.Polynomial.identity()
<add> assert_almost_equal(p(x), x)
<add> p = poly.Polynomial.identity([1,3])
<add> assert_almost_equal(p(x), x)
<ide><path>numpy/polynomial/tests/test_polyutils.py
<add>"""Tests for polyutils module.
<add>
<add>"""
<add>from __future__ import division
<add>
<add>import numpy as np
<add>import numpy.polynomial.polyutils as pu
<add>from numpy.testing import *
<add>from exceptions import TypeError, ValueError
<add>
<add>class TestMisc(TestCase) :
<add>
<add> def test_trimseq(self) :
<add> for i in range(5) :
<add> tgt = [1]
<add> res = pu.trimseq([1] + [0]*5)
<add> assert_equal(res, tgt)
<add>
<add> def test_as_series(self) :
<add> # check exceptions
<add> assert_raises(ValueError, pu.as_series, [[]])
<add> assert_raises(ValueError, pu.as_series, [[[1,2]]])
<add> assert_raises(ValueError, pu.as_series, [[1],['a']])
<add> # check common types
<add> types = ['i', 'd', 'O']
<add> for i in range(len(types)) :
<add> for j in range(i) :
<add> ci = np.ones(1, types[i])
<add> cj = np.ones(1, types[j])
<add> [resi, resj] = pu.as_series([ci, cj])
<add> assert_(resi.dtype.char == resj.dtype.char)
<add> assert_(resj.dtype.char == types[i])
<add>
<add> def test_trimcoef(self) :
<add> coef = [2, -1, 1, 0]
<add> # Test exceptions
<add> assert_raises(ValueError, pu.trimcoef, coef, -1)
<add> # Test results
<add> assert_equal(pu.trimcoef(coef), coef[:-1])
<add> assert_equal(pu.trimcoef(coef, 1), coef[:-3])
<add> assert_equal(pu.trimcoef(coef, 2), [0])
<add>
<add>
<add>class TestDomain(TestCase) :
<add>
<add> def test_getdomain(self) :
<add> # test for real values
<add> x = [1, 10, 3, -1]
<add> tgt = [-1,10]
<add> res = pu.getdomain(x)
<add> assert_almost_equal(res, tgt)
<add>
<add> # test for complex values
<add> x = [1 + 1j, 1 - 1j, 0, 2]
<add> tgt = [-1j, 2 + 1j]
<add> res = pu.getdomain(x)
<add> assert_almost_equal(res, tgt)
<add>
<add> def test_mapdomain(self) :
<add> # test for real values
<add> dom1 = [0,4]
<add> dom2 = [1,3]
<add> tgt = dom2
<add> res = pu. mapdomain(dom1, dom1, dom2)
<add> assert_almost_equal(res, tgt)
<add>
<add> # test for complex values
<add> dom1 = [0 - 1j, 2 + 1j]
<add> dom2 = [-2, 2]
<add> tgt = dom2
<add> res = pu.mapdomain(dom1, dom1, dom2)
<add> assert_almost_equal(res, tgt)
<add>
<add> def test_mapparms(self) :
<add> # test for real values
<add> dom1 = [0,4]
<add> dom2 = [1,3]
<add> tgt = [1, .5]
<add> res = pu. mapparms(dom1, dom2)
<add> assert_almost_equal(res, tgt)
<add>
<add> # test for complex values
<add> dom1 = [0 - 1j, 2 + 1j]
<add> dom2 = [-2, 2]
<add> tgt = [-1 + 1j, 1 - 1j]
<add> res = pu.mapparms(dom1, dom2)
<add> assert_almost_equal(res, tgt)
<ide><path>numpy/setup.py
<ide> def configuration(parent_package='',top_path=None):
<ide> config.add_subpackage('ma')
<ide> config.add_subpackage('matrixlib')
<ide> config.add_subpackage('compat')
<add> config.add_subpackage('polynomial')
<ide> config.add_subpackage('doc')
<ide> config.add_data_dir('doc')
<ide> config.add_data_dir('tests') | 11 |
Text | Text | add chinese translation of multiple-components | 833fcb65937e9c6e9610682f23d47963e9f641bf | <ide><path>docs/docs/04-multiple-components.zh-CN.md
<ide> ---
<del>id: multiple-componentsm-zh-CN
<add>id: multiple-components-zh-CN
<ide> title: 复合组件
<ide> permalink: multiple-components-zh-CN.html
<ide> prev: interactivity-and-dynamic-uis-zh-CN.html
<ide> React.render(
<ide> 上面例子中,`Avatar` 拥有 `ProfilePic` 和 `ProfileLink` 的实例。`拥有者` 就是给其它组件设置 `props` 的那个组件。更正式地说,
<ide> 如果组件 `Y` 在 `render()` 方法是创建了组件 `X`,那么 `Y` 就拥有 `X`。上面讲过,组件不能修改自身的 `props` - 它们总是与它们拥有者设置的保持一致。这是保持用户界面一致性的关键性原则。
<ide>
<del>把从属关系与父子关系加以区别至关重要。从属关系是 React 特有的,父子关系与你所熟知的 DOM 里的是一样的。在上一个例子中,`Avatar` 拥有 `div`、`ProfilePic` 和 `ProfileLink` 实例,`div` 是 `ProfilePic` 和 `ProfileLink` 实例的**父级**(但不是拥有者)。
<add>把从属关系与父子关系加以区别至关重要。从属关系是 React 特有的,而父子关系简单来讲就是DOM 里的标签的关系。在上一个例子中,`Avatar` 拥有 `div`、`ProfilePic` 和 `ProfileLink` 实例,`div` 是 `ProfilePic` 和 `ProfileLink` 实例的**父级**(但不是拥有者)。
<ide>
<ide>
<ide> ## 子级
<ide>
<del>实例化 React 组件时,可以在标签开始和关闭的位置包含其它 React 组件:
<add>实例化 React 组件时,你可以在开始标签和结束标签之间引用在React 组件或者Javascript 表达式:
<ide>
<ide> ```javascript
<ide> <Parent><Child /></Parent>
<ide> React.render(
<ide>
<ide> 直观来看,只是删除了`<p>Paragraph 1</p>`。事实上,React 先更新第一个子级的内容,然后删除最后一个组件。React 是根据子级的*顺序*来校正的。
<ide>
<del>### 状态化子级
<add>### 子组件状态管理
<ide>
<ide> 对于大多数组件,这没什么大碍。但是,对于使用 `this.state` 来在多次渲染过程中里维持数据的状态化组件,这样做潜在很多问题。
<ide>
<ide> React.render(
<ide>
<ide> ### 动态子级
<ide>
<del>如果子级被打乱(如在搜索结果中)或者有新组件添加到列表开头(如在流中)情况会变得更加复杂。如果子级要在多个渲染阶段保持自己的特征和状态,在这种情况下,你可以通过给子级设置惟一的 `key` 来区分。
<add>如果子组件位置会改变(如在搜索结果中)或者有新组件添加到列表开头(如在流中)情况会变得更加复杂。如果子级要在多个渲染阶段保持自己的特征和状态,在这种情况下,你可以通过给子级设置惟一标识的 `key` 来区分。
<ide>
<ide> ```javascript
<ide> render: function() {
<ide> React 里,数据通过上面介绍过的 `props` 从拥有者流向归属者
<ide>
<ide> > 注意:
<ide> >
<del>> 如果在数据变化时让 `shouldComponentUpdate()` 返回 false,React 就不能保证用户界面同步。当使用它的时候一定确保你清楚到底做了什么,并且只在遇到明显性能问题的时候才使用它。不要低估 JavaScript 的速度,DOM 操作通常才是慢的原因。
<ide>\ No newline at end of file
<add>> 如果在数据变化时让 `shouldComponentUpdate()` 返回 false,React 就不能保证用户界面同步。当使用它的时候一定确保你清楚到底做了什么,并且只在遇到明显性能问题的时候才使用它。不要低估 JavaScript 的速度,DOM 操作通常才是慢的原因。 | 1 |
Ruby | Ruby | implement limit & offset for ourselves | 04309aee82468fa4c4b3d92a533e84a96533f236 | <ide><path>activerecord/lib/active_record/relation/finder_methods.rb
<ide> def find_one(id)
<ide> end
<ide>
<ide> def find_some(ids)
<add> return find_some_ordered(ids) unless order_values.present?
<add>
<add> result = where(primary_key => ids).to_a
<add>
<ide> expected_size =
<ide> if limit_value && ids.size > limit_value
<ide> limit_value
<ide> def find_some(ids)
<ide> # 11 ids with limit 3, offset 9 should give 2 results.
<ide> if offset_value && (ids.size - offset_value < expected_size)
<ide> expected_size = ids.size - offset_value
<add> end
<add>
<add> if result.size == expected_size
<add> result
<ide> else
<del> ids = ids.first(expected_size) if order_values.empty?
<add> raise_record_not_found_exception!(ids, result.size, expected_size)
<ide> end
<add> end
<ide>
<del> result = where(primary_key => ids).to_a
<add> def find_some_ordered(ids)
<add> ids = ids.slice(offset_value || 0, limit_value || ids.size) || []
<add>
<add> result = except(:limit, :offset).where(primary_key => ids).to_a
<add>
<add> if result.size == ids.size
<add> pk_type = @klass.type_for_attribute(primary_key)
<ide>
<del> if result.size == expected_size
<del> return result if order_values.present?
<ide> records_by_id = result.index_by(&:id)
<del> ids.collect { |id| records_by_id[id.to_i] }.compact
<add> ids.map { |id| records_by_id.fetch(pk_type.cast(id)) }
<ide> else
<del> raise_record_not_found_exception!(ids, result.size, expected_size)
<add> raise_record_not_found_exception!(ids, result.size, ids.size)
<ide> end
<ide> end
<ide>
<ide><path>activerecord/test/cases/finder_test.rb
<ide> def test_find_with_ids_where_and_limit
<ide> assert_equal 'The Fifth Topic of the day', records[2].title
<ide> end
<ide>
<del> def test_find_with_ids_and_offset # failing with offset
<add> def test_find_with_ids_and_offset
<ide> records = Topic.offset(2).find([3,2,5,1,4])
<ide> assert_equal 3, records.size
<ide> assert_equal 'The Fifth Topic of the day', records[0].title
<ide> def test_find_by_ids
<ide> assert_equal topics(:second).title, Topic.find([2]).first.title
<ide> end
<ide>
<del> def test_find_by_ids_with_limit_and_offset # failing with offset
<add> def test_find_by_ids_with_limit_and_offset
<ide> assert_equal 2, Entrant.limit(2).find([1,3,2]).size
<ide> entrants = Entrant.limit(3).offset(2).find([1,3,2])
<ide> assert_equal 1, entrants.size | 2 |
PHP | PHP | add test covering | fa09901b384668e6a45022299f8726dd92f73bba | <ide><path>tests/TestCase/ORM/Association/HasManyTest.php
<ide> public function testSaveReplaceSaveStrategy()
<ide> $this->assertTrue($authors->Articles->exists(['id' => $articleId]));
<ide> }
<ide>
<add> /**
<add> * Test that save works with replace saveStrategy conditions
<add> *
<add> * @return void
<add> */
<add> public function testSaveReplaceSaveStrategyClosureConditions()
<add> {
<add> $authors = $this->getTableLocator()->get('Authors');
<add> $authors->hasMany('Articles')
<add> ->setDependent(true)
<add> ->setSaveStrategy('replace')
<add> ->setConditions(function () {
<add> return ['published' => 'Y'];
<add> });
<add>
<add> $entity = $authors->newEntity([
<add> 'name' => 'mylux',
<add> 'articles' => [
<add> ['title' => 'Not matching conditions', 'body' => '', 'published' => 'N'],
<add> ['title' => 'Random Post', 'body' => 'The cake is nice', 'published' => 'Y'],
<add> ['title' => 'Another Random Post', 'body' => 'The cake is yummy', 'published' => 'Y'],
<add> ['title' => 'One more random post', 'body' => 'The cake is forever', 'published' => 'Y'],
<add> ],
<add> ], ['associated' => ['Articles']]);
<add>
<add> $entity = $authors->save($entity, ['associated' => ['Articles']]);
<add> $sizeArticles = count($entity->articles);
<add> // Should be one fewer because of conditions.
<add> $this->assertSame($sizeArticles - 1, $authors->Articles->find('all')->where(['author_id' => $entity['id']])->count());
<add>
<add> $articleId = $entity->articles[0]->id;
<add> unset($entity->articles[0], $entity->articles[1]);
<add> $entity->setDirty('articles', true);
<add>
<add> $authors->save($entity, ['associated' => ['Articles']]);
<add>
<add> $this->assertSame($sizeArticles - 2, $authors->Articles->find('all')->where(['author_id' => $entity['id']])->count());
<add>
<add> // Should still exist because it doesn't match the association conditions.
<add> $articles = $this->getTableLocator()->get('Articles');
<add> $this->assertTrue($articles->exists(['id' => $articleId]));
<add> }
<add>
<ide> /**
<ide> * Test that save works with replace saveStrategy, replacing the already persisted entities even if no new entities are passed
<ide> * | 1 |
Python | Python | display a default message | 3dddbfdf00f2b99ef41b349c35c7dd21b1d9f459 | <ide><path>glances/monitor_list.py
<ide> def update(self):
<ide> monitoredlist = [p for p in processlist if re.search(self.regex(i), p['cmdline']) is not None]
<ide> self.__monitor_list[i]['count'] = len(monitoredlist)
<ide>
<add> # Always get processes CPU and MEM
<add> self.__monitor_list[i]['default_result'] = 'CPU: {0:.1f}% | MEM: {1:.1f}%'.format(
<add> sum([p['cpu_percent'] for p in monitoredlist]),
<add> sum([p['memory_percent'] for p in monitoredlist]))
<add>
<ide> if self.command(i) is not None:
<ide> # Execute the user command line
<ide> try:
<ide> def update(self):
<ide> if self.command(i) is None or self.__monitor_list[i]['result'] == '':
<ide> # If there is no command specified in the conf file
<ide> # then display CPU and MEM %
<del> self.__monitor_list[i]['result'] = 'CPU: {0:.1f}% | MEM: {1:.1f}%'.format(
<del> sum([p['cpu_percent'] for p in monitoredlist]),
<del> sum([p['memory_percent'] for p in monitoredlist]))
<add> self.__monitor_list[i]['result'] = self.__monitor_list[i]['default_result']
<ide>
<ide> return self.__monitor_list
<ide>
<ide><path>glances/plugins/glances_monitor.py
<ide> def msg_curse(self, args=None):
<ide> try:
<ide> msg = u(m['result']) if m['count'] >= 1 else ''
<ide> except UnicodeEncodeError:
<del> # Hack if return message contain accent letter (non UTF-8 compliant)
<del> msg = ''
<add> # Hack if return message contains non UTF-8 compliant char
<add> msg = u(m['default_result']) if m['count'] >= 1 else ''
<ide> ret.append(self.curse_add_line(msg, optional=True, splittable=True))
<ide> ret.append(self.curse_new_line())
<ide> | 2 |
Javascript | Javascript | add download and hreflang attributes | ae7e44ec84b656213b546fb0596adb266427e76a | <ide><path>src/browser/dom/DefaultDOMPropertyConfig.js
<ide> var DefaultDOMPropertyConfig = {
<ide> defer: HAS_BOOLEAN_VALUE,
<ide> dir: null,
<ide> disabled: MUST_USE_ATTRIBUTE | HAS_BOOLEAN_VALUE,
<add> download: null,
<ide> draggable: null,
<ide> encType: null,
<ide> form: MUST_USE_ATTRIBUTE,
<ide> var DefaultDOMPropertyConfig = {
<ide> height: MUST_USE_ATTRIBUTE,
<ide> hidden: MUST_USE_ATTRIBUTE | HAS_BOOLEAN_VALUE,
<ide> href: null,
<add> hrefLang: null,
<ide> htmlFor: null,
<ide> httpEquiv: null,
<ide> icon: null,
<ide> var DefaultDOMPropertyConfig = {
<ide> autoFocus: 'autofocus',
<ide> autoPlay: 'autoplay',
<ide> encType: 'enctype',
<add> hrefLang: 'hreflang',
<ide> radioGroup: 'radiogroup',
<ide> spellCheck: 'spellcheck',
<ide> srcDoc: 'srcdoc' | 1 |
Javascript | Javascript | add test for validation for wasi.start() argument | decc5f5506bfaeac9f09ed1d12b4f519bc0ac250 | <ide><path>test/wasi/test-wasi-start-validation.js
<add>'use strict';
<add>
<add>require('../common');
<add>const assert = require('assert');
<add>const { WASI } = require('wasi');
<add>
<add>const fixtures = require('../common/fixtures');
<add>
<add>{
<add> const wasi = new WASI();
<add> assert.throws(
<add> () => {
<add> wasi.start();
<add> },
<add> { code: 'ERR_INVALID_ARG_TYPE', message: /\bWebAssembly\.Instance\b/ }
<add> );
<add>}
<add>
<add>{
<add> const wasi = new WASI({});
<add> (async () => {
<add> const bufferSource = fixtures.readSync('simple.wasm');
<add> const wasm = await WebAssembly.compile(bufferSource);
<add> const instance = await WebAssembly.instantiate(wasm);
<add>
<add> assert.throws(
<add> () => { wasi.start(instance); },
<add> { code: 'ERR_INVALID_ARG_TYPE', message: /\bWebAssembly\.Memory\b/ }
<add> );
<add> })();
<add>} | 1 |
Python | Python | fix attributeerror on renamed _field_mapping | 760b25bc20a1434cbdd69dc0b13bacdc3bbedd7c | <ide><path>rest_framework/serializers.py
<ide> def get_unique_for_date_validators(self):
<ide>
<ide>
<ide> if hasattr(models, 'UUIDField'):
<del> ModelSerializer._field_mapping[models.UUIDField] = UUIDField
<add> ModelSerializer.serializer_field_mapping[models.UUIDField] = UUIDField
<ide>
<ide> if postgres_fields:
<ide> class CharMappingField(DictField):
<ide> child = CharField()
<ide>
<del> ModelSerializer._field_mapping[postgres_fields.HStoreField] = CharMappingField
<add> ModelSerializer.serializer_field_mapping[postgres_fields.HStoreField] = CharMappingField
<ide>
<ide>
<ide> class HyperlinkedModelSerializer(ModelSerializer): | 1 |
PHP | PHP | fix duplicate implements | ab3a6af87d07632e5d5643bd4987902c6fe239db | <ide><path>src/Illuminate/Filesystem/FilesystemAdapter.php
<ide> /**
<ide> * @mixin \League\Flysystem\FilesystemInterface
<ide> */
<del>class FilesystemAdapter implements CloudFilesystemContract, FilesystemContract
<add>class FilesystemAdapter implements CloudFilesystemContract
<ide> {
<ide> /**
<ide> * The Flysystem filesystem implementation. | 1 |
Ruby | Ruby | remove vendored version of rack | b69da86ea545b342036fb37a472ec5abefaf3bd5 | <ide><path>actionpack/lib/action_dispatch.rb
<ide> end
<ide> require 'active_support/core/all'
<ide>
<del>$:.unshift "#{File.dirname(__FILE__)}/action_dispatch/vendor/rack-1.0"
<del>begin
<del> gem 'rack', '~> 1.0.0'
<del> require 'rack'
<del>rescue Gem::LoadError
<del> require 'action_dispatch/vendor/rack-1.0/rack'
<del>end
<add>gem 'rack', '~> 1.0.0'
<add>require 'rack'
<ide>
<ide> module ActionDispatch
<ide> autoload :Request, 'action_dispatch/http/request'
<ide><path>actionpack/lib/action_dispatch/http/request.rb
<ide> def content_type
<ide> end
<ide> end
<ide> end
<del>
<add>
<add> def media_type
<add> content_type.to_s
<add> end
<add>
<ide> # Returns the accepted MIME type for the request.
<ide> def accepts
<ide> @accepts ||= begin
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack.rb
<del># Copyright (C) 2007, 2008, 2009 Christian Neukirchen <purl.org/net/chneukirchen>
<del>#
<del># Rack is freely distributable under the terms of an MIT-style license.
<del># See COPYING or http://www.opensource.org/licenses/mit-license.php.
<del>
<del>$:.unshift(File.expand_path(File.dirname(__FILE__)))
<del>
<del>
<del># The Rack main module, serving as a namespace for all core Rack
<del># modules and classes.
<del>#
<del># All modules meant for use in your application are <tt>autoload</tt>ed here,
<del># so it should be enough just to <tt>require rack.rb</tt> in your code.
<del>
<del>module Rack
<del> # The Rack protocol version number implemented.
<del> VERSION = [0,1]
<del>
<del> # Return the Rack protocol version as a dotted string.
<del> def self.version
<del> VERSION.join(".")
<del> end
<del>
<del> # Return the Rack release as a dotted string.
<del> def self.release
<del> "1.0 bundled"
<del> end
<del>
<del> autoload :Builder, "rack/builder"
<del> autoload :Cascade, "rack/cascade"
<del> autoload :Chunked, "rack/chunked"
<del> autoload :CommonLogger, "rack/commonlogger"
<del> autoload :ConditionalGet, "rack/conditionalget"
<del> autoload :ContentLength, "rack/content_length"
<del> autoload :ContentType, "rack/content_type"
<del> autoload :File, "rack/file"
<del> autoload :Deflater, "rack/deflater"
<del> autoload :Directory, "rack/directory"
<del> autoload :ForwardRequest, "rack/recursive"
<del> autoload :Handler, "rack/handler"
<del> autoload :Head, "rack/head"
<del> autoload :Lint, "rack/lint"
<del> autoload :Lock, "rack/lock"
<del> autoload :MethodOverride, "rack/methodoverride"
<del> autoload :Mime, "rack/mime"
<del> autoload :Recursive, "rack/recursive"
<del> autoload :Reloader, "rack/reloader"
<del> autoload :ShowExceptions, "rack/showexceptions"
<del> autoload :ShowStatus, "rack/showstatus"
<del> autoload :Static, "rack/static"
<del> autoload :URLMap, "rack/urlmap"
<del> autoload :Utils, "rack/utils"
<del>
<del> autoload :MockRequest, "rack/mock"
<del> autoload :MockResponse, "rack/mock"
<del>
<del> autoload :Request, "rack/request"
<del> autoload :Response, "rack/response"
<del>
<del> module Auth
<del> autoload :Basic, "rack/auth/basic"
<del> autoload :AbstractRequest, "rack/auth/abstract/request"
<del> autoload :AbstractHandler, "rack/auth/abstract/handler"
<del> autoload :OpenID, "rack/auth/openid"
<del> module Digest
<del> autoload :MD5, "rack/auth/digest/md5"
<del> autoload :Nonce, "rack/auth/digest/nonce"
<del> autoload :Params, "rack/auth/digest/params"
<del> autoload :Request, "rack/auth/digest/request"
<del> end
<del> end
<del>
<del> module Session
<del> autoload :Cookie, "rack/session/cookie"
<del> autoload :Pool, "rack/session/pool"
<del> autoload :Memcache, "rack/session/memcache"
<del> end
<del>
<del> # *Adapters* connect Rack with third party web frameworks.
<del> #
<del> # Rack includes an adapter for Camping, see README for other
<del> # frameworks supporting Rack in their code bases.
<del> #
<del> # Refer to the submodules for framework-specific calling details.
<del>
<del> module Adapter
<del> autoload :Camping, "rack/adapter/camping"
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/adapter/camping.rb
<del>module Rack
<del> module Adapter
<del> class Camping
<del> def initialize(app)
<del> @app = app
<del> end
<del>
<del> def call(env)
<del> env["PATH_INFO"] ||= ""
<del> env["SCRIPT_NAME"] ||= ""
<del> controller = @app.run(env['rack.input'], env)
<del> h = controller.headers
<del> h.each_pair do |k,v|
<del> if v.kind_of? URI
<del> h[k] = v.to_s
<del> end
<del> end
<del> [controller.status, controller.headers, [controller.body.to_s]]
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/auth/abstract/handler.rb
<del>module Rack
<del> module Auth
<del> # Rack::Auth::AbstractHandler implements common authentication functionality.
<del> #
<del> # +realm+ should be set for all handlers.
<del>
<del> class AbstractHandler
<del>
<del> attr_accessor :realm
<del>
<del> def initialize(app, realm=nil, &authenticator)
<del> @app, @realm, @authenticator = app, realm, authenticator
<del> end
<del>
<del>
<del> private
<del>
<del> def unauthorized(www_authenticate = challenge)
<del> return [ 401,
<del> { 'Content-Type' => 'text/plain',
<del> 'Content-Length' => '0',
<del> 'WWW-Authenticate' => www_authenticate.to_s },
<del> []
<del> ]
<del> end
<del>
<del> def bad_request
<del> return [ 400,
<del> { 'Content-Type' => 'text/plain',
<del> 'Content-Length' => '0' },
<del> []
<del> ]
<del> end
<del>
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/auth/abstract/request.rb
<del>module Rack
<del> module Auth
<del> class AbstractRequest
<del>
<del> def initialize(env)
<del> @env = env
<del> end
<del>
<del> def provided?
<del> !authorization_key.nil?
<del> end
<del>
<del> def parts
<del> @parts ||= @env[authorization_key].split(' ', 2)
<del> end
<del>
<del> def scheme
<del> @scheme ||= parts.first.downcase.to_sym
<del> end
<del>
<del> def params
<del> @params ||= parts.last
<del> end
<del>
<del>
<del> private
<del>
<del> AUTHORIZATION_KEYS = ['HTTP_AUTHORIZATION', 'X-HTTP_AUTHORIZATION', 'X_HTTP_AUTHORIZATION']
<del>
<del> def authorization_key
<del> @authorization_key ||= AUTHORIZATION_KEYS.detect { |key| @env.has_key?(key) }
<del> end
<del>
<del> end
<del>
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/auth/basic.rb
<del>require 'rack/auth/abstract/handler'
<del>require 'rack/auth/abstract/request'
<del>
<del>module Rack
<del> module Auth
<del> # Rack::Auth::Basic implements HTTP Basic Authentication, as per RFC 2617.
<del> #
<del> # Initialize with the Rack application that you want protecting,
<del> # and a block that checks if a username and password pair are valid.
<del> #
<del> # See also: <tt>example/protectedlobster.rb</tt>
<del>
<del> class Basic < AbstractHandler
<del>
<del> def call(env)
<del> auth = Basic::Request.new(env)
<del>
<del> return unauthorized unless auth.provided?
<del>
<del> return bad_request unless auth.basic?
<del>
<del> if valid?(auth)
<del> env['REMOTE_USER'] = auth.username
<del>
<del> return @app.call(env)
<del> end
<del>
<del> unauthorized
<del> end
<del>
<del>
<del> private
<del>
<del> def challenge
<del> 'Basic realm="%s"' % realm
<del> end
<del>
<del> def valid?(auth)
<del> @authenticator.call(*auth.credentials)
<del> end
<del>
<del> class Request < Auth::AbstractRequest
<del> def basic?
<del> :basic == scheme
<del> end
<del>
<del> def credentials
<del> @credentials ||= params.unpack("m*").first.split(/:/, 2)
<del> end
<del>
<del> def username
<del> credentials.first
<del> end
<del> end
<del>
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/auth/digest/md5.rb
<del>require 'rack/auth/abstract/handler'
<del>require 'rack/auth/digest/request'
<del>require 'rack/auth/digest/params'
<del>require 'rack/auth/digest/nonce'
<del>require 'digest/md5'
<del>
<del>module Rack
<del> module Auth
<del> module Digest
<del> # Rack::Auth::Digest::MD5 implements the MD5 algorithm version of
<del> # HTTP Digest Authentication, as per RFC 2617.
<del> #
<del> # Initialize with the [Rack] application that you want protecting,
<del> # and a block that looks up a plaintext password for a given username.
<del> #
<del> # +opaque+ needs to be set to a constant base64/hexadecimal string.
<del> #
<del> class MD5 < AbstractHandler
<del>
<del> attr_accessor :opaque
<del>
<del> attr_writer :passwords_hashed
<del>
<del> def initialize(*args)
<del> super
<del> @passwords_hashed = nil
<del> end
<del>
<del> def passwords_hashed?
<del> !!@passwords_hashed
<del> end
<del>
<del> def call(env)
<del> auth = Request.new(env)
<del>
<del> unless auth.provided?
<del> return unauthorized
<del> end
<del>
<del> if !auth.digest? || !auth.correct_uri? || !valid_qop?(auth)
<del> return bad_request
<del> end
<del>
<del> if valid?(auth)
<del> if auth.nonce.stale?
<del> return unauthorized(challenge(:stale => true))
<del> else
<del> env['REMOTE_USER'] = auth.username
<del>
<del> return @app.call(env)
<del> end
<del> end
<del>
<del> unauthorized
<del> end
<del>
<del>
<del> private
<del>
<del> QOP = 'auth'.freeze
<del>
<del> def params(hash = {})
<del> Params.new do |params|
<del> params['realm'] = realm
<del> params['nonce'] = Nonce.new.to_s
<del> params['opaque'] = H(opaque)
<del> params['qop'] = QOP
<del>
<del> hash.each { |k, v| params[k] = v }
<del> end
<del> end
<del>
<del> def challenge(hash = {})
<del> "Digest #{params(hash)}"
<del> end
<del>
<del> def valid?(auth)
<del> valid_opaque?(auth) && valid_nonce?(auth) && valid_digest?(auth)
<del> end
<del>
<del> def valid_qop?(auth)
<del> QOP == auth.qop
<del> end
<del>
<del> def valid_opaque?(auth)
<del> H(opaque) == auth.opaque
<del> end
<del>
<del> def valid_nonce?(auth)
<del> auth.nonce.valid?
<del> end
<del>
<del> def valid_digest?(auth)
<del> digest(auth, @authenticator.call(auth.username)) == auth.response
<del> end
<del>
<del> def md5(data)
<del> ::Digest::MD5.hexdigest(data)
<del> end
<del>
<del> alias :H :md5
<del>
<del> def KD(secret, data)
<del> H([secret, data] * ':')
<del> end
<del>
<del> def A1(auth, password)
<del> [ auth.username, auth.realm, password ] * ':'
<del> end
<del>
<del> def A2(auth)
<del> [ auth.method, auth.uri ] * ':'
<del> end
<del>
<del> def digest(auth, password)
<del> password_hash = passwords_hashed? ? password : H(A1(auth, password))
<del>
<del> KD(password_hash, [ auth.nonce, auth.nc, auth.cnonce, QOP, H(A2(auth)) ] * ':')
<del> end
<del>
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/auth/digest/nonce.rb
<del>require 'digest/md5'
<del>
<del>module Rack
<del> module Auth
<del> module Digest
<del> # Rack::Auth::Digest::Nonce is the default nonce generator for the
<del> # Rack::Auth::Digest::MD5 authentication handler.
<del> #
<del> # +private_key+ needs to set to a constant string.
<del> #
<del> # +time_limit+ can be optionally set to an integer (number of seconds),
<del> # to limit the validity of the generated nonces.
<del>
<del> class Nonce
<del>
<del> class << self
<del> attr_accessor :private_key, :time_limit
<del> end
<del>
<del> def self.parse(string)
<del> new(*string.unpack("m*").first.split(' ', 2))
<del> end
<del>
<del> def initialize(timestamp = Time.now, given_digest = nil)
<del> @timestamp, @given_digest = timestamp.to_i, given_digest
<del> end
<del>
<del> def to_s
<del> [([ @timestamp, digest ] * ' ')].pack("m*").strip
<del> end
<del>
<del> def digest
<del> ::Digest::MD5.hexdigest([ @timestamp, self.class.private_key ] * ':')
<del> end
<del>
<del> def valid?
<del> digest == @given_digest
<del> end
<del>
<del> def stale?
<del> !self.class.time_limit.nil? && (@timestamp - Time.now.to_i) < self.class.time_limit
<del> end
<del>
<del> def fresh?
<del> !stale?
<del> end
<del>
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/auth/digest/params.rb
<del>module Rack
<del> module Auth
<del> module Digest
<del> class Params < Hash
<del>
<del> def self.parse(str)
<del> split_header_value(str).inject(new) do |header, param|
<del> k, v = param.split('=', 2)
<del> header[k] = dequote(v)
<del> header
<del> end
<del> end
<del>
<del> def self.dequote(str) # From WEBrick::HTTPUtils
<del> ret = (/\A"(.*)"\Z/ =~ str) ? $1 : str.dup
<del> ret.gsub!(/\\(.)/, "\\1")
<del> ret
<del> end
<del>
<del> def self.split_header_value(str)
<del> str.scan( /(\w+\=(?:"[^\"]+"|[^,]+))/n ).collect{ |v| v[0] }
<del> end
<del>
<del> def initialize
<del> super
<del>
<del> yield self if block_given?
<del> end
<del>
<del> def [](k)
<del> super k.to_s
<del> end
<del>
<del> def []=(k, v)
<del> super k.to_s, v.to_s
<del> end
<del>
<del> UNQUOTED = ['qop', 'nc', 'stale']
<del>
<del> def to_s
<del> inject([]) do |parts, (k, v)|
<del> parts << "#{k}=" + (UNQUOTED.include?(k) ? v.to_s : quote(v))
<del> parts
<del> end.join(', ')
<del> end
<del>
<del> def quote(str) # From WEBrick::HTTPUtils
<del> '"' << str.gsub(/[\\\"]/o, "\\\1") << '"'
<del> end
<del>
<del> end
<del> end
<del> end
<del>end
<del>
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/auth/digest/request.rb
<del>require 'rack/auth/abstract/request'
<del>require 'rack/auth/digest/params'
<del>require 'rack/auth/digest/nonce'
<del>
<del>module Rack
<del> module Auth
<del> module Digest
<del> class Request < Auth::AbstractRequest
<del>
<del> def method
<del> @env['rack.methodoverride.original_method'] || @env['REQUEST_METHOD']
<del> end
<del>
<del> def digest?
<del> :digest == scheme
<del> end
<del>
<del> def correct_uri?
<del> (@env['SCRIPT_NAME'].to_s + @env['PATH_INFO'].to_s) == uri
<del> end
<del>
<del> def nonce
<del> @nonce ||= Nonce.parse(params['nonce'])
<del> end
<del>
<del> def params
<del> @params ||= Params.parse(parts.last)
<del> end
<del>
<del> def method_missing(sym)
<del> if params.has_key? key = sym.to_s
<del> return params[key]
<del> end
<del> super
<del> end
<del>
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/auth/openid.rb
<del># AUTHOR: blink <[email protected]>; blink#[email protected]
<del>
<del>gem 'ruby-openid', '~> 2' if defined? Gem
<del>require 'rack/request'
<del>require 'rack/utils'
<del>require 'rack/auth/abstract/handler'
<del>require 'uri'
<del>require 'openid' #gem
<del>require 'openid/extension' #gem
<del>require 'openid/store/memory' #gem
<del>
<del>module Rack
<del> class Request
<del> def openid_request
<del> @env['rack.auth.openid.request']
<del> end
<del>
<del> def openid_response
<del> @env['rack.auth.openid.response']
<del> end
<del> end
<del>
<del> module Auth
<del>
<del> # Rack::Auth::OpenID provides a simple method for setting up an OpenID
<del> # Consumer. It requires the ruby-openid library from janrain to operate,
<del> # as well as a rack method of session management.
<del> #
<del> # The ruby-openid home page is at http://openidenabled.com/ruby-openid/.
<del> #
<del> # The OpenID specifications can be found at
<del> # http://openid.net/specs/openid-authentication-1_1.html
<del> # and
<del> # http://openid.net/specs/openid-authentication-2_0.html. Documentation
<del> # for published OpenID extensions and related topics can be found at
<del> # http://openid.net/developers/specs/.
<del> #
<del> # It is recommended to read through the OpenID spec, as well as
<del> # ruby-openid's documentation, to understand what exactly goes on. However
<del> # a setup as simple as the presented examples is enough to provide
<del> # Consumer functionality.
<del> #
<del> # This library strongly intends to utilize the OpenID 2.0 features of the
<del> # ruby-openid library, which provides OpenID 1.0 compatiblity.
<del> #
<del> # NOTE: Due to the amount of data that this library stores in the
<del> # session, Rack::Session::Cookie may fault.
<del>
<del> class OpenID
<del>
<del> class NoSession < RuntimeError; end
<del> class BadExtension < RuntimeError; end
<del> # Required for ruby-openid
<del> ValidStatus = [:success, :setup_needed, :cancel, :failure]
<del>
<del> # = Arguments
<del> #
<del> # The first argument is the realm, identifying the site they are trusting
<del> # with their identity. This is required, also treated as the trust_root
<del> # in OpenID 1.x exchanges.
<del> #
<del> # The optional second argument is a hash of options.
<del> #
<del> # == Options
<del> #
<del> # <tt>:return_to</tt> defines the url to return to after the client
<del> # authenticates with the openid service provider. This url should point
<del> # to where Rack::Auth::OpenID is mounted. If <tt>:return_to</tt> is not
<del> # provided, return_to will be the current url which allows flexibility
<del> # with caveats.
<del> #
<del> # <tt>:session_key</tt> defines the key to the session hash in the env.
<del> # It defaults to 'rack.session'.
<del> #
<del> # <tt>:openid_param</tt> defines at what key in the request parameters to
<del> # find the identifier to resolve. As per the 2.0 spec, the default is
<del> # 'openid_identifier'.
<del> #
<del> # <tt>:store</tt> defined what OpenID Store to use for persistant
<del> # information. By default a Store::Memory will be used.
<del> #
<del> # <tt>:immediate</tt> as true will make initial requests to be of an
<del> # immediate type. This is false by default. See OpenID specification
<del> # documentation.
<del> #
<del> # <tt>:extensions</tt> should be a hash of openid extension
<del> # implementations. The key should be the extension main module, the value
<del> # should be an array of arguments for extension::Request.new.
<del> # The hash is iterated over and passed to #add_extension for processing.
<del> # Please see #add_extension for further documentation.
<del> #
<del> # == Examples
<del> #
<del> # simple_oid = OpenID.new('http://mysite.com/')
<del> #
<del> # return_oid = OpenID.new('http://mysite.com/', {
<del> # :return_to => 'http://mysite.com/openid'
<del> # })
<del> #
<del> # complex_oid = OpenID.new('http://mysite.com/',
<del> # :immediate => true,
<del> # :extensions => {
<del> # ::OpenID::SReg => [['email'],['nickname']]
<del> # }
<del> # )
<del> #
<del> # = Advanced
<del> #
<del> # Most of the functionality of this library is encapsulated such that
<del> # expansion and overriding functions isn't difficult nor tricky.
<del> # Alternately, to avoid opening up singleton objects or subclassing, a
<del> # wrapper rack middleware can be composed to act upon Auth::OpenID's
<del> # responses. See #check and #finish for locations of pertinent data.
<del> #
<del> # == Responses
<del> #
<del> # To change the responses that Auth::OpenID returns, override the methods
<del> # #redirect, #bad_request, #unauthorized, #access_denied, and
<del> # #foreign_server_failure.
<del> #
<del> # Additionally #confirm_post_params is used when the URI would exceed
<del> # length limits on a GET request when doing the initial verification
<del> # request.
<del> #
<del> # == Processing
<del> #
<del> # To change methods of processing completed transactions, override the
<del> # methods #success, #setup_needed, #cancel, and #failure. Please ensure
<del> # the returned object is a rack compatible response.
<del> #
<del> # The first argument is an OpenID::Response, the second is a
<del> # Rack::Request of the current request, the last is the hash used in
<del> # ruby-openid handling, which can be found manually at
<del> # env['rack.session'][:openid].
<del> #
<del> # This is useful if you wanted to expand the processing done, such as
<del> # setting up user accounts.
<del> #
<del> # oid_app = Rack::Auth::OpenID.new realm, :return_to => return_to
<del> # def oid_app.success oid, request, session
<del> # user = Models::User[oid.identity_url]
<del> # user ||= Models::User.create_from_openid oid
<del> # request['rack.session'][:user] = user.id
<del> # redirect MyApp.site_home
<del> # end
<del> #
<del> # site_map['/openid'] = oid_app
<del> # map = Rack::URLMap.new site_map
<del> # ...
<del>
<del> def initialize(realm, options={})
<del> realm = URI(realm)
<del> raise ArgumentError, "Invalid realm: #{realm}" \
<del> unless realm.absolute? \
<del> and realm.fragment.nil? \
<del> and realm.scheme =~ /^https?$/ \
<del> and realm.host =~ /^(\*\.)?#{URI::REGEXP::PATTERN::URIC_NO_SLASH}+/
<del> realm.path = '/' if realm.path.empty?
<del> @realm = realm.to_s
<del>
<del> if ruri = options[:return_to]
<del> ruri = URI(ruri)
<del> raise ArgumentError, "Invalid return_to: #{ruri}" \
<del> unless ruri.absolute? \
<del> and ruri.scheme =~ /^https?$/ \
<del> and ruri.fragment.nil?
<del> raise ArgumentError, "return_to #{ruri} not within realm #{realm}" \
<del> unless self.within_realm?(ruri)
<del> @return_to = ruri.to_s
<del> end
<del>
<del> @session_key = options[:session_key] || 'rack.session'
<del> @openid_param = options[:openid_param] || 'openid_identifier'
<del> @store = options[:store] || ::OpenID::Store::Memory.new
<del> @immediate = !!options[:immediate]
<del>
<del> @extensions = {}
<del> if extensions = options.delete(:extensions)
<del> extensions.each do |ext, args|
<del> add_extension ext, *args
<del> end
<del> end
<del>
<del> # Undocumented, semi-experimental
<del> @anonymous = !!options[:anonymous]
<del> end
<del>
<del> attr_reader :realm, :return_to, :session_key, :openid_param, :store,
<del> :immediate, :extensions
<del>
<del> # Sets up and uses session data at <tt>:openid</tt> within the session.
<del> # Errors in this setup will raise a NoSession exception.
<del> #
<del> # If the parameter 'openid.mode' is set, which implies a followup from
<del> # the openid server, processing is passed to #finish and the result is
<del> # returned. However, if there is no appropriate openid information in the
<del> # session, a 400 error is returned.
<del> #
<del> # If the parameter specified by <tt>options[:openid_param]</tt> is
<del> # present, processing is passed to #check and the result is returned.
<del> #
<del> # If neither of these conditions are met, #unauthorized is called.
<del>
<del> def call(env)
<del> env['rack.auth.openid'] = self
<del> env_session = env[@session_key]
<del> unless env_session and env_session.is_a?(Hash)
<del> raise NoSession, 'No compatible session'
<del> end
<del> # let us work in our own namespace...
<del> session = (env_session[:openid] ||= {})
<del> unless session and session.is_a?(Hash)
<del> raise NoSession, 'Incompatible openid session'
<del> end
<del>
<del> request = Rack::Request.new(env)
<del> consumer = ::OpenID::Consumer.new(session, @store)
<del>
<del> if mode = request.GET['openid.mode']
<del> if session.key?(:openid_param)
<del> finish(consumer, session, request)
<del> else
<del> bad_request
<del> end
<del> elsif request.GET[@openid_param]
<del> check(consumer, session, request)
<del> else
<del> unauthorized
<del> end
<del> end
<del>
<del> # As the first part of OpenID consumer action, #check retrieves the data
<del> # required for completion.
<del> #
<del> # If all parameters fit within the max length of a URI, a 303 redirect
<del> # will be returned. Otherwise #confirm_post_params will be called.
<del> #
<del> # Any messages from OpenID's request are logged to env['rack.errors']
<del> #
<del> # <tt>env['rack.auth.openid.request']</tt> is the openid checkid request
<del> # instance.
<del> #
<del> # <tt>session[:openid_param]</tt> is set to the openid identifier
<del> # provided by the user.
<del> #
<del> # <tt>session[:return_to]</tt> is set to the return_to uri given to the
<del> # identity provider.
<del>
<del> def check(consumer, session, req)
<del> oid = consumer.begin(req.GET[@openid_param], @anonymous)
<del> req.env['rack.auth.openid.request'] = oid
<del> req.env['rack.errors'].puts(oid.message)
<del> p oid if $DEBUG
<del>
<del> ## Extension support
<del> extensions.each do |ext,args|
<del> oid.add_extension(ext::Request.new(*args))
<del> end
<del>
<del> session[:openid_param] = req.GET[openid_param]
<del> return_to_uri = return_to ? return_to : req.url
<del> session[:return_to] = return_to_uri
<del> immediate = session.key?(:setup_needed) ? false : immediate
<del>
<del> if oid.send_redirect?(realm, return_to_uri, immediate)
<del> uri = oid.redirect_url(realm, return_to_uri, immediate)
<del> redirect(uri)
<del> else
<del> confirm_post_params(oid, realm, return_to_uri, immediate)
<del> end
<del> rescue ::OpenID::DiscoveryFailure => e
<del> # thrown from inside OpenID::Consumer#begin by yadis stuff
<del> req.env['rack.errors'].puts([e.message, *e.backtrace]*"\n")
<del> return foreign_server_failure
<del> end
<del>
<del> # This is the final portion of authentication.
<del> # If successful, a redirect to the realm is be returned.
<del> # Data gathered from extensions are stored in session[:openid] with the
<del> # extension's namespace uri as the key.
<del> #
<del> # Any messages from OpenID's response are logged to env['rack.errors']
<del> #
<del> # <tt>env['rack.auth.openid.response']</tt> will contain the openid
<del> # response.
<del>
<del> def finish(consumer, session, req)
<del> oid = consumer.complete(req.GET, req.url)
<del> req.env['rack.auth.openid.response'] = oid
<del> req.env['rack.errors'].puts(oid.message)
<del> p oid if $DEBUG
<del>
<del> raise unless ValidStatus.include?(oid.status)
<del> __send__(oid.status, oid, req, session)
<del> end
<del>
<del> # The first argument should be the main extension module.
<del> # The extension module should contain the constants:
<del> # * class Request, should have OpenID::Extension as an ancestor
<del> # * class Response, should have OpenID::Extension as an ancestor
<del> # * string NS_URI, which defining the namespace of the extension
<del> #
<del> # All trailing arguments will be passed to extension::Request.new in
<del> # #check.
<del> # The openid response will be passed to
<del> # extension::Response#from_success_response, #get_extension_args will be
<del> # called on the result to attain the gathered data.
<del> #
<del> # This method returns the key at which the response data will be found in
<del> # the session, which is the namespace uri by default.
<del>
<del> def add_extension(ext, *args)
<del> raise BadExtension unless valid_extension?(ext)
<del> extensions[ext] = args
<del> return ext::NS_URI
<del> end
<del>
<del> # Checks the validitity, in the context of usage, of a submitted
<del> # extension.
<del>
<del> def valid_extension?(ext)
<del> if not %w[NS_URI Request Response].all?{|c| ext.const_defined?(c) }
<del> raise ArgumentError, 'Extension is missing constants.'
<del> elsif not ext::Response.respond_to?(:from_success_response)
<del> raise ArgumentError, 'Response is missing required method.'
<del> end
<del> return true
<del> rescue
<del> return false
<del> end
<del>
<del> # Checks the provided uri to ensure it'd be considered within the realm.
<del> # is currently not compatible with wildcard realms.
<del>
<del> def within_realm? uri
<del> uri = URI.parse(uri.to_s)
<del> realm = URI.parse(self.realm)
<del> return false unless uri.absolute?
<del> return false unless uri.path[0, realm.path.size] == realm.path
<del> return false unless uri.host == realm.host or realm.host[/^\*\./]
<del> # for wildcard support, is awkward with URI limitations
<del> realm_match = Regexp.escape(realm.host).
<del> sub(/^\*\./,"^#{URI::REGEXP::PATTERN::URIC_NO_SLASH}+.")+'$'
<del> return false unless uri.host.match(realm_match)
<del> return true
<del> end
<del> alias_method :include?, :within_realm?
<del>
<del> protected
<del>
<del> ### These methods define some of the boilerplate responses.
<del>
<del> # Returns an html form page for posting to an Identity Provider if the
<del> # GET request would exceed the upper URI length limit.
<del>
<del> def confirm_post_params(oid, realm, return_to, immediate)
<del> Rack::Response.new.finish do |r|
<del> r.write '<html><head><title>Confirm...</title></head><body>'
<del> r.write oid.form_markup(realm, return_to, immediate)
<del> r.write '</body></html>'
<del> end
<del> end
<del>
<del> # Returns a 303 redirect with the destination of that provided by the
<del> # argument.
<del>
<del> def redirect(uri)
<del> [ 303, {'Content-Length'=>'0', 'Content-Type'=>'text/plain',
<del> 'Location' => uri},
<del> [] ]
<del> end
<del>
<del> # Returns an empty 400 response.
<del>
<del> def bad_request
<del> [ 400, {'Content-Type'=>'text/plain', 'Content-Length'=>'0'},
<del> [''] ]
<del> end
<del>
<del> # Returns a basic unauthorized 401 response.
<del>
<del> def unauthorized
<del> [ 401, {'Content-Type' => 'text/plain', 'Content-Length' => '13'},
<del> ['Unauthorized.'] ]
<del> end
<del>
<del> # Returns a basic access denied 403 response.
<del>
<del> def access_denied
<del> [ 403, {'Content-Type' => 'text/plain', 'Content-Length' => '14'},
<del> ['Access denied.'] ]
<del> end
<del>
<del> # Returns a 503 response to be used if communication with the remote
<del> # OpenID server fails.
<del>
<del> def foreign_server_failure
<del> [ 503, {'Content-Type'=>'text/plain', 'Content-Length' => '23'},
<del> ['Foreign server failure.'] ]
<del> end
<del>
<del> private
<del>
<del> ### These methods are called after a transaction is completed, depending
<del> # on its outcome. These should all return a rack compatible response.
<del> # You'd want to override these to provide additional functionality.
<del>
<del> # Called to complete processing on a successful transaction.
<del> # Within the openid session, :openid_identity and :openid_identifier are
<del> # set to the user friendly and the standard representation of the
<del> # validated identity. All other data in the openid session is cleared.
<del>
<del> def success(oid, request, session)
<del> session.clear
<del> session[:openid_identity] = oid.display_identifier
<del> session[:openid_identifier] = oid.identity_url
<del> extensions.keys.each do |ext|
<del> label = ext.name[/[^:]+$/].downcase
<del> response = ext::Response.from_success_response(oid)
<del> session[label] = response.data
<del> end
<del> redirect(realm)
<del> end
<del>
<del> # Called if the Identity Provider indicates further setup by the user is
<del> # required.
<del> # The identifier is retrived from the openid session at :openid_param.
<del> # And :setup_needed is set to true to prevent looping.
<del>
<del> def setup_needed(oid, request, session)
<del> identifier = session[:openid_param]
<del> session[:setup_needed] = true
<del> redirect req.script_name + '?' + openid_param + '=' + identifier
<del> end
<del>
<del> # Called if the user indicates they wish to cancel identification.
<del> # Data within openid session is cleared.
<del>
<del> def cancel(oid, request, session)
<del> session.clear
<del> access_denied
<del> end
<del>
<del> # Called if the Identity Provider indicates the user is unable to confirm
<del> # their identity. Data within the openid session is left alone, in case
<del> # of swarm auth attacks.
<del>
<del> def failure(oid, request, session)
<del> unauthorized
<del> end
<del> end
<del>
<del> # A class developed out of the request to use OpenID as an authentication
<del> # middleware. The request will be sent to the OpenID instance unless the
<del> # block evaluates to true. For example in rackup, you can use it as such:
<del> #
<del> # use Rack::Session::Pool
<del> # use Rack::Auth::OpenIDAuth, realm, openid_options do |env|
<del> # env['rack.session'][:authkey] == a_string
<del> # end
<del> # run RackApp
<del> #
<del> # Or simply:
<del> #
<del> # app = Rack::Auth::OpenIDAuth.new app, realm, openid_options, &auth
<del>
<del> class OpenIDAuth < Rack::Auth::AbstractHandler
<del> attr_reader :oid
<del> def initialize(app, realm, options={}, &auth)
<del> @oid = OpenID.new(realm, options)
<del> super(app, &auth)
<del> end
<del>
<del> def call(env)
<del> to = auth.call(env) ? @app : @oid
<del> to.call env
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/builder.rb
<del>module Rack
<del> # Rack::Builder implements a small DSL to iteratively construct Rack
<del> # applications.
<del> #
<del> # Example:
<del> #
<del> # app = Rack::Builder.new {
<del> # use Rack::CommonLogger
<del> # use Rack::ShowExceptions
<del> # map "/lobster" do
<del> # use Rack::Lint
<del> # run Rack::Lobster.new
<del> # end
<del> # }
<del> #
<del> # Or
<del> #
<del> # app = Rack::Builder.app do
<del> # use Rack::CommonLogger
<del> # lambda { |env| [200, {'Content-Type' => 'text/plain'}, 'OK'] }
<del> # end
<del> #
<del> # +use+ adds a middleware to the stack, +run+ dispatches to an application.
<del> # You can use +map+ to construct a Rack::URLMap in a convenient way.
<del>
<del> class Builder
<del> def initialize(&block)
<del> @ins = []
<del> instance_eval(&block) if block_given?
<del> end
<del>
<del> def self.app(&block)
<del> self.new(&block).to_app
<del> end
<del>
<del> def use(middleware, *args, &block)
<del> @ins << lambda { |app| middleware.new(app, *args, &block) }
<del> end
<del>
<del> def run(app)
<del> @ins << app #lambda { |nothing| app }
<del> end
<del>
<del> def map(path, &block)
<del> if @ins.last.kind_of? Hash
<del> @ins.last[path] = self.class.new(&block).to_app
<del> else
<del> @ins << {}
<del> map(path, &block)
<del> end
<del> end
<del>
<del> def to_app
<del> @ins[-1] = Rack::URLMap.new(@ins.last) if Hash === @ins.last
<del> inner_app = @ins.last
<del> @ins[0...-1].reverse.inject(inner_app) { |a, e| e.call(a) }
<del> end
<del>
<del> def call(env)
<del> to_app.call(env)
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/cascade.rb
<del>module Rack
<del> # Rack::Cascade tries an request on several apps, and returns the
<del> # first response that is not 404 (or in a list of configurable
<del> # status codes).
<del>
<del> class Cascade
<del> attr_reader :apps
<del>
<del> def initialize(apps, catch=404)
<del> @apps = apps
<del> @catch = [*catch]
<del> end
<del>
<del> def call(env)
<del> status = headers = body = nil
<del> raise ArgumentError, "empty cascade" if @apps.empty?
<del> @apps.each { |app|
<del> begin
<del> status, headers, body = app.call(env)
<del> break unless @catch.include?(status.to_i)
<del> end
<del> }
<del> [status, headers, body]
<del> end
<del>
<del> def add app
<del> @apps << app
<del> end
<del>
<del> def include? app
<del> @apps.include? app
<del> end
<del>
<del> alias_method :<<, :add
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/chunked.rb
<del>require 'rack/utils'
<del>
<del>module Rack
<del>
<del> # Middleware that applies chunked transfer encoding to response bodies
<del> # when the response does not include a Content-Length header.
<del> class Chunked
<del> include Rack::Utils
<del>
<del> def initialize(app)
<del> @app = app
<del> end
<del>
<del> def call(env)
<del> status, headers, body = @app.call(env)
<del> headers = HeaderHash.new(headers)
<del>
<del> if env['HTTP_VERSION'] == 'HTTP/1.0' ||
<del> STATUS_WITH_NO_ENTITY_BODY.include?(status) ||
<del> headers['Content-Length'] ||
<del> headers['Transfer-Encoding']
<del> [status, headers.to_hash, body]
<del> else
<del> dup.chunk(status, headers, body)
<del> end
<del> end
<del>
<del> def chunk(status, headers, body)
<del> @body = body
<del> headers.delete('Content-Length')
<del> headers['Transfer-Encoding'] = 'chunked'
<del> [status, headers.to_hash, self]
<del> end
<del>
<del> def each
<del> term = "\r\n"
<del> @body.each do |chunk|
<del> size = bytesize(chunk)
<del> next if size == 0
<del> yield [size.to_s(16), term, chunk, term].join
<del> end
<del> yield ["0", term, "", term].join
<del> end
<del>
<del> def close
<del> @body.close if @body.respond_to?(:close)
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/commonlogger.rb
<del>module Rack
<del> # Rack::CommonLogger forwards every request to an +app+ given, and
<del> # logs a line in the Apache common log format to the +logger+, or
<del> # rack.errors by default.
<del>
<del> class CommonLogger
<del> def initialize(app, logger=nil)
<del> @app = app
<del> @logger = logger
<del> end
<del>
<del> def call(env)
<del> dup._call(env)
<del> end
<del>
<del> def _call(env)
<del> @env = env
<del> @logger ||= self
<del> @time = Time.now
<del> @status, @header, @body = @app.call(env)
<del> [@status, @header, self]
<del> end
<del>
<del> def close
<del> @body.close if @body.respond_to? :close
<del> end
<del>
<del> # By default, log to rack.errors.
<del> def <<(str)
<del> @env["rack.errors"].write(str)
<del> @env["rack.errors"].flush
<del> end
<del>
<del> def each
<del> length = 0
<del> @body.each { |part|
<del> length += part.size
<del> yield part
<del> }
<del>
<del> @now = Time.now
<del>
<del> # Common Log Format: http://httpd.apache.org/docs/1.3/logs.html#common
<del> # lilith.local - - [07/Aug/2006 23:58:02] "GET / HTTP/1.1" 500 -
<del> # %{%s - %s [%s] "%s %s%s %s" %d %s\n} %
<del> @logger << %{%s - %s [%s] "%s %s%s %s" %d %s %0.4f\n} %
<del> [
<del> @env['HTTP_X_FORWARDED_FOR'] || @env["REMOTE_ADDR"] || "-",
<del> @env["REMOTE_USER"] || "-",
<del> @now.strftime("%d/%b/%Y %H:%M:%S"),
<del> @env["REQUEST_METHOD"],
<del> @env["PATH_INFO"],
<del> @env["QUERY_STRING"].empty? ? "" : "?"+@env["QUERY_STRING"],
<del> @env["HTTP_VERSION"],
<del> @status.to_s[0..3],
<del> (length.zero? ? "-" : length.to_s),
<del> @now - @time
<del> ]
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/conditionalget.rb
<del>require 'rack/utils'
<del>
<del>module Rack
<del>
<del> # Middleware that enables conditional GET using If-None-Match and
<del> # If-Modified-Since. The application should set either or both of the
<del> # Last-Modified or Etag response headers according to RFC 2616. When
<del> # either of the conditions is met, the response body is set to be zero
<del> # length and the response status is set to 304 Not Modified.
<del> #
<del> # Applications that defer response body generation until the body's each
<del> # message is received will avoid response body generation completely when
<del> # a conditional GET matches.
<del> #
<del> # Adapted from Michael Klishin's Merb implementation:
<del> # http://github.com/wycats/merb-core/tree/master/lib/merb-core/rack/middleware/conditional_get.rb
<del> class ConditionalGet
<del> def initialize(app)
<del> @app = app
<del> end
<del>
<del> def call(env)
<del> return @app.call(env) unless %w[GET HEAD].include?(env['REQUEST_METHOD'])
<del>
<del> status, headers, body = @app.call(env)
<del> headers = Utils::HeaderHash.new(headers)
<del> if etag_matches?(env, headers) || modified_since?(env, headers)
<del> status = 304
<del> body = []
<del> end
<del> [status, headers, body]
<del> end
<del>
<del> private
<del> def etag_matches?(env, headers)
<del> etag = headers['Etag'] and etag == env['HTTP_IF_NONE_MATCH']
<del> end
<del>
<del> def modified_since?(env, headers)
<del> last_modified = headers['Last-Modified'] and
<del> last_modified == env['HTTP_IF_MODIFIED_SINCE']
<del> end
<del> end
<del>
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/content_length.rb
<del>require 'rack/utils'
<del>
<del>module Rack
<del> # Sets the Content-Length header on responses with fixed-length bodies.
<del> class ContentLength
<del> include Rack::Utils
<del>
<del> def initialize(app)
<del> @app = app
<del> end
<del>
<del> def call(env)
<del> status, headers, body = @app.call(env)
<del> headers = HeaderHash.new(headers)
<del>
<del> if !STATUS_WITH_NO_ENTITY_BODY.include?(status) &&
<del> !headers['Content-Length'] &&
<del> !headers['Transfer-Encoding'] &&
<del> (body.respond_to?(:to_ary) || body.respond_to?(:to_str))
<del>
<del> body = [body] if body.respond_to?(:to_str) # rack 0.4 compat
<del> length = body.to_ary.inject(0) { |len, part| len + bytesize(part) }
<del> headers['Content-Length'] = length.to_s
<del> end
<del>
<del> [status, headers, body]
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/content_type.rb
<del>require 'rack/utils'
<del>
<del>module Rack
<del>
<del> # Sets the Content-Type header on responses which don't have one.
<del> #
<del> # Builder Usage:
<del> # use Rack::ContentType, "text/plain"
<del> #
<del> # When no content type argument is provided, "text/html" is assumed.
<del> class ContentType
<del> def initialize(app, content_type = "text/html")
<del> @app, @content_type = app, content_type
<del> end
<del>
<del> def call(env)
<del> status, headers, body = @app.call(env)
<del> headers = Utils::HeaderHash.new(headers)
<del> headers['Content-Type'] ||= @content_type
<del> [status, headers.to_hash, body]
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/deflater.rb
<del>require "zlib"
<del>require "stringio"
<del>require "time" # for Time.httpdate
<del>require 'rack/utils'
<del>
<del>module Rack
<del> class Deflater
<del> def initialize(app)
<del> @app = app
<del> end
<del>
<del> def call(env)
<del> status, headers, body = @app.call(env)
<del> headers = Utils::HeaderHash.new(headers)
<del>
<del> # Skip compressing empty entity body responses and responses with
<del> # no-transform set.
<del> if Utils::STATUS_WITH_NO_ENTITY_BODY.include?(status) ||
<del> headers['Cache-Control'].to_s =~ /\bno-transform\b/
<del> return [status, headers, body]
<del> end
<del>
<del> request = Request.new(env)
<del>
<del> encoding = Utils.select_best_encoding(%w(gzip deflate identity),
<del> request.accept_encoding)
<del>
<del> # Set the Vary HTTP header.
<del> vary = headers["Vary"].to_s.split(",").map { |v| v.strip }
<del> unless vary.include?("*") || vary.include?("Accept-Encoding")
<del> headers["Vary"] = vary.push("Accept-Encoding").join(",")
<del> end
<del>
<del> case encoding
<del> when "gzip"
<del> mtime = headers.key?("Last-Modified") ?
<del> Time.httpdate(headers["Last-Modified"]) : Time.now
<del> body = self.class.gzip(body, mtime)
<del> size = Rack::Utils.bytesize(body)
<del> headers = headers.merge("Content-Encoding" => "gzip", "Content-Length" => size.to_s)
<del> [status, headers, [body]]
<del> when "deflate"
<del> body = self.class.deflate(body)
<del> size = Rack::Utils.bytesize(body)
<del> headers = headers.merge("Content-Encoding" => "deflate", "Content-Length" => size.to_s)
<del> [status, headers, [body]]
<del> when "identity"
<del> [status, headers, body]
<del> when nil
<del> message = "An acceptable encoding for the requested resource #{request.fullpath} could not be found."
<del> [406, {"Content-Type" => "text/plain", "Content-Length" => message.length.to_s}, [message]]
<del> end
<del> end
<del>
<del> def self.gzip(body, mtime)
<del> io = StringIO.new
<del> gzip = Zlib::GzipWriter.new(io)
<del> gzip.mtime = mtime
<del>
<del> # TODO: Add streaming
<del> body.each { |part| gzip << part }
<del>
<del> gzip.close
<del> return io.string
<del> end
<del>
<del> DEFLATE_ARGS = [
<del> Zlib::DEFAULT_COMPRESSION,
<del> # drop the zlib header which causes both Safari and IE to choke
<del> -Zlib::MAX_WBITS,
<del> Zlib::DEF_MEM_LEVEL,
<del> Zlib::DEFAULT_STRATEGY
<del> ]
<del>
<del> # Loosely based on Mongrel's Deflate handler
<del> def self.deflate(body)
<del> deflater = Zlib::Deflate.new(*DEFLATE_ARGS)
<del>
<del> # TODO: Add streaming
<del> body.each { |part| deflater << part }
<del>
<del> return deflater.finish
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/directory.rb
<del>require 'time'
<del>require 'rack/utils'
<del>require 'rack/mime'
<del>
<del>module Rack
<del> # Rack::Directory serves entries below the +root+ given, according to the
<del> # path info of the Rack request. If a directory is found, the file's contents
<del> # will be presented in an html based index. If a file is found, the env will
<del> # be passed to the specified +app+.
<del> #
<del> # If +app+ is not specified, a Rack::File of the same +root+ will be used.
<del>
<del> class Directory
<del> DIR_FILE = "<tr><td class='name'><a href='%s'>%s</a></td><td class='size'>%s</td><td class='type'>%s</td><td class='mtime'>%s</td></tr>"
<del> DIR_PAGE = <<-PAGE
<del><html><head>
<del> <title>%s</title>
<del> <meta http-equiv="content-type" content="text/html; charset=utf-8" />
<del> <style type='text/css'>
<del>table { width:100%%; }
<del>.name { text-align:left; }
<del>.size, .mtime { text-align:right; }
<del>.type { width:11em; }
<del>.mtime { width:15em; }
<del> </style>
<del></head><body>
<del><h1>%s</h1>
<del><hr />
<del><table>
<del> <tr>
<del> <th class='name'>Name</th>
<del> <th class='size'>Size</th>
<del> <th class='type'>Type</th>
<del> <th class='mtime'>Last Modified</th>
<del> </tr>
<del>%s
<del></table>
<del><hr />
<del></body></html>
<del> PAGE
<del>
<del> attr_reader :files
<del> attr_accessor :root, :path
<del>
<del> def initialize(root, app=nil)
<del> @root = F.expand_path(root)
<del> @app = app || Rack::File.new(@root)
<del> end
<del>
<del> def call(env)
<del> dup._call(env)
<del> end
<del>
<del> F = ::File
<del>
<del> def _call(env)
<del> @env = env
<del> @script_name = env['SCRIPT_NAME']
<del> @path_info = Utils.unescape(env['PATH_INFO'])
<del>
<del> if forbidden = check_forbidden
<del> forbidden
<del> else
<del> @path = F.join(@root, @path_info)
<del> list_path
<del> end
<del> end
<del>
<del> def check_forbidden
<del> return unless @path_info.include? ".."
<del>
<del> body = "Forbidden\n"
<del> size = Rack::Utils.bytesize(body)
<del> return [403, {"Content-Type" => "text/plain","Content-Length" => size.to_s}, [body]]
<del> end
<del>
<del> def list_directory
<del> @files = [['../','Parent Directory','','','']]
<del> glob = F.join(@path, '*')
<del>
<del> Dir[glob].sort.each do |node|
<del> stat = stat(node)
<del> next unless stat
<del> basename = F.basename(node)
<del> ext = F.extname(node)
<del>
<del> url = F.join(@script_name, @path_info, basename)
<del> size = stat.size
<del> type = stat.directory? ? 'directory' : Mime.mime_type(ext)
<del> size = stat.directory? ? '-' : filesize_format(size)
<del> mtime = stat.mtime.httpdate
<del> url << '/' if stat.directory?
<del> basename << '/' if stat.directory?
<del>
<del> @files << [ url, basename, size, type, mtime ]
<del> end
<del>
<del> return [ 200, {'Content-Type'=>'text/html; charset=utf-8'}, self ]
<del> end
<del>
<del> def stat(node, max = 10)
<del> F.stat(node)
<del> rescue Errno::ENOENT, Errno::ELOOP
<del> return nil
<del> end
<del>
<del> # TODO: add correct response if not readable, not sure if 404 is the best
<del> # option
<del> def list_path
<del> @stat = F.stat(@path)
<del>
<del> if @stat.readable?
<del> return @app.call(@env) if @stat.file?
<del> return list_directory if @stat.directory?
<del> else
<del> raise Errno::ENOENT, 'No such file or directory'
<del> end
<del>
<del> rescue Errno::ENOENT, Errno::ELOOP
<del> return entity_not_found
<del> end
<del>
<del> def entity_not_found
<del> body = "Entity not found: #{@path_info}\n"
<del> size = Rack::Utils.bytesize(body)
<del> return [404, {"Content-Type" => "text/plain", "Content-Length" => size.to_s}, [body]]
<del> end
<del>
<del> def each
<del> show_path = @path.sub(/^#{@root}/,'')
<del> files = @files.map{|f| DIR_FILE % f }*"\n"
<del> page = DIR_PAGE % [ show_path, show_path , files ]
<del> page.each_line{|l| yield l }
<del> end
<del>
<del> # Stolen from Ramaze
<del>
<del> FILESIZE_FORMAT = [
<del> ['%.1fT', 1 << 40],
<del> ['%.1fG', 1 << 30],
<del> ['%.1fM', 1 << 20],
<del> ['%.1fK', 1 << 10],
<del> ]
<del>
<del> def filesize_format(int)
<del> FILESIZE_FORMAT.each do |format, size|
<del> return format % (int.to_f / size) if int >= size
<del> end
<del>
<del> int.to_s + 'B'
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/file.rb
<del>require 'time'
<del>require 'rack/utils'
<del>require 'rack/mime'
<del>
<del>module Rack
<del> # Rack::File serves files below the +root+ given, according to the
<del> # path info of the Rack request.
<del> #
<del> # Handlers can detect if bodies are a Rack::File, and use mechanisms
<del> # like sendfile on the +path+.
<del>
<del> class File
<del> attr_accessor :root
<del> attr_accessor :path
<del>
<del> alias :to_path :path
<del>
<del> def initialize(root)
<del> @root = root
<del> end
<del>
<del> def call(env)
<del> dup._call(env)
<del> end
<del>
<del> F = ::File
<del>
<del> def _call(env)
<del> @path_info = Utils.unescape(env["PATH_INFO"])
<del> return forbidden if @path_info.include? ".."
<del>
<del> @path = F.join(@root, @path_info)
<del>
<del> begin
<del> if F.file?(@path) && F.readable?(@path)
<del> serving
<del> else
<del> raise Errno::EPERM
<del> end
<del> rescue SystemCallError
<del> not_found
<del> end
<del> end
<del>
<del> def forbidden
<del> body = "Forbidden\n"
<del> [403, {"Content-Type" => "text/plain",
<del> "Content-Length" => body.size.to_s},
<del> [body]]
<del> end
<del>
<del> # NOTE:
<del> # We check via File::size? whether this file provides size info
<del> # via stat (e.g. /proc files often don't), otherwise we have to
<del> # figure it out by reading the whole file into memory. And while
<del> # we're at it we also use this as body then.
<del>
<del> def serving
<del> if size = F.size?(@path)
<del> body = self
<del> else
<del> body = [F.read(@path)]
<del> size = Utils.bytesize(body.first)
<del> end
<del>
<del> [200, {
<del> "Last-Modified" => F.mtime(@path).httpdate,
<del> "Content-Type" => Mime.mime_type(F.extname(@path), 'text/plain'),
<del> "Content-Length" => size.to_s
<del> }, body]
<del> end
<del>
<del> def not_found
<del> body = "File not found: #{@path_info}\n"
<del> [404, {"Content-Type" => "text/plain",
<del> "Content-Length" => body.size.to_s},
<del> [body]]
<del> end
<del>
<del> def each
<del> F.open(@path, "rb") { |file|
<del> while part = file.read(8192)
<del> yield part
<del> end
<del> }
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/handler.rb
<del>module Rack
<del> # *Handlers* connect web servers with Rack.
<del> #
<del> # Rack includes Handlers for Mongrel, WEBrick, FastCGI, CGI, SCGI
<del> # and LiteSpeed.
<del> #
<del> # Handlers usually are activated by calling <tt>MyHandler.run(myapp)</tt>.
<del> # A second optional hash can be passed to include server-specific
<del> # configuration.
<del> module Handler
<del> def self.get(server)
<del> return unless server
<del>
<del> if klass = @handlers[server]
<del> obj = Object
<del> klass.split("::").each { |x| obj = obj.const_get(x) }
<del> obj
<del> else
<del> Rack::Handler.const_get(server.capitalize)
<del> end
<del> end
<del>
<del> def self.register(server, klass)
<del> @handlers ||= {}
<del> @handlers[server] = klass
<del> end
<del>
<del> autoload :CGI, "rack/handler/cgi"
<del> autoload :FastCGI, "rack/handler/fastcgi"
<del> autoload :Mongrel, "rack/handler/mongrel"
<del> autoload :EventedMongrel, "rack/handler/evented_mongrel"
<del> autoload :SwiftipliedMongrel, "rack/handler/swiftiplied_mongrel"
<del> autoload :WEBrick, "rack/handler/webrick"
<del> autoload :LSWS, "rack/handler/lsws"
<del> autoload :SCGI, "rack/handler/scgi"
<del> autoload :Thin, "rack/handler/thin"
<del>
<del> register 'cgi', 'Rack::Handler::CGI'
<del> register 'fastcgi', 'Rack::Handler::FastCGI'
<del> register 'mongrel', 'Rack::Handler::Mongrel'
<del> register 'emongrel', 'Rack::Handler::EventedMongrel'
<del> register 'smongrel', 'Rack::Handler::SwiftipliedMongrel'
<del> register 'webrick', 'Rack::Handler::WEBrick'
<del> register 'lsws', 'Rack::Handler::LSWS'
<del> register 'scgi', 'Rack::Handler::SCGI'
<del> register 'thin', 'Rack::Handler::Thin'
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/handler/cgi.rb
<del>require 'rack/content_length'
<del>
<del>module Rack
<del> module Handler
<del> class CGI
<del> def self.run(app, options=nil)
<del> serve app
<del> end
<del>
<del> def self.serve(app)
<del> app = ContentLength.new(app)
<del>
<del> env = ENV.to_hash
<del> env.delete "HTTP_CONTENT_LENGTH"
<del>
<del> env["SCRIPT_NAME"] = "" if env["SCRIPT_NAME"] == "/"
<del>
<del> env.update({"rack.version" => [0,1],
<del> "rack.input" => $stdin,
<del> "rack.errors" => $stderr,
<del>
<del> "rack.multithread" => false,
<del> "rack.multiprocess" => true,
<del> "rack.run_once" => true,
<del>
<del> "rack.url_scheme" => ["yes", "on", "1"].include?(ENV["HTTPS"]) ? "https" : "http"
<del> })
<del>
<del> env["QUERY_STRING"] ||= ""
<del> env["HTTP_VERSION"] ||= env["SERVER_PROTOCOL"]
<del> env["REQUEST_PATH"] ||= "/"
<del>
<del> status, headers, body = app.call(env)
<del> begin
<del> send_headers status, headers
<del> send_body body
<del> ensure
<del> body.close if body.respond_to? :close
<del> end
<del> end
<del>
<del> def self.send_headers(status, headers)
<del> STDOUT.print "Status: #{status}\r\n"
<del> headers.each { |k, vs|
<del> vs.split("\n").each { |v|
<del> STDOUT.print "#{k}: #{v}\r\n"
<del> }
<del> }
<del> STDOUT.print "\r\n"
<del> STDOUT.flush
<del> end
<del>
<del> def self.send_body(body)
<del> body.each { |part|
<del> STDOUT.print part
<del> STDOUT.flush
<del> }
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/handler/evented_mongrel.rb
<del>require 'swiftcore/evented_mongrel'
<del>
<del>module Rack
<del> module Handler
<del> class EventedMongrel < Handler::Mongrel
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/handler/fastcgi.rb
<del>require 'fcgi'
<del>require 'socket'
<del>require 'rack/content_length'
<del>
<del>module Rack
<del> module Handler
<del> class FastCGI
<del> def self.run(app, options={})
<del> file = options[:File] and STDIN.reopen(UNIXServer.new(file))
<del> port = options[:Port] and STDIN.reopen(TCPServer.new(port))
<del> FCGI.each { |request|
<del> serve request, app
<del> }
<del> end
<del>
<del> module ProperStream # :nodoc:
<del> def each # This is missing by default.
<del> while line = gets
<del> yield line
<del> end
<del> end
<del>
<del> def read(*args)
<del> if args.empty?
<del> super || "" # Empty string on EOF.
<del> else
<del> super
<del> end
<del> end
<del> end
<del>
<del> def self.serve(request, app)
<del> app = Rack::ContentLength.new(app)
<del>
<del> env = request.env
<del> env.delete "HTTP_CONTENT_LENGTH"
<del>
<del> request.in.extend ProperStream
<del>
<del> env["SCRIPT_NAME"] = "" if env["SCRIPT_NAME"] == "/"
<del>
<del> env.update({"rack.version" => [0,1],
<del> "rack.input" => request.in,
<del> "rack.errors" => request.err,
<del>
<del> "rack.multithread" => false,
<del> "rack.multiprocess" => true,
<del> "rack.run_once" => false,
<del>
<del> "rack.url_scheme" => ["yes", "on", "1"].include?(env["HTTPS"]) ? "https" : "http"
<del> })
<del>
<del> env["QUERY_STRING"] ||= ""
<del> env["HTTP_VERSION"] ||= env["SERVER_PROTOCOL"]
<del> env["REQUEST_PATH"] ||= "/"
<del> env.delete "PATH_INFO" if env["PATH_INFO"] == ""
<del> env.delete "CONTENT_TYPE" if env["CONTENT_TYPE"] == ""
<del> env.delete "CONTENT_LENGTH" if env["CONTENT_LENGTH"] == ""
<del>
<del> status, headers, body = app.call(env)
<del> begin
<del> send_headers request.out, status, headers
<del> send_body request.out, body
<del> ensure
<del> body.close if body.respond_to? :close
<del> request.finish
<del> end
<del> end
<del>
<del> def self.send_headers(out, status, headers)
<del> out.print "Status: #{status}\r\n"
<del> headers.each { |k, vs|
<del> vs.split("\n").each { |v|
<del> out.print "#{k}: #{v}\r\n"
<del> }
<del> }
<del> out.print "\r\n"
<del> out.flush
<del> end
<del>
<del> def self.send_body(out, body)
<del> body.each { |part|
<del> out.print part
<del> out.flush
<del> }
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/handler/lsws.rb
<del>require 'lsapi'
<del>require 'rack/content_length'
<del>
<del>module Rack
<del> module Handler
<del> class LSWS
<del> def self.run(app, options=nil)
<del> while LSAPI.accept != nil
<del> serve app
<del> end
<del> end
<del> def self.serve(app)
<del> app = Rack::ContentLength.new(app)
<del>
<del> env = ENV.to_hash
<del> env.delete "HTTP_CONTENT_LENGTH"
<del> env["SCRIPT_NAME"] = "" if env["SCRIPT_NAME"] == "/"
<del> env.update({"rack.version" => [0,1],
<del> "rack.input" => StringIO.new($stdin.read.to_s),
<del> "rack.errors" => $stderr,
<del> "rack.multithread" => false,
<del> "rack.multiprocess" => true,
<del> "rack.run_once" => false,
<del> "rack.url_scheme" => ["yes", "on", "1"].include?(ENV["HTTPS"]) ? "https" : "http"
<del> })
<del> env["QUERY_STRING"] ||= ""
<del> env["HTTP_VERSION"] ||= env["SERVER_PROTOCOL"]
<del> env["REQUEST_PATH"] ||= "/"
<del> status, headers, body = app.call(env)
<del> begin
<del> send_headers status, headers
<del> send_body body
<del> ensure
<del> body.close if body.respond_to? :close
<del> end
<del> end
<del> def self.send_headers(status, headers)
<del> print "Status: #{status}\r\n"
<del> headers.each { |k, vs|
<del> vs.split("\n").each { |v|
<del> print "#{k}: #{v}\r\n"
<del> }
<del> }
<del> print "\r\n"
<del> STDOUT.flush
<del> end
<del> def self.send_body(body)
<del> body.each { |part|
<del> print part
<del> STDOUT.flush
<del> }
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/handler/mongrel.rb
<del>require 'mongrel'
<del>require 'stringio'
<del>require 'rack/content_length'
<del>require 'rack/chunked'
<del>
<del>module Rack
<del> module Handler
<del> class Mongrel < ::Mongrel::HttpHandler
<del> def self.run(app, options={})
<del> server = ::Mongrel::HttpServer.new(options[:Host] || '0.0.0.0',
<del> options[:Port] || 8080)
<del> # Acts like Rack::URLMap, utilizing Mongrel's own path finding methods.
<del> # Use is similar to #run, replacing the app argument with a hash of
<del> # { path=>app, ... } or an instance of Rack::URLMap.
<del> if options[:map]
<del> if app.is_a? Hash
<del> app.each do |path, appl|
<del> path = '/'+path unless path[0] == ?/
<del> server.register(path, Rack::Handler::Mongrel.new(appl))
<del> end
<del> elsif app.is_a? URLMap
<del> app.instance_variable_get(:@mapping).each do |(host, path, appl)|
<del> next if !host.nil? && !options[:Host].nil? && options[:Host] != host
<del> path = '/'+path unless path[0] == ?/
<del> server.register(path, Rack::Handler::Mongrel.new(appl))
<del> end
<del> else
<del> raise ArgumentError, "first argument should be a Hash or URLMap"
<del> end
<del> else
<del> server.register('/', Rack::Handler::Mongrel.new(app))
<del> end
<del> yield server if block_given?
<del> server.run.join
<del> end
<del>
<del> def initialize(app)
<del> @app = Rack::Chunked.new(Rack::ContentLength.new(app))
<del> end
<del>
<del> def process(request, response)
<del> env = {}.replace(request.params)
<del> env.delete "HTTP_CONTENT_TYPE"
<del> env.delete "HTTP_CONTENT_LENGTH"
<del>
<del> env["SCRIPT_NAME"] = "" if env["SCRIPT_NAME"] == "/"
<del>
<del> env.update({"rack.version" => [0,1],
<del> "rack.input" => request.body || StringIO.new(""),
<del> "rack.errors" => $stderr,
<del>
<del> "rack.multithread" => true,
<del> "rack.multiprocess" => false, # ???
<del> "rack.run_once" => false,
<del>
<del> "rack.url_scheme" => "http",
<del> })
<del> env["QUERY_STRING"] ||= ""
<del> env.delete "PATH_INFO" if env["PATH_INFO"] == ""
<del>
<del> status, headers, body = @app.call(env)
<del>
<del> begin
<del> response.status = status.to_i
<del> response.send_status(nil)
<del>
<del> headers.each { |k, vs|
<del> vs.split("\n").each { |v|
<del> response.header[k] = v
<del> }
<del> }
<del> response.send_header
<del>
<del> body.each { |part|
<del> response.write part
<del> response.socket.flush
<del> }
<del> ensure
<del> body.close if body.respond_to? :close
<del> end
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/handler/scgi.rb
<del>require 'scgi'
<del>require 'stringio'
<del>require 'rack/content_length'
<del>require 'rack/chunked'
<del>
<del>module Rack
<del> module Handler
<del> class SCGI < ::SCGI::Processor
<del> attr_accessor :app
<del>
<del> def self.run(app, options=nil)
<del> new(options.merge(:app=>app,
<del> :host=>options[:Host],
<del> :port=>options[:Port],
<del> :socket=>options[:Socket])).listen
<del> end
<del>
<del> def initialize(settings = {})
<del> @app = Rack::Chunked.new(Rack::ContentLength.new(settings[:app]))
<del> @log = Object.new
<del> def @log.info(*args); end
<del> def @log.error(*args); end
<del> super(settings)
<del> end
<del>
<del> def process_request(request, input_body, socket)
<del> env = {}.replace(request)
<del> env.delete "HTTP_CONTENT_TYPE"
<del> env.delete "HTTP_CONTENT_LENGTH"
<del> env["REQUEST_PATH"], env["QUERY_STRING"] = env["REQUEST_URI"].split('?', 2)
<del> env["HTTP_VERSION"] ||= env["SERVER_PROTOCOL"]
<del> env["PATH_INFO"] = env["REQUEST_PATH"]
<del> env["QUERY_STRING"] ||= ""
<del> env["SCRIPT_NAME"] = ""
<del> env.update({"rack.version" => [0,1],
<del> "rack.input" => StringIO.new(input_body),
<del> "rack.errors" => $stderr,
<del>
<del> "rack.multithread" => true,
<del> "rack.multiprocess" => true,
<del> "rack.run_once" => false,
<del>
<del> "rack.url_scheme" => ["yes", "on", "1"].include?(env["HTTPS"]) ? "https" : "http"
<del> })
<del> status, headers, body = app.call(env)
<del> begin
<del> socket.write("Status: #{status}\r\n")
<del> headers.each do |k, vs|
<del> vs.split("\n").each { |v| socket.write("#{k}: #{v}\r\n")}
<del> end
<del> socket.write("\r\n")
<del> body.each {|s| socket.write(s)}
<del> ensure
<del> body.close if body.respond_to? :close
<del> end
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/handler/swiftiplied_mongrel.rb
<del>require 'swiftcore/swiftiplied_mongrel'
<del>
<del>module Rack
<del> module Handler
<del> class SwiftipliedMongrel < Handler::Mongrel
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/handler/thin.rb
<del>require "thin"
<del>require "rack/content_length"
<del>require "rack/chunked"
<del>
<del>module Rack
<del> module Handler
<del> class Thin
<del> def self.run(app, options={})
<del> app = Rack::Chunked.new(Rack::ContentLength.new(app))
<del> server = ::Thin::Server.new(options[:Host] || '0.0.0.0',
<del> options[:Port] || 8080,
<del> app)
<del> yield server if block_given?
<del> server.start
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/handler/webrick.rb
<del>require 'webrick'
<del>require 'stringio'
<del>require 'rack/content_length'
<del>
<del>module Rack
<del> module Handler
<del> class WEBrick < ::WEBrick::HTTPServlet::AbstractServlet
<del> def self.run(app, options={})
<del> server = ::WEBrick::HTTPServer.new(options)
<del> server.mount "/", Rack::Handler::WEBrick, app
<del> trap(:INT) { server.shutdown }
<del> yield server if block_given?
<del> server.start
<del> end
<del>
<del> def initialize(server, app)
<del> super server
<del> @app = Rack::ContentLength.new(app)
<del> end
<del>
<del> def service(req, res)
<del> env = req.meta_vars
<del> env.delete_if { |k, v| v.nil? }
<del>
<del> env.update({"rack.version" => [0,1],
<del> "rack.input" => StringIO.new(req.body.to_s),
<del> "rack.errors" => $stderr,
<del>
<del> "rack.multithread" => true,
<del> "rack.multiprocess" => false,
<del> "rack.run_once" => false,
<del>
<del> "rack.url_scheme" => ["yes", "on", "1"].include?(ENV["HTTPS"]) ? "https" : "http"
<del> })
<del>
<del> env["HTTP_VERSION"] ||= env["SERVER_PROTOCOL"]
<del> env["QUERY_STRING"] ||= ""
<del> env["REQUEST_PATH"] ||= "/"
<del> if env["PATH_INFO"] == ""
<del> env.delete "PATH_INFO"
<del> else
<del> path, n = req.request_uri.path, env["SCRIPT_NAME"].length
<del> env["PATH_INFO"] = path[n, path.length-n]
<del> end
<del>
<del> status, headers, body = @app.call(env)
<del> begin
<del> res.status = status.to_i
<del> headers.each { |k, vs|
<del> if k.downcase == "set-cookie"
<del> res.cookies.concat vs.split("\n")
<del> else
<del> vs.split("\n").each { |v|
<del> res[k] = v
<del> }
<del> end
<del> }
<del> body.each { |part|
<del> res.body << part
<del> }
<del> ensure
<del> body.close if body.respond_to? :close
<del> end
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/head.rb
<del>module Rack
<del>
<del>class Head
<del> def initialize(app)
<del> @app = app
<del> end
<del>
<del> def call(env)
<del> status, headers, body = @app.call(env)
<del>
<del> if env["REQUEST_METHOD"] == "HEAD"
<del> [status, headers, []]
<del> else
<del> [status, headers, body]
<del> end
<del> end
<del>end
<del>
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/lint.rb
<del>require 'rack/utils'
<del>
<del>module Rack
<del> # Rack::Lint validates your application and the requests and
<del> # responses according to the Rack spec.
<del>
<del> class Lint
<del> def initialize(app)
<del> @app = app
<del> end
<del>
<del> # :stopdoc:
<del>
<del> class LintError < RuntimeError; end
<del> module Assertion
<del> def assert(message, &block)
<del> unless block.call
<del> raise LintError, message
<del> end
<del> end
<del> end
<del> include Assertion
<del>
<del> ## This specification aims to formalize the Rack protocol. You
<del> ## can (and should) use Rack::Lint to enforce it.
<del> ##
<del> ## When you develop middleware, be sure to add a Lint before and
<del> ## after to catch all mistakes.
<del>
<del> ## = Rack applications
<del>
<del> ## A Rack application is an Ruby object (not a class) that
<del> ## responds to +call+.
<del> def call(env=nil)
<del> dup._call(env)
<del> end
<del>
<del> def _call(env)
<del> ## It takes exactly one argument, the *environment*
<del> assert("No env given") { env }
<del> check_env env
<del>
<del> env['rack.input'] = InputWrapper.new(env['rack.input'])
<del> env['rack.errors'] = ErrorWrapper.new(env['rack.errors'])
<del>
<del> ## and returns an Array of exactly three values:
<del> status, headers, @body = @app.call(env)
<del> ## The *status*,
<del> check_status status
<del> ## the *headers*,
<del> check_headers headers
<del> ## and the *body*.
<del> check_content_type status, headers
<del> check_content_length status, headers, env
<del> [status, headers, self]
<del> end
<del>
<del> ## == The Environment
<del> def check_env(env)
<del> ## The environment must be an true instance of Hash (no
<del> ## subclassing allowed) that includes CGI-like headers.
<del> ## The application is free to modify the environment.
<del> assert("env #{env.inspect} is not a Hash, but #{env.class}") {
<del> env.instance_of? Hash
<del> }
<del>
<del> ##
<del> ## The environment is required to include these variables
<del> ## (adopted from PEP333), except when they'd be empty, but see
<del> ## below.
<del>
<del> ## <tt>REQUEST_METHOD</tt>:: The HTTP request method, such as
<del> ## "GET" or "POST". This cannot ever
<del> ## be an empty string, and so is
<del> ## always required.
<del>
<del> ## <tt>SCRIPT_NAME</tt>:: The initial portion of the request
<del> ## URL's "path" that corresponds to the
<del> ## application object, so that the
<del> ## application knows its virtual
<del> ## "location". This may be an empty
<del> ## string, if the application corresponds
<del> ## to the "root" of the server.
<del>
<del> ## <tt>PATH_INFO</tt>:: The remainder of the request URL's
<del> ## "path", designating the virtual
<del> ## "location" of the request's target
<del> ## within the application. This may be an
<del> ## empty string, if the request URL targets
<del> ## the application root and does not have a
<del> ## trailing slash. This information should be
<del> ## decoded by the server if it comes from a
<del> ## URL.
<del>
<del> ## <tt>QUERY_STRING</tt>:: The portion of the request URL that
<del> ## follows the <tt>?</tt>, if any. May be
<del> ## empty, but is always required!
<del>
<del> ## <tt>SERVER_NAME</tt>, <tt>SERVER_PORT</tt>:: When combined with <tt>SCRIPT_NAME</tt> and <tt>PATH_INFO</tt>, these variables can be used to complete the URL. Note, however, that <tt>HTTP_HOST</tt>, if present, should be used in preference to <tt>SERVER_NAME</tt> for reconstructing the request URL. <tt>SERVER_NAME</tt> and <tt>SERVER_PORT</tt> can never be empty strings, and so are always required.
<del>
<del> ## <tt>HTTP_</tt> Variables:: Variables corresponding to the
<del> ## client-supplied HTTP request
<del> ## headers (i.e., variables whose
<del> ## names begin with <tt>HTTP_</tt>). The
<del> ## presence or absence of these
<del> ## variables should correspond with
<del> ## the presence or absence of the
<del> ## appropriate HTTP header in the
<del> ## request.
<del>
<del> ## In addition to this, the Rack environment must include these
<del> ## Rack-specific variables:
<del>
<del> ## <tt>rack.version</tt>:: The Array [0,1], representing this version of Rack.
<del> ## <tt>rack.url_scheme</tt>:: +http+ or +https+, depending on the request URL.
<del> ## <tt>rack.input</tt>:: See below, the input stream.
<del> ## <tt>rack.errors</tt>:: See below, the error stream.
<del> ## <tt>rack.multithread</tt>:: true if the application object may be simultaneously invoked by another thread in the same process, false otherwise.
<del> ## <tt>rack.multiprocess</tt>:: true if an equivalent application object may be simultaneously invoked by another process, false otherwise.
<del> ## <tt>rack.run_once</tt>:: true if the server expects (but does not guarantee!) that the application will only be invoked this one time during the life of its containing process. Normally, this will only be true for a server based on CGI (or something similar).
<del>
<del> ## The server or the application can store their own data in the
<del> ## environment, too. The keys must contain at least one dot,
<del> ## and should be prefixed uniquely. The prefix <tt>rack.</tt>
<del> ## is reserved for use with the Rack core distribution and must
<del> ## not be used otherwise.
<del> ##
<del>
<del> %w[REQUEST_METHOD SERVER_NAME SERVER_PORT
<del> QUERY_STRING
<del> rack.version rack.input rack.errors
<del> rack.multithread rack.multiprocess rack.run_once].each { |header|
<del> assert("env missing required key #{header}") { env.include? header }
<del> }
<del>
<del> ## The environment must not contain the keys
<del> ## <tt>HTTP_CONTENT_TYPE</tt> or <tt>HTTP_CONTENT_LENGTH</tt>
<del> ## (use the versions without <tt>HTTP_</tt>).
<del> %w[HTTP_CONTENT_TYPE HTTP_CONTENT_LENGTH].each { |header|
<del> assert("env contains #{header}, must use #{header[5,-1]}") {
<del> not env.include? header
<del> }
<del> }
<del>
<del> ## The CGI keys (named without a period) must have String values.
<del> env.each { |key, value|
<del> next if key.include? "." # Skip extensions
<del> assert("env variable #{key} has non-string value #{value.inspect}") {
<del> value.instance_of? String
<del> }
<del> }
<del>
<del> ##
<del> ## There are the following restrictions:
<del>
<del> ## * <tt>rack.version</tt> must be an array of Integers.
<del> assert("rack.version must be an Array, was #{env["rack.version"].class}") {
<del> env["rack.version"].instance_of? Array
<del> }
<del> ## * <tt>rack.url_scheme</tt> must either be +http+ or +https+.
<del> assert("rack.url_scheme unknown: #{env["rack.url_scheme"].inspect}") {
<del> %w[http https].include? env["rack.url_scheme"]
<del> }
<del>
<del> ## * There must be a valid input stream in <tt>rack.input</tt>.
<del> check_input env["rack.input"]
<del> ## * There must be a valid error stream in <tt>rack.errors</tt>.
<del> check_error env["rack.errors"]
<del>
<del> ## * The <tt>REQUEST_METHOD</tt> must be a valid token.
<del> assert("REQUEST_METHOD unknown: #{env["REQUEST_METHOD"]}") {
<del> env["REQUEST_METHOD"] =~ /\A[0-9A-Za-z!\#$%&'*+.^_`|~-]+\z/
<del> }
<del>
<del> ## * The <tt>SCRIPT_NAME</tt>, if non-empty, must start with <tt>/</tt>
<del> assert("SCRIPT_NAME must start with /") {
<del> !env.include?("SCRIPT_NAME") ||
<del> env["SCRIPT_NAME"] == "" ||
<del> env["SCRIPT_NAME"] =~ /\A\//
<del> }
<del> ## * The <tt>PATH_INFO</tt>, if non-empty, must start with <tt>/</tt>
<del> assert("PATH_INFO must start with /") {
<del> !env.include?("PATH_INFO") ||
<del> env["PATH_INFO"] == "" ||
<del> env["PATH_INFO"] =~ /\A\//
<del> }
<del> ## * The <tt>CONTENT_LENGTH</tt>, if given, must consist of digits only.
<del> assert("Invalid CONTENT_LENGTH: #{env["CONTENT_LENGTH"]}") {
<del> !env.include?("CONTENT_LENGTH") || env["CONTENT_LENGTH"] =~ /\A\d+\z/
<del> }
<del>
<del> ## * One of <tt>SCRIPT_NAME</tt> or <tt>PATH_INFO</tt> must be
<del> ## set. <tt>PATH_INFO</tt> should be <tt>/</tt> if
<del> ## <tt>SCRIPT_NAME</tt> is empty.
<del> assert("One of SCRIPT_NAME or PATH_INFO must be set (make PATH_INFO '/' if SCRIPT_NAME is empty)") {
<del> env["SCRIPT_NAME"] || env["PATH_INFO"]
<del> }
<del> ## <tt>SCRIPT_NAME</tt> never should be <tt>/</tt>, but instead be empty.
<del> assert("SCRIPT_NAME cannot be '/', make it '' and PATH_INFO '/'") {
<del> env["SCRIPT_NAME"] != "/"
<del> }
<del> end
<del>
<del> ## === The Input Stream
<del> def check_input(input)
<del> ## The input stream must respond to +gets+, +each+ and +read+.
<del> [:gets, :each, :read].each { |method|
<del> assert("rack.input #{input} does not respond to ##{method}") {
<del> input.respond_to? method
<del> }
<del> }
<del> end
<del>
<del> class InputWrapper
<del> include Assertion
<del>
<del> def initialize(input)
<del> @input = input
<del> end
<del>
<del> def size
<del> @input.size
<del> end
<del>
<del> def rewind
<del> @input.rewind
<del> end
<del>
<del> ## * +gets+ must be called without arguments and return a string,
<del> ## or +nil+ on EOF.
<del> def gets(*args)
<del> assert("rack.input#gets called with arguments") { args.size == 0 }
<del> v = @input.gets
<del> assert("rack.input#gets didn't return a String") {
<del> v.nil? or v.instance_of? String
<del> }
<del> v
<del> end
<del>
<del> ## * +read+ must be called without or with one integer argument
<del> ## and return a string, or +nil+ on EOF.
<del> def read(*args)
<del> assert("rack.input#read called with too many arguments") {
<del> args.size <= 1
<del> }
<del> if args.size == 1
<del> assert("rack.input#read called with non-integer argument") {
<del> args.first.kind_of? Integer
<del> }
<del> end
<del> v = @input.read(*args)
<del> assert("rack.input#read didn't return a String") {
<del> v.nil? or v.instance_of? String
<del> }
<del> v
<del> end
<del>
<del> ## * +each+ must be called without arguments and only yield Strings.
<del> def each(*args)
<del> assert("rack.input#each called with arguments") { args.size == 0 }
<del> @input.each { |line|
<del> assert("rack.input#each didn't yield a String") {
<del> line.instance_of? String
<del> }
<del> yield line
<del> }
<del> end
<del>
<del> ## * +close+ must never be called on the input stream.
<del> def close(*args)
<del> assert("rack.input#close must not be called") { false }
<del> end
<del> end
<del>
<del> ## === The Error Stream
<del> def check_error(error)
<del> ## The error stream must respond to +puts+, +write+ and +flush+.
<del> [:puts, :write, :flush].each { |method|
<del> assert("rack.error #{error} does not respond to ##{method}") {
<del> error.respond_to? method
<del> }
<del> }
<del> end
<del>
<del> class ErrorWrapper
<del> include Assertion
<del>
<del> def initialize(error)
<del> @error = error
<del> end
<del>
<del> ## * +puts+ must be called with a single argument that responds to +to_s+.
<del> def puts(str)
<del> @error.puts str
<del> end
<del>
<del> ## * +write+ must be called with a single argument that is a String.
<del> def write(str)
<del> assert("rack.errors#write not called with a String") { str.instance_of? String }
<del> @error.write str
<del> end
<del>
<del> ## * +flush+ must be called without arguments and must be called
<del> ## in order to make the error appear for sure.
<del> def flush
<del> @error.flush
<del> end
<del>
<del> ## * +close+ must never be called on the error stream.
<del> def close(*args)
<del> assert("rack.errors#close must not be called") { false }
<del> end
<del> end
<del>
<del> ## == The Response
<del>
<del> ## === The Status
<del> def check_status(status)
<del> ## The status, if parsed as integer (+to_i+), must be greater than or equal to 100.
<del> assert("Status must be >=100 seen as integer") { status.to_i >= 100 }
<del> end
<del>
<del> ## === The Headers
<del> def check_headers(header)
<del> ## The header must respond to each, and yield values of key and value.
<del> assert("headers object should respond to #each, but doesn't (got #{header.class} as headers)") {
<del> header.respond_to? :each
<del> }
<del> header.each { |key, value|
<del> ## The header keys must be Strings.
<del> assert("header key must be a string, was #{key.class}") {
<del> key.instance_of? String
<del> }
<del> ## The header must not contain a +Status+ key,
<del> assert("header must not contain Status") { key.downcase != "status" }
<del> ## contain keys with <tt>:</tt> or newlines in their name,
<del> assert("header names must not contain : or \\n") { key !~ /[:\n]/ }
<del> ## contain keys names that end in <tt>-</tt> or <tt>_</tt>,
<del> assert("header names must not end in - or _") { key !~ /[-_]\z/ }
<del> ## but only contain keys that consist of
<del> ## letters, digits, <tt>_</tt> or <tt>-</tt> and start with a letter.
<del> assert("invalid header name: #{key}") { key =~ /\A[a-zA-Z][a-zA-Z0-9_-]*\z/ }
<del>
<del> ## The values of the header must be Strings,
<del> assert("a header value must be a String, but the value of " +
<del> "'#{key}' is a #{value.class}") { value.kind_of? String }
<del> ## consisting of lines (for multiple header values) seperated by "\n".
<del> value.split("\n").each { |item|
<del> ## The lines must not contain characters below 037.
<del> assert("invalid header value #{key}: #{item.inspect}") {
<del> item !~ /[\000-\037]/
<del> }
<del> }
<del> }
<del> end
<del>
<del> ## === The Content-Type
<del> def check_content_type(status, headers)
<del> headers.each { |key, value|
<del> ## There must be a <tt>Content-Type</tt>, except when the
<del> ## +Status+ is 1xx, 204 or 304, in which case there must be none
<del> ## given.
<del> if key.downcase == "content-type"
<del> assert("Content-Type header found in #{status} response, not allowed") {
<del> not Rack::Utils::STATUS_WITH_NO_ENTITY_BODY.include? status.to_i
<del> }
<del> return
<del> end
<del> }
<del> assert("No Content-Type header found") {
<del> Rack::Utils::STATUS_WITH_NO_ENTITY_BODY.include? status.to_i
<del> }
<del> end
<del>
<del> ## === The Content-Length
<del> def check_content_length(status, headers, env)
<del> headers.each { |key, value|
<del> if key.downcase == 'content-length'
<del> ## There must not be a <tt>Content-Length</tt> header when the
<del> ## +Status+ is 1xx, 204 or 304.
<del> assert("Content-Length header found in #{status} response, not allowed") {
<del> not Rack::Utils::STATUS_WITH_NO_ENTITY_BODY.include? status.to_i
<del> }
<del>
<del> bytes = 0
<del> string_body = true
<del>
<del> if @body.respond_to?(:to_ary)
<del> @body.each { |part|
<del> unless part.kind_of?(String)
<del> string_body = false
<del> break
<del> end
<del>
<del> bytes += Rack::Utils.bytesize(part)
<del> }
<del>
<del> if env["REQUEST_METHOD"] == "HEAD"
<del> assert("Response body was given for HEAD request, but should be empty") {
<del> bytes == 0
<del> }
<del> else
<del> if string_body
<del> assert("Content-Length header was #{value}, but should be #{bytes}") {
<del> value == bytes.to_s
<del> }
<del> end
<del> end
<del> end
<del>
<del> return
<del> end
<del> }
<del> end
<del>
<del> ## === The Body
<del> def each
<del> @closed = false
<del> ## The Body must respond to #each
<del> @body.each { |part|
<del> ## and must only yield String values.
<del> assert("Body yielded non-string value #{part.inspect}") {
<del> part.instance_of? String
<del> }
<del> yield part
<del> }
<del> ##
<del> ## If the Body responds to #close, it will be called after iteration.
<del> # XXX howto: assert("Body has not been closed") { @closed }
<del>
<del>
<del> ##
<del> ## If the Body responds to #to_path, it must return a String
<del> ## identifying the location of a file whose contents are identical
<del> ## to that produced by calling #each.
<del>
<del> if @body.respond_to?(:to_path)
<del> assert("The file identified by body.to_path does not exist") {
<del> ::File.exist? @body.to_path
<del> }
<del> end
<del>
<del> ##
<del> ## The Body commonly is an Array of Strings, the application
<del> ## instance itself, or a File-like object.
<del> end
<del>
<del> def close
<del> @closed = true
<del> @body.close if @body.respond_to?(:close)
<del> end
<del>
<del> # :startdoc:
<del>
<del> end
<del>end
<del>
<del>## == Thanks
<del>## Some parts of this specification are adopted from PEP333: Python
<del>## Web Server Gateway Interface
<del>## v1.0 (http://www.python.org/dev/peps/pep-0333/). I'd like to thank
<del>## everyone involved in that effort.
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/lobster.rb
<del>require 'zlib'
<del>
<del>require 'rack/request'
<del>require 'rack/response'
<del>
<del>module Rack
<del> # Paste has a Pony, Rack has a Lobster!
<del> class Lobster
<del> LobsterString = Zlib::Inflate.inflate("eJx9kEEOwyAMBO99xd7MAcytUhPlJyj2
<del> P6jy9i4k9EQyGAnBarEXeCBqSkntNXsi/ZCvC48zGQoZKikGrFMZvgS5ZHd+aGWVuWwhVF0
<del> t1drVmiR42HcWNz5w3QanT+2gIvTVCiE1lm1Y0eU4JGmIIbaKwextKn8rvW+p5PIwFl8ZWJ
<del> I8jyiTlhTcYXkekJAzTyYN6E08A+dk8voBkAVTJQ==".delete("\n ").unpack("m*")[0])
<del>
<del> LambdaLobster = lambda { |env|
<del> if env["QUERY_STRING"].include?("flip")
<del> lobster = LobsterString.split("\n").
<del> map { |line| line.ljust(42).reverse }.
<del> join("\n")
<del> href = "?"
<del> else
<del> lobster = LobsterString
<del> href = "?flip"
<del> end
<del>
<del> content = ["<title>Lobstericious!</title>",
<del> "<pre>", lobster, "</pre>",
<del> "<a href='#{href}'>flip!</a>"]
<del> length = content.inject(0) { |a,e| a+e.size }.to_s
<del> [200, {"Content-Type" => "text/html", "Content-Length" => length}, content]
<del> }
<del>
<del> def call(env)
<del> req = Request.new(env)
<del> if req.GET["flip"] == "left"
<del> lobster = LobsterString.split("\n").
<del> map { |line| line.ljust(42).reverse }.
<del> join("\n")
<del> href = "?flip=right"
<del> elsif req.GET["flip"] == "crash"
<del> raise "Lobster crashed"
<del> else
<del> lobster = LobsterString
<del> href = "?flip=left"
<del> end
<del>
<del> res = Response.new
<del> res.write "<title>Lobstericious!</title>"
<del> res.write "<pre>"
<del> res.write lobster
<del> res.write "</pre>"
<del> res.write "<p><a href='#{href}'>flip!</a></p>"
<del> res.write "<p><a href='?flip=crash'>crash!</a></p>"
<del> res.finish
<del> end
<del>
<del> end
<del>end
<del>
<del>if $0 == __FILE__
<del> require 'rack'
<del> require 'rack/showexceptions'
<del> Rack::Handler::WEBrick.run \
<del> Rack::ShowExceptions.new(Rack::Lint.new(Rack::Lobster.new)),
<del> :Port => 9292
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/lock.rb
<del>module Rack
<del> class Lock
<del> FLAG = 'rack.multithread'.freeze
<del>
<del> def initialize(app, lock = Mutex.new)
<del> @app, @lock = app, lock
<del> end
<del>
<del> def call(env)
<del> old, env[FLAG] = env[FLAG], false
<del> @lock.synchronize { @app.call(env) }
<del> ensure
<del> env[FLAG] = old
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/methodoverride.rb
<del>module Rack
<del> class MethodOverride
<del> HTTP_METHODS = %w(GET HEAD PUT POST DELETE OPTIONS)
<del>
<del> METHOD_OVERRIDE_PARAM_KEY = "_method".freeze
<del> HTTP_METHOD_OVERRIDE_HEADER = "HTTP_X_HTTP_METHOD_OVERRIDE".freeze
<del>
<del> def initialize(app)
<del> @app = app
<del> end
<del>
<del> def call(env)
<del> if env["REQUEST_METHOD"] == "POST"
<del> req = Request.new(env)
<del> method = req.POST[METHOD_OVERRIDE_PARAM_KEY] ||
<del> env[HTTP_METHOD_OVERRIDE_HEADER]
<del> method = method.to_s.upcase
<del> if HTTP_METHODS.include?(method)
<del> env["rack.methodoverride.original_method"] = env["REQUEST_METHOD"]
<del> env["REQUEST_METHOD"] = method
<del> end
<del> end
<del>
<del> @app.call(env)
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/mime.rb
<del>module Rack
<del> module Mime
<del> # Returns String with mime type if found, otherwise use +fallback+.
<del> # +ext+ should be filename extension in the '.ext' format that
<del> # File.extname(file) returns.
<del> # +fallback+ may be any object
<del> #
<del> # Also see the documentation for MIME_TYPES
<del> #
<del> # Usage:
<del> # Rack::Mime.mime_type('.foo')
<del> #
<del> # This is a shortcut for:
<del> # Rack::Mime::MIME_TYPES.fetch('.foo', 'application/octet-stream')
<del>
<del> def mime_type(ext, fallback='application/octet-stream')
<del> MIME_TYPES.fetch(ext, fallback)
<del> end
<del> module_function :mime_type
<del>
<del> # List of most common mime-types, selected various sources
<del> # according to their usefulness in a webserving scope for Ruby
<del> # users.
<del> #
<del> # To amend this list with your local mime.types list you can use:
<del> #
<del> # require 'webrick/httputils'
<del> # list = WEBrick::HTTPUtils.load_mime_types('/etc/mime.types')
<del> # Rack::Mime::MIME_TYPES.merge!(list)
<del> #
<del> # To add the list mongrel provides, use:
<del> #
<del> # require 'mongrel/handlers'
<del> # Rack::Mime::MIME_TYPES.merge!(Mongrel::DirHandler::MIME_TYPES)
<del>
<del> MIME_TYPES = {
<del> ".3gp" => "video/3gpp",
<del> ".a" => "application/octet-stream",
<del> ".ai" => "application/postscript",
<del> ".aif" => "audio/x-aiff",
<del> ".aiff" => "audio/x-aiff",
<del> ".asc" => "application/pgp-signature",
<del> ".asf" => "video/x-ms-asf",
<del> ".asm" => "text/x-asm",
<del> ".asx" => "video/x-ms-asf",
<del> ".atom" => "application/atom+xml",
<del> ".au" => "audio/basic",
<del> ".avi" => "video/x-msvideo",
<del> ".bat" => "application/x-msdownload",
<del> ".bin" => "application/octet-stream",
<del> ".bmp" => "image/bmp",
<del> ".bz2" => "application/x-bzip2",
<del> ".c" => "text/x-c",
<del> ".cab" => "application/vnd.ms-cab-compressed",
<del> ".cc" => "text/x-c",
<del> ".chm" => "application/vnd.ms-htmlhelp",
<del> ".class" => "application/octet-stream",
<del> ".com" => "application/x-msdownload",
<del> ".conf" => "text/plain",
<del> ".cpp" => "text/x-c",
<del> ".crt" => "application/x-x509-ca-cert",
<del> ".css" => "text/css",
<del> ".csv" => "text/csv",
<del> ".cxx" => "text/x-c",
<del> ".deb" => "application/x-debian-package",
<del> ".der" => "application/x-x509-ca-cert",
<del> ".diff" => "text/x-diff",
<del> ".djv" => "image/vnd.djvu",
<del> ".djvu" => "image/vnd.djvu",
<del> ".dll" => "application/x-msdownload",
<del> ".dmg" => "application/octet-stream",
<del> ".doc" => "application/msword",
<del> ".dot" => "application/msword",
<del> ".dtd" => "application/xml-dtd",
<del> ".dvi" => "application/x-dvi",
<del> ".ear" => "application/java-archive",
<del> ".eml" => "message/rfc822",
<del> ".eps" => "application/postscript",
<del> ".exe" => "application/x-msdownload",
<del> ".f" => "text/x-fortran",
<del> ".f77" => "text/x-fortran",
<del> ".f90" => "text/x-fortran",
<del> ".flv" => "video/x-flv",
<del> ".for" => "text/x-fortran",
<del> ".gem" => "application/octet-stream",
<del> ".gemspec" => "text/x-script.ruby",
<del> ".gif" => "image/gif",
<del> ".gz" => "application/x-gzip",
<del> ".h" => "text/x-c",
<del> ".hh" => "text/x-c",
<del> ".htm" => "text/html",
<del> ".html" => "text/html",
<del> ".ico" => "image/vnd.microsoft.icon",
<del> ".ics" => "text/calendar",
<del> ".ifb" => "text/calendar",
<del> ".iso" => "application/octet-stream",
<del> ".jar" => "application/java-archive",
<del> ".java" => "text/x-java-source",
<del> ".jnlp" => "application/x-java-jnlp-file",
<del> ".jpeg" => "image/jpeg",
<del> ".jpg" => "image/jpeg",
<del> ".js" => "application/javascript",
<del> ".json" => "application/json",
<del> ".log" => "text/plain",
<del> ".m3u" => "audio/x-mpegurl",
<del> ".m4v" => "video/mp4",
<del> ".man" => "text/troff",
<del> ".mathml" => "application/mathml+xml",
<del> ".mbox" => "application/mbox",
<del> ".mdoc" => "text/troff",
<del> ".me" => "text/troff",
<del> ".mid" => "audio/midi",
<del> ".midi" => "audio/midi",
<del> ".mime" => "message/rfc822",
<del> ".mml" => "application/mathml+xml",
<del> ".mng" => "video/x-mng",
<del> ".mov" => "video/quicktime",
<del> ".mp3" => "audio/mpeg",
<del> ".mp4" => "video/mp4",
<del> ".mp4v" => "video/mp4",
<del> ".mpeg" => "video/mpeg",
<del> ".mpg" => "video/mpeg",
<del> ".ms" => "text/troff",
<del> ".msi" => "application/x-msdownload",
<del> ".odp" => "application/vnd.oasis.opendocument.presentation",
<del> ".ods" => "application/vnd.oasis.opendocument.spreadsheet",
<del> ".odt" => "application/vnd.oasis.opendocument.text",
<del> ".ogg" => "application/ogg",
<del> ".p" => "text/x-pascal",
<del> ".pas" => "text/x-pascal",
<del> ".pbm" => "image/x-portable-bitmap",
<del> ".pdf" => "application/pdf",
<del> ".pem" => "application/x-x509-ca-cert",
<del> ".pgm" => "image/x-portable-graymap",
<del> ".pgp" => "application/pgp-encrypted",
<del> ".pkg" => "application/octet-stream",
<del> ".pl" => "text/x-script.perl",
<del> ".pm" => "text/x-script.perl-module",
<del> ".png" => "image/png",
<del> ".pnm" => "image/x-portable-anymap",
<del> ".ppm" => "image/x-portable-pixmap",
<del> ".pps" => "application/vnd.ms-powerpoint",
<del> ".ppt" => "application/vnd.ms-powerpoint",
<del> ".ps" => "application/postscript",
<del> ".psd" => "image/vnd.adobe.photoshop",
<del> ".py" => "text/x-script.python",
<del> ".qt" => "video/quicktime",
<del> ".ra" => "audio/x-pn-realaudio",
<del> ".rake" => "text/x-script.ruby",
<del> ".ram" => "audio/x-pn-realaudio",
<del> ".rar" => "application/x-rar-compressed",
<del> ".rb" => "text/x-script.ruby",
<del> ".rdf" => "application/rdf+xml",
<del> ".roff" => "text/troff",
<del> ".rpm" => "application/x-redhat-package-manager",
<del> ".rss" => "application/rss+xml",
<del> ".rtf" => "application/rtf",
<del> ".ru" => "text/x-script.ruby",
<del> ".s" => "text/x-asm",
<del> ".sgm" => "text/sgml",
<del> ".sgml" => "text/sgml",
<del> ".sh" => "application/x-sh",
<del> ".sig" => "application/pgp-signature",
<del> ".snd" => "audio/basic",
<del> ".so" => "application/octet-stream",
<del> ".svg" => "image/svg+xml",
<del> ".svgz" => "image/svg+xml",
<del> ".swf" => "application/x-shockwave-flash",
<del> ".t" => "text/troff",
<del> ".tar" => "application/x-tar",
<del> ".tbz" => "application/x-bzip-compressed-tar",
<del> ".tcl" => "application/x-tcl",
<del> ".tex" => "application/x-tex",
<del> ".texi" => "application/x-texinfo",
<del> ".texinfo" => "application/x-texinfo",
<del> ".text" => "text/plain",
<del> ".tif" => "image/tiff",
<del> ".tiff" => "image/tiff",
<del> ".torrent" => "application/x-bittorrent",
<del> ".tr" => "text/troff",
<del> ".txt" => "text/plain",
<del> ".vcf" => "text/x-vcard",
<del> ".vcs" => "text/x-vcalendar",
<del> ".vrml" => "model/vrml",
<del> ".war" => "application/java-archive",
<del> ".wav" => "audio/x-wav",
<del> ".wma" => "audio/x-ms-wma",
<del> ".wmv" => "video/x-ms-wmv",
<del> ".wmx" => "video/x-ms-wmx",
<del> ".wrl" => "model/vrml",
<del> ".wsdl" => "application/wsdl+xml",
<del> ".xbm" => "image/x-xbitmap",
<del> ".xhtml" => "application/xhtml+xml",
<del> ".xls" => "application/vnd.ms-excel",
<del> ".xml" => "application/xml",
<del> ".xpm" => "image/x-xpixmap",
<del> ".xsl" => "application/xml",
<del> ".xslt" => "application/xslt+xml",
<del> ".yaml" => "text/yaml",
<del> ".yml" => "text/yaml",
<del> ".zip" => "application/zip",
<del> }
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/mock.rb
<del>require 'uri'
<del>require 'stringio'
<del>require 'rack/lint'
<del>require 'rack/utils'
<del>require 'rack/response'
<del>
<del>module Rack
<del> # Rack::MockRequest helps testing your Rack application without
<del> # actually using HTTP.
<del> #
<del> # After performing a request on a URL with get/post/put/delete, it
<del> # returns a MockResponse with useful helper methods for effective
<del> # testing.
<del> #
<del> # You can pass a hash with additional configuration to the
<del> # get/post/put/delete.
<del> # <tt>:input</tt>:: A String or IO-like to be used as rack.input.
<del> # <tt>:fatal</tt>:: Raise a FatalWarning if the app writes to rack.errors.
<del> # <tt>:lint</tt>:: If true, wrap the application in a Rack::Lint.
<del>
<del> class MockRequest
<del> class FatalWarning < RuntimeError
<del> end
<del>
<del> class FatalWarner
<del> def puts(warning)
<del> raise FatalWarning, warning
<del> end
<del>
<del> def write(warning)
<del> raise FatalWarning, warning
<del> end
<del>
<del> def flush
<del> end
<del>
<del> def string
<del> ""
<del> end
<del> end
<del>
<del> DEFAULT_ENV = {
<del> "rack.version" => [0,1],
<del> "rack.input" => StringIO.new,
<del> "rack.errors" => StringIO.new,
<del> "rack.multithread" => true,
<del> "rack.multiprocess" => true,
<del> "rack.run_once" => false,
<del> }
<del>
<del> def initialize(app)
<del> @app = app
<del> end
<del>
<del> def get(uri, opts={}) request("GET", uri, opts) end
<del> def post(uri, opts={}) request("POST", uri, opts) end
<del> def put(uri, opts={}) request("PUT", uri, opts) end
<del> def delete(uri, opts={}) request("DELETE", uri, opts) end
<del>
<del> def request(method="GET", uri="", opts={})
<del> env = self.class.env_for(uri, opts.merge(:method => method))
<del>
<del> if opts[:lint]
<del> app = Rack::Lint.new(@app)
<del> else
<del> app = @app
<del> end
<del>
<del> errors = env["rack.errors"]
<del> MockResponse.new(*(app.call(env) + [errors]))
<del> end
<del>
<del> # Return the Rack environment used for a request to +uri+.
<del> def self.env_for(uri="", opts={})
<del> uri = URI(uri)
<del> env = DEFAULT_ENV.dup
<del>
<del> env["REQUEST_METHOD"] = opts[:method] || "GET"
<del> env["SERVER_NAME"] = uri.host || "example.org"
<del> env["SERVER_PORT"] = uri.port ? uri.port.to_s : "80"
<del> env["QUERY_STRING"] = uri.query.to_s
<del> env["PATH_INFO"] = (!uri.path || uri.path.empty?) ? "/" : uri.path
<del> env["rack.url_scheme"] = uri.scheme || "http"
<del>
<del> env["SCRIPT_NAME"] = opts[:script_name] || ""
<del>
<del> if opts[:fatal]
<del> env["rack.errors"] = FatalWarner.new
<del> else
<del> env["rack.errors"] = StringIO.new
<del> end
<del>
<del> opts[:input] ||= ""
<del> if String === opts[:input]
<del> env["rack.input"] = StringIO.new(opts[:input])
<del> else
<del> env["rack.input"] = opts[:input]
<del> end
<del>
<del> env["CONTENT_LENGTH"] ||= env["rack.input"].length.to_s
<del>
<del> opts.each { |field, value|
<del> env[field] = value if String === field
<del> }
<del>
<del> env
<del> end
<del> end
<del>
<del> # Rack::MockResponse provides useful helpers for testing your apps.
<del> # Usually, you don't create the MockResponse on your own, but use
<del> # MockRequest.
<del>
<del> class MockResponse
<del> def initialize(status, headers, body, errors=StringIO.new(""))
<del> @status = status.to_i
<del>
<del> @original_headers = headers
<del> @headers = Rack::Utils::HeaderHash.new
<del> headers.each { |field, values|
<del> @headers[field] = values
<del> @headers[field] = "" if values.empty?
<del> }
<del>
<del> @body = ""
<del> body.each { |part| @body << part }
<del>
<del> @errors = errors.string
<del> end
<del>
<del> # Status
<del> attr_reader :status
<del>
<del> # Headers
<del> attr_reader :headers, :original_headers
<del>
<del> def [](field)
<del> headers[field]
<del> end
<del>
<del>
<del> # Body
<del> attr_reader :body
<del>
<del> def =~(other)
<del> @body =~ other
<del> end
<del>
<del> def match(other)
<del> @body.match other
<del> end
<del>
<del>
<del> # Errors
<del> attr_accessor :errors
<del>
<del>
<del> include Response::Helpers
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/recursive.rb
<del>require 'uri'
<del>
<del>module Rack
<del> # Rack::ForwardRequest gets caught by Rack::Recursive and redirects
<del> # the current request to the app at +url+.
<del> #
<del> # raise ForwardRequest.new("/not-found")
<del> #
<del>
<del> class ForwardRequest < Exception
<del> attr_reader :url, :env
<del>
<del> def initialize(url, env={})
<del> @url = URI(url)
<del> @env = env
<del>
<del> @env["PATH_INFO"] = @url.path
<del> @env["QUERY_STRING"] = @url.query if @url.query
<del> @env["HTTP_HOST"] = @url.host if @url.host
<del> @env["HTTP_PORT"] = @url.port if @url.port
<del> @env["rack.url_scheme"] = @url.scheme if @url.scheme
<del>
<del> super "forwarding to #{url}"
<del> end
<del> end
<del>
<del> # Rack::Recursive allows applications called down the chain to
<del> # include data from other applications (by using
<del> # <tt>rack['rack.recursive.include'][...]</tt> or raise a
<del> # ForwardRequest to redirect internally.
<del>
<del> class Recursive
<del> def initialize(app)
<del> @app = app
<del> end
<del>
<del> def call(env)
<del> @script_name = env["SCRIPT_NAME"]
<del> @app.call(env.merge('rack.recursive.include' => method(:include)))
<del> rescue ForwardRequest => req
<del> call(env.merge(req.env))
<del> end
<del>
<del> def include(env, path)
<del> unless path.index(@script_name) == 0 && (path[@script_name.size] == ?/ ||
<del> path[@script_name.size].nil?)
<del> raise ArgumentError, "can only include below #{@script_name}, not #{path}"
<del> end
<del>
<del> env = env.merge("PATH_INFO" => path, "SCRIPT_NAME" => @script_name,
<del> "REQUEST_METHOD" => "GET",
<del> "CONTENT_LENGTH" => "0", "CONTENT_TYPE" => "",
<del> "rack.input" => StringIO.new(""))
<del> @app.call(env)
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/reloader.rb
<del>require 'thread'
<del>
<del>module Rack
<del> # Rack::Reloader checks on every request, but at most every +secs+
<del> # seconds, if a file loaded changed, and reloads it, logging to
<del> # rack.errors.
<del> #
<del> # It is recommended you use ShowExceptions to catch SyntaxErrors etc.
<del>
<del> class Reloader
<del> def initialize(app, secs=10)
<del> @app = app
<del> @secs = secs # reload every @secs seconds max
<del> @last = Time.now
<del> end
<del>
<del> def call(env)
<del> if Time.now > @last + @secs
<del> Thread.exclusive {
<del> reload!(env['rack.errors'])
<del> @last = Time.now
<del> }
<del> end
<del>
<del> @app.call(env)
<del> end
<del>
<del> def reload!(stderr=$stderr)
<del> need_reload = $LOADED_FEATURES.find_all { |loaded|
<del> begin
<del> if loaded =~ /\A[.\/]/ # absolute filename or 1.9
<del> abs = loaded
<del> else
<del> abs = $LOAD_PATH.map { |path| ::File.join(path, loaded) }.
<del> find { |file| ::File.exist? file }
<del> end
<del>
<del> if abs
<del> ::File.mtime(abs) > @last - @secs rescue false
<del> else
<del> false
<del> end
<del> end
<del> }
<del>
<del> need_reload.each { |l|
<del> $LOADED_FEATURES.delete l
<del> }
<del>
<del> need_reload.each { |to_load|
<del> begin
<del> if require to_load
<del> stderr.puts "#{self.class}: reloaded `#{to_load}'"
<del> end
<del> rescue LoadError, SyntaxError => e
<del> raise e # Possibly ShowExceptions
<del> end
<del> }
<del>
<del> stderr.flush
<del> need_reload
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/request.rb
<del>require 'rack/utils'
<del>
<del>module Rack
<del> # Rack::Request provides a convenient interface to a Rack
<del> # environment. It is stateless, the environment +env+ passed to the
<del> # constructor will be directly modified.
<del> #
<del> # req = Rack::Request.new(env)
<del> # req.post?
<del> # req.params["data"]
<del> #
<del> # The environment hash passed will store a reference to the Request object
<del> # instantiated so that it will only instantiate if an instance of the Request
<del> # object doesn't already exist.
<del>
<del> class Request
<del> # The environment of the request.
<del> attr_reader :env
<del>
<del> def self.new(env)
<del> if self == Rack::Request
<del> env["rack.request"] ||= super
<del> else
<del> super
<del> end
<del> end
<del>
<del> def initialize(env)
<del> @env = env
<del> end
<del>
<del> def body; @env["rack.input"] end
<del> def scheme; @env["rack.url_scheme"] end
<del> def script_name; @env["SCRIPT_NAME"].to_s end
<del> def path_info; @env["PATH_INFO"].to_s end
<del> def port; @env["SERVER_PORT"].to_i end
<del> def request_method; @env["REQUEST_METHOD"] end
<del> def query_string; @env["QUERY_STRING"].to_s end
<del> def content_length; @env['CONTENT_LENGTH'] end
<del> def content_type; @env['CONTENT_TYPE'] end
<del>
<del> # The media type (type/subtype) portion of the CONTENT_TYPE header
<del> # without any media type parameters. e.g., when CONTENT_TYPE is
<del> # "text/plain;charset=utf-8", the media-type is "text/plain".
<del> #
<del> # For more information on the use of media types in HTTP, see:
<del> # http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.7
<del> def media_type
<del> content_type && content_type.split(/\s*[;,]\s*/, 2)[0].downcase
<del> end
<del>
<del> # The media type parameters provided in CONTENT_TYPE as a Hash, or
<del> # an empty Hash if no CONTENT_TYPE or media-type parameters were
<del> # provided. e.g., when the CONTENT_TYPE is "text/plain;charset=utf-8",
<del> # this method responds with the following Hash:
<del> # { 'charset' => 'utf-8' }
<del> def media_type_params
<del> return {} if content_type.nil?
<del> content_type.split(/\s*[;,]\s*/)[1..-1].
<del> collect { |s| s.split('=', 2) }.
<del> inject({}) { |hash,(k,v)| hash[k.downcase] = v ; hash }
<del> end
<del>
<del> # The character set of the request body if a "charset" media type
<del> # parameter was given, or nil if no "charset" was specified. Note
<del> # that, per RFC2616, text/* media types that specify no explicit
<del> # charset are to be considered ISO-8859-1.
<del> def content_charset
<del> media_type_params['charset']
<del> end
<del>
<del> def host
<del> # Remove port number.
<del> (@env["HTTP_HOST"] || @env["SERVER_NAME"]).gsub(/:\d+\z/, '')
<del> end
<del>
<del> def script_name=(s); @env["SCRIPT_NAME"] = s.to_s end
<del> def path_info=(s); @env["PATH_INFO"] = s.to_s end
<del>
<del> def get?; request_method == "GET" end
<del> def post?; request_method == "POST" end
<del> def put?; request_method == "PUT" end
<del> def delete?; request_method == "DELETE" end
<del> def head?; request_method == "HEAD" end
<del>
<del> # The set of form-data media-types. Requests that do not indicate
<del> # one of the media types presents in this list will not be eligible
<del> # for form-data / param parsing.
<del> FORM_DATA_MEDIA_TYPES = [
<del> nil,
<del> 'application/x-www-form-urlencoded',
<del> 'multipart/form-data'
<del> ]
<del>
<del> # Determine whether the request body contains form-data by checking
<del> # the request media_type against registered form-data media-types:
<del> # "application/x-www-form-urlencoded" and "multipart/form-data". The
<del> # list of form-data media types can be modified through the
<del> # +FORM_DATA_MEDIA_TYPES+ array.
<del> def form_data?
<del> FORM_DATA_MEDIA_TYPES.include?(media_type)
<del> end
<del>
<del> # Returns the data recieved in the query string.
<del> def GET
<del> if @env["rack.request.query_string"] == query_string
<del> @env["rack.request.query_hash"]
<del> else
<del> @env["rack.request.query_string"] = query_string
<del> @env["rack.request.query_hash"] =
<del> Utils.parse_nested_query(query_string)
<del> end
<del> end
<del>
<del> # Returns the data recieved in the request body.
<del> #
<del> # This method support both application/x-www-form-urlencoded and
<del> # multipart/form-data.
<del> def POST
<del> if @env["rack.request.form_input"].eql? @env["rack.input"]
<del> @env["rack.request.form_hash"]
<del> elsif form_data?
<del> @env["rack.request.form_input"] = @env["rack.input"]
<del> unless @env["rack.request.form_hash"] =
<del> Utils::Multipart.parse_multipart(env)
<del> form_vars = @env["rack.input"].read
<del>
<del> # Fix for Safari Ajax postings that always append \0
<del> form_vars.sub!(/\0\z/, '')
<del>
<del> @env["rack.request.form_vars"] = form_vars
<del> @env["rack.request.form_hash"] = Utils.parse_nested_query(form_vars)
<del>
<del> begin
<del> @env["rack.input"].rewind if @env["rack.input"].respond_to?(:rewind)
<del> rescue Errno::ESPIPE
<del> # Handles exceptions raised by input streams that cannot be rewound
<del> # such as when using plain CGI under Apache
<del> end
<del> end
<del> @env["rack.request.form_hash"]
<del> else
<del> {}
<del> end
<del> end
<del>
<del> # The union of GET and POST data.
<del> def params
<del> self.put? ? self.GET : self.GET.update(self.POST)
<del> rescue EOFError => e
<del> self.GET
<del> end
<del>
<del> # shortcut for request.params[key]
<del> def [](key)
<del> params[key.to_s]
<del> end
<del>
<del> # shortcut for request.params[key] = value
<del> def []=(key, value)
<del> params[key.to_s] = value
<del> end
<del>
<del> # like Hash#values_at
<del> def values_at(*keys)
<del> keys.map{|key| params[key] }
<del> end
<del>
<del> # the referer of the client or '/'
<del> def referer
<del> @env['HTTP_REFERER'] || '/'
<del> end
<del> alias referrer referer
<del>
<del>
<del> def cookies
<del> return {} unless @env["HTTP_COOKIE"]
<del>
<del> if @env["rack.request.cookie_string"] == @env["HTTP_COOKIE"]
<del> @env["rack.request.cookie_hash"]
<del> else
<del> @env["rack.request.cookie_string"] = @env["HTTP_COOKIE"]
<del> # According to RFC 2109:
<del> # If multiple cookies satisfy the criteria above, they are ordered in
<del> # the Cookie header such that those with more specific Path attributes
<del> # precede those with less specific. Ordering with respect to other
<del> # attributes (e.g., Domain) is unspecified.
<del> @env["rack.request.cookie_hash"] =
<del> Utils.parse_query(@env["rack.request.cookie_string"], ';,').inject({}) {|h,(k,v)|
<del> h[k] = Array === v ? v.first : v
<del> h
<del> }
<del> end
<del> end
<del>
<del> def xhr?
<del> @env["HTTP_X_REQUESTED_WITH"] == "XMLHttpRequest"
<del> end
<del>
<del> # Tries to return a remake of the original request URL as a string.
<del> def url
<del> url = scheme + "://"
<del> url << host
<del>
<del> if scheme == "https" && port != 443 ||
<del> scheme == "http" && port != 80
<del> url << ":#{port}"
<del> end
<del>
<del> url << fullpath
<del>
<del> url
<del> end
<del>
<del> def fullpath
<del> path = script_name + path_info
<del> path << "?" << query_string unless query_string.empty?
<del> path
<del> end
<del>
<del> def accept_encoding
<del> @env["HTTP_ACCEPT_ENCODING"].to_s.split(/,\s*/).map do |part|
<del> m = /^([^\s,]+?)(?:;\s*q=(\d+(?:\.\d+)?))?$/.match(part) # From WEBrick
<del>
<del> if m
<del> [m[1], (m[2] || 1.0).to_f]
<del> else
<del> raise "Invalid value for Accept-Encoding: #{part.inspect}"
<del> end
<del> end
<del> end
<del>
<del> def ip
<del> if addr = @env['HTTP_X_FORWARDED_FOR']
<del> addr.split(',').last.strip
<del> else
<del> @env['REMOTE_ADDR']
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/response.rb
<del>require 'rack/request'
<del>require 'rack/utils'
<del>
<del>module Rack
<del> # Rack::Response provides a convenient interface to create a Rack
<del> # response.
<del> #
<del> # It allows setting of headers and cookies, and provides useful
<del> # defaults (a OK response containing HTML).
<del> #
<del> # You can use Response#write to iteratively generate your response,
<del> # but note that this is buffered by Rack::Response until you call
<del> # +finish+. +finish+ however can take a block inside which calls to
<del> # +write+ are syncronous with the Rack response.
<del> #
<del> # Your application's +call+ should end returning Response#finish.
<del>
<del> class Response
<del> attr_accessor :length
<del>
<del> def initialize(body=[], status=200, header={}, &block)
<del> @status = status
<del> @header = Utils::HeaderHash.new({"Content-Type" => "text/html"}.
<del> merge(header))
<del>
<del> @writer = lambda { |x| @body << x }
<del> @block = nil
<del> @length = 0
<del>
<del> @body = []
<del>
<del> if body.respond_to? :to_str
<del> write body.to_str
<del> elsif body.respond_to?(:each)
<del> body.each { |part|
<del> write part.to_s
<del> }
<del> else
<del> raise TypeError, "stringable or iterable required"
<del> end
<del>
<del> yield self if block_given?
<del> end
<del>
<del> attr_reader :header
<del> attr_accessor :status, :body
<del>
<del> def [](key)
<del> header[key]
<del> end
<del>
<del> def []=(key, value)
<del> header[key] = value
<del> end
<del>
<del> def set_cookie(key, value)
<del> case value
<del> when Hash
<del> domain = "; domain=" + value[:domain] if value[:domain]
<del> path = "; path=" + value[:path] if value[:path]
<del> # According to RFC 2109, we need dashes here.
<del> # N.B.: cgi.rb uses spaces...
<del> expires = "; expires=" + value[:expires].clone.gmtime.
<del> strftime("%a, %d-%b-%Y %H:%M:%S GMT") if value[:expires]
<del> secure = "; secure" if value[:secure]
<del> httponly = "; HttpOnly" if value[:httponly]
<del> value = value[:value]
<del> end
<del> value = [value] unless Array === value
<del> cookie = Utils.escape(key) + "=" +
<del> value.map { |v| Utils.escape v }.join("&") +
<del> "#{domain}#{path}#{expires}#{secure}#{httponly}"
<del>
<del> case self["Set-Cookie"]
<del> when Array
<del> self["Set-Cookie"] << cookie
<del> when String
<del> self["Set-Cookie"] = [self["Set-Cookie"], cookie]
<del> when nil
<del> self["Set-Cookie"] = cookie
<del> end
<del> end
<del>
<del> def delete_cookie(key, value={})
<del> unless Array === self["Set-Cookie"]
<del> self["Set-Cookie"] = [self["Set-Cookie"]].compact
<del> end
<del>
<del> self["Set-Cookie"].reject! { |cookie|
<del> cookie =~ /\A#{Utils.escape(key)}=/
<del> }
<del>
<del> set_cookie(key,
<del> {:value => '', :path => nil, :domain => nil,
<del> :expires => Time.at(0) }.merge(value))
<del> end
<del>
<del>
<del> def finish(&block)
<del> @block = block
<del>
<del> if [204, 304].include?(status.to_i)
<del> header.delete "Content-Type"
<del> [status.to_i, header.to_hash, []]
<del> else
<del> [status.to_i, header.to_hash, self]
<del> end
<del> end
<del> alias to_a finish # For *response
<del>
<del> def each(&callback)
<del> @body.each(&callback)
<del> @writer = callback
<del> @block.call(self) if @block
<del> end
<del>
<del> # Append to body and update Content-Length.
<del> #
<del> # NOTE: Do not mix #write and direct #body access!
<del> #
<del> def write(str)
<del> s = str.to_s
<del> @length += s.size
<del> @writer.call s
<del>
<del> header["Content-Length"] = @length.to_s
<del> str
<del> end
<del>
<del> def close
<del> body.close if body.respond_to?(:close)
<del> end
<del>
<del> def empty?
<del> @block == nil && @body.empty?
<del> end
<del>
<del> alias headers header
<del>
<del> module Helpers
<del> def invalid?; @status < 100 || @status >= 600; end
<del>
<del> def informational?; @status >= 100 && @status < 200; end
<del> def successful?; @status >= 200 && @status < 300; end
<del> def redirection?; @status >= 300 && @status < 400; end
<del> def client_error?; @status >= 400 && @status < 500; end
<del> def server_error?; @status >= 500 && @status < 600; end
<del>
<del> def ok?; @status == 200; end
<del> def forbidden?; @status == 403; end
<del> def not_found?; @status == 404; end
<del>
<del> def redirect?; [301, 302, 303, 307].include? @status; end
<del> def empty?; [201, 204, 304].include? @status; end
<del>
<del> # Headers
<del> attr_reader :headers, :original_headers
<del>
<del> def include?(header)
<del> !!headers[header]
<del> end
<del>
<del> def content_type
<del> headers["Content-Type"]
<del> end
<del>
<del> def content_length
<del> cl = headers["Content-Length"]
<del> cl ? cl.to_i : cl
<del> end
<del>
<del> def location
<del> headers["Location"]
<del> end
<del> end
<del>
<del> include Helpers
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/session/abstract/id.rb
<del># AUTHOR: blink <[email protected]>; blink#[email protected]
<del># bugrep: Andreas Zehnder
<del>
<del>require 'time'
<del>require 'rack/request'
<del>require 'rack/response'
<del>
<del>module Rack
<del>
<del> module Session
<del>
<del> module Abstract
<del>
<del> # ID sets up a basic framework for implementing an id based sessioning
<del> # service. Cookies sent to the client for maintaining sessions will only
<del> # contain an id reference. Only #get_session and #set_session are
<del> # required to be overwritten.
<del> #
<del> # All parameters are optional.
<del> # * :key determines the name of the cookie, by default it is
<del> # 'rack.session'
<del> # * :path, :domain, :expire_after, :secure, and :httponly set the related
<del> # cookie options as by Rack::Response#add_cookie
<del> # * :defer will not set a cookie in the response.
<del> # * :renew (implementation dependent) will prompt the generation of a new
<del> # session id, and migration of data to be referenced at the new id. If
<del> # :defer is set, it will be overridden and the cookie will be set.
<del> # * :sidbits sets the number of bits in length that a generated session
<del> # id will be.
<del> #
<del> # These options can be set on a per request basis, at the location of
<del> # env['rack.session.options']. Additionally the id of the session can be
<del> # found within the options hash at the key :id. It is highly not
<del> # recommended to change its value.
<del> #
<del> # Is Rack::Utils::Context compatible.
<del>
<del> class ID
<del> DEFAULT_OPTIONS = {
<del> :path => '/',
<del> :domain => nil,
<del> :expire_after => nil,
<del> :secure => false,
<del> :httponly => true,
<del> :defer => false,
<del> :renew => false,
<del> :sidbits => 128
<del> }
<del>
<del> attr_reader :key, :default_options
<del> def initialize(app, options={})
<del> @app = app
<del> @key = options[:key] || "rack.session"
<del> @default_options = self.class::DEFAULT_OPTIONS.merge(options)
<del> end
<del>
<del> def call(env)
<del> context(env)
<del> end
<del>
<del> def context(env, app=@app)
<del> load_session(env)
<del> status, headers, body = app.call(env)
<del> commit_session(env, status, headers, body)
<del> end
<del>
<del> private
<del>
<del> # Generate a new session id using Ruby #rand. The size of the
<del> # session id is controlled by the :sidbits option.
<del> # Monkey patch this to use custom methods for session id generation.
<del>
<del> def generate_sid
<del> "%0#{@default_options[:sidbits] / 4}x" %
<del> rand(2**@default_options[:sidbits] - 1)
<del> end
<del>
<del> # Extracts the session id from provided cookies and passes it and the
<del> # environment to #get_session. It then sets the resulting session into
<del> # 'rack.session', and places options and session metadata into
<del> # 'rack.session.options'.
<del>
<del> def load_session(env)
<del> request = Rack::Request.new(env)
<del> session_id = request.cookies[@key]
<del>
<del> begin
<del> session_id, session = get_session(env, session_id)
<del> env['rack.session'] = session
<del> rescue
<del> env['rack.session'] = Hash.new
<del> end
<del>
<del> env['rack.session.options'] = @default_options.
<del> merge(:id => session_id)
<del> end
<del>
<del> # Acquires the session from the environment and the session id from
<del> # the session options and passes them to #set_session. If successful
<del> # and the :defer option is not true, a cookie will be added to the
<del> # response with the session's id.
<del>
<del> def commit_session(env, status, headers, body)
<del> session = env['rack.session']
<del> options = env['rack.session.options']
<del> session_id = options[:id]
<del>
<del> if not session_id = set_session(env, session_id, session, options)
<del> env["rack.errors"].puts("Warning! #{self.class.name} failed to save session. Content dropped.")
<del> [status, headers, body]
<del> elsif options[:defer] and not options[:renew]
<del> env["rack.errors"].puts("Defering cookie for #{session_id}") if $VERBOSE
<del> [status, headers, body]
<del> else
<del> cookie = Hash.new
<del> cookie[:value] = session_id
<del> cookie[:expires] = Time.now + options[:expire_after] unless options[:expire_after].nil?
<del> response = Rack::Response.new(body, status, headers)
<del> response.set_cookie(@key, cookie.merge(options))
<del> response.to_a
<del> end
<del> end
<del>
<del> # All thread safety and session retrival proceedures should occur here.
<del> # Should return [session_id, session].
<del> # If nil is provided as the session id, generation of a new valid id
<del> # should occur within.
<del>
<del> def get_session(env, sid)
<del> raise '#get_session not implemented.'
<del> end
<del>
<del> # All thread safety and session storage proceedures should occur here.
<del> # Should return true or false dependant on whether or not the session
<del> # was saved or not.
<del> def set_session(env, sid, session, options)
<del> raise '#set_session not implemented.'
<del> end
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/session/cookie.rb
<del>require 'openssl'
<del>require 'rack/request'
<del>require 'rack/response'
<del>
<del>module Rack
<del>
<del> module Session
<del>
<del> # Rack::Session::Cookie provides simple cookie based session management.
<del> # The session is a Ruby Hash stored as base64 encoded marshalled data
<del> # set to :key (default: rack.session).
<del> # When the secret key is set, cookie data is checked for data integrity.
<del> #
<del> # Example:
<del> #
<del> # use Rack::Session::Cookie, :key => 'rack.session',
<del> # :domain => 'foo.com',
<del> # :path => '/',
<del> # :expire_after => 2592000,
<del> # :secret => 'change_me'
<del> #
<del> # All parameters are optional.
<del>
<del> class Cookie
<del>
<del> def initialize(app, options={})
<del> @app = app
<del> @key = options[:key] || "rack.session"
<del> @secret = options[:secret]
<del> @default_options = {:domain => nil,
<del> :path => "/",
<del> :expire_after => nil}.merge(options)
<del> end
<del>
<del> def call(env)
<del> load_session(env)
<del> status, headers, body = @app.call(env)
<del> commit_session(env, status, headers, body)
<del> end
<del>
<del> private
<del>
<del> def load_session(env)
<del> request = Rack::Request.new(env)
<del> session_data = request.cookies[@key]
<del>
<del> if @secret && session_data
<del> session_data, digest = session_data.split("--")
<del> session_data = nil unless digest == generate_hmac(session_data)
<del> end
<del>
<del> begin
<del> session_data = session_data.unpack("m*").first
<del> session_data = Marshal.load(session_data)
<del> env["rack.session"] = session_data
<del> rescue
<del> env["rack.session"] = Hash.new
<del> end
<del>
<del> env["rack.session.options"] = @default_options.dup
<del> end
<del>
<del> def commit_session(env, status, headers, body)
<del> session_data = Marshal.dump(env["rack.session"])
<del> session_data = [session_data].pack("m*")
<del>
<del> if @secret
<del> session_data = "#{session_data}--#{generate_hmac(session_data)}"
<del> end
<del>
<del> if session_data.size > (4096 - @key.size)
<del> env["rack.errors"].puts("Warning! Rack::Session::Cookie data size exceeds 4K. Content dropped.")
<del> [status, headers, body]
<del> else
<del> options = env["rack.session.options"]
<del> cookie = Hash.new
<del> cookie[:value] = session_data
<del> cookie[:expires] = Time.now + options[:expire_after] unless options[:expire_after].nil?
<del> response = Rack::Response.new(body, status, headers)
<del> response.set_cookie(@key, cookie.merge(options))
<del> response.to_a
<del> end
<del> end
<del>
<del> def generate_hmac(data)
<del> OpenSSL::HMAC.hexdigest(OpenSSL::Digest::SHA1.new, @secret, data)
<del> end
<del>
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/session/memcache.rb
<del># AUTHOR: blink <[email protected]>; blink#[email protected]
<del>
<del>require 'rack/session/abstract/id'
<del>require 'memcache'
<del>
<del>module Rack
<del> module Session
<del> # Rack::Session::Memcache provides simple cookie based session management.
<del> # Session data is stored in memcached. The corresponding session key is
<del> # maintained in the cookie.
<del> # You may treat Session::Memcache as you would Session::Pool with the
<del> # following caveats.
<del> #
<del> # * Setting :expire_after to 0 would note to the Memcache server to hang
<del> # onto the session data until it would drop it according to it's own
<del> # specifications. However, the cookie sent to the client would expire
<del> # immediately.
<del> #
<del> # Note that memcache does drop data before it may be listed to expire. For
<del> # a full description of behaviour, please see memcache's documentation.
<del>
<del> class Memcache < Abstract::ID
<del> attr_reader :mutex, :pool
<del> DEFAULT_OPTIONS = Abstract::ID::DEFAULT_OPTIONS.merge \
<del> :namespace => 'rack:session',
<del> :memcache_server => 'localhost:11211'
<del>
<del> def initialize(app, options={})
<del> super
<del>
<del> @mutex = Mutex.new
<del> @pool = MemCache.
<del> new @default_options[:memcache_server], @default_options
<del> raise 'No memcache servers' unless @pool.servers.any?{|s|s.alive?}
<del> end
<del>
<del> def generate_sid
<del> loop do
<del> sid = super
<del> break sid unless @pool.get(sid, true)
<del> end
<del> end
<del>
<del> def get_session(env, sid)
<del> session = @pool.get(sid) if sid
<del> @mutex.lock if env['rack.multithread']
<del> unless sid and session
<del> env['rack.errors'].puts("Session '#{sid.inspect}' not found, initializing...") if $VERBOSE and not sid.nil?
<del> session = {}
<del> sid = generate_sid
<del> ret = @pool.add sid, session
<del> raise "Session collision on '#{sid.inspect}'" unless /^STORED/ =~ ret
<del> end
<del> session.instance_variable_set('@old', {}.merge(session))
<del> return [sid, session]
<del> rescue MemCache::MemCacheError, Errno::ECONNREFUSED # MemCache server cannot be contacted
<del> warn "#{self} is unable to find server."
<del> warn $!.inspect
<del> return [ nil, {} ]
<del> ensure
<del> @mutex.unlock if env['rack.multithread']
<del> end
<del>
<del> def set_session(env, session_id, new_session, options)
<del> expiry = options[:expire_after]
<del> expiry = expiry.nil? ? 0 : expiry + 1
<del>
<del> @mutex.lock if env['rack.multithread']
<del> session = @pool.get(session_id) || {}
<del> if options[:renew] or options[:drop]
<del> @pool.delete session_id
<del> return false if options[:drop]
<del> session_id = generate_sid
<del> @pool.add session_id, 0 # so we don't worry about cache miss on #set
<del> end
<del> old_session = new_session.instance_variable_get('@old') || {}
<del> session = merge_sessions session_id, old_session, new_session, session
<del> @pool.set session_id, session, expiry
<del> return session_id
<del> rescue MemCache::MemCacheError, Errno::ECONNREFUSED # MemCache server cannot be contacted
<del> warn "#{self} is unable to find server."
<del> warn $!.inspect
<del> return false
<del> ensure
<del> @mutex.unlock if env['rack.multithread']
<del> end
<del>
<del> private
<del>
<del> def merge_sessions sid, old, new, cur=nil
<del> cur ||= {}
<del> unless Hash === old and Hash === new
<del> warn 'Bad old or new sessions provided.'
<del> return cur
<del> end
<del>
<del> delete = old.keys - new.keys
<del> warn "//@#{sid}: delete #{delete*','}" if $VERBOSE and not delete.empty?
<del> delete.each{|k| cur.delete k }
<del>
<del> update = new.keys.select{|k| new[k] != old[k] }
<del> warn "//@#{sid}: update #{update*','}" if $VERBOSE and not update.empty?
<del> update.each{|k| cur[k] = new[k] }
<del>
<del> cur
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/session/pool.rb
<del># AUTHOR: blink <[email protected]>; blink#[email protected]
<del># THANKS:
<del># apeiros, for session id generation, expiry setup, and threadiness
<del># sergio, threadiness and bugreps
<del>
<del>require 'rack/session/abstract/id'
<del>require 'thread'
<del>
<del>module Rack
<del> module Session
<del> # Rack::Session::Pool provides simple cookie based session management.
<del> # Session data is stored in a hash held by @pool.
<del> # In the context of a multithreaded environment, sessions being
<del> # committed to the pool is done in a merging manner.
<del> #
<del> # The :drop option is available in rack.session.options if you with to
<del> # explicitly remove the session from the session cache.
<del> #
<del> # Example:
<del> # myapp = MyRackApp.new
<del> # sessioned = Rack::Session::Pool.new(myapp,
<del> # :domain => 'foo.com',
<del> # :expire_after => 2592000
<del> # )
<del> # Rack::Handler::WEBrick.run sessioned
<del>
<del> class Pool < Abstract::ID
<del> attr_reader :mutex, :pool
<del> DEFAULT_OPTIONS = Abstract::ID::DEFAULT_OPTIONS.merge :drop => false
<del>
<del> def initialize(app, options={})
<del> super
<del> @pool = Hash.new
<del> @mutex = Mutex.new
<del> end
<del>
<del> def generate_sid
<del> loop do
<del> sid = super
<del> break sid unless @pool.key? sid
<del> end
<del> end
<del>
<del> def get_session(env, sid)
<del> session = @pool[sid] if sid
<del> @mutex.lock if env['rack.multithread']
<del> unless sid and session
<del> env['rack.errors'].puts("Session '#{sid.inspect}' not found, initializing...") if $VERBOSE and not sid.nil?
<del> session = {}
<del> sid = generate_sid
<del> @pool.store sid, session
<del> end
<del> session.instance_variable_set('@old', {}.merge(session))
<del> return [sid, session]
<del> ensure
<del> @mutex.unlock if env['rack.multithread']
<del> end
<del>
<del> def set_session(env, session_id, new_session, options)
<del> @mutex.lock if env['rack.multithread']
<del> session = @pool[session_id]
<del> if options[:renew] or options[:drop]
<del> @pool.delete session_id
<del> return false if options[:drop]
<del> session_id = generate_sid
<del> @pool.store session_id, 0
<del> end
<del> old_session = new_session.instance_variable_get('@old') || {}
<del> session = merge_sessions session_id, old_session, new_session, session
<del> @pool.store session_id, session
<del> return session_id
<del> rescue
<del> warn "#{new_session.inspect} has been lost."
<del> warn $!.inspect
<del> ensure
<del> @mutex.unlock if env['rack.multithread']
<del> end
<del>
<del> private
<del>
<del> def merge_sessions sid, old, new, cur=nil
<del> cur ||= {}
<del> unless Hash === old and Hash === new
<del> warn 'Bad old or new sessions provided.'
<del> return cur
<del> end
<del>
<del> delete = old.keys - new.keys
<del> warn "//@#{sid}: dropping #{delete*','}" if $DEBUG and not delete.empty?
<del> delete.each{|k| cur.delete k }
<del>
<del> update = new.keys.select{|k| new[k] != old[k] }
<del> warn "//@#{sid}: updating #{update*','}" if $DEBUG and not update.empty?
<del> update.each{|k| cur[k] = new[k] }
<del>
<del> cur
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/showexceptions.rb
<del>require 'ostruct'
<del>require 'erb'
<del>require 'rack/request'
<del>require 'rack/utils'
<del>
<del>module Rack
<del> # Rack::ShowExceptions catches all exceptions raised from the app it
<del> # wraps. It shows a useful backtrace with the sourcefile and
<del> # clickable context, the whole Rack environment and the request
<del> # data.
<del> #
<del> # Be careful when you use this on public-facing sites as it could
<del> # reveal information helpful to attackers.
<del>
<del> class ShowExceptions
<del> CONTEXT = 7
<del>
<del> def initialize(app)
<del> @app = app
<del> @template = ERB.new(TEMPLATE)
<del> end
<del>
<del> def call(env)
<del> @app.call(env)
<del> rescue StandardError, LoadError, SyntaxError => e
<del> backtrace = pretty(env, e)
<del> [500,
<del> {"Content-Type" => "text/html",
<del> "Content-Length" => backtrace.join.size.to_s},
<del> backtrace]
<del> end
<del>
<del> def pretty(env, exception)
<del> req = Rack::Request.new(env)
<del> path = (req.script_name + req.path_info).squeeze("/")
<del>
<del> frames = exception.backtrace.map { |line|
<del> frame = OpenStruct.new
<del> if line =~ /(.*?):(\d+)(:in `(.*)')?/
<del> frame.filename = $1
<del> frame.lineno = $2.to_i
<del> frame.function = $4
<del>
<del> begin
<del> lineno = frame.lineno-1
<del> lines = ::File.readlines(frame.filename)
<del> frame.pre_context_lineno = [lineno-CONTEXT, 0].max
<del> frame.pre_context = lines[frame.pre_context_lineno...lineno]
<del> frame.context_line = lines[lineno].chomp
<del> frame.post_context_lineno = [lineno+CONTEXT, lines.size].min
<del> frame.post_context = lines[lineno+1..frame.post_context_lineno]
<del> rescue
<del> end
<del>
<del> frame
<del> else
<del> nil
<del> end
<del> }.compact
<del>
<del> env["rack.errors"].puts "#{exception.class}: #{exception.message}"
<del> env["rack.errors"].puts exception.backtrace.map { |l| "\t" + l }
<del> env["rack.errors"].flush
<del>
<del> [@template.result(binding)]
<del> end
<del>
<del> def h(obj) # :nodoc:
<del> case obj
<del> when String
<del> Utils.escape_html(obj)
<del> else
<del> Utils.escape_html(obj.inspect)
<del> end
<del> end
<del>
<del> # :stopdoc:
<del>
<del># adapted from Django <djangoproject.com>
<del># Copyright (c) 2005, the Lawrence Journal-World
<del># Used under the modified BSD license:
<del># http://www.xfree86.org/3.3.6/COPYRIGHT2.html#5
<del>TEMPLATE = <<'HTML'
<del><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<del><html lang="en">
<del><head>
<del> <meta http-equiv="content-type" content="text/html; charset=utf-8" />
<del> <meta name="robots" content="NONE,NOARCHIVE" />
<del> <title><%=h exception.class %> at <%=h path %></title>
<del> <style type="text/css">
<del> html * { padding:0; margin:0; }
<del> body * { padding:10px 20px; }
<del> body * * { padding:0; }
<del> body { font:small sans-serif; }
<del> body>div { border-bottom:1px solid #ddd; }
<del> h1 { font-weight:normal; }
<del> h2 { margin-bottom:.8em; }
<del> h2 span { font-size:80%; color:#666; font-weight:normal; }
<del> h3 { margin:1em 0 .5em 0; }
<del> h4 { margin:0 0 .5em 0; font-weight: normal; }
<del> table {
<del> border:1px solid #ccc; border-collapse: collapse; background:white; }
<del> tbody td, tbody th { vertical-align:top; padding:2px 3px; }
<del> thead th {
<del> padding:1px 6px 1px 3px; background:#fefefe; text-align:left;
<del> font-weight:normal; font-size:11px; border:1px solid #ddd; }
<del> tbody th { text-align:right; color:#666; padding-right:.5em; }
<del> table.vars { margin:5px 0 2px 40px; }
<del> table.vars td, table.req td { font-family:monospace; }
<del> table td.code { width:100%;}
<del> table td.code div { overflow:hidden; }
<del> table.source th { color:#666; }
<del> table.source td {
<del> font-family:monospace; white-space:pre; border-bottom:1px solid #eee; }
<del> ul.traceback { list-style-type:none; }
<del> ul.traceback li.frame { margin-bottom:1em; }
<del> div.context { margin: 10px 0; }
<del> div.context ol {
<del> padding-left:30px; margin:0 10px; list-style-position: inside; }
<del> div.context ol li {
<del> font-family:monospace; white-space:pre; color:#666; cursor:pointer; }
<del> div.context ol.context-line li { color:black; background-color:#ccc; }
<del> div.context ol.context-line li span { float: right; }
<del> div.commands { margin-left: 40px; }
<del> div.commands a { color:black; text-decoration:none; }
<del> #summary { background: #ffc; }
<del> #summary h2 { font-weight: normal; color: #666; }
<del> #summary ul#quicklinks { list-style-type: none; margin-bottom: 2em; }
<del> #summary ul#quicklinks li { float: left; padding: 0 1em; }
<del> #summary ul#quicklinks>li+li { border-left: 1px #666 solid; }
<del> #explanation { background:#eee; }
<del> #template, #template-not-exist { background:#f6f6f6; }
<del> #template-not-exist ul { margin: 0 0 0 20px; }
<del> #traceback { background:#eee; }
<del> #requestinfo { background:#f6f6f6; padding-left:120px; }
<del> #summary table { border:none; background:transparent; }
<del> #requestinfo h2, #requestinfo h3 { position:relative; margin-left:-100px; }
<del> #requestinfo h3 { margin-bottom:-1em; }
<del> .error { background: #ffc; }
<del> .specific { color:#cc3300; font-weight:bold; }
<del> </style>
<del> <script type="text/javascript">
<del> //<!--
<del> function getElementsByClassName(oElm, strTagName, strClassName){
<del> // Written by Jonathan Snook, http://www.snook.ca/jon;
<del> // Add-ons by Robert Nyman, http://www.robertnyman.com
<del> var arrElements = (strTagName == "*" && document.all)? document.all :
<del> oElm.getElementsByTagName(strTagName);
<del> var arrReturnElements = new Array();
<del> strClassName = strClassName.replace(/\-/g, "\\-");
<del> var oRegExp = new RegExp("(^|\\s)" + strClassName + "(\\s|$$)");
<del> var oElement;
<del> for(var i=0; i<arrElements.length; i++){
<del> oElement = arrElements[i];
<del> if(oRegExp.test(oElement.className)){
<del> arrReturnElements.push(oElement);
<del> }
<del> }
<del> return (arrReturnElements)
<del> }
<del> function hideAll(elems) {
<del> for (var e = 0; e < elems.length; e++) {
<del> elems[e].style.display = 'none';
<del> }
<del> }
<del> window.onload = function() {
<del> hideAll(getElementsByClassName(document, 'table', 'vars'));
<del> hideAll(getElementsByClassName(document, 'ol', 'pre-context'));
<del> hideAll(getElementsByClassName(document, 'ol', 'post-context'));
<del> }
<del> function toggle() {
<del> for (var i = 0; i < arguments.length; i++) {
<del> var e = document.getElementById(arguments[i]);
<del> if (e) {
<del> e.style.display = e.style.display == 'none' ? 'block' : 'none';
<del> }
<del> }
<del> return false;
<del> }
<del> function varToggle(link, id) {
<del> toggle('v' + id);
<del> var s = link.getElementsByTagName('span')[0];
<del> var uarr = String.fromCharCode(0x25b6);
<del> var darr = String.fromCharCode(0x25bc);
<del> s.innerHTML = s.innerHTML == uarr ? darr : uarr;
<del> return false;
<del> }
<del> //-->
<del> </script>
<del></head>
<del><body>
<del>
<del><div id="summary">
<del> <h1><%=h exception.class %> at <%=h path %></h1>
<del> <h2><%=h exception.message %></h2>
<del> <table><tr>
<del> <th>Ruby</th>
<del> <td><code><%=h frames.first.filename %></code>: in <code><%=h frames.first.function %></code>, line <%=h frames.first.lineno %></td>
<del> </tr><tr>
<del> <th>Web</th>
<del> <td><code><%=h req.request_method %> <%=h(req.host + path)%></code></td>
<del> </tr></table>
<del>
<del> <h3>Jump to:</h3>
<del> <ul id="quicklinks">
<del> <li><a href="#get-info">GET</a></li>
<del> <li><a href="#post-info">POST</a></li>
<del> <li><a href="#cookie-info">Cookies</a></li>
<del> <li><a href="#env-info">ENV</a></li>
<del> </ul>
<del></div>
<del>
<del><div id="traceback">
<del> <h2>Traceback <span>(innermost first)</span></h2>
<del> <ul class="traceback">
<del><% frames.each { |frame| %>
<del> <li class="frame">
<del> <code><%=h frame.filename %></code>: in <code><%=h frame.function %></code>
<del>
<del> <% if frame.context_line %>
<del> <div class="context" id="c<%=h frame.object_id %>">
<del> <% if frame.pre_context %>
<del> <ol start="<%=h frame.pre_context_lineno+1 %>" class="pre-context" id="pre<%=h frame.object_id %>">
<del> <% frame.pre_context.each { |line| %>
<del> <li onclick="toggle('pre<%=h frame.object_id %>', 'post<%=h frame.object_id %>')"><%=h line %></li>
<del> <% } %>
<del> </ol>
<del> <% end %>
<del>
<del> <ol start="<%=h frame.lineno %>" class="context-line">
<del> <li onclick="toggle('pre<%=h frame.object_id %>', 'post<%=h frame.object_id %>')"><%=h frame.context_line %><span>...</span></li></ol>
<del>
<del> <% if frame.post_context %>
<del> <ol start='<%=h frame.lineno+1 %>' class="post-context" id="post<%=h frame.object_id %>">
<del> <% frame.post_context.each { |line| %>
<del> <li onclick="toggle('pre<%=h frame.object_id %>', 'post<%=h frame.object_id %>')"><%=h line %></li>
<del> <% } %>
<del> </ol>
<del> <% end %>
<del> </div>
<del> <% end %>
<del> </li>
<del><% } %>
<del> </ul>
<del></div>
<del>
<del><div id="requestinfo">
<del> <h2>Request information</h2>
<del>
<del> <h3 id="get-info">GET</h3>
<del> <% unless req.GET.empty? %>
<del> <table class="req">
<del> <thead>
<del> <tr>
<del> <th>Variable</th>
<del> <th>Value</th>
<del> </tr>
<del> </thead>
<del> <tbody>
<del> <% req.GET.sort_by { |k, v| k.to_s }.each { |key, val| %>
<del> <tr>
<del> <td><%=h key %></td>
<del> <td class="code"><div><%=h val.inspect %></div></td>
<del> </tr>
<del> <% } %>
<del> </tbody>
<del> </table>
<del> <% else %>
<del> <p>No GET data.</p>
<del> <% end %>
<del>
<del> <h3 id="post-info">POST</h3>
<del> <% unless req.POST.empty? %>
<del> <table class="req">
<del> <thead>
<del> <tr>
<del> <th>Variable</th>
<del> <th>Value</th>
<del> </tr>
<del> </thead>
<del> <tbody>
<del> <% req.POST.sort_by { |k, v| k.to_s }.each { |key, val| %>
<del> <tr>
<del> <td><%=h key %></td>
<del> <td class="code"><div><%=h val.inspect %></div></td>
<del> </tr>
<del> <% } %>
<del> </tbody>
<del> </table>
<del> <% else %>
<del> <p>No POST data.</p>
<del> <% end %>
<del>
<del>
<del> <h3 id="cookie-info">COOKIES</h3>
<del> <% unless req.cookies.empty? %>
<del> <table class="req">
<del> <thead>
<del> <tr>
<del> <th>Variable</th>
<del> <th>Value</th>
<del> </tr>
<del> </thead>
<del> <tbody>
<del> <% req.cookies.each { |key, val| %>
<del> <tr>
<del> <td><%=h key %></td>
<del> <td class="code"><div><%=h val.inspect %></div></td>
<del> </tr>
<del> <% } %>
<del> </tbody>
<del> </table>
<del> <% else %>
<del> <p>No cookie data.</p>
<del> <% end %>
<del>
<del> <h3 id="env-info">Rack ENV</h3>
<del> <table class="req">
<del> <thead>
<del> <tr>
<del> <th>Variable</th>
<del> <th>Value</th>
<del> </tr>
<del> </thead>
<del> <tbody>
<del> <% env.sort_by { |k, v| k.to_s }.each { |key, val| %>
<del> <tr>
<del> <td><%=h key %></td>
<del> <td class="code"><div><%=h val %></div></td>
<del> </tr>
<del> <% } %>
<del> </tbody>
<del> </table>
<del>
<del></div>
<del>
<del><div id="explanation">
<del> <p>
<del> You're seeing this error because you use <code>Rack::ShowExceptions</code>.
<del> </p>
<del></div>
<del>
<del></body>
<del></html>
<del>HTML
<del>
<del> # :startdoc:
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/showstatus.rb
<del>require 'erb'
<del>require 'rack/request'
<del>require 'rack/utils'
<del>
<del>module Rack
<del> # Rack::ShowStatus catches all empty responses the app it wraps and
<del> # replaces them with a site explaining the error.
<del> #
<del> # Additional details can be put into <tt>rack.showstatus.detail</tt>
<del> # and will be shown as HTML. If such details exist, the error page
<del> # is always rendered, even if the reply was not empty.
<del>
<del> class ShowStatus
<del> def initialize(app)
<del> @app = app
<del> @template = ERB.new(TEMPLATE)
<del> end
<del>
<del> def call(env)
<del> status, headers, body = @app.call(env)
<del> headers = Utils::HeaderHash.new(headers)
<del> empty = headers['Content-Length'].to_i <= 0
<del>
<del> # client or server error, or explicit message
<del> if (status.to_i >= 400 && empty) || env["rack.showstatus.detail"]
<del> req = Rack::Request.new(env)
<del> message = Rack::Utils::HTTP_STATUS_CODES[status.to_i] || status.to_s
<del> detail = env["rack.showstatus.detail"] || message
<del> body = @template.result(binding)
<del> size = Rack::Utils.bytesize(body)
<del> [status, headers.merge("Content-Type" => "text/html", "Content-Length" => size.to_s), [body]]
<del> else
<del> [status, headers, body]
<del> end
<del> end
<del>
<del> def h(obj) # :nodoc:
<del> case obj
<del> when String
<del> Utils.escape_html(obj)
<del> else
<del> Utils.escape_html(obj.inspect)
<del> end
<del> end
<del>
<del> # :stopdoc:
<del>
<del># adapted from Django <djangoproject.com>
<del># Copyright (c) 2005, the Lawrence Journal-World
<del># Used under the modified BSD license:
<del># http://www.xfree86.org/3.3.6/COPYRIGHT2.html#5
<del>TEMPLATE = <<'HTML'
<del><!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<del><html lang="en">
<del><head>
<del> <meta http-equiv="content-type" content="text/html; charset=utf-8" />
<del> <title><%=h message %> at <%=h req.script_name + req.path_info %></title>
<del> <meta name="robots" content="NONE,NOARCHIVE" />
<del> <style type="text/css">
<del> html * { padding:0; margin:0; }
<del> body * { padding:10px 20px; }
<del> body * * { padding:0; }
<del> body { font:small sans-serif; background:#eee; }
<del> body>div { border-bottom:1px solid #ddd; }
<del> h1 { font-weight:normal; margin-bottom:.4em; }
<del> h1 span { font-size:60%; color:#666; font-weight:normal; }
<del> table { border:none; border-collapse: collapse; width:100%; }
<del> td, th { vertical-align:top; padding:2px 3px; }
<del> th { width:12em; text-align:right; color:#666; padding-right:.5em; }
<del> #info { background:#f6f6f6; }
<del> #info ol { margin: 0.5em 4em; }
<del> #info ol li { font-family: monospace; }
<del> #summary { background: #ffc; }
<del> #explanation { background:#eee; border-bottom: 0px none; }
<del> </style>
<del></head>
<del><body>
<del> <div id="summary">
<del> <h1><%=h message %> <span>(<%= status.to_i %>)</span></h1>
<del> <table class="meta">
<del> <tr>
<del> <th>Request Method:</th>
<del> <td><%=h req.request_method %></td>
<del> </tr>
<del> <tr>
<del> <th>Request URL:</th>
<del> <td><%=h req.url %></td>
<del> </tr>
<del> </table>
<del> </div>
<del> <div id="info">
<del> <p><%= detail %></p>
<del> </div>
<del>
<del> <div id="explanation">
<del> <p>
<del> You're seeing this error because you use <code>Rack::ShowStatus</code>.
<del> </p>
<del> </div>
<del></body>
<del></html>
<del>HTML
<del>
<del> # :startdoc:
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/static.rb
<del>module Rack
<del>
<del> # The Rack::Static middleware intercepts requests for static files
<del> # (javascript files, images, stylesheets, etc) based on the url prefixes
<del> # passed in the options, and serves them using a Rack::File object. This
<del> # allows a Rack stack to serve both static and dynamic content.
<del> #
<del> # Examples:
<del> # use Rack::Static, :urls => ["/media"]
<del> # will serve all requests beginning with /media from the "media" folder
<del> # located in the current directory (ie media/*).
<del> #
<del> # use Rack::Static, :urls => ["/css", "/images"], :root => "public"
<del> # will serve all requests beginning with /css or /images from the folder
<del> # "public" in the current directory (ie public/css/* and public/images/*)
<del>
<del> class Static
<del>
<del> def initialize(app, options={})
<del> @app = app
<del> @urls = options[:urls] || ["/favicon.ico"]
<del> root = options[:root] || Dir.pwd
<del> @file_server = Rack::File.new(root)
<del> end
<del>
<del> def call(env)
<del> path = env["PATH_INFO"]
<del> can_serve = @urls.any? { |url| path.index(url) == 0 }
<del>
<del> if can_serve
<del> @file_server.call(env)
<del> else
<del> @app.call(env)
<del> end
<del> end
<del>
<del> end
<del>end
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/urlmap.rb
<del>module Rack
<del> # Rack::URLMap takes a hash mapping urls or paths to apps, and
<del> # dispatches accordingly. Support for HTTP/1.1 host names exists if
<del> # the URLs start with <tt>http://</tt> or <tt>https://</tt>.
<del> #
<del> # URLMap modifies the SCRIPT_NAME and PATH_INFO such that the part
<del> # relevant for dispatch is in the SCRIPT_NAME, and the rest in the
<del> # PATH_INFO. This should be taken care of when you need to
<del> # reconstruct the URL in order to create links.
<del> #
<del> # URLMap dispatches in such a way that the longest paths are tried
<del> # first, since they are most specific.
<del>
<del> class URLMap
<del> def initialize(map = {})
<del> remap(map)
<del> end
<del>
<del> def remap(map)
<del> @mapping = map.map { |location, app|
<del> if location =~ %r{\Ahttps?://(.*?)(/.*)}
<del> host, location = $1, $2
<del> else
<del> host = nil
<del> end
<del>
<del> unless location[0] == ?/
<del> raise ArgumentError, "paths need to start with /"
<del> end
<del> location = location.chomp('/')
<del>
<del> [host, location, app]
<del> }.sort_by { |(h, l, a)| [-l.size, h.to_s.size] } # Longest path first
<del> end
<del>
<del> def call(env)
<del> path = env["PATH_INFO"].to_s.squeeze("/")
<del> script_name = env['SCRIPT_NAME']
<del> hHost, sName, sPort = env.values_at('HTTP_HOST','SERVER_NAME','SERVER_PORT')
<del> @mapping.each { |host, location, app|
<del> next unless (hHost == host || sName == host \
<del> || (host.nil? && (hHost == sName || hHost == sName+':'+sPort)))
<del> next unless location == path[0, location.size]
<del> next unless path[location.size] == nil || path[location.size] == ?/
<del>
<del> return app.call(
<del> env.merge(
<del> 'SCRIPT_NAME' => (script_name + location),
<del> 'PATH_INFO' => path[location.size..-1]))
<del> }
<del> [404, {"Content-Type" => "text/plain"}, ["Not Found: #{path}"]]
<del> end
<del> end
<del>end
<del>
<ide><path>actionpack/lib/action_dispatch/vendor/rack-1.0/rack/utils.rb
<del>require 'set'
<del>require 'tempfile'
<del>
<del>module Rack
<del> # Rack::Utils contains a grab-bag of useful methods for writing web
<del> # applications adopted from all kinds of Ruby libraries.
<del>
<del> module Utils
<del> # Performs URI escaping so that you can construct proper
<del> # query strings faster. Use this rather than the cgi.rb
<del> # version since it's faster. (Stolen from Camping).
<del> def escape(s)
<del> s.to_s.gsub(/([^ a-zA-Z0-9_.-]+)/n) {
<del> '%'+$1.unpack('H2'*$1.size).join('%').upcase
<del> }.tr(' ', '+')
<del> end
<del> module_function :escape
<del>
<del> # Unescapes a URI escaped string. (Stolen from Camping).
<del> def unescape(s)
<del> s.tr('+', ' ').gsub(/((?:%[0-9a-fA-F]{2})+)/n){
<del> [$1.delete('%')].pack('H*')
<del> }
<del> end
<del> module_function :unescape
<del>
<del> # Stolen from Mongrel, with some small modifications:
<del> # Parses a query string by breaking it up at the '&'
<del> # and ';' characters. You can also use this to parse
<del> # cookies by changing the characters used in the second
<del> # parameter (which defaults to '&;').
<del> def parse_query(qs, d = '&;')
<del> params = {}
<del>
<del> (qs || '').split(/[#{d}] */n).each do |p|
<del> k, v = unescape(p).split('=', 2)
<del>
<del> if cur = params[k]
<del> if cur.class == Array
<del> params[k] << v
<del> else
<del> params[k] = [cur, v]
<del> end
<del> else
<del> params[k] = v
<del> end
<del> end
<del>
<del> return params
<del> end
<del> module_function :parse_query
<del>
<del> def parse_nested_query(qs, d = '&;')
<del> params = {}
<del>
<del> (qs || '').split(/[#{d}] */n).each do |p|
<del> k, v = unescape(p).split('=', 2)
<del> normalize_params(params, k, v)
<del> end
<del>
<del> return params
<del> end
<del> module_function :parse_nested_query
<del>
<del> def normalize_params(params, name, v = nil)
<del> name =~ %r([\[\]]*([^\[\]]+)\]*)
<del> k = $1 || ''
<del> after = $' || ''
<del>
<del> return if k.empty?
<del>
<del> if after == ""
<del> params[k] = v
<del> elsif after == "[]"
<del> params[k] ||= []
<del> raise TypeError unless params[k].is_a?(Array)
<del> params[k] << v
<del> elsif after =~ %r(^\[\]\[([^\[\]]+)\]$) || after =~ %r(^\[\](.+)$)
<del> child_key = $1
<del> params[k] ||= []
<del> raise TypeError unless params[k].is_a?(Array)
<del> if params[k].last.is_a?(Hash) && !params[k].last.key?(child_key)
<del> normalize_params(params[k].last, child_key, v)
<del> else
<del> params[k] << normalize_params({}, child_key, v)
<del> end
<del> else
<del> params[k] ||= {}
<del> params[k] = normalize_params(params[k], after, v)
<del> end
<del>
<del> return params
<del> end
<del> module_function :normalize_params
<del>
<del> def build_query(params)
<del> params.map { |k, v|
<del> if v.class == Array
<del> build_query(v.map { |x| [k, x] })
<del> else
<del> escape(k) + "=" + escape(v)
<del> end
<del> }.join("&")
<del> end
<del> module_function :build_query
<del>
<del> # Escape ampersands, brackets and quotes to their HTML/XML entities.
<del> def escape_html(string)
<del> string.to_s.gsub("&", "&").
<del> gsub("<", "<").
<del> gsub(">", ">").
<del> gsub("'", "'").
<del> gsub('"', """)
<del> end
<del> module_function :escape_html
<del>
<del> def select_best_encoding(available_encodings, accept_encoding)
<del> # http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
<del>
<del> expanded_accept_encoding =
<del> accept_encoding.map { |m, q|
<del> if m == "*"
<del> (available_encodings - accept_encoding.map { |m2, _| m2 }).map { |m2| [m2, q] }
<del> else
<del> [[m, q]]
<del> end
<del> }.inject([]) { |mem, list|
<del> mem + list
<del> }
<del>
<del> encoding_candidates = expanded_accept_encoding.sort_by { |_, q| -q }.map { |m, _| m }
<del>
<del> unless encoding_candidates.include?("identity")
<del> encoding_candidates.push("identity")
<del> end
<del>
<del> expanded_accept_encoding.find_all { |m, q|
<del> q == 0.0
<del> }.each { |m, _|
<del> encoding_candidates.delete(m)
<del> }
<del>
<del> return (encoding_candidates & available_encodings)[0]
<del> end
<del> module_function :select_best_encoding
<del>
<del> # Return the bytesize of String; uses String#length under Ruby 1.8 and
<del> # String#bytesize under 1.9.
<del> if ''.respond_to?(:bytesize)
<del> def bytesize(string)
<del> string.bytesize
<del> end
<del> else
<del> def bytesize(string)
<del> string.size
<del> end
<del> end
<del> module_function :bytesize
<del>
<del> # Context allows the use of a compatible middleware at different points
<del> # in a request handling stack. A compatible middleware must define
<del> # #context which should take the arguments env and app. The first of which
<del> # would be the request environment. The second of which would be the rack
<del> # application that the request would be forwarded to.
<del> class Context
<del> attr_reader :for, :app
<del>
<del> def initialize(app_f, app_r)
<del> raise 'running context does not respond to #context' unless app_f.respond_to? :context
<del> @for, @app = app_f, app_r
<del> end
<del>
<del> def call(env)
<del> @for.context(env, @app)
<del> end
<del>
<del> def recontext(app)
<del> self.class.new(@for, app)
<del> end
<del>
<del> def context(env, app=@app)
<del> recontext(app).call(env)
<del> end
<del> end
<del>
<del> # A case-insensitive Hash that preserves the original case of a
<del> # header when set.
<del> class HeaderHash < Hash
<del> def initialize(hash={})
<del> @names = {}
<del> hash.each { |k, v| self[k] = v }
<del> end
<del>
<del> def to_hash
<del> inject({}) do |hash, (k,v)|
<del> if v.respond_to? :to_ary
<del> hash[k] = v.to_ary.join("\n")
<del> else
<del> hash[k] = v
<del> end
<del> hash
<del> end
<del> end
<del>
<del> def [](k)
<del> super @names[k.downcase]
<del> end
<del>
<del> def []=(k, v)
<del> delete k
<del> @names[k.downcase] = k
<del> super k, v
<del> end
<del>
<del> def delete(k)
<del> super @names.delete(k.downcase)
<del> end
<del>
<del> def include?(k)
<del> @names.has_key? k.downcase
<del> end
<del>
<del> alias_method :has_key?, :include?
<del> alias_method :member?, :include?
<del> alias_method :key?, :include?
<del>
<del> def merge!(other)
<del> other.each { |k, v| self[k] = v }
<del> self
<del> end
<del>
<del> def merge(other)
<del> hash = dup
<del> hash.merge! other
<del> end
<del> end
<del>
<del> # Every standard HTTP code mapped to the appropriate message.
<del> # Stolen from Mongrel.
<del> HTTP_STATUS_CODES = {
<del> 100 => 'Continue',
<del> 101 => 'Switching Protocols',
<del> 200 => 'OK',
<del> 201 => 'Created',
<del> 202 => 'Accepted',
<del> 203 => 'Non-Authoritative Information',
<del> 204 => 'No Content',
<del> 205 => 'Reset Content',
<del> 206 => 'Partial Content',
<del> 300 => 'Multiple Choices',
<del> 301 => 'Moved Permanently',
<del> 302 => 'Found',
<del> 303 => 'See Other',
<del> 304 => 'Not Modified',
<del> 305 => 'Use Proxy',
<del> 307 => 'Temporary Redirect',
<del> 400 => 'Bad Request',
<del> 401 => 'Unauthorized',
<del> 402 => 'Payment Required',
<del> 403 => 'Forbidden',
<del> 404 => 'Not Found',
<del> 405 => 'Method Not Allowed',
<del> 406 => 'Not Acceptable',
<del> 407 => 'Proxy Authentication Required',
<del> 408 => 'Request Timeout',
<del> 409 => 'Conflict',
<del> 410 => 'Gone',
<del> 411 => 'Length Required',
<del> 412 => 'Precondition Failed',
<del> 413 => 'Request Entity Too Large',
<del> 414 => 'Request-URI Too Large',
<del> 415 => 'Unsupported Media Type',
<del> 416 => 'Requested Range Not Satisfiable',
<del> 417 => 'Expectation Failed',
<del> 500 => 'Internal Server Error',
<del> 501 => 'Not Implemented',
<del> 502 => 'Bad Gateway',
<del> 503 => 'Service Unavailable',
<del> 504 => 'Gateway Timeout',
<del> 505 => 'HTTP Version Not Supported'
<del> }
<del>
<del> # Responses with HTTP status codes that should not have an entity body
<del> STATUS_WITH_NO_ENTITY_BODY = Set.new((100..199).to_a << 204 << 304)
<del>
<del> # A multipart form data parser, adapted from IOWA.
<del> #
<del> # Usually, Rack::Request#POST takes care of calling this.
<del>
<del> module Multipart
<del> EOL = "\r\n"
<del>
<del> def self.parse_multipart(env)
<del> unless env['CONTENT_TYPE'] =~
<del> %r|\Amultipart/form-data.*boundary=\"?([^\";,]+)\"?|n
<del> nil
<del> else
<del> boundary = "--#{$1}"
<del>
<del> params = {}
<del> buf = ""
<del> content_length = env['CONTENT_LENGTH'].to_i
<del> input = env['rack.input']
<del>
<del> boundary_size = boundary.size + EOL.size
<del> bufsize = 16384
<del>
<del> content_length -= boundary_size
<del>
<del> status = input.read(boundary_size)
<del> raise EOFError, "bad content body" unless status == boundary + EOL
<del>
<del> rx = /(?:#{EOL})?#{Regexp.quote boundary}(#{EOL}|--)/n
<del>
<del> loop {
<del> head = nil
<del> body = ''
<del> filename = content_type = name = nil
<del>
<del> until head && buf =~ rx
<del> if !head && i = buf.index("\r\n\r\n")
<del> head = buf.slice!(0, i+2) # First \r\n
<del> buf.slice!(0, 2) # Second \r\n
<del>
<del> filename = head[/Content-Disposition:.* filename="?([^\";]*)"?/ni, 1]
<del> content_type = head[/Content-Type: (.*)\r\n/ni, 1]
<del> name = head[/Content-Disposition:.* name="?([^\";]*)"?/ni, 1]
<del>
<del> if filename
<del> body = Tempfile.new("RackMultipart")
<del> body.binmode if body.respond_to?(:binmode)
<del> end
<del>
<del> next
<del> end
<del>
<del> # Save the read body part.
<del> if head && (boundary_size+4 < buf.size)
<del> body << buf.slice!(0, buf.size - (boundary_size+4))
<del> end
<del>
<del> c = input.read(bufsize < content_length ? bufsize : content_length)
<del> raise EOFError, "bad content body" if c.nil? || c.empty?
<del> buf << c
<del> content_length -= c.size
<del> end
<del>
<del> # Save the rest.
<del> if i = buf.index(rx)
<del> body << buf.slice!(0, i)
<del> buf.slice!(0, boundary_size+2)
<del>
<del> content_length = -1 if $1 == "--"
<del> end
<del>
<del> if filename == ""
<del> # filename is blank which means no file has been selected
<del> data = nil
<del> elsif filename
<del> body.rewind
<del>
<del> # Take the basename of the upload's original filename.
<del> # This handles the full Windows paths given by Internet Explorer
<del> # (and perhaps other broken user agents) without affecting
<del> # those which give the lone filename.
<del> filename =~ /^(?:.*[:\\\/])?(.*)/m
<del> filename = $1
<del>
<del> data = {:filename => filename, :type => content_type,
<del> :name => name, :tempfile => body, :head => head}
<del> else
<del> data = body
<del> end
<del>
<del> Utils.normalize_params(params, name, data) unless data.nil?
<del>
<del> break if buf.empty? || content_length == -1
<del> }
<del>
<del> begin
<del> input.rewind if input.respond_to?(:rewind)
<del> rescue Errno::ESPIPE
<del> # Handles exceptions raised by input streams that cannot be rewound
<del> # such as when using plain CGI under Apache
<del> end
<del>
<del> params
<del> end
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/test/dispatch/request/multipart_params_parsing_test.rb
<ide> def teardown
<ide>
<ide> # Ruby CGI doesn't handle multipart/mixed for us.
<ide> files = params['files']
<del> assert_kind_of String, files
<add> assert_kind_of Tempfile, files
<ide> files.force_encoding('ASCII-8BIT') if files.respond_to?(:force_encoding)
<ide> assert_equal 19756, files.size
<ide> end | 53 |
Text | Text | update azure pipelines status badge url | a3c320dd707b915da2192427bcceea166edbd6d4 | <ide><path>README.md
<ide> 
<ide>
<del>[](https://github.visualstudio.com/Atom/_build/latest?definitionId=32&branchName=master) [](https://travis-ci.org/atom/atom) [](https://ci.appveyor.com/project/Atom/atom)
<add>[](https://dev.azure.com/github/Atom/_build/latest?definitionId=32&branchName=master) [](https://travis-ci.org/atom/atom) [](https://ci.appveyor.com/project/Atom/atom)
<ide> [](https://david-dm.org/atom/atom)
<ide> [](https://atom-slack.herokuapp.com)
<ide> | 1 |
Python | Python | add service_name attribute to elbconnection class | b034b2f9b1b1c4250274405dc1c0fa36456e8bcf | <ide><path>libcloud/loadbalancer/drivers/elb.py
<ide> class ELBConnection(SignedAWSConnection):
<ide> version = VERSION
<ide> host = HOST
<ide> responseCls = ELBResponse
<add> service_name = 'elb'
<ide>
<ide>
<ide> class ElasticLBDriver(Driver): | 1 |
Java | Java | refine typehint.builder#onreachabletype javadoc | 3e9b57106e74b09265f247cbcd6853078c32ae49 | <ide><path>spring-core/src/main/java/org/springframework/aot/hint/TypeHint.java
<ide> public Builder(TypeReference type) {
<ide>
<ide> /**
<ide> * Make this hint conditional on the fact that the specified type
<del> * can be resolved.
<add> * is in a reachable code path from a static analysis point of view.
<ide> * @param reachableType the type that should be reachable for this
<ide> * hint to apply
<ide> * @return {@code this}, to facilitate method chaining
<ide> public Builder onReachableType(TypeReference reachableType) {
<ide>
<ide> /**
<ide> * Make this hint conditional on the fact that the specified type
<del> * can be resolved.
<add> * is in a reachable code path from a static analysis point of view.
<ide> * @param reachableType the type that should be reachable for this
<ide> * hint to apply
<ide> * @return {@code this}, to facilitate method chaining | 1 |
Javascript | Javascript | fix development test cases for changed defaults | e7d246eaa153daa6639a4ec06e6bd9d478bba16d | <ide><path>test/cases/side-effects/empty-modules/index.js
<ide> import "./pure";
<ide> import "./referenced";
<ide> import "./side-referenced";
<ide>
<del>it("should skip imports to empty modules", () => {
<del> expect(require.resolveWeak("./cjs")).toBe(null);
<del> expect(require.resolveWeak("./module")).toBe(null);
<del> expect(require.resolveWeak("./pure")).toBe(null);
<del> expect(require.resolveWeak("./referenced")).toBe(null);
<del>});
<add>if (process.env.NODE_ENV === "production") {
<add> it("should skip imports to empty modules", () => {
<add> expect(require.resolveWeak("./cjs")).toBe(null);
<add> expect(require.resolveWeak("./module")).toBe(null);
<add> expect(require.resolveWeak("./pure")).toBe(null);
<add> expect(require.resolveWeak("./referenced")).toBe(null);
<add> });
<add>}
<ide>
<ide> it("should not skip transitive side effects", () => {
<ide> expect(global.value).toBe(true);
<ide><path>test/cases/side-effects/empty-modules/pure.js
<ide> // comment
<del>export default /*#__PURE__*/ console.log("test");
<del>const unused = /*#__PURE__*/ console.log("test");
<del>const unusedExport = /*#__PURE__*/ console.log("test");
<add>export default /*#__PURE__*/ console.log.bind(null, "test");
<add>const unused = /*#__PURE__*/ console.log.bind(null, "test");
<add>const unusedExport = /*#__PURE__*/ console.log.bind(null, "test");
<ide> export { unusedExport, class1, class2, fun1, fun2, fun3 };
<ide> function fun1() {
<del> console.log("test");
<add> console.log.bind(null, "test");
<ide> return unused;
<ide> }
<ide> const fun2 = function () {
<del> console.log("test");
<add> console.log.bind(null, "test");
<ide> };
<ide> const fun3 = () => {
<del> console.log("test");
<add> console.log.bind(null, "test");
<ide> };
<ide> class class1 {
<ide> constructor() {
<del> console.log("test");
<add> console.log.bind(null, "test");
<ide> }
<ide> }
<ide> const class2 = class {
<ide> constructor() {
<del> console.log("test");
<add> console.log.bind(null, "test");
<ide> }
<ide> };
<ide> if ("") {
<del> console.log("test");
<add> console.log.bind(null, "test");
<ide> }
<ide> for (; false; ) {}
<ide> for (var i = 0; false; ) {} | 2 |
PHP | PHP | fix failing tests | 16c84176408f91a123c8b6d0d57d678b0fb21065 | <ide><path>tests/TestCase/Database/QueryTest.php
<ide> public function testDebugInfo() {
<ide> ->where(['id' => '1']);
<ide>
<ide> $expected = [
<del> 'sql' => $query->sql(),
<del> 'params' => [
<del> ':c0' => ['value' => '1', 'type' => 'integer', 'placeholder' => 'c0']
<del> ],
<add> 'params' => [],
<ide> 'defaultTypes' => ['id' => 'integer'],
<ide> 'decorators' => 0,
<ide> 'executed' => false
<ide> public function testDebugInfo() {
<ide>
<ide> $query->execute();
<ide> $expected = [
<del> 'sql' => $query->sql(),
<ide> 'params' => [
<ide> ':c0' => ['value' => '1', 'type' => 'integer', 'placeholder' => 'c0']
<ide> ],
<ide> public function testDebugInfo() {
<ide> 'executed' => true
<ide> ];
<ide> $result = $query->__debugInfo();
<add> $this->assertEquals($result, $expected);
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/ORM/QueryTest.php
<ide> public function testDebugInfo() {
<ide> });
<ide>
<ide> $expected = [
<del> 'sql' => $query->sql(),
<ide> 'params' => $query->valueBinder()->bindings(),
<ide> 'defaultTypes' => [
<ide> 'authors.id' => 'integer', | 2 |
Python | Python | add type annotations for imagegpt | 7f3d4440d63786b0544de73650707adb72f02c12 | <ide><path>src/transformers/models/imagegpt/modeling_imagegpt.py
<ide> import math
<ide> import os
<ide> import warnings
<del>from typing import Tuple
<add>from typing import Any, Optional, Tuple, Union
<ide>
<ide> import torch
<ide> import torch.utils.checkpoint
<ide> def load_tf_weights_in_imagegpt(model, config, imagegpt_checkpoint_path):
<ide>
<ide>
<ide> class ImageGPTLayerNorm(nn.Module):
<del> def __init__(self, hidden_size, eps=1e-5):
<add> def __init__(self, hidden_size: Tuple[int], eps: float = 1e-5):
<ide> super().__init__()
<ide> self.eps = eps
<ide> self.weight = nn.Parameter(torch.Tensor(hidden_size))
<ide>
<del> def forward(self, tensor):
<add> def forward(self, tensor: torch.Tensor) -> tuple:
<ide> # input is not mean centered
<ide> return (
<ide> tensor
<ide> def forward(self, tensor):
<ide>
<ide>
<ide> class ImageGPTAttention(nn.Module):
<del> def __init__(self, config, is_cross_attention=False, layer_idx=None):
<add> def __init__(self, config, is_cross_attention: Optional[bool] = False, layer_idx: Optional[int] = None):
<ide> super().__init__()
<ide>
<ide> max_positions = config.max_position_embeddings
<ide> def _merge_heads(self, tensor, num_heads, attn_head_size):
<ide>
<ide> def forward(
<ide> self,
<del> hidden_states,
<del> layer_past=None,
<del> attention_mask=None,
<del> head_mask=None,
<del> encoder_hidden_states=None,
<del> encoder_attention_mask=None,
<del> use_cache=False,
<del> output_attentions=False,
<del> ):
<add> hidden_states: torch.Tensor,
<add> layer_past: Optional[bool] = None,
<add> attention_mask: Optional[torch.Tensor] = None,
<add> head_mask: Optional[torch.Tensor] = None,
<add> encoder_hidden_states: Optional[torch.Tensor] = None,
<add> encoder_attention_mask: Optional[torch.Tensor] = None,
<add> use_cache: Optional[bool] = False,
<add> output_attentions: Optional[bool] = False,
<add> ) -> tuple:
<ide> if encoder_hidden_states is not None:
<ide> if not hasattr(self, "q_attn"):
<ide> raise ValueError(
<ide> def __init__(self, intermediate_size, config):
<ide> self.act = ACT2FN[config.activation_function]
<ide> self.dropout = nn.Dropout(config.resid_pdrop)
<ide>
<del> def forward(self, hidden_states):
<add> def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
<ide> hidden_states = self.c_fc(hidden_states)
<ide> hidden_states = self.act(hidden_states)
<ide> hidden_states = self.c_proj(hidden_states)
<ide> def __init__(self, config, layer_idx=None):
<ide>
<ide> def forward(
<ide> self,
<del> hidden_states,
<del> layer_past=None,
<del> attention_mask=None,
<del> head_mask=None,
<del> encoder_hidden_states=None,
<del> encoder_attention_mask=None,
<del> use_cache=False,
<del> output_attentions=False,
<del> ):
<add> hidden_states: torch.Tensor,
<add> layer_past: Optional[bool] = None,
<add> attention_mask: Optional[torch.Tensor] = None,
<add> head_mask: Optional[torch.Tensor] = None,
<add> encoder_hidden_states: Optional[torch.Tensor] = None,
<add> encoder_attention_mask: Optional[torch.Tensor] = None,
<add> use_cache: Optional[bool] = False,
<add> output_attentions: Optional[bool] = False,
<add> ) -> tuple:
<ide> residual = hidden_states
<ide> hidden_states = self.ln_1(hidden_states)
<ide> attn_outputs = self.attn(
<ide> def _set_gradient_checkpointing(self, module, value=False):
<ide> class ImageGPTModel(ImageGPTPreTrainedModel):
<ide> _keys_to_ignore_on_load_missing = ["attn.masked_bias"]
<ide>
<del> def __init__(self, config):
<add> def __init__(self, config: ImageGPTConfig):
<ide> super().__init__(config)
<ide>
<ide> self.embed_dim = config.hidden_size
<ide> def _prune_heads(self, heads_to_prune):
<ide> @replace_return_docstrings(output_type=BaseModelOutputWithPastAndCrossAttentions, config_class=_CONFIG_FOR_DOC)
<ide> def forward(
<ide> self,
<del> input_ids=None,
<del> past_key_values=None,
<del> attention_mask=None,
<del> token_type_ids=None,
<del> position_ids=None,
<del> head_mask=None,
<del> inputs_embeds=None,
<del> encoder_hidden_states=None,
<del> encoder_attention_mask=None,
<del> use_cache=None,
<del> output_attentions=None,
<del> output_hidden_states=None,
<del> return_dict=None,
<del> **kwargs,
<del> ):
<add> input_ids: Optional[torch.Tensor] = None,
<add> past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None,
<add> attention_mask: Optional[torch.Tensor] = None,
<add> token_type_ids: Optional[torch.Tensor] = None,
<add> position_ids: Optional[torch.Tensor] = None,
<add> head_mask: Optional[torch.Tensor] = None,
<add> inputs_embeds: Optional[torch.Tensor] = None,
<add> encoder_hidden_states: Optional[torch.Tensor] = None,
<add> encoder_attention_mask: Optional[torch.Tensor] = None,
<add> use_cache: Optional[bool] = None,
<add> output_attentions: Optional[bool] = None,
<add> output_hidden_states: Optional[bool] = None,
<add> return_dict: Optional[bool] = None,
<add> **kwargs: Any,
<add> ) -> Union[Tuple, BaseModelOutputWithPastAndCrossAttentions]:
<ide> r"""
<ide> labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
<ide> Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
<ide> def custom_forward(*inputs):
<ide> class ImageGPTForCausalImageModeling(ImageGPTPreTrainedModel):
<ide> _keys_to_ignore_on_load_missing = [r"attn.masked_bias", r"attn.bias", r"lm_head.weight"]
<ide>
<del> def __init__(self, config):
<add> def __init__(self, config: ImageGPTConfig):
<ide> super().__init__(config)
<ide> self.transformer = ImageGPTModel(config)
<ide> self.lm_head = nn.Linear(config.n_embd, config.vocab_size - 1, bias=False)
<ide> def get_output_embeddings(self):
<ide> def set_output_embeddings(self, new_embeddings):
<ide> self.lm_head = new_embeddings
<ide>
<del> def prepare_inputs_for_generation(self, input_ids, past=None, **kwargs):
<add> def prepare_inputs_for_generation(self, input_ids: torch.Tensor, past: Optional[bool] = None, **kwargs):
<ide> token_type_ids = kwargs.get("token_type_ids", None)
<ide> # only last token for inputs_ids if past is defined in kwargs
<ide> if past:
<ide> def prepare_inputs_for_generation(self, input_ids, past=None, **kwargs):
<ide> @replace_return_docstrings(output_type=CausalLMOutputWithCrossAttentions, config_class=_CONFIG_FOR_DOC)
<ide> def forward(
<ide> self,
<del> input_ids=None,
<del> past_key_values=None,
<del> attention_mask=None,
<del> token_type_ids=None,
<del> position_ids=None,
<del> head_mask=None,
<del> inputs_embeds=None,
<del> encoder_hidden_states=None,
<del> encoder_attention_mask=None,
<del> labels=None,
<del> use_cache=None,
<del> output_attentions=None,
<del> output_hidden_states=None,
<del> return_dict=None,
<del> **kwargs,
<del> ):
<add> input_ids: Optional[torch.Tensor] = None,
<add> past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None,
<add> attention_mask: Optional[torch.Tensor] = None,
<add> token_type_ids: Optional[torch.Tensor] = None,
<add> position_ids: Optional[torch.Tensor] = None,
<add> head_mask: Optional[torch.Tensor] = None,
<add> inputs_embeds: Optional[torch.Tensor] = None,
<add> encoder_hidden_states: Optional[torch.Tensor] = None,
<add> encoder_attention_mask: Optional[torch.Tensor] = None,
<add> labels: Optional[torch.Tensor] = None,
<add> use_cache: Optional[bool] = None,
<add> output_attentions: Optional[bool] = None,
<add> output_hidden_states: Optional[bool] = None,
<add> return_dict: Optional[bool] = None,
<add> **kwargs: Any,
<add> ) -> Union[Tuple, CausalLMOutputWithCrossAttentions]:
<ide> r"""
<ide> labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
<ide> Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
<ide> def _reorder_cache(past: Tuple[Tuple[torch.Tensor]], beam_idx: torch.Tensor) ->
<ide> class ImageGPTForImageClassification(ImageGPTPreTrainedModel):
<ide> _keys_to_ignore_on_load_missing = [r"h\.\d+\.attn\.masked_bias", r"lm_head\.weight"]
<ide>
<del> def __init__(self, config):
<add> def __init__(self, config: ImageGPTConfig):
<ide> super().__init__(config)
<ide> self.num_labels = config.num_labels
<ide> self.transformer = ImageGPTModel(config)
<ide> def __init__(self, config):
<ide> @replace_return_docstrings(output_type=SequenceClassifierOutputWithPast, config_class=_CONFIG_FOR_DOC)
<ide> def forward(
<ide> self,
<del> input_ids=None,
<del> past_key_values=None,
<del> attention_mask=None,
<del> token_type_ids=None,
<del> position_ids=None,
<del> head_mask=None,
<del> inputs_embeds=None,
<del> labels=None,
<del> use_cache=None,
<del> output_attentions=None,
<del> output_hidden_states=None,
<del> return_dict=None,
<del> **kwargs,
<del> ):
<add> input_ids: Optional[torch.Tensor] = None,
<add> past_key_values: Optional[Tuple[Tuple[torch.Tensor]]] = None,
<add> attention_mask: Optional[torch.Tensor] = None,
<add> token_type_ids: Optional[torch.Tensor] = None,
<add> position_ids: Optional[torch.Tensor] = None,
<add> head_mask: Optional[torch.Tensor] = None,
<add> inputs_embeds: Optional[torch.Tensor] = None,
<add> labels: Optional[torch.Tensor] = None,
<add> use_cache: Optional[bool] = None,
<add> output_attentions: Optional[bool] = None,
<add> output_hidden_states: Optional[bool] = None,
<add> return_dict: Optional[bool] = None,
<add> **kwargs: Any,
<add> ) -> Union[Tuple, SequenceClassifierOutputWithPast]:
<ide> r"""
<ide> labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
<ide> Labels for computing the sequence classification/regression loss. Indices should be in `[0, ..., | 1 |
Javascript | Javascript | add wallaby.js support | 4f1ba6ac78253a815c843e863596fd8403be7392 | <ide><path>wallaby.js
<add>module.exports = function (wallaby) {
<add> return {
<add> files: [
<add> 'server/**/*.js',
<add> 'client/**/*.js',
<add> 'lib/**/*.js',
<add> 'dist/**/*.js',
<add> 'test/**/*.*',
<add> '!test/**/*.test.js'
<add> ],
<add>
<add> tests: [
<add> 'test/**/*.test.js',
<add> '!test/integration/**/*.test.js'
<add> ],
<add>
<add> compilers: {
<add> '**/*.js': wallaby.compilers.babel()
<add> },
<add>
<add> env: {
<add> type: 'node',
<add> runner: 'node',
<add> params: {
<add> env: 'NODE_PATH=test/lib'
<add> }
<add> },
<add>
<add> testFramework: 'jest'
<add> }
<add>} | 1 |
Mixed | Text | pass context parameter to custom tooltip | 6c38c31a0a29a36ec2a4f31cd8cac5aa16b79c86 | <ide><path>docs/docs/configuration/tooltip.md
<ide> The tooltip items passed to the tooltip callbacks implement the following interf
<ide>
<ide> ## External (Custom) Tooltips
<ide>
<del>Custom tooltips allow you to hook into the tooltip rendering process so that you can render the tooltip in your own custom way. Generally this is used to create an HTML tooltip instead of an oncanvas one. You can enable custom tooltips in the global or chart configuration like so:
<add>Custom tooltips allow you to hook into the tooltip rendering process so that you can render the tooltip in your own custom way. Generally this is used to create an HTML tooltip instead of an on-canvas tooltip. The `custom` option takes a function which is passed a context parameter containing the `chart` and `tooltip`. You can enable custom tooltips in the global or chart configuration like so:
<ide>
<ide> ```javascript
<ide> var myPieChart = new Chart(ctx, {
<ide> var myPieChart = new Chart(ctx, {
<ide> // Disable the on-canvas tooltip
<ide> enabled: false,
<ide>
<del> custom: function(tooltipModel) {
<add> custom: function(context) {
<ide> // Tooltip Element
<ide> var tooltipEl = document.getElementById('chartjs-tooltip');
<ide>
<ide> var myPieChart = new Chart(ctx, {
<ide> }
<ide>
<ide> // Hide if no tooltip
<add> var tooltipModel = context.tooltip;
<ide> if (tooltipModel.opacity === 0) {
<ide> tooltipEl.style.opacity = 0;
<ide> return;
<ide><path>docs/docs/getting-started/v3-migration.md
<ide> Animation system was completely rewritten in Chart.js v3. Each property can now
<ide>
<ide> * `xLabel` and `yLabel` were removed. Please use `index` and `value`
<ide> * The `filter` option will now be passed additional parameters when called and should have the method signature `function(tooltipItem, index, tooltipItems, data)`
<add>* The `custom` callback now takes a context object that has `tooltip` and `chart` properties
<ide>
<ide> ## Developer migration
<ide>
<ide><path>src/plugins/plugin.tooltip.js
<ide> export class Tooltip extends Element {
<ide> }
<ide>
<ide> if (changed && options.custom) {
<del> options.custom.call(me, [me]);
<add> options.custom.call(me, {chart: me._chart, tooltip: me});
<ide> }
<ide> }
<ide> | 3 |
Javascript | Javascript | pull refresh outside the work function | 6ba2f6d4b8da38b60399fd4a7d14df109dd3e41a | <ide><path>src/git-repository-async.js
<ide> export default class GitRepositoryAsync {
<ide> checkoutOptions.checkoutStrategy = Git.Checkout.STRATEGY.FORCE | Git.Checkout.STRATEGY.DISABLE_PATHSPEC_MATCH
<ide> return Git.Checkout.head(repo, checkoutOptions)
<ide> })
<del> .then(() => this.refreshStatusForPath(_path))
<ide> })
<ide> })
<add> .then(() => this.refreshStatusForPath(_path))
<ide> }
<ide>
<ide> // Public: Checks out a branch in your repository. | 1 |
Python | Python | fix error types | de0bbe0ac09988cfe84442855183577315a0d80a | <ide><path>glances/glances.py
<ide> def __get_process_stats(self, proc):
<ide> # Process name (cached by PSUtil)
<ide> try:
<ide> procstat['name'] = proc.name()
<del> except AttributeError:
<add> except TypeError:
<ide> procstat['name'] = proc.name
<ide>
<ide>
<ide> def __get_process_stats(self, proc):
<ide> except:
<ide> try:
<ide> self.username_cache[procstat['pid']] = proc.username()
<del> except AttributeError:
<add> except TypeError:
<ide> self.username_cache[procstat['pid']] = proc.username
<ide> except KeyError:
<ide> try:
<ide> def __get_process_stats(self, proc):
<ide> # Process status
<ide> try:
<ide> procstat['status'] = str(proc.status())[:1].upper()
<del> except AttributeError:
<add> except TypeError:
<ide> procstat['status'] = str(proc.status)[:1].upper()
<ide>
<ide> # Process nice | 1 |
Ruby | Ruby | use default location when possible | a17f38dd364af67d5965aaa58438cb768c6d057a | <ide><path>Library/Homebrew/os/mac/xcode.rb
<ide> module Mac
<ide> module Xcode
<ide> module_function
<ide>
<add> DEFAULT_BUNDLE_PATH = Pathname.new("/Applications/Xcode.app").freeze
<ide> BUNDLE_ID = "com.apple.dt.Xcode".freeze
<ide> OLD_BUNDLE_ID = "com.apple.Xcode".freeze
<ide>
<ide> def toolchain_path
<ide> Pathname.new("#{prefix}/Toolchains/XcodeDefault.xctoolchain")
<ide> end
<ide>
<del> # Ask Spotlight where Xcode is. If the user didn't install the
<del> # helper tools and installed Xcode in a non-conventional place, this
<del> # is our only option. See: https://superuser.com/questions/390757
<ide> def bundle_path
<del> MacOS.app_with_bundle_id(V4_BUNDLE_ID, V3_BUNDLE_ID)
<add> # Use the default location if it exists.
<add> return DEFAULT_BUNDLE_PATH if DEFAULT_BUNDLE_PATH.exist?
<add>
<add> # Ask Spotlight where Xcode is. If the user didn't install the
<add> # helper tools and installed Xcode in a non-conventional place, this
<add> # is our only option. See: https://superuser.com/questions/390757
<add> MacOS.app_with_bundle_id(BUNDLE_ID, OLD_BUNDLE_ID)
<ide> end
<ide>
<ide> def installed? | 1 |
PHP | PHP | add email options closure | 052c4c6f0c0483909a5bb2aa814c86897bf33892 | <ide><path>src/Illuminate/Foundation/Auth/ResetsPasswords.php
<ide> public function sendResetLinkEmail(Request $request)
<ide>
<ide> $broker = $this->getBroker();
<ide>
<del> $response = Password::broker($broker)->sendResetLink($request->only('email'), function (Message $message) {
<del> $message->subject($this->getEmailSubject());
<del> });
<add> $response = Password::broker($broker)->sendResetLink($request->only('email'), $this->getEmailOptionsClosure());
<ide>
<ide> switch ($response) {
<ide> case Password::RESET_LINK_SENT:
<ide> public function sendResetLinkEmail(Request $request)
<ide> }
<ide> }
<ide>
<add> /**
<add> * Get the closure which is used to configure email options.
<add> *
<add> * @return \Closure
<add> */
<add> protected function getEmailOptionsClosure()
<add> {
<add> return function (Message $message) {
<add> $message->subject($this->getEmailSubject());
<add> };
<add> }
<add>
<ide> /**
<ide> * Get the e-mail subject line to be used for the reset link email.
<ide> * | 1 |
Python | Python | introduce a final methods in view class | 94f24e2e2f6bff77fcc6ee23f5c90716becab192 | <ide><path>djangorestframework/views.py
<ide> def dispatch(self, request, *args, **kwargs):
<ide> except ErrorResponse, exc:
<ide> response = exc.response
<ide>
<del> # Always add these headers.
<del> #
<del> # TODO - this isn't actually the correct way to set the vary header,
<del> # also it's currently sub-optimal for HTTP caching - need to sort that out.
<del> response.headers['Allow'] = ', '.join(self.allowed_methods)
<del> response.headers['Vary'] = 'Authenticate, Accept'
<del>
<del> # merge with headers possibly set at some point in the view
<del> response.headers.update(self.headers)
<del>
<ide> set_script_prefix(orig_prefix)
<del>
<del> return self.render(response)
<add> return self.final(request, response)
<ide>
<ide> def options(self, request, *args, **kwargs):
<ide> response_obj = {
<ide> def options(self, request, *args, **kwargs):
<ide> response_obj['fields'] = field_name_types
<ide> return response_obj
<ide>
<add> def final(self, request, response, *args, **kargs):
<add> """
<add> As initial, final can be overriden to add code that must be set after the render
<add> """
<add> # Always add these headers.
<add> #
<add> # TODO - this isn't actually the correct way to set the vary header,
<add> # also it's currently sub-optimal for HTTP caching - need to sort that out.
<add> response.headers['Allow'] = ', '.join(self.allowed_methods)
<add> response.headers['Vary'] = 'Authenticate, Accept'
<add>
<add> # merge with headers possibly set at some point in the view
<add> response.headers.update(self.headers)
<add> return self.render(response)
<add>
<ide>
<ide> class ModelView(View):
<ide> """ | 1 |
Go | Go | remove extra locking | f93750b2c4d5f6144f0790ffa89291da3c097b80 | <ide><path>daemon/graphdriver/aufs/aufs.go
<ide> func init() {
<ide>
<ide> // Driver contains information about the filesystem mounted.
<ide> type Driver struct {
<del> sync.Mutex
<ide> root string
<ide> uidMaps []idtools.IDMap
<ide> gidMaps []idtools.IDMap
<ide> func (a *Driver) getParentLayerPaths(id string) ([]string, error) {
<ide> }
<ide>
<ide> func (a *Driver) mount(id string, target string, mountLabel string, layers []string) error {
<del> a.Lock()
<del> defer a.Unlock()
<del>
<ide> // If the id is mounted or we get an error return
<ide> if mounted, err := a.mounted(target); err != nil || mounted {
<ide> return err
<ide> func (a *Driver) mount(id string, target string, mountLabel string, layers []str
<ide> }
<ide>
<ide> func (a *Driver) unmount(mountPath string) error {
<del> a.Lock()
<del> defer a.Unlock()
<del>
<ide> if mounted, err := a.mounted(mountPath); err != nil || !mounted {
<ide> return err
<ide> } | 1 |
Javascript | Javascript | add bench for zlib gzip + gunzip cycle | e797d5babd119900034953a65efe90ed06ed97b9 | <ide><path>benchmark/zlib/pipe.js
<add>'use strict';
<add>const common = require('../common.js');
<add>const fs = require('fs');
<add>const zlib = require('zlib');
<add>
<add>const bench = common.createBenchmark(main, {
<add> inputLen: [1024],
<add> duration: [5],
<add> type: ['string', 'buffer']
<add>});
<add>
<add>function main({ inputLen, duration, type }) {
<add> const buffer = Buffer.alloc(inputLen, fs.readFileSync(__filename));
<add> const chunk = type === 'buffer' ? buffer : buffer.toString('utf8');
<add>
<add> const input = zlib.createGzip();
<add> const output = zlib.createGunzip();
<add>
<add> let readFromOutput = 0;
<add> input.pipe(output);
<add> if (type === 'string')
<add> output.setEncoding('utf8');
<add> output.on('data', (chunk) => readFromOutput += chunk.length);
<add>
<add> function write() {
<add> input.write(chunk, write);
<add> }
<add>
<add> bench.start();
<add> write();
<add>
<add> setTimeout(() => {
<add> // Give result in GBit/s, like the net benchmarks do
<add> bench.end(readFromOutput * 8 / (1024 ** 3));
<add>
<add> // Cut off writing the easy way.
<add> input.write = () => {};
<add> }, duration * 1000);
<add>}
<ide><path>test/parallel/test-benchmark-zlib.js
<ide> runBenchmark('zlib',
<ide> 'method=deflate',
<ide> 'n=1',
<ide> 'options=true',
<del> 'type=Deflate'
<del> ]);
<add> 'type=Deflate',
<add> 'inputLen=1024',
<add> 'duration=0.001'
<add> ],
<add> {
<add> 'NODEJS_BENCHMARK_ZERO_ALLOWED': 1
<add> }); | 2 |
Ruby | Ruby | fix a small typo [ci skip] | a8da5d82fd2b87c1a34d8ede5343219a82be40bc | <ide><path>railties/test/application/configuration_test.rb
<ide> def index
<ide> assert_equal app.env_config['action_dispatch.backtrace_cleaner'], Rails.backtrace_cleaner
<ide> end
<ide>
<del> test "config.colorize_logging defaul is true" do
<add> test "config.colorize_logging default is true" do
<ide> make_basic_app
<ide> assert app.config.colorize_logging
<ide> end | 1 |
Ruby | Ruby | limit => 19` for oracle | a9e6e6e07aa5f433e0f66179b22c1c9575685727 | <ide><path>activerecord/test/cases/connection_adapters/type_lookup_test.rb
<ide> def test_integer_types
<ide>
<ide> def test_bigint_limit
<ide> cast_type = @connection.type_map.lookup("bigint")
<del> assert_equal 8, cast_type.limit
<add> if current_adapter?(:OracleAdapter)
<add> assert_equal 19, cast_type.limit
<add> else
<add> assert_equal 8, cast_type.limit
<add> end
<ide> end
<ide>
<ide> def test_decimal_without_scale
<ide><path>activerecord/test/cases/migration/change_schema_test.rb
<ide> def test_create_table_with_bigint
<ide> eight = columns.detect { |c| c.name == "eight_int" }
<ide>
<ide> if current_adapter?(:OracleAdapter)
<del> assert_equal 'NUMBER(8)', eight.sql_type
<add> assert_equal 'NUMBER(19)', eight.sql_type
<ide> elsif current_adapter?(:SQLite3Adapter)
<ide> assert_equal 'bigint', eight.sql_type
<ide> else | 2 |
Text | Text | translate 04 to korean | 7eabccc214c96e74ee049a0f026f43be403e86bb | <ide><path>docs/docs/04-multiple-components.ko-KR.md
<add>---
<add>id: multiple-components-ko-KR
<add>title: 복합 컴포넌트
<add>permalink: multiple-components.ko-KR.html
<add>prev: interactivity-and-dynamic-uis.ko-KR.html
<add>next: reusable-components.html
<add>---
<add>
<add>지금까지, 단일 컴포넌트에서 데이터를 표시하고 유저 입력을 다루는 것을 살펴보았습니다. 다음엔 React의 최고의 기능 중 하나인 조합가능성(composability)을 살펴봅시다.
<add>
<add>
<add>## 동기: 관심의 분리
<add>
<add>명확히 정의된 인터페이스와 다른 컴포넌트를 재사용해 모듈러 컴포넌트를 구축하면, 함수와 클래스를 이용했을 때 얻을 수 있는 이점 대부분을 얻을 수 있습니다. 특히 앱에서 *다른 관심을 분리*할 수 있습니다.아무리 간단히 새 컴포넌트를 만들었다고 해도 말이죠. 당신의 애플리케이션에서 쓸 커스텀 컴포넌트 라이브러리를 만들어서, 당신의 도메인에 최적화된 방법으로 UI를 표현할 수 있게 됩니다.
<add>
<add>
<add>## 조합(Composition) 예제
<add>
<add>간단히 페이스북 그래프 API를 사용해 프로필 사진과 유저이름을 보여주는 아바타 컴포넌트를 만든다고 합시다.
<add>
<add>```javascript
<add>var Avatar = React.createClass({
<add> render: function() {
<add> return (
<add> <div>
<add> <ProfilePic username={this.props.username} />
<add> <ProfileLink username={this.props.username} />
<add> </div>
<add> );
<add> }
<add>});
<add>
<add>var ProfilePic = React.createClass({
<add> render: function() {
<add> return (
<add> <img src={'http://graph.facebook.com/' + this.props.username + '/picture'} />
<add> );
<add> }
<add>});
<add>
<add>var ProfileLink = React.createClass({
<add> render: function() {
<add> return (
<add> <a href={'http://www.facebook.com/' + this.props.username}>
<add> {this.props.username}
<add> </a>
<add> );
<add> }
<add>});
<add>
<add>React.render(
<add> <Avatar username="pwh" />,
<add> document.getElementById('example')
<add>);
<add>```
<add>
<add>
<add>## 소유권(Ownership)
<add>
<add>위의 예제에서, `Avatar` 인스턴스는 `ProfilePic`과 `ProfileLink`인스턴스를 *가지고* 있습니다. React에서 **소유자는 다른 컴포넌트의 `props`를 설정하는 컴포넌트입니다**. 더 정식으로 말하면, `X` 컴포넌트가 `Y` 컴포넌트의 `render()` 메서드 안에서 만들어졌다면, `Y`가 `X`를 *소유하고* 있다고 합니다. 앞에서 설명한 바와 같이, 컴포넌트는 자신의 `props`를 변경할 수 없습니다. `props`는 언제나 소유자가 설정한 것과 일치합니다. 이와 같은 중요한 성질이 UI가 일관성 있도록 해줍니다.
<add>
<add>소유(owner-ownee)관계와 부모·자식 관계를 구별하는 것은 중요합니다. 부모·자식 관계가 DOM에서부터 쓰던 익숙하고 이미 알고있던 단순한 것인 한편, 소유관계는 React 고유의 것입니다. 위의 예제에서, `Avatar`는 `div`, `ProfilePic`, `ProfileLink`인스턴스를 소유하고, `div`는 `ProfilePic`과 `ProfileLink`인스턴스의 (소유자가 아닌) **부모**입니다.
<add>
<add>
<add>## 자식
<add>
<add>React 컴포넌트 인스턴스를 만들 때, 추가적인 React 컴포넌트나 JavaScript 표현식을 시작과 끝 태그 사이에 넣을 수 있습니다. 이렇게 말이죠.
<add>
<add>```javascript
<add><Parent><Child /></Parent>
<add>```
<add>
<add>`Parent`는 `this.props.children`라는 특수 prop으로 자식들을 읽을 수 있습니다. **`this.props.children` 는 불투명한 데이터 구조이며,** [React.Children 유틸리티](/react/docs/top-level-api.html#react.children)를 사용해 자식들을 관리합니다.
<add>
<add>
<add>### 자식 Reconciliation
<add>
<add>**Reconciliation은 React가 DOM을 각각 새로운 렌더 패스에 업데이트하는 과정입니다.** 일반적으로, 자식은 렌더하는 순서에 따라 비교조정됩니다. 예를 들어, 각각의 마크업을 생성하는 두 랜더 패스가 있다고 해봅시다.
<add>
<add>```html
<add>// Render Pass 1
<add><Card>
<add> <p>Paragraph 1</p>
<add> <p>Paragraph 2</p>
<add></Card>
<add>// Render Pass 2
<add><Card>
<add> <p>Paragraph 2</p>
<add></Card>
<add>```
<add>
<add>직관적으로 보면, `<p>Paragraph 1</p>`가 없어졌습니다만 그러는 대신에, React는 첫 번째 자식의 텍스트를 비교조정하고 마지막 자식을 파괴하도록 DOM을 비교조정할 것입니다. React는 자식들의 *순서*에 따라 비교조정합니다.
<add>
<add>
<add>### 상태기반(Stateful) 자식
<add>
<add>대부분의 컴포넌트에서는, 이것은 큰 문제가 아닙니다. 하지만 렌더 패스 간에 `this.state`를 유지하는 상태기반의 컴포넌트에서는 매우 문제가 될 수 있습니다.
<add>
<add>대부분의 경우, 이 문제는 엘리먼트를 파괴하지 않고 숨김으로써 피해갈 수 있습니다.
<add>
<add>```html
<add>// Render Pass 1
<add><Card>
<add> <p>Paragraph 1</p>
<add> <p>Paragraph 2</p>
<add></Card>
<add>// Render Pass 2
<add><Card>
<add> <p style={{'{{'}}display: 'none'}}>Paragraph 1</p>
<add> <p>Paragraph 2</p>
<add></Card>
<add>```
<add>
<add>
<add>### 동적 자식
<add>
<add>자식들이 섞이거나(검색의 결과같은 경우) 새로운 컴포넌트가 리스트의 앞에 추가(스트림같은 경우)된다면 상황은 점점 더 까다로워집니다. 이런 때에의 동일성과 각 자식의 상태는 반드시 랜더 패스 간에 유지돼야 합니다. 각 자식에 `key`를 할당 함으로써 독자적으로 식별할 수 있습니다.
<add>
<add>```javascript
<add> render: function() {
<add> var results = this.props.results;
<add> return (
<add> <ol>
<add> {results.map(function(result) {
<add> return <li key={result.id}>{result.text}</li>;
<add> })}
<add> </ol>
<add> );
<add> }
<add>```
<add>
<add>React가 키가 있는 자식들을 비교조정할 때, React는 `key`가 있는 자식이 (오염(clobbered)되는 대신) 재배치되고 (재사용되는 대신) 파괴되도록 보장할 것입니다.
<add>
<add>`key`는 *항상* 배열 안의 각 컴포넌트의 컨테이너 HTML 자식이 아닌 컴포넌트에게 직접 주어져야 합니다.
<add>
<add>```javascript
<add>// 틀림!
<add>var ListItemWrapper = React.createClass({
<add> render: function() {
<add> return <li key={this.props.data.id}>{this.props.data.text}</li>;
<add> }
<add>});
<add>var MyComponent = React.createClass({
<add> render: function() {
<add> return (
<add> <ul>
<add> {this.props.results.map(function(result) {
<add> return <ListItemWrapper data={result}/>;
<add> })}
<add> </ul>
<add> );
<add> }
<add>});
<add>
<add>// 맞음 :)
<add>var ListItemWrapper = React.createClass({
<add> render: function() {
<add> return <li>{this.props.data.text}</li>;
<add> }
<add>});
<add>var MyComponent = React.createClass({
<add> render: function() {
<add> return (
<add> <ul>
<add> {this.props.results.map(function(result) {
<add> return <ListItemWrapper key={result.id} data={result}/>;
<add> })}
<add> </ul>
<add> );
<add> }
<add>});
<add>```
<add>
<add>객체를 넘기는 것으로 자식에 키를 할당할 수도 있습니다. 객체 키는 각 값의 `key`로 사용될 것입니다. 하지만 JavaScript가 프로퍼티의 순서의 유지를 보장하지 않는 것을 기억해 두셔야 합니다. 실제 브라우저에서는 32비트의 양의 정수로 해석할 수 있는 프로퍼티를 **제외**하고 프로퍼티의 순서를 유지합니다. 숫자 프로퍼티는 다른 프로퍼티보다 먼저 순차정렬 됩니다. 이런 경우 React는 순서없이 컴포넌트를 렌더합니다. 키에 스트링 접두사를 붙여서 이를 막을 수 있습니다.
<add>
<add>```javascript
<add> render: function() {
<add> var items = {};
<add>
<add> this.props.results.forEach(function(result) {
<add> // result.id가 (짧은 해시처럼) 숫자로 보일 수 있다면,
<add> // 객체의 반복순서는 보장되지 않습니다. 이 경우, 프리픽스를 넣어
<add> // 키가 스트링임을 보장합니다.
<add> items['result-' + result.id] = <li>{result.text}</li>;
<add> });
<add>
<add> return (
<add> <ol>
<add> {items}
<add> </ol>
<add> );
<add> }
<add>```
<add>
<add>## 데이터 흐름
<add>
<add>React에서 데이터는 위에서 말한 것처럼 `props`를 통해 소유자로부터 소유한 컴포넌트로 흐릅니다. 이것은 사실상 단방향 데이터 바인딩입니다. 소유자는 `props`나 `state`를 기준으로 계산한 어떤 값으로 소유한 컴포넌트의 props를 바인드합니다. 이 과정은 재귀적으로 발생하므로, 데이터의 변경은 자동으로 모든 곳에 반영됩니다.
<add>
<add>
<add>## 성능의 주의점
<add>
<add>소유자가 가지고 있는 노드의 수가 많아지면 데이터의 변화에 반응하는 비용이 증가할 것으로 생각할 수도 있습니다. 좋은 소식은 JavaScript가 빠르고 `render()` 메서드는 꽤 간단한 경향이 있어, 대부분 애플리케이션에서 매우 빠르다는 점입니다. 덧붙여, 대부분의 병목 현상은 JS 실행이 아닌 DOM 변경에서 일어나고, React는 배치와 탐지 변경을 이용해 최적화해 줍니다.
<add>
<add>하지만, 가끔 성능을 위해 정교하게 제어해야 할 때도 있습니다. 이런 경우,React가 서브트리의 처리를 건너 뛰도록 간단히 `shouldComponentUpdate()`를 오버라이드해 false를 리턴하게 할 수 있습니다. 좀 더 자세한 정보는 [React 참조 문서](/react/docs/component-specs.html)를 보세요.
<add>
<add>> 주의:
<add>>
<add>> 데이터가 실제로는 변경되었지만 `shouldComponentUpdate()`가 false를 리턴한다면 React는 UI를 싱크시킬수 없습니다. 이 기능을 사용할 때에는 지금 무엇을 하고 있는지 알고있고, 눈에 띄는 성능 문제가 있을경우에만 사용하세요. JavaScript는 DOM에 비교하여 빠릅니다. 과소평가하지 마세요. | 1 |
Text | Text | fix a typo in the assert.md | 74f0e51b4df3eccc3bf1f61ab2922b35da6adab1 | <ide><path>doc/api/assert.md
<ide> assert.notDeepEqual(obj1, obj3);
<ide> // AssertionError: { a: { b: 1 } } notDeepEqual { a: { b: 1 } }
<ide>
<ide> assert.notDeepEqual(obj1, obj4);
<del> // OK, obj1 and obj2 are not deeply equal
<add> // OK, obj1 and obj4 are not deeply equal
<ide> ```
<ide>
<ide> If the values are deeply equal, an `AssertionError` is thrown with a `message` | 1 |
Ruby | Ruby | improve pronoun for multiple version message | 66ca9e79fc14f28e54f259bba94a52922728bde8 | <ide><path>Library/Homebrew/cmd/uninstall.rb
<ide> def uninstall
<ide> versions = rack.subdirs.map(&:basename)
<ide> verb = versions.length == 1 ? "is" : "are"
<ide> puts "#{keg.name} #{versions.join(", ")} #{verb} still installed."
<del> puts "Remove them all with `brew uninstall --force #{keg.name}`."
<add> puts "Remove all versions with `brew uninstall --force #{keg.name}`."
<ide> end
<ide> end
<ide> end | 1 |
Python | Python | remove unnecessary use of fusedlayernorm | 98dd19b96b351f481e1268ab6c7b035bb21d106e | <ide><path>pytorch_transformers/modeling_bert.py
<ide> def swish(x):
<ide> ACT2FN = {"gelu": gelu, "relu": torch.nn.functional.relu, "swish": swish}
<ide>
<ide>
<del>try:
<del> from apex.normalization.fused_layer_norm import FusedLayerNorm as BertLayerNorm
<del>except (ImportError, AttributeError) as e:
<del> logger.info("Better speed can be achieved with apex installed from https://www.github.com/nvidia/apex .")
<del> BertLayerNorm = torch.nn.LayerNorm
<add>BertLayerNorm = torch.nn.LayerNorm
<ide>
<ide> class BertEmbeddings(nn.Module):
<ide> """Construct the embeddings from word, position and token_type embeddings. | 1 |
Javascript | Javascript | get all promises when using multiple test files | c50b2ad16c8251a8bacfec0d9f344dad0bedad7c | <ide><path>test/ConfigTestCases.template.js
<ide> const describeCases = config => {
<ide> }
<ide> };
<ide>
<del> results.push(
<del> _require(outputDirectory, optionsArr[i], bundlePath)
<del> );
<add> if (Array.isArray(bundlePath)) {
<add> for (const bundlePathItem of bundlePath) {
<add> results.push(
<add> _require(
<add> outputDirectory,
<add> optionsArr[i],
<add> "./" + bundlePathItem
<add> )
<add> );
<add> }
<add> } else {
<add> results.push(
<add> _require(outputDirectory, optionsArr[i], bundlePath)
<add> );
<add> }
<ide> }
<ide> }
<ide> // give a free pass to compilation that generated an error | 1 |
Go | Go | fix a comment error | 19e2c9177a111758e304ad9dbd0835cddbd840ab | <ide><path>volume/store/store.go
<ide> func (s *VolumeStore) Purge(name string) {
<ide> type VolumeStore struct {
<ide> locks *locker.Locker
<ide> globalLock sync.Mutex
<del> // names stores the volume name -> driver name relationship.
<add> // names stores the volume name -> volume relationship.
<ide> // This is used for making lookups faster so we don't have to probe all drivers
<ide> names map[string]volume.Volume
<ide> // refs stores the volume name and the list of things referencing it | 1 |
Javascript | Javascript | increase delay of data-uri test | 4fae91141c04dffb5094b5961eb5d7b2a1f3f423 | <ide><path>test/unit/manipulation.js
<ide> asyncTest( "Insert script with data-URI (gh-1887)", 1, function() {
<ide> }
<ide>
<ide> start();
<del> });
<add> }, 100 );
<ide> }); | 1 |
PHP | PHP | move string target parsing into routebuilder | 74ee0913cb46812577a8dfb7a33833dc2fb7dda1 | <ide><path>src/Routing/RouteBuilder.php
<ide> protected function _methodRoute($method, $template, $target, $name)
<ide> 'routeClass' => $this->_routeClass,
<ide> ];
<ide>
<add> $target = $this->parseDefaults($target);
<ide> $target['_method'] = $method;
<ide>
<ide> $route = $this->_makeRoute($template, $target, $options);
<ide> public function loadPlugin($name, $file = 'routes.php')
<ide> * @throws \InvalidArgumentException
<ide> * @throws \BadMethodCallException
<ide> */
<del> public function connect($route, array $defaults = [], array $options = [])
<add> public function connect($route, $defaults = [], array $options = [])
<ide> {
<del> if ($defaults === null) {
<del> $defaults = [];
<del> }
<del>
<add> $defaults = $this->parseDefaults($defaults);
<ide> if (!isset($options['action']) && !isset($defaults['action'])) {
<ide> $defaults['action'] = 'index';
<ide> }
<ide> public function connect($route, array $defaults = [], array $options = [])
<ide> return $route;
<ide> }
<ide>
<add> /**
<add> * Parse the defaults if they're a string
<add> *
<add> * @param string|array $defaults Defaults array from the connect() method.
<add> * @return string|array
<add> */
<add> protected static function parseDefaults($defaults)
<add> {
<add> if (!is_string($defaults)) {
<add> return $defaults;
<add> }
<add>
<add> $regex = '/(?:([a-zA-Z0-9\/]*)\.)?([a-zA-Z0-9\/]*?)(?:\/)?([a-zA-Z0-9]*):{2}([a-zA-Z0-9_]*)/i';
<add> if (preg_match($regex, $defaults, $matches)) {
<add> unset($matches[0]);
<add> $matches = array_filter($matches, function ($value) {
<add> return $value !== '' && $value !== '::';
<add> });
<add>
<add> // Intentionally incomplete switch
<add> switch (count($matches)) {
<add> case 2:
<add> return [
<add> 'controller' => $matches[3],
<add> 'action' => $matches[4]
<add> ];
<add> case 3:
<add> return [
<add> 'prefix' => strtolower($matches[2]),
<add> 'controller' => $matches[3],
<add> 'action' => $matches[4]
<add> ];
<add> case 4:
<add> return [
<add> 'plugin' => $matches[1],
<add> 'prefix' => strtolower($matches[2]),
<add> 'controller' => $matches[3],
<add> 'action' => $matches[4]
<add> ];
<add> }
<add> }
<add> throw new RuntimeException("Could not parse `{$defaults}` route destination string.");
<add> }
<add>
<ide> /**
<ide> * Create a route object, or return the provided object.
<ide> *
<ide><path>src/Routing/Router.php
<ide> public static function getNamedExpressions()
<ide> */
<ide> public static function connect($route, $defaults = [], $options = [])
<ide> {
<del> $defaults = static::parseDefaults($defaults);
<del>
<ide> static::$initialized = true;
<ide> static::scope('/', function ($routes) use ($route, $defaults, $options) {
<ide> $routes->connect($route, $defaults, $options);
<ide> });
<ide> }
<ide>
<del> /**
<del> * Parse the defaults if they're a string
<del> *
<del> * @param string|array Defaults array from the connect() method.
<del> * @return string|array
<del> */
<del> protected static function parseDefaults($defaults)
<del> {
<del> if (!is_string($defaults)) {
<del> return $defaults;
<del> }
<del>
<del> $regex = '/(?:([a-zA-Z0-9]*)\.)?([a-zA-Z0-9]*)(?:\\\\)?([a-zA-Z0-9]*):{2}([a-zA-Z0-9]*)/i';
<del>
<del> if (preg_match($regex, $defaults, $matches)) {
<del> unset($matches[0]);
<del> $matches = array_filter($matches, function ($value) {
<del> return $value !== '' && $value !== '::';
<del> });
<del>
<del> switch (count($matches)) {
<del> case 2:
<del> return [
<del> 'controller' => $matches[2],
<del> 'view' => $matches[4]
<del> ];
<del> case 3:
<del> return [
<del> 'prefix' => $matches[2],
<del> 'controller' => $matches[3],
<del> 'view' => $matches[4]
<del> ];
<del> case 4:
<del> return [
<del> 'plugin' => $matches[1],
<del> 'prefix' => $matches[2],
<del> 'controller' => $matches[3],
<del> 'view' => $matches[4]
<del> ];
<del> default:
<del> throw new RuntimeException('Could not parse the string syntax for Router::connect() defaults.');
<del> }
<del> }
<del> }
<del>
<ide> /**
<ide> * Connects a new redirection Route in the router.
<ide> *
<ide><path>tests/TestCase/Routing/RouteBuilderTest.php
<ide> use Cake\Routing\Route\RedirectRoute;
<ide> use Cake\Routing\Route\Route;
<ide> use Cake\TestSuite\TestCase;
<add>use RuntimeException;
<ide>
<ide> /**
<ide> * RouteBuilder test case
<ide> public function testConnectTrimTrailingSlash()
<ide> $this->assertEquals($expected, $this->collection->parse('/articles/'));
<ide> }
<ide>
<add> /**
<add> * Test connect() with short string syntax
<add> *
<add> * @return void
<add> */
<add> public function testConnectShortStringInvalid()
<add> {
<add> $this->expectException(RuntimeException::class);
<add> $routes = new RouteBuilder($this->collection, '/');
<add> $routes->connect('/my-articles/view', 'Articles:no');
<add> }
<add>
<add> /**
<add> * Test connect() with short string syntax
<add> *
<add> * @return void
<add> */
<add> public function testConnectShortString()
<add> {
<add> $routes = new RouteBuilder($this->collection, '/');
<add> $routes->connect('/my-articles/view', 'Articles::view');
<add> $expected = [
<add> 'pass' => [],
<add> 'controller' => 'Articles',
<add> 'action' => 'view',
<add> 'plugin' => null,
<add> '_matchedRoute' => '/my-articles/view'
<add> ];
<add> $this->assertEquals($expected, $this->collection->parse('/my-articles/view'));
<add>
<add> $url = $expected['_matchedRoute'];
<add> unset($expected['_matchedRoute']);
<add> $this->assertEquals($url, '/' . $this->collection->match($expected, []));
<add> }
<add>
<add> /**
<add> * Test connect() with short string syntax
<add> *
<add> * @return void
<add> */
<add> public function testConnectShortStringPluginPrefix()
<add> {
<add> $routes = new RouteBuilder($this->collection, '/');
<add> $routes->connect('/admin/blog/articles/view', 'Vendor/Blog.Management/Admin/Articles::view');
<add> $expected = [
<add> 'pass' => [],
<add> 'plugin' => 'Vendor/Blog',
<add> 'prefix' => 'management/admin',
<add> 'controller' => 'Articles',
<add> 'action' => 'view',
<add> '_matchedRoute' => '/admin/blog/articles/view'
<add> ];
<add> $this->assertEquals($expected, $this->collection->parse('/admin/blog/articles/view'));
<add>
<add> $url = $expected['_matchedRoute'];
<add> unset($expected['_matchedRoute']);
<add> $this->assertEquals($url, '/' . $this->collection->match($expected, []));
<add> }
<add>
<ide> /**
<ide> * Test if a route name already exist
<ide> *
<ide> public function testConnectTrimTrailingSlash()
<ide> public function testNameExists()
<ide> {
<ide> $routes = new RouteBuilder($this->collection, '/l', ['prefix' => 'api']);
<del>
<ide> $this->assertFalse($routes->nameExists('myRouteName'));
<ide>
<ide> $routes->connect('myRouteUrl', ['action' => 'index'], ['_name' => 'myRouteName']);
<del>
<ide> $this->assertTrue($routes->nameExists('myRouteName'));
<ide> }
<ide>
<ide> public function testHttpMethods($method)
<ide> );
<ide> }
<ide>
<add> /**
<add> * Test that the HTTP method helpers create the right kind of routes.
<add> *
<add> * @dataProvider httpMethodProvider
<add> * @return void
<add> */
<add> public function testHttpMethodsStringTarget($method)
<add> {
<add> $routes = new RouteBuilder($this->collection, '/', [], ['namePrefix' => 'app:']);
<add> $route = $routes->{strtolower($method)}(
<add> '/bookmarks/:id',
<add> 'Bookmarks::view',
<add> 'route-name'
<add> );
<add> $this->assertInstanceOf(Route::class, $route, 'Should return a route');
<add> $this->assertSame($method, $route->defaults['_method']);
<add> $this->assertSame('app:route-name', $route->options['_name']);
<add> $this->assertSame('/bookmarks/:id', $route->template);
<add> $this->assertEquals(
<add> ['plugin' => null, 'controller' => 'Bookmarks', 'action' => 'view', '_method' => $method],
<add> $route->defaults
<add> );
<add> }
<add>
<ide> /**
<ide> * Integration test for http method helpers and route fluent method
<ide> *
<ide><path>tests/TestCase/Routing/RouterTest.php
<ide> public function testCreateRouteBuilder()
<ide> }
<ide>
<ide> /**
<del> * testShortStringSyntax
<add> * test connect() with short string syntax
<ide> *
<ide> * @return void
<ide> */
<del> public function testShortStringSyntax()
<add> public function testConnectShortStringSyntax()
<ide> {
<del> Router::connect('/admin/articles/view', 'Admin\Articles::view');
<add> Router::connect('/admin/articles/view', 'Admin/Articles::view');
<ide> $result = Router::parseRequest($this->makeRequest('/admin/articles/view', 'GET'));
<ide> $expected = [
<ide> 'pass' => [],
<del> 'prefix' => 'Admin',
<add> 'prefix' => 'admin',
<ide> 'controller' => 'Articles',
<del> 'view' => 'view',
<del> 'action' => 'index',
<add> 'action' => 'view',
<ide> 'plugin' => null,
<ide> '_matchedRoute' => '/admin/articles/view'
<ide>
<ide> ];
<ide> $this->assertEquals($result, $expected);
<del>
<del> Router::connect('/admin/blog/articles/view', 'Blog.Admin\Articles::view');
<del> $result = Router::parseRequest($this->makeRequest('/admin/blog/articles/view', 'GET'));
<del> $expected = [
<del> 'pass' => [],
<del> 'plugin' => 'Blog',
<del> 'prefix' => 'Admin',
<del> 'controller' => 'Articles',
<del> 'view' => 'view',
<del> 'action' => 'index',
<del> '_matchedRoute' => '/admin/blog/articles/view'
<del> ];
<del> $this->assertEquals($result, $expected);
<del>
<del> Router::connect('/my-articles/view', 'Articles::view');
<del> $result = Router::parseRequest($this->makeRequest('/my-articles/view', 'GET'));
<del> $expected = [
<del> 'pass' => [],
<del> 'controller' => 'Articles',
<del> 'view' => 'view',
<del> 'action' => 'index',
<del> 'plugin' => null,
<del> '_matchedRoute' => '/my-articles/view'
<del> ];
<del> $this->assertEquals($result, $expected);
<ide> }
<ide>
<ide> /** | 4 |
Ruby | Ruby | replace map.flatten with flat_map in railties | cff340f1eda96be952437abeed80591a85ef0194 | <ide><path>railties/lib/rails/commands/plugin.rb
<ide> end
<ide> if File.exist?(railsrc)
<ide> extra_args_string = File.read(railsrc)
<del> extra_args = extra_args_string.split(/\n+/).map {|l| l.split}.flatten
<add> extra_args = extra_args_string.split(/\n+/).flat_map {|l| l.split}
<ide> puts "Using #{extra_args.join(" ")} from #{railsrc}"
<ide> ARGV.insert(1, *extra_args)
<ide> end
<ide><path>railties/lib/rails/generators/actions.rb
<ide> def initializer(filename, data=nil, &block)
<ide> # generate(:authenticated, "user session")
<ide> def generate(what, *args)
<ide> log :generate, what
<del> argument = args.map {|arg| arg.to_s }.flatten.join(" ")
<add> argument = args.flat_map {|arg| arg.to_s }.join(" ")
<ide>
<ide> in_root { run_ruby_script("bin/rails generate #{what} #{argument}", verbose: false) }
<ide> end
<ide><path>railties/lib/rails/paths.rb
<ide> def load_paths
<ide> def filter_by(&block)
<ide> all_paths.find_all(&block).flat_map { |path|
<ide> paths = path.existent
<del> paths - path.children.map { |p| yield(p) ? [] : p.existent }.flatten
<add> paths - path.children.flat_map { |p| yield(p) ? [] : p.existent }
<ide> }.uniq
<ide> end
<ide> end
<ide><path>railties/lib/rails/source_annotation_extractor.rb
<ide> def extract_annotations_from(file, pattern)
<ide> # Prints the mapping from filenames to annotations in +results+ ordered by filename.
<ide> # The +options+ hash is passed to each annotation's +to_s+.
<ide> def display(results, options={})
<del> options[:indent] = results.map { |f, a| a.map(&:line) }.flatten.max.to_s.size
<add> options[:indent] = results.flat_map { |f, a| a.map(&:line) }.max.to_s.size
<ide> results.keys.sort.each do |file|
<ide> puts "#{file}:"
<ide> results[file].each do |note| | 4 |
Text | Text | add periods at the end of hints | 857e29258da36bebfcff74034062fc9f8cf9f5fd | <ide><path>curriculum/challenges/english/10-coding-interview-prep/data-structures/perform-a-subset-check-on-two-sets-of-data.md
<ide> assert(
<ide> );
<ide> ```
<ide>
<del>The first `Set` should be contained in the second `Set`
<add>The first `Set` should be contained in the second `Set`.
<ide>
<ide> ```js
<ide> assert(
<ide> assert(
<ide> );
<ide> ```
<ide>
<del>`['a', 'b'].isSubsetOf(['a', 'b', 'c', 'd'])` should return `true`
<add>`['a', 'b'].isSubsetOf(['a', 'b', 'c', 'd'])` should return `true`.
<ide>
<ide> ```js
<ide> assert(
<ide> assert(
<ide> );
<ide> ```
<ide>
<del>`['a', 'b', 'c'].isSubsetOf(['a', 'b'])` should return `false`
<add>`['a', 'b', 'c'].isSubsetOf(['a', 'b'])` should return `false`.
<ide>
<ide> ```js
<ide> assert(
<ide> assert(
<ide> );
<ide> ```
<ide>
<del>`[].isSubsetOf([])` should return `true`
<add>`[].isSubsetOf([])` should return `true`.
<ide>
<ide> ```js
<ide> assert(
<ide> assert(
<ide> );
<ide> ```
<ide>
<del>`['a', 'b'].isSubsetOf(['c', 'd'])` should return `false`
<add>`['a', 'b'].isSubsetOf(['c', 'd'])` should return `false`.
<ide>
<ide> ```js
<ide> assert( | 1 |
Text | Text | use serial comma in events docs | 82861e9446af8ad12d2c2a0967e331a4333440d6 | <ide><path>doc/api/events.md
<ide> The [`--trace-warnings`][] command-line flag can be used to display the
<ide> stack trace for such warnings.
<ide>
<ide> The emitted warning can be inspected with [`process.on('warning')`][] and will
<del>have the additional `emitter`, `type` and `count` properties, referring to
<add>have the additional `emitter`, `type`, and `count` properties, referring to
<ide> the event emitter instance, the event’s name and the number of attached
<ide> listeners, respectively.
<ide> Its `name` property is set to `'MaxListenersExceededWarning'`. | 1 |
Javascript | Javascript | remove duplicate test | a7e40622157fad6822d5f734c6325b99de811211 | <ide><path>packages/ember-routing-htmlbars/tests/helpers/link-to_test.js
<ide> QUnit.module("Handlebars {{link-to}} helper", {
<ide> });
<ide>
<ide>
<del>test("should be able to be inserted in DOM when the router is not present", function() {
<del> var template = "{{#link-to 'index'}}Go to Index{{/link-to}}";
<del> view = EmberView.create({
<del> template: compile(template)
<del> });
<del>
<del> appendView(view);
<del>
<del> equal(view.$().text(), 'Go to Index');
<del>});
<del>
<ide> test("should be able to be inserted in DOM when the router is not present", function() {
<ide> var template = "{{#link-to 'index'}}Go to Index{{/link-to}}";
<ide> view = EmberView.create({ | 1 |
Python | Python | remove a bunch of unnescesarry iterkeys() calls | 4c97101b1f0815a3f311fc77483b935fe62966bb | <ide><path>django/conf/__init__.py
<ide> def __getattr__(self, name):
<ide> return getattr(self.default_settings, name)
<ide>
<ide> def __dir__(self):
<del> return list(six.iterkeys(self.__dict__)) + dir(self.default_settings)
<add> return list(self.__dict__) + dir(self.default_settings)
<ide>
<ide> # For Python < 2.6:
<ide> __members__ = property(lambda self: self.__dir__())
<ide><path>django/contrib/admin/options.py
<ide> def get_fieldsets(self, request, obj=None):
<ide> if self.declared_fieldsets:
<ide> return self.declared_fieldsets
<ide> form = self.get_form(request, obj)
<del> fields = list(six.iterkeys(form.base_fields)) + list(self.get_readonly_fields(request, obj))
<add> fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
<ide> return [(None, {'fields': fields})]
<ide>
<ide> def get_form(self, request, obj=None, **kwargs):
<ide> def get_fieldsets(self, request, obj=None):
<ide> if self.declared_fieldsets:
<ide> return self.declared_fieldsets
<ide> form = self.get_formset(request, obj).form
<del> fields = list(six.iterkeys(form.base_fields)) + list(self.get_readonly_fields(request, obj))
<add> fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
<ide> return [(None, {'fields': fields})]
<ide>
<ide> def queryset(self, request):
<ide><path>django/contrib/admin/templatetags/admin_list.py
<ide> def result_headers(cl):
<ide> if i in ordering_field_columns:
<ide> sorted = True
<ide> order_type = ordering_field_columns.get(i).lower()
<del> sort_priority = list(six.iterkeys(ordering_field_columns)).index(i) + 1
<add> sort_priority = list(ordering_field_columns).index(i) + 1
<ide> th_classes.append('sorted %sending' % order_type)
<ide> new_order_type = {'asc': 'desc', 'desc': 'asc'}[order_type]
<ide>
<ide><path>django/contrib/auth/admin.py
<ide> def user_change_password(self, request, id, form_url=''):
<ide> else:
<ide> form = self.change_password_form(user)
<ide>
<del> fieldsets = [(None, {'fields': list(six.iterkeys(form.base_fields))})]
<add> fieldsets = [(None, {'fields': list(form.base_fields)})]
<ide> adminForm = admin.helpers.AdminForm(form, fieldsets, {})
<ide>
<ide> context = {
<ide><path>django/contrib/auth/tests/forms.py
<ide> def test_success(self):
<ide> def test_field_order(self):
<ide> # Regression test - check the order of fields:
<ide> user = User.objects.get(username='testclient')
<del> self.assertEqual(list(six.iterkeys(PasswordChangeForm(user, {}).fields)),
<add> self.assertEqual(list(PasswordChangeForm(user, {}).fields),
<ide> ['old_password', 'new_password1', 'new_password2'])
<ide>
<ide>
<ide><path>django/contrib/formtools/wizard/views.py
<ide> def __repr__(self):
<ide> @property
<ide> def all(self):
<ide> "Returns the names of all steps/forms."
<del> return list(six.iterkeys(self._wizard.get_form_list()))
<add> return list(self._wizard.get_form_list())
<ide>
<ide> @property
<ide> def count(self):
<ide><path>django/contrib/gis/db/backends/mysql/operations.py
<ide> class MySQLOperations(DatabaseOperations, BaseSpatialOperations):
<ide> 'within' : 'MBRWithin',
<ide> }
<ide>
<del> gis_terms = dict([(term, None) for term in list(six.iterkeys(geometry_functions)) + ['isnull']])
<add> gis_terms = dict([(term, None) for term in list(geometry_functions) + ['isnull']])
<ide>
<ide> def geo_db_type(self, f):
<ide> return f.geom_type
<ide><path>django/contrib/gis/db/backends/oracle/operations.py
<ide> class OracleOperations(DatabaseOperations, BaseSpatialOperations):
<ide> geometry_functions.update(distance_functions)
<ide>
<ide> gis_terms = ['isnull']
<del> gis_terms += list(six.iterkeys(geometry_functions))
<add> gis_terms += list(geometry_functions)
<ide> gis_terms = dict([(term, None) for term in gis_terms])
<ide>
<ide> truncate_params = {'relate' : None}
<ide><path>django/contrib/gis/db/backends/postgis/operations.py
<ide> def get_dist_ops(operator):
<ide>
<ide> # Creating a dictionary lookup of all GIS terms for PostGIS.
<ide> gis_terms = ['isnull']
<del> gis_terms += list(six.iterkeys(self.geometry_operators))
<del> gis_terms += list(six.iterkeys(self.geometry_functions))
<add> gis_terms += list(self.geometry_operators)
<add> gis_terms += list(self.geometry_functions)
<ide> self.gis_terms = dict([(term, None) for term in gis_terms])
<ide>
<ide> self.area = prefix + 'Area'
<ide><path>django/contrib/gis/db/backends/spatialite/operations.py
<ide> def __init__(self, connection):
<ide>
<ide> # Creating the GIS terms dictionary.
<ide> gis_terms = ['isnull']
<del> gis_terms += list(six.iterkeys(self.geometry_functions))
<add> gis_terms += list(self.geometry_functions)
<ide> self.gis_terms = dict([(term, None) for term in gis_terms])
<ide>
<ide> if version >= (2, 4, 0):
<ide><path>django/contrib/gis/db/models/query.py
<ide> def values_list(self, *fields, **kwargs):
<ide> flat = kwargs.pop('flat', False)
<ide> if kwargs:
<ide> raise TypeError('Unexpected keyword arguments to values_list: %s'
<del> % (list(six.iterkeys(kwargs)),))
<add> % (list(kwargs),))
<ide> if flat and len(fields) > 1:
<ide> raise TypeError("'flat' is not valid when values_list is called with more than one field.")
<ide> return self._clone(klass=GeoValuesListQuerySet, setup=True, flat=flat,
<ide><path>django/contrib/gis/db/models/sql/compiler.py
<ide> def resolve_columns(self, row, fields=()):
<ide> objects.
<ide> """
<ide> values = []
<del> aliases = list(six.iterkeys(self.query.extra_select))
<add> aliases = list(self.query.extra_select)
<ide>
<ide> # Have to set a starting row number offset that is used for
<ide> # determining the correct starting row index -- needed for
<ide><path>django/core/management/__init__.py
<ide> def autocomplete(self):
<ide> except IndexError:
<ide> curr = ''
<ide>
<del> subcommands = list(six.iterkeys(get_commands())) + ['help']
<add> subcommands = list(get_commands()) + ['help']
<ide> options = [('--help', None)]
<ide>
<ide> # subcommand
<ide><path>django/core/serializers/__init__.py
<ide> def get_serializer(format):
<ide> def get_serializer_formats():
<ide> if not _serializers:
<ide> _load_serializers()
<del> return list(six.iterkeys(_serializers))
<add> return list(_serializers)
<ide>
<ide> def get_public_serializer_formats():
<ide> if not _serializers:
<ide><path>django/core/validators.py
<ide> def ip_address_validators(protocol, unpack_ipv4):
<ide> return ip_address_validator_map[protocol.lower()]
<ide> except KeyError:
<ide> raise ValueError("The protocol '%s' is unknown. Supported: %s"
<del> % (protocol, list(six.iterkeys(ip_address_validator_map))))
<add> % (protocol, list(ip_address_validator_map)))
<ide>
<ide> comma_separated_int_list_re = re.compile('^[\d,]+$')
<ide> validate_comma_separated_integer_list = RegexValidator(comma_separated_int_list_re, _('Enter only digits separated by commas.'), 'invalid')
<ide><path>django/db/models/base.py
<ide> def __init__(self, *args, **kwargs):
<ide> setattr(self, field.attname, val)
<ide>
<ide> if kwargs:
<del> for prop in list(six.iterkeys(kwargs)):
<add> for prop in list(kwargs):
<ide> try:
<ide> if isinstance(getattr(self.__class__, prop), property):
<ide> setattr(self, prop, kwargs.pop(prop))
<ide> except AttributeError:
<ide> pass
<ide> if kwargs:
<del> raise TypeError("'%s' is an invalid keyword argument for this function" % list(six.iterkeys(kwargs))[0])
<add> raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
<ide> super(Model, self).__init__()
<ide> signals.post_init.send(sender=self.__class__, instance=self)
<ide>
<ide><path>django/db/models/deletion.py
<ide> def instances_with_model(self):
<ide> def sort(self):
<ide> sorted_models = []
<ide> concrete_models = set()
<del> models = list(six.iterkeys(self.data))
<add> models = list(self.data)
<ide> while len(sorted_models) < len(models):
<ide> found = False
<ide> for model in models:
<ide><path>django/db/models/fields/__init__.py
<ide> def formfield(self, form_class=forms.CharField, **kwargs):
<ide> # Many of the subclass-specific formfield arguments (min_value,
<ide> # max_value) don't apply for choice fields, so be sure to only pass
<ide> # the values that TypedChoiceField will understand.
<del> for k in list(six.iterkeys(kwargs)):
<add> for k in list(kwargs):
<ide> if k not in ('coerce', 'empty_value', 'choices', 'required',
<ide> 'widget', 'label', 'initial', 'help_text',
<ide> 'error_messages', 'show_hidden_initial'):
<ide><path>django/db/models/fields/related.py
<ide> def get_prefetch_query_set(self, instances):
<ide> rel_obj_attr = attrgetter(self.related.field.attname)
<ide> instance_attr = lambda obj: obj._get_pk_val()
<ide> instances_dict = dict((instance_attr(inst), inst) for inst in instances)
<del> params = {'%s__pk__in' % self.related.field.name: list(six.iterkeys(instances_dict))}
<add> params = {'%s__pk__in' % self.related.field.name: list(instances_dict)}
<ide> qs = self.get_query_set(instance=instances[0]).filter(**params)
<ide> # Since we're going to assign directly in the cache,
<ide> # we must manage the reverse relation cache manually.
<ide> def get_prefetch_query_set(self, instances):
<ide> instance_attr = attrgetter(self.field.attname)
<ide> instances_dict = dict((instance_attr(inst), inst) for inst in instances)
<ide> if other_field.rel:
<del> params = {'%s__pk__in' % self.field.rel.field_name: list(six.iterkeys(instances_dict))}
<add> params = {'%s__pk__in' % self.field.rel.field_name: list(instances_dict)}
<ide> else:
<del> params = {'%s__in' % self.field.rel.field_name: list(six.iterkeys(instances_dict))}
<add> params = {'%s__in' % self.field.rel.field_name: list(instances_dict)}
<ide> qs = self.get_query_set(instance=instances[0]).filter(**params)
<ide> # Since we're going to assign directly in the cache,
<ide> # we must manage the reverse relation cache manually.
<ide> def get_prefetch_query_set(self, instances):
<ide> instance_attr = attrgetter(attname)
<ide> instances_dict = dict((instance_attr(inst), inst) for inst in instances)
<ide> db = self._db or router.db_for_read(self.model, instance=instances[0])
<del> query = {'%s__%s__in' % (rel_field.name, attname): list(six.iterkeys(instances_dict))}
<add> query = {'%s__%s__in' % (rel_field.name, attname): list(instances_dict)}
<ide> qs = super(RelatedManager, self).get_query_set().using(db).filter(**query)
<ide> # Since we just bypassed this class' get_query_set(), we must manage
<ide> # the reverse relation manually.
<ide><path>django/db/models/options.py
<ide> def _many_to_many(self):
<ide> self._m2m_cache
<ide> except AttributeError:
<ide> self._fill_m2m_cache()
<del> return list(six.iterkeys(self._m2m_cache))
<add> return list(self._m2m_cache)
<ide> many_to_many = property(_many_to_many)
<ide>
<ide> def get_m2m_with_model(self):
<ide> def get_all_related_many_to_many_objects(self, local_only=False):
<ide> cache = self._fill_related_many_to_many_cache()
<ide> if local_only:
<ide> return [k for k, v in cache.items() if not v]
<del> return list(six.iterkeys(cache))
<add> return list(cache)
<ide>
<ide> def get_all_related_m2m_objects_with_model(self):
<ide> """
<ide><path>django/db/models/query.py
<ide> def iterator(self):
<ide> requested = None
<ide> max_depth = self.query.max_depth
<ide>
<del> extra_select = list(six.iterkeys(self.query.extra_select))
<del> aggregate_select = list(six.iterkeys(self.query.aggregate_select))
<add> extra_select = list(self.query.extra_select)
<add> aggregate_select = list(self.query.aggregate_select)
<ide>
<ide> only_load = self.query.get_loaded_field_names()
<ide> if not fill_cache:
<ide> def values_list(self, *fields, **kwargs):
<ide> flat = kwargs.pop('flat', False)
<ide> if kwargs:
<ide> raise TypeError('Unexpected keyword arguments to values_list: %s'
<del> % (list(six.iterkeys(kwargs)),))
<add> % (list(kwargs),))
<ide> if flat and len(fields) > 1:
<ide> raise TypeError("'flat' is not valid when values_list is called with more than one field.")
<ide> return self._clone(klass=ValuesListQuerySet, setup=True, flat=flat,
<ide> def select_related(self, *fields, **kwargs):
<ide> depth = kwargs.pop('depth', 0)
<ide> if kwargs:
<ide> raise TypeError('Unexpected keyword arguments to select_related: %s'
<del> % (list(six.iterkeys(kwargs)),))
<add> % (list(kwargs),))
<ide> obj = self._clone()
<ide> if fields:
<ide> if depth:
<ide> def annotate(self, *args, **kwargs):
<ide>
<ide> obj = self._clone()
<ide>
<del> obj._setup_aggregate_query(list(six.iterkeys(kwargs)))
<add> obj._setup_aggregate_query(list(kwargs))
<ide>
<ide> # Add the aggregates to the query
<ide> for (alias, aggregate_expr) in kwargs.items():
<ide> def __init__(self, *args, **kwargs):
<ide>
<ide> def iterator(self):
<ide> # Purge any extra columns that haven't been explicitly asked for
<del> extra_names = list(six.iterkeys(self.query.extra_select))
<add> extra_names = list(self.query.extra_select)
<ide> field_names = self.field_names
<del> aggregate_names = list(six.iterkeys(self.query.aggregate_select))
<add> aggregate_names = list(self.query.aggregate_select)
<ide>
<ide> names = extra_names + field_names + aggregate_names
<ide>
<ide> def iterator(self):
<ide> # When extra(select=...) or an annotation is involved, the extra
<ide> # cols are always at the start of the row, and we need to reorder
<ide> # the fields to match the order in self._fields.
<del> extra_names = list(six.iterkeys(self.query.extra_select))
<add> extra_names = list(self.query.extra_select)
<ide> field_names = self.field_names
<del> aggregate_names = list(six.iterkeys(self.query.aggregate_select))
<add> aggregate_names = list(self.query.aggregate_select)
<ide>
<ide> names = extra_names + field_names + aggregate_names
<ide>
<ide><path>django/db/models/sql/query.py
<ide> def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True,
<ide> field, model, direct, m2m = opts.get_field_by_name(f.name)
<ide> break
<ide> else:
<del> names = opts.get_all_field_names() + list(six.iterkeys(self.aggregate_select))
<add> names = opts.get_all_field_names() + list(self.aggregate_select)
<ide> raise FieldError("Cannot resolve keyword %r into field. "
<ide> "Choices are: %s" % (name, ", ".join(names)))
<ide>
<ide> def add_fields(self, field_names, allow_m2m=True):
<ide> # from the model on which the lookup failed.
<ide> raise
<ide> else:
<del> names = sorted(opts.get_all_field_names() + list(six.iterkeys(self.extra))
<del> + list(six.iterkeys(self.aggregate_select)))
<add> names = sorted(opts.get_all_field_names() + list(self.extra)
<add> + list(self.aggregate_select))
<ide> raise FieldError("Cannot resolve keyword %r into field. "
<ide> "Choices are: %s" % (name, ", ".join(names)))
<ide> self.remove_inherited_models()
<ide><path>django/template/defaulttags.py
<ide> def templatetag(parser, token):
<ide> if tag not in TemplateTagNode.mapping:
<ide> raise TemplateSyntaxError("Invalid templatetag argument: '%s'."
<ide> " Must be one of: %s" %
<del> (tag, list(six.iterkeys(TemplateTagNode.mapping))))
<add> (tag, list(TemplateTagNode.mapping)))
<ide> return TemplateTagNode(tag)
<ide>
<ide> @register.tag
<ide><path>django/utils/datastructures.py
<ide> def __init__(self, data=None):
<ide> data = list(data)
<ide> super(SortedDict, self).__init__(data)
<ide> if isinstance(data, dict):
<del> self.keyOrder = list(six.iterkeys(data))
<add> self.keyOrder = list(data)
<ide> else:
<ide> self.keyOrder = []
<ide> seen = set()
<ide><path>django/utils/dictconfig.py
<ide> def configure(self):
<ide> #which were in the previous configuration but
<ide> #which are not in the new configuration.
<ide> root = logging.root
<del> existing = list(six.iterkeys(root.manager.loggerDict))
<add> existing = list(root.manager.loggerDict)
<ide> #The list needs to be sorted so that we can
<ide> #avoid disabling child loggers of explicitly
<ide> #named loggers. With a sorted list it is easier | 25 |
Ruby | Ruby | simplify attribute type decoration | c885086bd06f000390fda7f6a11362bbb8ca0798 | <ide><path>activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb
<ide> def map_avoiding_infinite_recursion(value)
<ide> end
<ide>
<ide> module ClassMethods # :nodoc:
<del> private
<del> def inherited(subclass)
<del> super
<del> # We need to apply this decorator here, rather than on module inclusion. The closure
<del> # created by the matcher would otherwise evaluate for `ActiveRecord::Base`, not the
<del> # sub class being decorated. As such, changes to `time_zone_aware_attributes`, or
<del> # `skip_time_zone_conversion_for_attributes` would not be picked up.
<del> subclass.class_eval do
<del> matcher = ->(name, type) { create_time_zone_conversion_attribute?(name, type) }
<del> decorate_matching_attribute_types(matcher, "_time_zone_conversion") do |type|
<del> TimeZoneConverter.new(type)
<del> end
<del> end
<add> def define_attribute(name, cast_type, **)
<add> if create_time_zone_conversion_attribute?(name, cast_type)
<add> cast_type = TimeZoneConverter.new(cast_type)
<ide> end
<add> super
<add> end
<ide>
<add> private
<ide> def create_time_zone_conversion_attribute?(name, cast_type)
<ide> enabled_for_column = time_zone_aware_attributes &&
<ide> !skip_time_zone_conversion_for_attributes.include?(name.to_sym)
<ide><path>activerecord/lib/active_record/locking/optimistic.rb
<ide> def update_counters(id, counters)
<ide> super
<ide> end
<ide>
<del> private
<del> # We need to apply this decorator here, rather than on module inclusion. The closure
<del> # created by the matcher would otherwise evaluate for `ActiveRecord::Base`, not the
<del> # sub class being decorated. As such, changes to `lock_optimistically`, or
<del> # `locking_column` would not be picked up.
<del> def inherited(subclass)
<del> subclass.class_eval do
<del> is_lock_column = ->(name, _) { lock_optimistically && name == locking_column }
<del> decorate_matching_attribute_types(is_lock_column, "_optimistic_locking") do |type|
<del> LockingType.new(type)
<del> end
<del> end
<del> super
<add> def define_attribute(name, cast_type, **) # :nodoc:
<add> if lock_optimistically && name == locking_column
<add> cast_type = LockingType.new(cast_type)
<ide> end
<add> super
<add> end
<ide> end
<ide> end
<ide>
<ide><path>activerecord/test/cases/attributes_test.rb
<ide> class CustomPropertiesTest < ActiveRecord::TestCase
<ide>
<ide> test "extra options are forwarded to the type caster constructor" do
<ide> klass = Class.new(OverloadedType) do
<del> attribute :starts_at, :datetime, precision: 3, limit: 2, scale: 1
<add> attribute :starts_at, :datetime, precision: 3, limit: 2, scale: 1, default: -> { Time.now.utc }
<ide> end
<ide>
<ide> starts_at_type = klass.type_for_attribute(:starts_at)
<ide> assert_equal 3, starts_at_type.precision
<ide> assert_equal 2, starts_at_type.limit
<ide> assert_equal 1, starts_at_type.scale
<add>
<add> assert_instance_of Time, klass.new.starts_at
<add> end
<add>
<add> test "time zone aware attribute" do
<add> with_timezone_config aware_attributes: true, zone: "Pacific Time (US & Canada)" do
<add> klass = Class.new(OverloadedType) do
<add> attribute :starts_at, :datetime, precision: 3, default: -> { Time.now.utc }
<add> end
<add>
<add> assert_instance_of ActiveSupport::TimeWithZone, klass.new.starts_at
<add> end
<ide> end
<ide>
<ide> test "nonexistent attribute" do | 3 |
Text | Text | fix typo in tooltip conf doc | f71f525caf852ad46ae6fc875e879cc10804b051 | <ide><path>docs/01-Chart-Configuration.md
<ide> var chartInstance = new Chart(ctx, {
<ide>
<ide> ### Tooltip Configuration
<ide>
<del>The title configuration is passed into the `options.tooltips` namespace. The global options for the chart tooltips is defined in `Chart.defaults.global.title`.
<add>The title configuration is passed into the `options.tooltips` namespace. The global options for the chart tooltips is defined in `Chart.defaults.global.tooltips`.
<ide>
<ide> Name | Type | Default | Description
<ide> --- | --- | --- | --- | 1 |
Javascript | Javascript | fix master tests | c3e42a962b2ea3b245e43f542bda7ea33a9715be | <ide><path>packages/react-dom/src/events/plugins/__tests__/ModernChangeEventPlugin-test.js
<ide> describe('ChangeEventPlugin', () => {
<ide> describe('concurrent mode', () => {
<ide> // @gate experimental
<ide> it('text input', () => {
<del> const root = ReactDOM.createRoot(container);
<add> const root = ReactDOM.unstable_createRoot(container);
<ide> let input;
<ide>
<ide> class ControlledInput extends React.Component {
<ide> describe('ChangeEventPlugin', () => {
<ide>
<ide> // @gate experimental
<ide> it('checkbox input', () => {
<del> const root = ReactDOM.createRoot(container);
<add> const root = ReactDOM.unstable_createRoot(container);
<ide> let input;
<ide>
<ide> class ControlledInput extends React.Component {
<ide> describe('ChangeEventPlugin', () => {
<ide>
<ide> // @gate experimental
<ide> it('textarea', () => {
<del> const root = ReactDOM.createRoot(container);
<add> const root = ReactDOM.unstable_createRoot(container);
<ide> let textarea;
<ide>
<ide> class ControlledTextarea extends React.Component {
<ide> describe('ChangeEventPlugin', () => {
<ide>
<ide> // @gate experimental
<ide> it('parent of input', () => {
<del> const root = ReactDOM.createRoot(container);
<add> const root = ReactDOM.unstable_createRoot(container);
<ide> let input;
<ide>
<ide> class ControlledInput extends React.Component {
<ide> describe('ChangeEventPlugin', () => {
<ide>
<ide> // @gate experimental
<ide> it('is async for non-input events', () => {
<del> const root = ReactDOM.createRoot(container);
<add> const root = ReactDOM.unstable_createRoot(container);
<ide> let input;
<ide>
<ide> class ControlledInput extends React.Component {
<ide> describe('ChangeEventPlugin', () => {
<ide> const {act} = TestUtils;
<ide> const {useState} = React;
<ide>
<del> const root = ReactDOM.createRoot(container);
<add> const root = ReactDOM.unstable_createRoot(container);
<ide>
<ide> const target = React.createRef(null);
<ide> function Foo() {
<ide><path>packages/react-dom/src/events/plugins/__tests__/ModernSimpleEventPlugin-test.js
<ide> describe('SimpleEventPlugin', function() {
<ide> // @gate experimental
<ide> it('flushes pending interactive work before extracting event handler', () => {
<ide> container = document.createElement('div');
<del> const root = ReactDOM.createRoot(container);
<add> const root = ReactDOM.unstable_createRoot(container);
<ide> document.body.appendChild(container);
<ide>
<ide> let button;
<ide> describe('SimpleEventPlugin', function() {
<ide> // @gate experimental
<ide> it('end result of many interactive updates is deterministic', () => {
<ide> container = document.createElement('div');
<del> const root = ReactDOM.createRoot(container);
<add> const root = ReactDOM.unstable_createRoot(container);
<ide> document.body.appendChild(container);
<ide>
<ide> let button;
<ide> describe('SimpleEventPlugin', function() {
<ide> }
<ide>
<ide> // Initial mount
<del> const root = ReactDOM.createRoot(container);
<add> const root = ReactDOM.unstable_createRoot(container);
<ide> root.render(<Wrapper />);
<ide> expect(Scheduler).toFlushAndYield([
<ide> 'High-pri count: 0, Low-pri count: 0', | 2 |
Javascript | Javascript | remove all but one instance of proptypes | ae1817fdb96780c92a0cb8ec9b753f897b5d5703 | <ide><path>RNTester/js/LinkingExample.js
<ide>
<ide> 'use strict';
<ide>
<del>var React = require('react');
<del>var PropTypes = require('prop-types');
<del>var ReactNative = require('react-native');
<del>var {Linking, StyleSheet, Text, TouchableOpacity, View} = ReactNative;
<del>var RNTesterBlock = require('./RNTesterBlock');
<add>const React = require('react');
<add>const {
<add> Linking,
<add> StyleSheet,
<add> Text,
<add> TouchableOpacity,
<add> View,
<add>} = require('react-native');
<ide>
<del>class OpenURLButton extends React.Component {
<del> static propTypes = {
<del> url: PropTypes.string,
<del> };
<add>const RNTesterBlock = require('./RNTesterBlock');
<add>
<add>type Props = $ReadOnly<{|
<add> url?: ?string,
<add>|}>;
<ide>
<add>class OpenURLButton extends React.Component<Props> {
<ide> handleClick = () => {
<ide> Linking.canOpenURL(this.props.url).then(supported => {
<ide> if (supported) {
<ide> class IntentAndroidExample extends React.Component {
<ide> }
<ide> }
<ide>
<del>var styles = StyleSheet.create({
<add>const styles = StyleSheet.create({
<ide> container: {
<ide> flex: 1,
<ide> backgroundColor: 'white',
<ide><path>RNTester/js/RNTesterBlock.js
<ide>
<ide> 'use strict';
<ide>
<del>var React = require('react');
<del>var PropTypes = require('prop-types');
<del>var ReactNative = require('react-native');
<del>var {StyleSheet, Text, View} = ReactNative;
<add>const React = require('react');
<add>const {StyleSheet, Text, View} = require('react-native');
<ide>
<del>class RNTesterBlock extends React.Component<
<del> {
<del> title?: string,
<del> description?: string,
<del> },
<del> $FlowFixMeState,
<del>> {
<del> static propTypes = {
<del> title: PropTypes.string,
<del> description: PropTypes.string,
<del> };
<add>type Props = $ReadOnly<{|
<add> children?: React.Node,
<add> title?: ?string,
<add> description?: ?string,
<add>|}>;
<add>
<add>type State = {|
<add> description: ?string,
<add>|};
<ide>
<del> state = {description: (null: ?string)};
<add>class RNTesterBlock extends React.Component<Props, State> {
<add> state = {description: null};
<ide>
<ide> render() {
<del> var description;
<del> if (this.props.description) {
<del> description = (
<del> <Text style={styles.descriptionText}>{this.props.description}</Text>
<del> );
<del> }
<add> const description = this.props.description ? (
<add> <Text style={styles.descriptionText}>{this.props.description}</Text>
<add> ) : null;
<ide>
<ide> return (
<ide> <View style={styles.container}>
<ide> <View style={styles.titleContainer}>
<ide> <Text style={styles.titleText}>{this.props.title}</Text>
<ide> {description}
<ide> </View>
<del> <View style={styles.children}>
<del> {
<del> // $FlowFixMe found when converting React.createClass to ES6
<del> this.props.children
<del> }
<del> </View>
<add> <View style={styles.children}>{this.props.children}</View>
<ide> </View>
<ide> );
<ide> }
<ide> }
<ide>
<del>var styles = StyleSheet.create({
<add>const styles = StyleSheet.create({
<ide> container: {
<ide> borderRadius: 3,
<ide> borderWidth: 0.5,
<ide><path>RNTester/js/RNTesterButton.js
<ide>
<ide> 'use strict';
<ide>
<del>var React = require('react');
<del>var PropTypes = require('prop-types');
<del>var ReactNative = require('react-native');
<del>var {StyleSheet, Text, TouchableHighlight} = ReactNative;
<add>const React = require('react');
<add>const {StyleSheet, Text, TouchableHighlight} = require('react-native');
<ide>
<del>class RNTesterButton extends React.Component<{onPress?: Function}> {
<del> static propTypes = {
<del> onPress: PropTypes.func,
<del> };
<add>import type {PressEvent} from 'CoreEventTypes';
<ide>
<add>type Props = $ReadOnly<{|
<add> children?: React.Node,
<add> onPress?: ?(event: PressEvent) => mixed,
<add>|}>;
<add>
<add>class RNTesterButton extends React.Component<Props> {
<ide> render() {
<ide> return (
<ide> <TouchableHighlight
<ide> onPress={this.props.onPress}
<ide> style={styles.button}
<ide> underlayColor="grey">
<del> <Text>
<del> {
<del> // $FlowFixMe found when converting React.createClass to ES6
<del> this.props.children
<del> }
<del> </Text>
<add> <Text>{this.props.children}</Text>
<ide> </TouchableHighlight>
<ide> );
<ide> }
<ide> }
<ide>
<del>var styles = StyleSheet.create({
<add>const styles = StyleSheet.create({
<ide> button: {
<ide> borderColor: '#696969',
<ide> borderRadius: 8,
<ide><path>RNTester/js/RNTesterPage.js
<ide>
<ide> 'use strict';
<ide>
<del>var PropTypes = require('prop-types');
<del>var React = require('react');
<del>var ReactNative = require('react-native');
<del>var {ScrollView, StyleSheet, View} = ReactNative;
<add>const React = require('react');
<add>const {ScrollView, StyleSheet, View} = require('react-native');
<ide>
<del>var RNTesterTitle = require('./RNTesterTitle');
<add>const RNTesterTitle = require('./RNTesterTitle');
<ide>
<del>class RNTesterPage extends React.Component<{
<del> noScroll?: boolean,
<del> noSpacer?: boolean,
<del>}> {
<del> static propTypes = {
<del> noScroll: PropTypes.bool,
<del> noSpacer: PropTypes.bool,
<del> };
<add>type Props = $ReadOnly<{|
<add> children?: React.Node,
<add> title?: ?string,
<add> noScroll?: ?boolean,
<add> noSpacer?: ?boolean,
<add>|}>;
<ide>
<add>class RNTesterPage extends React.Component<Props> {
<ide> render() {
<del> var ContentWrapper;
<del> var wrapperProps = {};
<add> let ContentWrapper;
<add> let wrapperProps = {};
<ide> if (this.props.noScroll) {
<ide> ContentWrapper = ((View: any): React.ComponentType<any>);
<ide> } else {
<ide> ContentWrapper = (ScrollView: React.ComponentType<any>);
<del> // $FlowFixMe found when converting React.createClass to ES6
<ide> wrapperProps.automaticallyAdjustContentInsets = !this.props.title;
<ide> wrapperProps.keyboardShouldPersistTaps = 'handled';
<ide> wrapperProps.keyboardDismissMode = 'interactive';
<ide> }
<del> /* $FlowFixMe(>=0.68.0 site=react_native_fb) This comment suppresses an
<del> * error found when Flow v0.68 was deployed. To see the error delete this
<del> * comment and run Flow. */
<del> var title = this.props.title ? (
<add> const title = this.props.title ? (
<ide> <RNTesterTitle title={this.props.title} />
<ide> ) : null;
<del> var spacer = this.props.noSpacer ? null : <View style={styles.spacer} />;
<add> const spacer = this.props.noSpacer ? null : <View style={styles.spacer} />;
<ide> return (
<ide> <View style={styles.container}>
<ide> {title}
<ide> <ContentWrapper style={styles.wrapper} {...wrapperProps}>
<del> {
<del> // $FlowFixMe found when converting React.createClass to ES6
<del> this.props.children
<del> }
<add> {this.props.children}
<ide> {spacer}
<ide> </ContentWrapper>
<ide> </View>
<ide> );
<ide> }
<ide> }
<ide>
<del>var styles = StyleSheet.create({
<add>const styles = StyleSheet.create({
<ide> container: {
<ide> backgroundColor: '#e9eaed',
<ide> flex: 1, | 4 |
Python | Python | validate the mypy exit-code | 0148bf8cdd7577730af8f1418c3a92dfad82595a | <ide><path>numpy/typing/tests/test_typing.py
<ide> def run_mypy() -> None:
<ide>
<ide> for directory in (PASS_DIR, REVEAL_DIR, FAIL_DIR, MISC_DIR):
<ide> # Run mypy
<del> stdout, stderr, _ = api.run([
<add> stdout, stderr, exit_code = api.run([
<ide> "--config-file",
<ide> MYPY_INI,
<ide> "--cache-dir",
<ide> CACHE_DIR,
<ide> directory,
<ide> ])
<ide> assert not stderr, directory
<add> assert exit_code in {0, 1}, stdout
<ide> stdout = stdout.replace('*', '')
<ide>
<ide> # Parse the output | 1 |
Javascript | Javascript | use describe() blocks to setup preconditions | 304f6c16fb5784f14a3d8fd8a394c778af938088 | <ide><path>node-tests/blueprints/acceptance-test-test.js
<ide> const expect = chai.expect;
<ide> describe('Blueprint: acceptance-test', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('acceptance-test foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['acceptance-test', 'foo'], _file => {
<add> describe('in app', function() {
<add> beforeEach(function() {
<add> return emberNew();
<add> });
<add>
<add> it('acceptance-test foo', function() {
<add> return emberGenerateDestroy(['acceptance-test', 'foo'], _file => {
<ide> expect(_file('tests/acceptance/foo-test.js'))
<ide> .to.contain("import { test } from 'qunit';")
<ide> .to.contain("moduleForAcceptance('Acceptance | foo');")
<ide> .to.contain("test('visiting /foo', function(assert) {")
<ide> .to.contain("visit('/foo');")
<ide> .to.contain("andThen(function() {")
<ide> .to.contain("assert.equal(currentURL(), '/foo');");
<del> }));
<add> });
<add> });
<add>
<add> describe('with ember-cli-mocha', function() {
<add> beforeEach(function() {
<add> return modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> });
<add>
<add> it('acceptance-test foo', function() {
<add> return emberGenerateDestroy(['acceptance-test', 'foo'], _file => {
<add> expect(_file('tests/acceptance/foo-test.js'))
<add> .to.contain("import { describe, it, beforeEach, afterEach } from 'mocha';")
<add> .to.contain("import { expect } from 'chai';")
<add> .to.contain("describe('Acceptance | foo', function() {")
<add> .to.contain("it('can visit /foo', function() {")
<add> .to.contain("visit('/foo');")
<add> .to.contain("return andThen(() => {")
<add> .to.contain("expect(currentURL()).to.equal('/foo');");
<add> });
<add> });
<add> });
<ide> });
<ide>
<del> it('in-addon acceptance-test foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['acceptance-test', 'foo'], _file => {
<add>
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('acceptance-test foo', function() {
<add> return emberGenerateDestroy(['acceptance-test', 'foo'], _file => {
<ide> expect(_file('tests/acceptance/foo-test.js'))
<ide> .to.contain("import { test } from 'qunit';")
<ide> .to.contain("moduleForAcceptance('Acceptance | foo');")
<ide> describe('Blueprint: acceptance-test', function() {
<ide>
<ide> expect(_file('app/acceptance-tests/foo.js'))
<ide> .to.not.exist;
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-addon acceptance-test foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['acceptance-test', 'foo/bar'], _file => {
<add> it('acceptance-test foo/bar', function() {
<add> return emberGenerateDestroy(['acceptance-test', 'foo/bar'], _file => {
<ide> expect(_file('tests/acceptance/foo/bar-test.js'))
<ide> .to.contain("import { test } from 'qunit';")
<ide> .to.contain("moduleForAcceptance('Acceptance | foo/bar');")
<ide> describe('Blueprint: acceptance-test', function() {
<ide>
<ide> expect(_file('app/acceptance-tests/foo/bar.js'))
<ide> .to.not.exist;
<del> }));
<del> });
<del>
<del> it('acceptance-test foo for mocha', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => emberGenerateDestroy(['acceptance-test', 'foo'], _file => {
<del> expect(_file('tests/acceptance/foo-test.js'))
<del> .to.contain("import { describe, it, beforeEach, afterEach } from 'mocha';")
<del> .to.contain("import { expect } from 'chai';")
<del> .to.contain("describe('Acceptance | foo', function() {")
<del> .to.contain("it('can visit /foo', function() {")
<del> .to.contain("visit('/foo');")
<del> .to.contain("return andThen(() => {")
<del> .to.contain("expect(currentURL()).to.equal('/foo');");
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/component-addon-test.js
<ide> const expect = chai.expect;
<ide> describe('Blueprint: component-addon', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('component-addon foo-bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['component-addon', 'foo-bar'], _file => {
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('component-addon foo-bar', function() {
<add> return emberGenerateDestroy(['component-addon', 'foo-bar'], _file => {
<ide> expect(_file('app/components/foo-bar.js'))
<ide> .to.contain("export { default } from 'my-addon/components/foo-bar';");
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/component-test.js
<ide> const fixture = require('../helpers/file');
<ide> describe('Blueprint: component', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('component x-foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component', 'x-foo'], _file => {
<add> describe('in app', function() {
<add> beforeEach(function() {
<add> return emberNew();
<add> });
<add>
<add> it('component x-foo', function() {
<add> return emberGenerateDestroy(['component', 'x-foo'], _file => {
<ide> expect(_file('app/components/x-foo.js')).to.contain("import Component from '@ember/component';")
<ide> .to.contain("export default Component.extend({")
<ide> .to.contain("});");
<ide> describe('Blueprint: component', function() {
<ide> .to.contain("integration: true")
<ide> .to.contain("{{x-foo}}")
<ide> .to.contain("{{#x-foo}}");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('component foo/x-foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component', 'foo/x-foo'], _file => {
<add> it('component foo/x-foo', function() {
<add> return emberGenerateDestroy(['component', 'foo/x-foo'], _file => {
<ide> expect(_file('app/components/foo/x-foo.js'))
<ide> .to.contain("import Component from '@ember/component';")
<ide> .to.contain("export default Component.extend({")
<ide> describe('Blueprint: component', function() {
<ide> .to.contain("integration: true")
<ide> .to.contain("{{foo/x-foo}}")
<ide> .to.contain("{{#foo/x-foo}}");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('component x-foo ignores --path option', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--path', 'foo'], _file => {
<add> it('component x-foo --path foo', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--path', 'foo'], _file => {
<ide> expect(_file('app/components/x-foo.js'))
<ide> .to.contain("import Component from '@ember/component';")
<ide> .to.contain("export default Component.extend({")
<ide> describe('Blueprint: component', function() {
<ide> .to.contain("integration: true")
<ide> .to.contain("{{x-foo}}")
<ide> .to.contain("{{#x-foo}}");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-addon component x-foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['component', 'x-foo'], _file => {
<del> expect(_file('addon/components/x-foo.js'))
<add> it('component x-foo --pod', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--pod'], _file => {
<add> expect(_file('app/components/x-foo/component.js'))
<ide> .to.contain("import Component from '@ember/component';")
<del> .to.contain("import layout from '../templates/components/x-foo';")
<ide> .to.contain("export default Component.extend({")
<del> .to.contain("layout")
<ide> .to.contain("});");
<ide>
<del> expect(_file('addon/templates/components/x-foo.hbs'))
<add> expect(_file('app/components/x-foo/template.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('app/components/x-foo.js'))
<del> .to.contain("export { default } from 'my-addon/components/x-foo';");
<del>
<del> expect(_file('tests/integration/components/x-foo-test.js'))
<add> expect(_file('tests/integration/components/x-foo/component-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<ide> .to.contain("moduleForComponent('x-foo'")
<del> .to.contain("integration: true")
<del> .to.contain("{{x-foo}}")
<del> .to.contain("{{#x-foo}}");
<del> }));
<del> });
<add> .to.contain("integration: true");
<add> });
<add> });
<ide>
<del> it('in-addon component nested/x-foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['component', 'nested/x-foo'], _file => {
<del> expect(_file('addon/components/nested/x-foo.js'))
<add> it('component foo/x-foo --pod', function() {
<add> return emberGenerateDestroy(['component', 'foo/x-foo', '--pod'], _file => {
<add> expect(_file('app/components/foo/x-foo/component.js'))
<ide> .to.contain("import Component from '@ember/component';")
<del> .to.contain("import layout from '../../templates/components/nested/x-foo';")
<ide> .to.contain("export default Component.extend({")
<del> .to.contain("layout")
<ide> .to.contain("});");
<ide>
<del> expect(_file('addon/templates/components/nested/x-foo.hbs'))
<add> expect(_file('app/components/foo/x-foo/template.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('app/components/nested/x-foo.js'))
<del> .to.contain("export { default } from 'my-addon/components/nested/x-foo';");
<del>
<del> expect(_file('tests/integration/components/nested/x-foo-test.js'))
<add> expect(_file('tests/integration/components/foo/x-foo/component-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('nested/x-foo'")
<add> .to.contain("moduleForComponent('foo/x-foo'")
<ide> .to.contain("integration: true")
<del> .to.contain("{{nested/x-foo}}")
<del> .to.contain("{{#nested/x-foo}}");
<del> }));
<del> });
<del>
<del> it('dummy component x-foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--dummy'], _file => {
<del> expect(_file('tests/dummy/app/components/x-foo.js'))
<del> .to.contain("import Component from '@ember/component';")
<del> .to.contain("export default Component.extend({")
<del> .to.contain("});");
<del>
<del> expect(_file('tests/dummy/app/templates/components/x-foo.hbs'))
<del> .to.equal("{{yield}}");
<del>
<del> expect(_file('app/components/x-foo.js'))
<del> .to.not.exist;
<del>
<del> expect(_file('tests/unit/components/x-foo-test.js'))
<del> .to.not.exist;
<del> }));
<del> });
<add> .to.contain("{{foo/x-foo}}")
<add> .to.contain("{{#foo/x-foo}}");
<add> });
<add> });
<ide>
<del> it('dummy component nested/x-foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['component', 'nested/x-foo', '--dummy'], _file => {
<del> expect(_file('tests/dummy/app/components/nested/x-foo.js'))
<add> it('component x-foo --pod --path bar', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--pod', '--path', 'bar'], _file => {
<add> expect(_file('app/bar/x-foo/component.js'))
<ide> .to.contain("import Component from '@ember/component';")
<ide> .to.contain("export default Component.extend({")
<ide> .to.contain("});");
<ide>
<del> expect(_file('tests/dummy/app/templates/components/nested/x-foo.hbs'))
<add> expect(_file('app/bar/x-foo/template.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('app/components/nested/x-foo.js'))
<del> .to.not.exist;
<del>
<del> expect(_file('tests/unit/components/nested/x-foo-test.js'))
<del> .to.not.exist;
<del> }));
<del> });
<add> expect(_file('tests/integration/bar/x-foo/component-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<add> .to.contain("moduleForComponent('bar/x-foo'")
<add> .to.contain("integration: true")
<add> .to.contain("{{bar/x-foo}}")
<add> .to.contain("{{#bar/x-foo}}");
<add> });
<add> });
<ide>
<del> it('in-repo-addon component x-foo', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--in-repo-addon=my-addon'], _file => {
<del> expect(_file('lib/my-addon/addon/components/x-foo.js'))
<add> it('component foo/x-foo --pod --path bar', function() {
<add> return emberGenerateDestroy(['component', 'foo/x-foo', '--pod', '--path', 'bar'], _file => {
<add> expect(_file('app/bar/foo/x-foo/component.js'))
<ide> .to.contain("import Component from '@ember/component';")
<del> .to.contain("import layout from '../templates/components/x-foo';")
<ide> .to.contain("export default Component.extend({")
<del> .to.contain("layout")
<ide> .to.contain("});");
<ide>
<del> expect(_file('lib/my-addon/addon/templates/components/x-foo.hbs'))
<add> expect(_file('app/bar/foo/x-foo/template.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('lib/my-addon/app/components/x-foo.js'))
<del> .to.contain("export { default } from 'my-addon/components/x-foo';");
<del>
<del> expect(_file('tests/integration/components/x-foo-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('x-foo'")
<del> .to.contain("integration: true")
<del> .to.contain("{{x-foo}}")
<del> .to.contain("{{#x-foo}}");
<del> }));
<del> });
<del>
<del> it('in-repo-addon component-test x-foo', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo', '--in-repo-addon=my-addon'], _file => {
<del> expect(_file('tests/integration/components/x-foo-test.js'))
<add> expect(_file('tests/integration/bar/foo/x-foo/component-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('x-foo'")
<add> .to.contain("moduleForComponent('bar/foo/x-foo'")
<ide> .to.contain("integration: true")
<del> .to.contain("{{x-foo}}")
<del> .to.contain("{{#x-foo}}");
<del> }));
<del> });
<del>
<del> it('in-repo-addon component-test x-foo --unit', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo', '--in-repo-addon=my-addon', '--unit'], _file => {
<del> expect(_file('tests/unit/components/x-foo-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("moduleForComponent('x-foo'")
<del> .to.contain("unit: true");
<del> }));
<del> });
<add> .to.contain("{{bar/foo/x-foo}}")
<add> .to.contain("{{#bar/foo/x-foo}}");
<add> });
<add> });
<ide>
<del> it('in-repo-addon component nested/x-foo', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['component', 'nested/x-foo', '--in-repo-addon=my-addon'], _file => {
<del> expect(_file('lib/my-addon/addon/components/nested/x-foo.js'))
<add> it('component x-foo --pod --path bar/baz', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--pod', '--path', 'bar/baz'], _file => {
<add> expect(_file('app/bar/baz/x-foo/component.js'))
<ide> .to.contain("import Component from '@ember/component';")
<del> .to.contain("import layout from '../../templates/components/nested/x-foo';")
<ide> .to.contain("export default Component.extend({")
<del> .to.contain("layout")
<ide> .to.contain("});");
<ide>
<del> expect(_file('lib/my-addon/addon/templates/components/nested/x-foo.hbs'))
<add> expect(_file('app/bar/baz/x-foo/template.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('lib/my-addon/app/components/nested/x-foo.js'))
<del> .to.contain("export { default } from 'my-addon/components/nested/x-foo';");
<del>
<del> expect(_file('tests/integration/components/nested/x-foo-test.js'))
<add> expect(_file('tests/integration/bar/baz/x-foo/component-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('nested/x-foo'")
<del> .to.contain("integration: true");
<del> }));
<del> });
<add> .to.contain("moduleForComponent('bar/baz/x-foo'")
<add> .to.contain("integration: true")
<add> .to.contain("{{bar/baz/x-foo}}")
<add> .to.contain("{{#bar/baz/x-foo}}");
<add> });
<add> });
<ide>
<del> // Pod tests
<del> it('component x-foo --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--pod'], _file => {
<del> expect(_file('app/components/x-foo/component.js'))
<add> it('component foo/x-foo --pod --path bar/baz', function() {
<add> return emberGenerateDestroy(['component', 'foo/x-foo', '--pod', '--path', 'bar/baz'], _file => {
<add> expect(_file('app/bar/baz/foo/x-foo/component.js'))
<ide> .to.contain("import Component from '@ember/component';")
<ide> .to.contain("export default Component.extend({")
<ide> .to.contain("});");
<ide>
<del> expect(_file('app/components/x-foo/template.hbs'))
<add> expect(_file('app/bar/baz/foo/x-foo/template.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('tests/integration/components/x-foo/component-test.js'))
<add> expect(_file('tests/integration/bar/baz/foo/x-foo/component-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('x-foo'")
<del> .to.contain("integration: true");
<del> }));
<del> });
<add> .to.contain("moduleForComponent('bar/baz/foo/x-foo'")
<add> .to.contain("integration: true")
<add> .to.contain("{{bar/baz/foo/x-foo}}")
<add> .to.contain("{{#bar/baz/foo/x-foo}}");
<add> });
<add> });
<ide>
<del> it('component x-foo --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--pod'], _file => {
<del> expect(_file('app/pods/components/x-foo/component.js'))
<add> it('component x-foo --pod -no-path', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--pod', '-no-path'], _file => {
<add> expect(_file('app/x-foo/component.js'))
<ide> .to.contain("import Component from '@ember/component';")
<ide> .to.contain("export default Component.extend({")
<ide> .to.contain("});");
<ide>
<del> expect(_file('app/pods/components/x-foo/template.hbs'))
<add> expect(_file('app/x-foo/template.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('tests/integration/pods/components/x-foo/component-test.js'))
<add> expect(_file('tests/integration/x-foo/component-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<ide> .to.contain("moduleForComponent('x-foo'")
<ide> .to.contain("integration: true")
<ide> .to.contain("{{x-foo}}")
<ide> .to.contain("{{#x-foo}}");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('component foo/x-foo --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component', 'foo/x-foo', '--pod'], _file => {
<del> expect(_file('app/components/foo/x-foo/component.js'))
<add> it('component foo/x-foo --pod -no-path', function() {
<add> return emberGenerateDestroy(['component', 'foo/x-foo', '--pod', '-no-path'], _file => {
<add> expect(_file('app/foo/x-foo/component.js'))
<ide> .to.contain("import Component from '@ember/component';")
<ide> .to.contain("export default Component.extend({")
<ide> .to.contain("});");
<ide>
<del> expect(_file('app/components/foo/x-foo/template.hbs'))
<add> expect(_file('app/foo/x-foo/template.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('tests/integration/components/foo/x-foo/component-test.js'))
<add> expect(_file('tests/integration/foo/x-foo/component-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<ide> .to.contain("moduleForComponent('foo/x-foo'")
<ide> .to.contain("integration: true")
<ide> .to.contain("{{foo/x-foo}}")
<ide> .to.contain("{{#foo/x-foo}}");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('component foo/x-foo --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['component', 'foo/x-foo', '--pod'], _file => {
<del> expect(_file('app/pods/components/foo/x-foo/component.js'))
<del> .to.contain("import Component from '@ember/component';")
<del> .to.contain("export default Component.extend({")
<del> .to.contain("});");
<add> it('component-test x-foo', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo'], _file => {
<add> expect(_file('tests/integration/components/x-foo-test.js'))
<add> .to.equal(fixture('component-test/default.js'));
<add> });
<add> });
<ide>
<del> expect(_file('app/pods/components/foo/x-foo/template.hbs'))
<del> .to.equal("{{yield}}");
<add> it('component-test x-foo --unit', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo', '--unit'], _file => {
<add> expect(_file('tests/unit/components/x-foo-test.js'))
<add> .to.equal(fixture('component-test/unit.js'));
<add> });
<add> });
<ide>
<del> expect(_file('tests/integration/pods/components/foo/x-foo/component-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('foo/x-foo'")
<del> .to.contain("integration: true")
<del> .to.contain("{{foo/x-foo}}")
<del> .to.contain("{{#foo/x-foo}}");
<del> }));
<del> });
<add> describe('with podModulePrefix', function() {
<add> beforeEach(function() {
<add> setupPodConfig({ podModulePrefix: true });
<add> });
<ide>
<del> it('component x-foo --pod --path', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--pod', '--path', 'bar'], _file => {
<del> expect(_file('app/bar/x-foo/component.js'))
<del> .to.contain("import Component from '@ember/component';")
<del> .to.contain("export default Component.extend({")
<del> .to.contain("});");
<add> it('component x-foo --pod', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--pod'], _file => {
<add> expect(_file('app/pods/components/x-foo/component.js'))
<add> .to.contain("import Component from '@ember/component';")
<add> .to.contain("export default Component.extend({")
<add> .to.contain("});");
<ide>
<del> expect(_file('app/bar/x-foo/template.hbs'))
<del> .to.equal("{{yield}}");
<add> expect(_file('app/pods/components/x-foo/template.hbs'))
<add> .to.equal("{{yield}}");
<ide>
<del> expect(_file('tests/integration/bar/x-foo/component-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('bar/x-foo'")
<del> .to.contain("integration: true")
<del> .to.contain("{{bar/x-foo}}")
<del> .to.contain("{{#bar/x-foo}}");
<del> }));
<del> });
<add> expect(_file('tests/integration/pods/components/x-foo/component-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<add> .to.contain("moduleForComponent('x-foo'")
<add> .to.contain("integration: true")
<add> .to.contain("{{x-foo}}")
<add> .to.contain("{{#x-foo}}");
<add> });
<add> });
<ide>
<del> it('component x-foo --pod --path podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--pod', '--path', 'bar'], _file => {
<del> expect(_file('app/pods/bar/x-foo/component.js'))
<del> .to.contain("import Component from '@ember/component';")
<del> .to.contain("export default Component.extend({")
<del> .to.contain("});");
<add> it('component foo/x-foo --pod', function() {
<add> return emberGenerateDestroy(['component', 'foo/x-foo', '--pod'], _file => {
<add> expect(_file('app/pods/components/foo/x-foo/component.js'))
<add> .to.contain("import Component from '@ember/component';")
<add> .to.contain("export default Component.extend({")
<add> .to.contain("});");
<ide>
<del> expect(_file('app/pods/bar/x-foo/template.hbs'))
<del> .to.equal("{{yield}}");
<add> expect(_file('app/pods/components/foo/x-foo/template.hbs'))
<add> .to.equal("{{yield}}");
<ide>
<del> expect(_file('tests/integration/pods/bar/x-foo/component-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('bar/x-foo'")
<del> .to.contain("integration: true")
<del> .to.contain("{{bar/x-foo}}")
<del> .to.contain("{{#bar/x-foo}}");
<del> }));
<del> });
<add> expect(_file('tests/integration/pods/components/foo/x-foo/component-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<add> .to.contain("moduleForComponent('foo/x-foo'")
<add> .to.contain("integration: true")
<add> .to.contain("{{foo/x-foo}}")
<add> .to.contain("{{#foo/x-foo}}");
<add> });
<add> });
<add>
<add> it('component x-foo --pod --path bar', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--pod', '--path', 'bar'], _file => {
<add> expect(_file('app/pods/bar/x-foo/component.js'))
<add> .to.contain("import Component from '@ember/component';")
<add> .to.contain("export default Component.extend({")
<add> .to.contain("});");
<add>
<add> expect(_file('app/pods/bar/x-foo/template.hbs'))
<add> .to.equal("{{yield}}");
<add>
<add> expect(_file('tests/integration/pods/bar/x-foo/component-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<add> .to.contain("moduleForComponent('bar/x-foo'")
<add> .to.contain("integration: true")
<add> .to.contain("{{bar/x-foo}}")
<add> .to.contain("{{#bar/x-foo}}");
<add> });
<add> });
<add>
<add> it('component foo/x-foo --pod --path bar', function() {
<add> return emberGenerateDestroy(['component', 'foo/x-foo', '--pod', '--path', 'bar'], _file => {
<add> expect(_file('app/pods/bar/foo/x-foo/component.js'))
<add> .to.contain("import Component from '@ember/component';")
<add> .to.contain("export default Component.extend({")
<add> .to.contain("});");
<add>
<add> expect(_file('app/pods/bar/foo/x-foo/template.hbs'))
<add> .to.equal("{{yield}}");
<add>
<add> expect(_file('tests/integration/pods/bar/foo/x-foo/component-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("moduleForComponent('bar/foo/x-foo'")
<add> .to.contain("integration: true")
<add> .to.contain("{{bar/foo/x-foo}}")
<add> .to.contain("{{#bar/foo/x-foo}}");
<add> });
<add> });
<add>
<add> it('component x-foo --pod --path bar/baz', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--pod', '--path', 'bar/baz'], _file => {
<add> expect(_file('app/pods/bar/baz/x-foo/component.js'))
<add> .to.contain("import Component from '@ember/component';")
<add> .to.contain("export default Component.extend({")
<add> .to.contain("});");
<add>
<add> expect(_file('app/pods/bar/baz/x-foo/template.hbs'))
<add> .to.equal("{{yield}}");
<add>
<add> expect(_file('tests/integration/pods/bar/baz/x-foo/component-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<add> .to.contain("moduleForComponent('bar/baz/x-foo'")
<add> .to.contain("integration: true")
<add> .to.contain("{{bar/baz/x-foo}}")
<add> .to.contain("{{#bar/baz/x-foo}}");
<add> });
<add> });
<add>
<add> it('component foo/x-foo --pod --path bar/baz', function() {
<add> return emberGenerateDestroy(['component', 'foo/x-foo', '--pod', '--path', 'bar/baz'], _file => {
<add> expect(_file('app/pods/bar/baz/foo/x-foo/component.js'))
<add> .to.contain("import Component from '@ember/component';")
<add> .to.contain("export default Component.extend({")
<add> .to.contain("});");
<add>
<add> expect(_file('app/pods/bar/baz/foo/x-foo/template.hbs'))
<add> .to.equal("{{yield}}");
<add>
<add> expect(_file('tests/integration/pods/bar/baz/foo/x-foo/component-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<add> .to.contain("moduleForComponent('bar/baz/foo/x-foo'")
<add> .to.contain("integration: true")
<add> .to.contain("{{bar/baz/foo/x-foo}}")
<add> .to.contain("{{#bar/baz/foo/x-foo}}");
<add> });
<add> });
<add>
<add> it('component x-foo --pod -no-path', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--pod', '-no-path'], _file => {
<add> expect(_file('app/pods/x-foo/component.js'))
<add> .to.contain("import Component from '@ember/component';")
<add> .to.contain("export default Component.extend({")
<add> .to.contain("});");
<add>
<add> expect(_file('app/pods/x-foo/template.hbs'))
<add> .to.equal("{{yield}}");
<add>
<add> expect(_file('tests/integration/pods/x-foo/component-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<add> .to.contain("moduleForComponent('x-foo'")
<add> .to.contain("integration: true")
<add> .to.contain("{{x-foo}}")
<add> .to.contain("{{#x-foo}}");
<add> });
<add> });
<ide>
<del> it('component foo/x-foo --pod --path', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component', 'foo/x-foo', '--pod', '--path', 'bar'], _file => {
<del> expect(_file('app/bar/foo/x-foo/component.js'))
<del> .to.contain("import Component from '@ember/component';")
<del> .to.contain("export default Component.extend({")
<del> .to.contain("});");
<add> it('component foo/x-foo --pod -no-path', function() {
<add> return emberGenerateDestroy(['component', 'foo/x-foo', '--pod', '-no-path'], _file => {
<add> expect(_file('app/pods/foo/x-foo/component.js'))
<add> .to.contain("import Component from '@ember/component';")
<add> .to.contain("export default Component.extend({")
<add> .to.contain("});");
<ide>
<del> expect(_file('app/bar/foo/x-foo/template.hbs'))
<del> .to.equal("{{yield}}");
<add> expect(_file('app/pods/foo/x-foo/template.hbs'))
<add> .to.equal("{{yield}}");
<ide>
<del> expect(_file('tests/integration/bar/foo/x-foo/component-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('bar/foo/x-foo'")
<del> .to.contain("integration: true")
<del> .to.contain("{{bar/foo/x-foo}}")
<del> .to.contain("{{#bar/foo/x-foo}}");
<del> }));
<del> });
<add> expect(_file('tests/integration/pods/foo/x-foo/component-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<add> .to.contain("moduleForComponent('foo/x-foo'")
<add> .to.contain("integration: true")
<add> .to.contain("{{foo/x-foo}}")
<add> .to.contain("{{#foo/x-foo}}");
<add> });
<add> });
<add> });
<ide>
<del> it('component foo/x-foo --pod --path podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['component', 'foo/x-foo', '--pod', '--path', 'bar'], _file => {
<del> expect(_file('app/pods/bar/foo/x-foo/component.js'))
<del> .to.contain("import Component from '@ember/component';")
<del> .to.contain("export default Component.extend({")
<del> .to.contain("});");
<add> describe('with usePods=true', function() {
<add> beforeEach(function() {
<add> fs.writeFileSync('.ember-cli', `{
<add> "disableAnalytics": false,
<add> "usePods": true
<add> }`);
<add> });
<ide>
<del> expect(_file('app/pods/bar/foo/x-foo/template.hbs'))
<del> .to.equal("{{yield}}");
<add> it('component-test x-foo', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo'], _file => {
<add> expect(_file('tests/integration/components/x-foo/component-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<add> .to.contain("moduleForComponent('x-foo'")
<add> .to.contain("integration: true")
<add> .to.contain("{{x-foo}}")
<add> .to.contain("{{#x-foo}}");
<add> });
<add> });
<add> });
<ide>
<del> expect(_file('tests/integration/pods/bar/foo/x-foo/component-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("moduleForComponent('bar/foo/x-foo'")
<del> .to.contain("integration: true")
<del> .to.contain("{{bar/foo/x-foo}}")
<del> .to.contain("{{#bar/foo/x-foo}}");
<del> }));
<add> describe('with [email protected]', function() {
<add> beforeEach(function() {
<add> generateFakePackageManifest('ember-cli-qunit', '4.1.1');
<add> });
<add>
<add> it('component-test x-foo', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo'], _file => {
<add> expect(_file('tests/integration/components/x-foo-test.js'))
<add> .to.equal(fixture('component-test/rfc232.js'));
<add> });
<add> });
<add>
<add> it('component-test x-foo --unit', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo', '--unit'], _file => {
<add> expect(_file('tests/unit/components/x-foo-test.js'))
<add> .to.equal(fixture('component-test/rfc232-unit.js'));
<add> });
<add> });
<add> });
<add>
<add> describe('with [email protected]', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> generateFakePackageManifest('ember-cli-mocha', '0.11.0');
<add> });
<add>
<add> it('component-test x-foo', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo'], _file => {
<add> expect(_file('tests/integration/components/x-foo-test.js'))
<add> .to.equal(fixture('component-test/mocha.js'));
<add> });
<add> });
<add>
<add> it('component-test x-foo --unit', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo', '--unit'], _file => {
<add> expect(_file('tests/unit/components/x-foo-test.js'))
<add> .to.equal(fixture('component-test/mocha-unit.js'));
<add> });
<add> });
<add> });
<add>
<add> describe('with [email protected]', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> generateFakePackageManifest('ember-cli-mocha', '0.12.0');
<add> });
<add>
<add> it('component-test x-foo', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo'], _file => {
<add> expect(_file('tests/integration/components/x-foo-test.js'))
<add> .to.equal(fixture('component-test/mocha-0.12.js'));
<add> });
<add> });
<add>
<add> it('component-test x-foo --unit', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo', '--unit'], _file => {
<add> expect(_file('tests/unit/components/x-foo-test.js'))
<add> .to.equal(fixture('component-test/mocha-0.12-unit.js'));
<add> });
<add> });
<add> });
<ide> });
<ide>
<del> it('component x-foo --pod --path nested', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--pod', '--path', 'bar/baz'], _file => {
<del> expect(_file('app/bar/baz/x-foo/component.js'))
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('component x-foo', function() {
<add> return emberGenerateDestroy(['component', 'x-foo'], _file => {
<add> expect(_file('addon/components/x-foo.js'))
<ide> .to.contain("import Component from '@ember/component';")
<add> .to.contain("import layout from '../templates/components/x-foo';")
<ide> .to.contain("export default Component.extend({")
<add> .to.contain("layout")
<ide> .to.contain("});");
<ide>
<del> expect(_file('app/bar/baz/x-foo/template.hbs'))
<add> expect(_file('addon/templates/components/x-foo.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('tests/integration/bar/baz/x-foo/component-test.js'))
<add> expect(_file('app/components/x-foo.js'))
<add> .to.contain("export { default } from 'my-addon/components/x-foo';");
<add>
<add> expect(_file('tests/integration/components/x-foo-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('bar/baz/x-foo'")
<add> .to.contain("moduleForComponent('x-foo'")
<ide> .to.contain("integration: true")
<del> .to.contain("{{bar/baz/x-foo}}")
<del> .to.contain("{{#bar/baz/x-foo}}");
<del> }));
<del> });
<add> .to.contain("{{x-foo}}")
<add> .to.contain("{{#x-foo}}");
<add> });
<add> });
<ide>
<del> it('component x-foo --pod --path nested podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--pod', '--path', 'bar/baz'], _file => {
<del> expect(_file('app/pods/bar/baz/x-foo/component.js'))
<add> it('component nested/x-foo', function() {
<add> return emberGenerateDestroy(['component', 'nested/x-foo'], _file => {
<add> expect(_file('addon/components/nested/x-foo.js'))
<ide> .to.contain("import Component from '@ember/component';")
<add> .to.contain("import layout from '../../templates/components/nested/x-foo';")
<ide> .to.contain("export default Component.extend({")
<add> .to.contain("layout")
<ide> .to.contain("});");
<ide>
<del> expect(_file('app/pods/bar/baz/x-foo/template.hbs'))
<add> expect(_file('addon/templates/components/nested/x-foo.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('tests/integration/pods/bar/baz/x-foo/component-test.js'))
<add> expect(_file('app/components/nested/x-foo.js'))
<add> .to.contain("export { default } from 'my-addon/components/nested/x-foo';");
<add>
<add> expect(_file('tests/integration/components/nested/x-foo-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('bar/baz/x-foo'")
<add> .to.contain("moduleForComponent('nested/x-foo'")
<ide> .to.contain("integration: true")
<del> .to.contain("{{bar/baz/x-foo}}")
<del> .to.contain("{{#bar/baz/x-foo}}");
<del> }));
<del> });
<add> .to.contain("{{nested/x-foo}}")
<add> .to.contain("{{#nested/x-foo}}");
<add> });
<add> });
<ide>
<del> it('component foo/x-foo --pod --path nested', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component', 'foo/x-foo', '--pod', '--path', 'bar/baz'], _file => {
<del> expect(_file('app/bar/baz/foo/x-foo/component.js'))
<add> it('component x-foo --dummy', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--dummy'], _file => {
<add> expect(_file('tests/dummy/app/components/x-foo.js'))
<ide> .to.contain("import Component from '@ember/component';")
<ide> .to.contain("export default Component.extend({")
<ide> .to.contain("});");
<ide>
<del> expect(_file('app/bar/baz/foo/x-foo/template.hbs'))
<add> expect(_file('tests/dummy/app/templates/components/x-foo.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('tests/integration/bar/baz/foo/x-foo/component-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('bar/baz/foo/x-foo'")
<del> .to.contain("integration: true")
<del> .to.contain("{{bar/baz/foo/x-foo}}")
<del> .to.contain("{{#bar/baz/foo/x-foo}}");
<del> }));
<del> });
<add> expect(_file('app/components/x-foo.js'))
<add> .to.not.exist;
<ide>
<del> it('component foo/x-foo --pod --path nested podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['component', 'foo/x-foo', '--pod', '--path', 'bar/baz'], _file => {
<del> expect(_file('app/pods/bar/baz/foo/x-foo/component.js'))
<add> expect(_file('tests/unit/components/x-foo-test.js'))
<add> .to.not.exist;
<add> });
<add> });
<add>
<add> it('component nested/x-foo --dummy', function() {
<add> return emberGenerateDestroy(['component', 'nested/x-foo', '--dummy'], _file => {
<add> expect(_file('tests/dummy/app/components/nested/x-foo.js'))
<ide> .to.contain("import Component from '@ember/component';")
<ide> .to.contain("export default Component.extend({")
<ide> .to.contain("});");
<ide>
<del> expect(_file('app/pods/bar/baz/foo/x-foo/template.hbs'))
<add> expect(_file('tests/dummy/app/templates/components/nested/x-foo.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('tests/integration/pods/bar/baz/foo/x-foo/component-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('bar/baz/foo/x-foo'")
<del> .to.contain("integration: true")
<del> .to.contain("{{bar/baz/foo/x-foo}}")
<del> .to.contain("{{#bar/baz/foo/x-foo}}");
<del> }));
<del> });
<add> expect(_file('app/components/nested/x-foo.js'))
<add> .to.not.exist;
<ide>
<del> it('component x-foo --pod -no-path', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--pod', '-no-path'], _file => {
<del> expect(_file('app/x-foo/component.js'))
<add> expect(_file('tests/unit/components/nested/x-foo-test.js'))
<add> .to.not.exist;
<add> });
<add> });
<add>
<add> it('component x-foo --pod', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--pod'], _file => {
<add> expect(_file('addon/components/x-foo/component.js'))
<ide> .to.contain("import Component from '@ember/component';")
<add> .to.contain("import layout from './template';")
<ide> .to.contain("export default Component.extend({")
<add> .to.contain("layout")
<ide> .to.contain("});");
<ide>
<del> expect(_file('app/x-foo/template.hbs'))
<add> expect(_file('addon/components/x-foo/template.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('tests/integration/x-foo/component-test.js'))
<add> expect(_file('app/components/x-foo/component.js'))
<add> .to.contain("export { default } from 'my-addon/components/x-foo/component';");
<add>
<add> expect(_file('tests/integration/components/x-foo/component-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("moduleForComponent('x-foo'")
<add> .to.contain("integration: true");
<add> });
<add> });
<add>
<add> it('component-test x-foo', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo'], _file => {
<add> expect(_file('tests/integration/components/x-foo-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<ide> .to.contain("moduleForComponent('x-foo'")
<ide> .to.contain("integration: true")
<ide> .to.contain("{{x-foo}}")
<ide> .to.contain("{{#x-foo}}");
<del> }));
<del> });
<ide>
<del> it('component x-foo --pod -no-path podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--pod', '-no-path'], _file => {
<del> expect(_file('app/pods/x-foo/component.js'))
<del> .to.contain("import Component from '@ember/component';")
<del> .to.contain("export default Component.extend({")
<del> .to.contain("});");
<add> expect(_file('app/component-test/x-foo.js'))
<add> .to.not.exist;
<add> });
<add> });
<ide>
<del> expect(_file('app/pods/x-foo/template.hbs'))
<del> .to.equal("{{yield}}");
<add> it('component-test x-foo --unit', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo', '--unit'], _file => {
<add> expect(_file('tests/unit/components/x-foo-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("moduleForComponent('x-foo'")
<add> .to.contain("unit: true");
<ide>
<del> expect(_file('tests/integration/pods/x-foo/component-test.js'))
<add> expect(_file('app/component-test/x-foo.js'))
<add> .to.not.exist;
<add> });
<add> });
<add>
<add> it('component-test x-foo --dummy', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo', '--dummy'], _file => {
<add> expect(_file('tests/integration/components/x-foo-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('x-foo'")
<del> .to.contain("integration: true")
<del> .to.contain("{{x-foo}}")
<del> .to.contain("{{#x-foo}}");
<del> }));
<add> .to.contain("moduleForComponent('x-foo'");
<add>
<add> expect(_file('app/component-test/x-foo.js'))
<add> .to.not.exist;
<add> });
<add> });
<ide> });
<ide>
<del> it('component foo/x-foo --pod -no-path', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component', 'foo/x-foo', '--pod', '-no-path'], _file => {
<del> expect(_file('app/foo/x-foo/component.js'))
<add> describe('in in-repo-addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'in-repo-addon' });
<add> });
<add>
<add> it('component x-foo --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--in-repo-addon=my-addon'], _file => {
<add> expect(_file('lib/my-addon/addon/components/x-foo.js'))
<ide> .to.contain("import Component from '@ember/component';")
<add> .to.contain("import layout from '../templates/components/x-foo';")
<ide> .to.contain("export default Component.extend({")
<add> .to.contain("layout")
<ide> .to.contain("});");
<ide>
<del> expect(_file('app/foo/x-foo/template.hbs'))
<add> expect(_file('lib/my-addon/addon/templates/components/x-foo.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('tests/integration/foo/x-foo/component-test.js'))
<add> expect(_file('lib/my-addon/app/components/x-foo.js'))
<add> .to.contain("export { default } from 'my-addon/components/x-foo';");
<add>
<add> expect(_file('tests/integration/components/x-foo-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('foo/x-foo'")
<add> .to.contain("moduleForComponent('x-foo'")
<ide> .to.contain("integration: true")
<del> .to.contain("{{foo/x-foo}}")
<del> .to.contain("{{#foo/x-foo}}");
<del> }));
<del> });
<del>
<del> it('component foo/x-foo --pod -no-path podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['component', 'foo/x-foo', '--pod', '-no-path'], _file => {
<del> expect(_file('app/pods/foo/x-foo/component.js'))
<del> .to.contain("import Component from '@ember/component';")
<del> .to.contain("export default Component.extend({")
<del> .to.contain("});");
<del>
<del> expect(_file('app/pods/foo/x-foo/template.hbs'))
<del> .to.equal("{{yield}}");
<add> .to.contain("{{x-foo}}")
<add> .to.contain("{{#x-foo}}");
<add> });
<add> });
<ide>
<del> expect(_file('tests/integration/pods/foo/x-foo/component-test.js'))
<add> it('component-test x-foo --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo', '--in-repo-addon=my-addon'], _file => {
<add> expect(_file('tests/integration/components/x-foo-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('foo/x-foo'")
<add> .to.contain("moduleForComponent('x-foo'")
<ide> .to.contain("integration: true")
<del> .to.contain("{{foo/x-foo}}")
<del> .to.contain("{{#foo/x-foo}}");
<del> }));
<del> });
<add> .to.contain("{{x-foo}}")
<add> .to.contain("{{#x-foo}}");
<add> });
<add> });
<ide>
<del> it('in-addon component x-foo --pod', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--pod'], _file => {
<del> expect(_file('addon/components/x-foo/component.js'))
<add> it('component-test x-foo --in-repo-addon=my-addon --unit', function() {
<add> return emberGenerateDestroy(['component-test', 'x-foo', '--in-repo-addon=my-addon', '--unit'], _file => {
<add> expect(_file('tests/unit/components/x-foo-test.js'))
<add> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<add> .to.contain("moduleForComponent('x-foo'")
<add> .to.contain("unit: true");
<add> });
<add> });
<add>
<add> it('component nested/x-foo --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['component', 'nested/x-foo', '--in-repo-addon=my-addon'], _file => {
<add> expect(_file('lib/my-addon/addon/components/nested/x-foo.js'))
<ide> .to.contain("import Component from '@ember/component';")
<del> .to.contain("import layout from './template';")
<add> .to.contain("import layout from '../../templates/components/nested/x-foo';")
<ide> .to.contain("export default Component.extend({")
<ide> .to.contain("layout")
<ide> .to.contain("});");
<ide>
<del> expect(_file('addon/components/x-foo/template.hbs'))
<add> expect(_file('lib/my-addon/addon/templates/components/nested/x-foo.hbs'))
<ide> .to.equal("{{yield}}");
<ide>
<del> expect(_file('app/components/x-foo/component.js'))
<del> .to.contain("export { default } from 'my-addon/components/x-foo/component';");
<add> expect(_file('lib/my-addon/app/components/nested/x-foo.js'))
<add> .to.contain("export { default } from 'my-addon/components/nested/x-foo';");
<ide>
<del> expect(_file('tests/integration/components/x-foo/component-test.js'))
<add> expect(_file('tests/integration/components/nested/x-foo-test.js'))
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("moduleForComponent('x-foo'")
<add> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<add> .to.contain("moduleForComponent('nested/x-foo'")
<ide> .to.contain("integration: true");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-repo-addon component x-foo --pod', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['component', 'x-foo', '--in-repo-addon=my-addon', '--pod'], _file => {
<add> it('component x-foo --in-repo-addon=my-addon --pod', function() {
<add> return emberGenerateDestroy(['component', 'x-foo', '--in-repo-addon=my-addon', '--pod'], _file => {
<ide> expect(_file('lib/my-addon/addon/components/x-foo/component.js'))
<ide> .to.contain("import Component from '@ember/component';")
<ide> .to.contain("import layout from './template';")
<ide> describe('Blueprint: component', function() {
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("moduleForComponent('x-foo'")
<ide> .to.contain("integration: true");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-repo-addon component nested/x-foo', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['component', 'nested/x-foo', '--in-repo-addon=my-addon', '--pod'], _file => {
<add> it('component nested/x-foo --in-repo-addon=my-addon --pod', function() {
<add> return emberGenerateDestroy(['component', 'nested/x-foo', '--in-repo-addon=my-addon', '--pod'], _file => {
<ide> expect(_file('lib/my-addon/addon/components/nested/x-foo/component.js'))
<ide> .to.contain("import Component from '@ember/component';")
<ide> .to.contain("import layout from './template';")
<ide> describe('Blueprint: component', function() {
<ide> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<ide> .to.contain("moduleForComponent('nested/x-foo'")
<ide> .to.contain("integration: true");
<del> }));
<del> });
<del>
<del> it('component-test x-foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo'], _file => {
<del> expect(_file('tests/integration/components/x-foo-test.js'))
<del> .to.equal(fixture('component-test/default.js'));
<del> }));
<del> });
<del>
<del> describe('usePods: true', function() {
<del> it('component-test x-foo', function() {
<del> return emberNew()
<del> .then(() => {
<del> fs.writeFileSync('.ember-cli', `{
<del> "disableAnalytics": false,
<del> "usePods": true
<del> }`);
<del> })
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo'], _file => {
<del> expect(_file('tests/integration/components/x-foo/component-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('x-foo'")
<del> .to.contain("integration: true")
<del> .to.contain("{{x-foo}}")
<del> .to.contain("{{#x-foo}}");
<del> }));
<add> });
<ide> });
<ide> });
<del>
<del> it('component-test x-foo --unit', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo', '--unit'], _file => {
<del> expect(_file('tests/unit/components/x-foo-test.js'))
<del> .to.equal(fixture('component-test/unit.js'));
<del> }));
<del> });
<del>
<del> it('in-addon component-test x-foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo'], _file => {
<del> expect(_file('tests/integration/components/x-foo-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('x-foo'")
<del> .to.contain("integration: true")
<del> .to.contain("{{x-foo}}")
<del> .to.contain("{{#x-foo}}");
<del>
<del> expect(_file('app/component-test/x-foo.js'))
<del> .to.not.exist;
<del> }));
<del> });
<del>
<del> it('in-addon component-test x-foo --unit', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo', '--unit'], _file => {
<del> expect(_file('tests/unit/components/x-foo-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("moduleForComponent('x-foo'")
<del> .to.contain("unit: true");
<del>
<del> expect(_file('app/component-test/x-foo.js'))
<del> .to.not.exist;
<del> }));
<del> });
<del>
<del> it('dummy component-test x-foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo', '--dummy'], _file => {
<del> expect(_file('tests/integration/components/x-foo-test.js'))
<del> .to.contain("import { moduleForComponent, test } from 'ember-qunit';")
<del> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("moduleForComponent('x-foo'");
<del>
<del> expect(_file('app/component-test/x-foo.js'))
<del> .to.not.exist;
<del> }));
<del> });
<del>
<del> it('component-test x-foo for RFC232', function() {
<del> return emberNew()
<del> .then(() => generateFakePackageManifest('ember-cli-qunit', '4.1.1'))
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo'], _file => {
<del> expect(_file('tests/integration/components/x-foo-test.js'))
<del> .to.equal(fixture('component-test/rfc232.js'));
<del> }));
<del> });
<del>
<del> it('component-test x-foo --unit for RFC232', function() {
<del> return emberNew()
<del> .then(() => generateFakePackageManifest('ember-cli-qunit', '4.1.1'))
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo', '--unit'], _file => {
<del> expect(_file('tests/unit/components/x-foo-test.js'))
<del> .to.equal(fixture('component-test/rfc232-unit.js'));
<del> }));
<del> });
<del>
<del> it('component-test x-foo for mocha', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.11.0'))
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo'], _file => {
<del> expect(_file('tests/integration/components/x-foo-test.js'))
<del> .to.equal(fixture('component-test/mocha.js'));
<del> }));
<del> });
<del>
<del> it('component-test x-foo --unit for mocha', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.11.0'))
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo', '--unit'], _file => {
<del> expect(_file('tests/unit/components/x-foo-test.js'))
<del> .to.equal(fixture('component-test/mocha-unit.js'));
<del> }));
<del> });
<del>
<del> it('component-test x-foo for mocha v0.12+', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.12.0'))
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo'], _file => {
<del> expect(_file('tests/integration/components/x-foo-test.js'))
<del> .to.equal(fixture('component-test/mocha-0.12.js'));
<del> }));
<del> });
<del>
<del> it('component-test x-foo --unit for mocha v0.12+', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.12.0'))
<del> .then(() => emberGenerateDestroy(['component-test', 'x-foo', '--unit'], _file => {
<del> expect(_file('tests/unit/components/x-foo-test.js'))
<del> .to.equal(fixture('component-test/mocha-0.12-unit.js'));
<del> }));
<del> });
<ide> });
<ide><path>node-tests/blueprints/controller-test.js
<ide> const generateFakePackageManifest = require('../helpers/generate-fake-package-ma
<ide> describe('Blueprint: controller', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('controller foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['controller', 'foo'], _file => {
<add> describe('in app', function() {
<add> beforeEach(function() {
<add> return emberNew();
<add> });
<add>
<add> it('controller foo', function() {
<add> return emberGenerateDestroy(['controller', 'foo'], _file => {
<ide> expect(_file('app/controllers/foo.js'))
<ide> .to.contain("import Controller from '@ember/controller';")
<ide> .to.contain("export default Controller.extend({\n});");
<ide>
<ide> expect(_file('tests/unit/controllers/foo-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<ide> .to.contain("moduleFor('controller:foo'");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('controller foo/bar', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['controller', 'foo/bar'], _file => {
<add> it('controller foo/bar', function() {
<add> return emberGenerateDestroy(['controller', 'foo/bar'], _file => {
<ide> expect(_file('app/controllers/foo/bar.js'))
<ide> .to.contain("import Controller from '@ember/controller';")
<ide> .to.contain("export default Controller.extend({\n});");
<ide>
<ide> expect(_file('tests/unit/controllers/foo/bar-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<ide> .to.contain("moduleFor('controller:foo/bar'");
<del> }));
<del> });
<add> });
<add> });
<add>
<add> it('controller foo --pod', function() {
<add> return emberGenerateDestroy(['controller', 'foo', '--pod'], _file => {
<add> expect(_file('app/foo/controller.js'))
<add> .to.contain("import Controller from '@ember/controller';")
<add> .to.contain("export default Controller.extend({\n});");
<add>
<add> expect(_file('tests/unit/foo/controller-test.js'))
<add> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<add> .to.contain("moduleFor('controller:foo'");
<add> });
<add> });
<add>
<add> it('controller foo/bar --pod', function() {
<add> return emberGenerateDestroy(['controller', 'foo/bar', '--pod'], _file => {
<add> expect(_file('app/foo/bar/controller.js'))
<add> .to.contain("import Controller from '@ember/controller';")
<add> .to.contain("export default Controller.extend({\n});");
<ide>
<del> it('in-addon controller foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['controller', 'foo'], _file => {
<add> expect(_file('tests/unit/foo/bar/controller-test.js'))
<add> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<add> .to.contain("moduleFor('controller:foo/bar'");
<add> });
<add> });
<add>
<add> it('controller-test foo', function() {
<add> return emberGenerateDestroy(['controller-test', 'foo'], _file => {
<add> expect(_file('tests/unit/controllers/foo-test.js'))
<add> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<add> .to.contain("moduleFor('controller:foo'");
<add> });
<add> });
<add>
<add> describe('with podModulePrefix', function() {
<add> beforeEach(function() {
<add> setupPodConfig({ podModulePrefix: true });
<add> });
<add>
<add> it('controller foo --pod podModulePrefix', function() {
<add> return emberGenerateDestroy(['controller', 'foo', '--pod'], _file => {
<add> expect(_file('app/pods/foo/controller.js'))
<add> .to.contain("import Controller from '@ember/controller';")
<add> .to.contain("export default Controller.extend({\n});");
<add>
<add> expect(_file('tests/unit/pods/foo/controller-test.js'))
<add> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<add> .to.contain("moduleFor('controller:foo'");
<add> });
<add> });
<add>
<add> it('controller foo/bar --pod podModulePrefix', function() {
<add> return emberGenerateDestroy(['controller', 'foo/bar', '--pod'], _file => {
<add> expect(_file('app/pods/foo/bar/controller.js'))
<add> .to.contain("import Controller from '@ember/controller';")
<add> .to.contain("export default Controller.extend({\n});");
<add>
<add> expect(_file('tests/unit/pods/foo/bar/controller-test.js'))
<add> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<add> .to.contain("moduleFor('controller:foo/bar'");
<add> });
<add> });
<add> });
<add>
<add> describe('with [email protected]', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> generateFakePackageManifest('ember-cli-mocha', '0.11.0');
<add> });
<add>
<add> it('controller-test foo for mocha', function() {
<add> return emberGenerateDestroy(['controller-test', 'foo'], _file => {
<add> expect(_file('tests/unit/controllers/foo-test.js'))
<add> .to.contain("import { describeModule, it } from 'ember-mocha';")
<add> .to.contain("describeModule('controller:foo', 'Unit | Controller | foo'");
<add> });
<add> });
<add> });
<add>
<add> describe('with [email protected]', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> generateFakePackageManifest('ember-cli-mocha', '0.12.0');
<add> });
<add>
<add> it('controller-test foo', function() {
<add> return emberGenerateDestroy(['controller-test', 'foo'], _file => {
<add> expect(_file('tests/unit/controllers/foo-test.js'))
<add> .to.contain("import { describe, it } from 'mocha';")
<add> .to.contain("import { setupTest } from 'ember-mocha';")
<add> .to.contain("describe('Unit | Controller | foo'")
<add> .to.contain("setupTest('controller:foo',");
<add> });
<add> });
<add> });
<add> });
<add>
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('controller foo', function() {
<add> return emberGenerateDestroy(['controller', 'foo'], _file => {
<ide> expect(_file('addon/controllers/foo.js'))
<ide> .to.contain("import Controller from '@ember/controller';")
<ide> .to.contain("export default Controller.extend({\n});");
<ide> describe('Blueprint: controller', function() {
<ide> expect(_file('tests/unit/controllers/foo-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<ide> .to.contain("moduleFor('controller:foo'");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-addon controller foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['controller', 'foo/bar'], _file => {
<add> it('controller foo/bar', function() {
<add> return emberGenerateDestroy(['controller', 'foo/bar'], _file => {
<ide> expect(_file('addon/controllers/foo/bar.js'))
<ide> .to.contain("import Controller from '@ember/controller';")
<ide> .to.contain("export default Controller.extend({\n});");
<ide> describe('Blueprint: controller', function() {
<ide> expect(_file('tests/unit/controllers/foo/bar-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<ide> .to.contain("moduleFor('controller:foo/bar'");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('dummy controller foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['controller', 'foo', '--dummy'], _file => {
<add> it('controller foo --dummy', function() {
<add> return emberGenerateDestroy(['controller', 'foo', '--dummy'], _file => {
<ide> expect(_file('tests/dummy/app/controllers/foo.js'))
<ide> .to.contain("import Controller from '@ember/controller';")
<ide> .to.contain("export default Controller.extend({\n});");
<ide> describe('Blueprint: controller', function() {
<ide>
<ide> expect(_file('tests/unit/controllers/foo-test.js'))
<ide> .to.not.exist;
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('dummy controller foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['controller', 'foo/bar', '--dummy'], _file => {
<add> it('controller foo/bar --dummy', function() {
<add> return emberGenerateDestroy(['controller', 'foo/bar', '--dummy'], _file => {
<ide> expect(_file('tests/dummy/app/controllers/foo/bar.js'))
<ide> .to.contain("import Controller from '@ember/controller';")
<ide> .to.contain("export default Controller.extend({\n});");
<ide> describe('Blueprint: controller', function() {
<ide>
<ide> expect(_file('tests/unit/controllers/foo/bar-test.js'))
<ide> .to.not.exist;
<del> }));
<add> });
<add> });
<add>
<add> it('controller-test foo', function() {
<add> return emberGenerateDestroy(['controller-test', 'foo'], _file => {
<add> expect(_file('tests/unit/controllers/foo-test.js'))
<add> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<add> .to.contain("moduleFor('controller:foo'");
<add> });
<add> });
<ide> });
<ide>
<del> it('in-repo-addon controller foo', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['controller', 'foo', '--in-repo-addon=my-addon'], _file => {
<add> describe('in in-repo-addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'in-repo-addon' });
<add> });
<add>
<add> it('controller foo --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['controller', 'foo', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('lib/my-addon/addon/controllers/foo.js'))
<ide> .to.contain("import Controller from '@ember/controller';")
<ide> .to.contain("export default Controller.extend({\n});");
<ide> describe('Blueprint: controller', function() {
<ide> expect(_file('tests/unit/controllers/foo-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<ide> .to.contain("moduleFor('controller:foo'");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-repo-addon controller foo/bar', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['controller', 'foo/bar', '--in-repo-addon=my-addon'], _file => {
<add> it('controller foo/bar --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['controller', 'foo/bar', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('lib/my-addon/addon/controllers/foo/bar.js'))
<ide> .to.contain("import Controller from '@ember/controller';")
<ide> .to.contain("export default Controller.extend({\n});");
<ide> describe('Blueprint: controller', function() {
<ide> expect(_file('tests/unit/controllers/foo/bar-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<ide> .to.contain("moduleFor('controller:foo/bar'");
<del> }));
<del> });
<del>
<del> it('controller foo --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['controller', 'foo', '--pod'], _file => {
<del> expect(_file('app/foo/controller.js'))
<del> .to.contain("import Controller from '@ember/controller';")
<del> .to.contain("export default Controller.extend({\n});");
<del>
<del> expect(_file('tests/unit/foo/controller-test.js'))
<del> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<del> .to.contain("moduleFor('controller:foo'");
<del> }));
<del> });
<del>
<del> it('controller foo --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['controller', 'foo', '--pod'], _file => {
<del> expect(_file('app/pods/foo/controller.js'))
<del> .to.contain("import Controller from '@ember/controller';")
<del> .to.contain("export default Controller.extend({\n});");
<del>
<del> expect(_file('tests/unit/pods/foo/controller-test.js'))
<del> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<del> .to.contain("moduleFor('controller:foo'");
<del> }));
<del> });
<del>
<del> it('controller foo/bar --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['controller', 'foo/bar', '--pod'], _file => {
<del> expect(_file('app/foo/bar/controller.js'))
<del> .to.contain("import Controller from '@ember/controller';")
<del> .to.contain("export default Controller.extend({\n});");
<del>
<del> expect(_file('tests/unit/foo/bar/controller-test.js'))
<del> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<del> .to.contain("moduleFor('controller:foo/bar'");
<del> }));
<del> });
<del>
<del> it('controller foo/bar --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['controller', 'foo/bar', '--pod'], _file => {
<del> expect(_file('app/pods/foo/bar/controller.js'))
<del> .to.contain("import Controller from '@ember/controller';")
<del> .to.contain("export default Controller.extend({\n});");
<del>
<del> expect(_file('tests/unit/pods/foo/bar/controller-test.js'))
<del> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<del> .to.contain("moduleFor('controller:foo/bar'");
<del> }));
<del> });
<del>
<del> it('controller-test foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['controller-test', 'foo'], _file => {
<del> expect(_file('tests/unit/controllers/foo-test.js'))
<del> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<del> .to.contain("moduleFor('controller:foo'");
<del> }));
<del> });
<del>
<del> it('in-addon controller-test foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['controller-test', 'foo'], _file => {
<del> expect(_file('tests/unit/controllers/foo-test.js'))
<del> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<del> .to.contain("moduleFor('controller:foo'");
<del> }));
<del> });
<del>
<del> it('controller-test foo for mocha', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.11.0'))
<del> .then(() => emberGenerateDestroy(['controller-test', 'foo'], _file => {
<del> expect(_file('tests/unit/controllers/foo-test.js'))
<del> .to.contain("import { describeModule, it } from 'ember-mocha';")
<del> .to.contain("describeModule('controller:foo', 'Unit | Controller | foo'");
<del> }));
<del> });
<del>
<del> it('controller-test foo for mocha v0.12+', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.12.0'))
<del> .then(() => emberGenerateDestroy(['controller-test', 'foo'], _file => {
<del> expect(_file('tests/unit/controllers/foo-test.js'))
<del> .to.contain("import { describe, it } from 'mocha';")
<del> .to.contain("import { setupTest } from 'ember-mocha';")
<del> .to.contain("describe('Unit | Controller | foo'")
<del> .to.contain("setupTest('controller:foo',");
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/helper-addon-test.js
<ide> const file = require('../helpers/file');
<ide> describe('Blueprint: helper-addon', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('in-addon helper-addon foo/bar-baz', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['helper-addon', 'foo/bar-baz'], _file => {
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('helper-addon foo/bar-baz', function() {
<add> return emberGenerateDestroy(['helper-addon', 'foo/bar-baz'], _file => {
<ide> expect(_file('app/helpers/foo/bar-baz.js'))
<ide> .to.equal(file('helper-addon.js'));
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/helper-test.js
<ide> const file = require('../helpers/file');
<ide> describe('Blueprint: helper', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('helper foo/bar-baz', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['helper', 'foo/bar-baz'], _file => {
<add> describe('in app', function() {
<add> beforeEach(function() {
<add> return emberNew();
<add> });
<add>
<add> it('helper foo/bar-baz', function() {
<add> return emberGenerateDestroy(['helper', 'foo/bar-baz'], _file => {
<ide> expect(_file('app/helpers/foo/bar-baz.js'))
<ide> .to.equal(file('helper.js'));
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<ide> .to.equal(file('helper-test/integration.js'));
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('helper foo/bar-baz unit', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['helper', '--test-type=unit', 'foo/bar-baz'], _file => {
<add> it('helper foo/bar-baz unit', function() {
<add> return emberGenerateDestroy(['helper', '--test-type=unit', 'foo/bar-baz'], _file => {
<ide> expect(_file('app/helpers/foo/bar-baz.js'))
<ide> .to.equal(file('helper.js'));
<ide> expect(_file('tests/unit/helpers/foo/bar-baz-test.js'))
<ide> .to.equal(file('helper-test/unit.js'));
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-addon helper foo/bar-baz', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['helper', 'foo/bar-baz'], _file => {
<del> expect(_file('addon/helpers/foo/bar-baz.js'))
<del> .to.equal(file('helper.js'));
<add> it('helper foo/bar-baz --pod', function() {
<add> return emberGenerateDestroy(['helper', 'foo/bar-baz', '--pod'], _file => {
<ide> expect(_file('app/helpers/foo/bar-baz.js'))
<del> .to.equal(file('helper-addon.js'));
<del> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.equal(file('helper-test/integration.js'));
<del> }));
<del> });
<del>
<del> it('in-addon helper foo/bar-baz', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['helper', 'foo/bar-baz'], _file => {
<del> expect(_file('addon/helpers/foo/bar-baz.js'))
<ide> .to.equal(file('helper.js'));
<del> expect(_file('app/helpers/foo/bar-baz.js'))
<del> .to.equal(file('helper-addon.js'));
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<ide> .to.equal(file('helper-test/integration.js'));
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('dummy helper foo/bar-baz', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['helper', 'foo/bar-baz', '--dummy'], _file => {
<del> expect(_file('tests/dummy/app/helpers/foo/bar-baz.js'))
<del> .to.equal(file('helper.js'));
<add> it('helper foo/bar-baz --pod', function() {
<add> return emberGenerateDestroy(['helper', 'foo/bar-baz', '--pod'], _file => {
<ide> expect(_file('app/helpers/foo/bar-baz.js'))
<del> .to.not.exist;
<del> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.not.exist;
<del> }));
<del> });
<del>
<del> it('in-repo-addon helper foo/bar-baz', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['helper', 'foo/bar-baz', '--in-repo-addon=my-addon'], _file => {
<del> expect(_file('lib/my-addon/addon/helpers/foo/bar-baz.js'))
<ide> .to.equal(file('helper.js'));
<del> expect(_file('lib/my-addon/app/helpers/foo/bar-baz.js'))
<del> .to.equal(file('helper-addon.js'));
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<ide> .to.equal(file('helper-test/integration.js'));
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-repo-addon helper foo/bar-baz', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['helper', 'foo/bar-baz', '--in-repo-addon=my-addon'], _file => {
<del> expect(_file('lib/my-addon/addon/helpers/foo/bar-baz.js'))
<del> .to.equal(file('helper.js'));
<del> expect(_file('lib/my-addon/app/helpers/foo/bar-baz.js'))
<del> .to.equal(file('helper-addon.js'));
<add> it('helper-test foo/bar-baz', function() {
<add> return emberGenerateDestroy(['helper-test', 'foo/bar-baz'], _file => {
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<ide> .to.equal(file('helper-test/integration.js'));
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('helper foo/bar-baz --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['helper', 'foo/bar-baz', '--pod'], _file => {
<del> expect(_file('app/helpers/foo/bar-baz.js'))
<del> .to.equal(file('helper.js'));
<add> it('helper-test foo/bar-baz --integration', function() {
<add> return emberGenerateDestroy(['helper-test', 'foo/bar-baz', '--integration'], _file => {
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<ide> .to.equal(file('helper-test/integration.js'));
<del> }));
<del> });
<del>
<del> it('helper foo/bar-baz --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['helper', 'foo/bar-baz', '--pod'], _file => {
<del> expect(_file('app/helpers/foo/bar-baz.js'))
<add> });
<add> });
<add>
<add> describe('with podModulePrefix', function() {
<add> beforeEach(function() {
<add> setupPodConfig({ podModulePrefix: true });
<add> });
<add>
<add> it('helper foo/bar-baz --pod', function() {
<add> return emberGenerateDestroy(['helper', 'foo/bar-baz', '--pod'], _file => {
<add> expect(_file('app/helpers/foo/bar-baz.js'))
<add> .to.equal(file('helper.js'));
<add> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<add> .to.equal(file('helper-test/integration.js'));
<add> });
<add> });
<add>
<add> it('helper foo/bar-baz --pod', function() {
<add> return emberGenerateDestroy(['helper', 'foo/bar-baz', '--pod'], _file => {
<add> expect(_file('app/helpers/foo/bar-baz.js'))
<add> .to.equal(file('helper.js'));
<add> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<add> .to.equal(file('helper-test/integration.js'));
<add> });
<add> });
<add> });
<add>
<add> describe('with [email protected]', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> generateFakePackageManifest('ember-cli-mocha', '0.11.0');
<add> });
<add>
<add> it('helper-test foo/bar-baz --integration', function() {
<add> return emberGenerateDestroy(['helper-test', 'foo/bar-baz', '--integration'], _file => {
<add> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<add> .to.contain("import { describeComponent, it } from 'ember-mocha';")
<add> .to.contain("import hbs from 'htmlbars-inline-precompile';");
<add> });
<add> });
<add> });
<add>
<add> describe('with [email protected]', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> generateFakePackageManifest('ember-cli-mocha', '0.12.0');
<add> });
<add>
<add> it('helper-test foo/bar-baz --integration', function() {
<add> return emberGenerateDestroy(['helper-test', 'foo/bar-baz', '--integration'], _file => {
<add> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<add> .to.contain("import { describe, it } from 'mocha';")
<add> .to.contain("import { setupComponentTest } from 'ember-mocha';")
<add> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<add> .to.contain("describe('Integration | Helper | foo/bar baz', function() {");
<add> });
<add> });
<add>
<add> it('helper-test foo/bar-baz for mocha', function() {
<add> return emberGenerateDestroy(['helper-test', 'foo/bar-baz'], _file => {
<add> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<add> .to.contain("import { describe, it } from 'mocha';")
<add> .to.contain("setupComponentTest('foo/bar-baz', {")
<add> .to.contain("describe('Integration | Helper | foo/bar baz', function() {");
<add> });
<add> });
<add> });
<add> });
<add>
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('helper foo/bar-baz', function() {
<add> return emberGenerateDestroy(['helper', 'foo/bar-baz'], _file => {
<add> expect(_file('addon/helpers/foo/bar-baz.js'))
<ide> .to.equal(file('helper.js'));
<add> expect(_file('app/helpers/foo/bar-baz.js'))
<add> .to.equal(file('helper-addon.js'));
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<ide> .to.equal(file('helper-test/integration.js'));
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('helper foo/bar-baz --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['helper', 'foo/bar-baz', '--pod'], _file => {
<del> expect(_file('app/helpers/foo/bar-baz.js'))
<add> it('helper foo/bar-baz', function() {
<add> return emberGenerateDestroy(['helper', 'foo/bar-baz'], _file => {
<add> expect(_file('addon/helpers/foo/bar-baz.js'))
<ide> .to.equal(file('helper.js'));
<add> expect(_file('app/helpers/foo/bar-baz.js'))
<add> .to.equal(file('helper-addon.js'));
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<ide> .to.equal(file('helper-test/integration.js'));
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('helper foo/bar-baz --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['helper', 'foo/bar-baz', '--pod'], _file => {
<del> expect(_file('app/helpers/foo/bar-baz.js'))
<add> it('helper foo/bar-baz --dummy', function() {
<add> return emberGenerateDestroy(['helper', 'foo/bar-baz', '--dummy'], _file => {
<add> expect(_file('tests/dummy/app/helpers/foo/bar-baz.js'))
<ide> .to.equal(file('helper.js'));
<add> expect(_file('app/helpers/foo/bar-baz.js'))
<add> .to.not.exist;
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.equal(file('helper-test/integration.js'));
<del> }));
<del> });
<add> .to.not.exist;
<add> });
<add> });
<ide>
<del> it('helper-test foo/bar-baz --integration', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['helper-test', 'foo/bar-baz', '--integration'], _file => {
<add> it('helper-test foo/bar-baz', function() {
<add> return emberGenerateDestroy(['helper-test', 'foo/bar-baz'], _file => {
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<ide> .to.equal(file('helper-test/integration.js'));
<del> }));
<add> });
<add> });
<ide> });
<ide>
<del> it('helper-test foo/bar-baz', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['helper-test', 'foo/bar-baz'], _file => {
<del> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.equal(file('helper-test/integration.js'));
<del> }));
<del> });
<add> describe('in in-repo-addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'in-repo-addon' });
<add> });
<ide>
<del> it('in-addon helper-test foo/bar-baz', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['helper-test', 'foo/bar-baz'], _file => {
<add> it('helper foo/bar-baz --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['helper', 'foo/bar-baz', '--in-repo-addon=my-addon'], _file => {
<add> expect(_file('lib/my-addon/addon/helpers/foo/bar-baz.js'))
<add> .to.equal(file('helper.js'));
<add> expect(_file('lib/my-addon/app/helpers/foo/bar-baz.js'))
<add> .to.equal(file('helper-addon.js'));
<ide> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<ide> .to.equal(file('helper-test/integration.js'));
<del> }));
<del> });
<del>
<del> it('helper-test foo/bar-baz --integration for mocha', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.11.0'))
<del> .then(() => emberGenerateDestroy(['helper-test', 'foo/bar-baz', '--integration'], _file => {
<del> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.contain("import { describeComponent, it } from 'ember-mocha';")
<del> .to.contain("import hbs from 'htmlbars-inline-precompile';");
<del> }));
<del> });
<del>
<del> it('helper-test foo/bar-baz --integration for mocha v0.12+', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.12.0'))
<del> .then(() => emberGenerateDestroy(['helper-test', 'foo/bar-baz', '--integration'], _file => {
<del> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.contain("import { describe, it } from 'mocha';")
<del> .to.contain("import { setupComponentTest } from 'ember-mocha';")
<del> .to.contain("import hbs from 'htmlbars-inline-precompile';")
<del> .to.contain("describe('Integration | Helper | foo/bar baz', function() {");
<del> }));
<del> });
<del>
<del> it('helper-test foo/bar-baz for mocha', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.12.0'))
<del> .then(() => emberGenerateDestroy(['helper-test', 'foo/bar-baz'], _file => {
<del> expect(_file('tests/integration/helpers/foo/bar-baz-test.js'))
<del> .to.contain("import { describe, it } from 'mocha';")
<del> .to.contain("setupComponentTest('foo/bar-baz', {")
<del> .to.contain("describe('Integration | Helper | foo/bar baz', function() {");
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/initializer-addon-test.js
<ide> const expect = chai.expect;
<ide> describe('Blueprint: initializer-addon', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('initializer-addon foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['initializer-addon', 'foo'], _file => {
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('initializer-addon foo', function() {
<add> return emberGenerateDestroy(['initializer-addon', 'foo'], _file => {
<ide> expect(_file('app/initializers/foo.js'))
<ide> .to.contain("export { default, initialize } from 'my-addon/initializers/foo';");
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/initializer-test.js
<ide> const expect = chai.expect;
<ide> describe('Blueprint: initializer', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('initializer foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['initializer', 'foo'], _file => {
<add> describe('in app', function() {
<add> beforeEach(function() {
<add> return emberNew();
<add> });
<add>
<add> it('initializer foo', function() {
<add> return emberGenerateDestroy(['initializer', 'foo'], _file => {
<ide> expect(_file('app/initializers/foo.js'))
<ide> .to.contain("export function initialize(/* application */) {\n" +
<ide> " // application.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: initializer', function() {
<ide>
<ide> expect(_file('tests/unit/initializers/foo-test.js'))
<ide> .to.contain("import { initialize } from 'my-app/initializers/foo';");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('initializer foo/bar', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['initializer', 'foo/bar'], _file => {
<add> it('initializer foo/bar', function() {
<add> return emberGenerateDestroy(['initializer', 'foo/bar'], _file => {
<ide> expect(_file('app/initializers/foo/bar.js'))
<ide> .to.contain("export function initialize(/* application */) {\n" +
<ide> " // application.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: initializer', function() {
<ide>
<ide> expect(_file('tests/unit/initializers/foo/bar-test.js'))
<ide> .to.contain("import { initialize } from 'my-app/initializers/foo/bar';");
<del> }));
<add> });
<add> });
<add>
<add> it('initializer foo --pod', function() {
<add> return emberGenerateDestroy(['initializer', 'foo', '--pod'], _file => {
<add> expect(_file('app/initializers/foo.js'))
<add> .to.contain("export function initialize(/* application */) {\n" +
<add> " // application.inject('route', 'foo', 'service:foo');\n" +
<add> "}\n" +
<add> "\n" +
<add> "export default {\n" +
<add> " initialize\n" +
<add> "};");
<add> });
<add> });
<add>
<add> it('initializer foo/bar --pod', function() {
<add> return emberGenerateDestroy(['initializer', 'foo/bar', '--pod'], _file => {
<add> expect(_file('app/initializers/foo/bar.js'))
<add> .to.contain("export function initialize(/* application */) {\n" +
<add> " // application.inject('route', 'foo', 'service:foo');\n" +
<add> "}\n" +
<add> "\n" +
<add> "export default {\n" +
<add> " initialize\n" +
<add> "};");
<add> });
<add> });
<add>
<add> it('initializer-test foo', function() {
<add> return emberGenerateDestroy(['initializer-test', 'foo'], _file => {
<add> expect(_file('tests/unit/initializers/foo-test.js'))
<add> .to.contain("import { initialize } from 'my-app/initializers/foo';")
<add> .to.contain("module('Unit | Initializer | foo'")
<add> .to.contain("application = Application.create();")
<add> .to.contain("initialize(this.application);");
<add> });
<add> });
<add>
<add> describe('with podModulePrefix', function() {
<add> beforeEach(function() {
<add> setupPodConfig({ podModulePrefix: true });
<add> });
<add>
<add> it('initializer foo --pod', function() {
<add> return emberGenerateDestroy(['initializer', 'foo', '--pod'], _file => {
<add> expect(_file('app/initializers/foo.js'))
<add> .to.contain("export function initialize(/* application */) {\n" +
<add> " // application.inject('route', 'foo', 'service:foo');\n" +
<add> "}\n" +
<add> "\n" +
<add> "export default {\n" +
<add> " initialize\n" +
<add> "};");
<add> });
<add> });
<add>
<add> it('initializer foo/bar --pod', function() {
<add> return emberGenerateDestroy(['initializer', 'foo/bar', '--pod'], _file => {
<add> expect(_file('app/initializers/foo/bar.js'))
<add> .to.contain("export function initialize(/* application */) {\n" +
<add> " // application.inject('route', 'foo', 'service:foo');\n" +
<add> "}\n" +
<add> "\n" +
<add> "export default {\n" +
<add> " initialize\n" +
<add> "};");
<add> });
<add> });
<add> });
<add>
<add> describe('with ember-cli-mocha', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> });
<add>
<add> it('initializer-test foo', function() {
<add> return emberGenerateDestroy(['initializer-test', 'foo'], _file => {
<add> expect(_file('tests/unit/initializers/foo-test.js'))
<add> .to.contain("import { initialize } from 'my-app/initializers/foo';")
<add> .to.contain("describe('Unit | Initializer | foo', function() {")
<add> .to.contain("application = Application.create();")
<add> .to.contain("initialize(application);");
<add> });
<add> });
<add> });
<ide> });
<ide>
<del> it('in-addon initializer foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['initializer', 'foo'], _file => {
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('initializer foo', function() {
<add> return emberGenerateDestroy(['initializer', 'foo'], _file => {
<ide> expect(_file('addon/initializers/foo.js'))
<ide> .to.contain("export function initialize(/* application */) {\n" +
<ide> " // application.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: initializer', function() {
<ide>
<ide> expect(_file('tests/unit/initializers/foo-test.js'))
<ide> .to.exist;
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-addon initializer foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['initializer', 'foo/bar'], _file => {
<add> it('initializer foo/bar', function() {
<add> return emberGenerateDestroy(['initializer', 'foo/bar'], _file => {
<ide> expect(_file('addon/initializers/foo/bar.js'))
<ide> .to.contain("export function initialize(/* application */) {\n" +
<ide> " // application.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: initializer', function() {
<ide>
<ide> expect(_file('tests/unit/initializers/foo/bar-test.js'))
<ide> .to.exist;
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('dummy initializer foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['initializer', 'foo', '--dummy'], _file => {
<add> it('initializer foo --dumy', function() {
<add> return emberGenerateDestroy(['initializer', 'foo', '--dummy'], _file => {
<ide> expect(_file('tests/dummy/app/initializers/foo.js'))
<ide> .to.contain("export function initialize(/* application */) {\n" +
<ide> " // application.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: initializer', function() {
<ide>
<ide> expect(_file('tests/unit/initializers/foo-test.js'))
<ide> .to.not.exist;
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('dummy initializer foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['initializer', 'foo/bar', '--dummy'], _file => {
<add> it('initializer foo/bar --dummy', function() {
<add> return emberGenerateDestroy(['initializer', 'foo/bar', '--dummy'], _file => {
<ide> expect(_file('tests/dummy/app/initializers/foo/bar.js'))
<ide> .to.contain("export function initialize(/* application */) {\n" +
<ide> " // application.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: initializer', function() {
<ide>
<ide> expect(_file('tests/unit/initializers/foo/bar-test.js'))
<ide> .to.not.exist;
<del> }));
<add> });
<add> });
<add>
<add> it('initializer-test foo', function() {
<add> return emberGenerateDestroy(['initializer-test', 'foo'], _file => {
<add> expect(_file('tests/unit/initializers/foo-test.js'))
<add> .to.contain("import { initialize } from 'dummy/initializers/foo';")
<add> .to.contain("module('Unit | Initializer | foo'")
<add> .to.contain("application = Application.create();")
<add> .to.contain("initialize(this.application);");
<add> });
<add> });
<add>
<ide> });
<ide>
<del> it('in-repo-addon initializer foo', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['initializer', 'foo', '--in-repo-addon=my-addon'], _file => {
<add> describe('in in-repo-addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'in-repo-addon' });
<add> });
<add>
<add> it('initializer foo --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['initializer', 'foo', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('lib/my-addon/addon/initializers/foo.js'))
<ide> .to.contain("export function initialize(/* application */) {\n" +
<ide> " // application.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: initializer', function() {
<ide>
<ide> expect(_file('tests/unit/initializers/foo-test.js'))
<ide> .to.exist;
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-repo-addon initializer foo/bar', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['initializer', 'foo/bar', '--in-repo-addon=my-addon'], _file => {
<add> it('initializer foo/bar --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['initializer', 'foo/bar', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('lib/my-addon/addon/initializers/foo/bar.js'))
<ide> .to.contain("export function initialize(/* application */) {\n" +
<ide> " // application.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: initializer', function() {
<ide>
<ide> expect(_file('tests/unit/initializers/foo/bar-test.js'))
<ide> .to.exist;
<del> }));
<del> });
<del>
<del> /* Pod tests */
<del>
<del> it('initializer foo --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['initializer', 'foo', '--pod'], _file => {
<del> expect(_file('app/initializers/foo.js'))
<del> .to.contain("export function initialize(/* application */) {\n" +
<del> " // application.inject('route', 'foo', 'service:foo');\n" +
<del> "}\n" +
<del> "\n" +
<del> "export default {\n" +
<del> " initialize\n" +
<del> "};");
<del> }));
<del> });
<del>
<del> it('initializer foo --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['initializer', 'foo', '--pod'], _file => {
<del> expect(_file('app/initializers/foo.js'))
<del> .to.contain("export function initialize(/* application */) {\n" +
<del> " // application.inject('route', 'foo', 'service:foo');\n" +
<del> "}\n" +
<del> "\n" +
<del> "export default {\n" +
<del> " initialize\n" +
<del> "};");
<del> }));
<del> });
<del>
<del> it('initializer foo/bar --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['initializer', 'foo/bar', '--pod'], _file => {
<del> expect(_file('app/initializers/foo/bar.js'))
<del> .to.contain("export function initialize(/* application */) {\n" +
<del> " // application.inject('route', 'foo', 'service:foo');\n" +
<del> "}\n" +
<del> "\n" +
<del> "export default {\n" +
<del> " initialize\n" +
<del> "};");
<del> }));
<del> });
<del>
<del>
<del> it('initializer foo/bar --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['initializer', 'foo/bar', '--pod'], _file => {
<del> expect(_file('app/initializers/foo/bar.js'))
<del> .to.contain("export function initialize(/* application */) {\n" +
<del> " // application.inject('route', 'foo', 'service:foo');\n" +
<del> "}\n" +
<del> "\n" +
<del> "export default {\n" +
<del> " initialize\n" +
<del> "};");
<del> }));
<del> });
<del>
<del>
<del> it('initializer-test foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['initializer-test', 'foo'], _file => {
<del> expect(_file('tests/unit/initializers/foo-test.js'))
<del> .to.contain("import { initialize } from 'my-app/initializers/foo';")
<del> .to.contain("module('Unit | Initializer | foo'")
<del> .to.contain("application = Application.create();")
<del> .to.contain("initialize(this.application);");
<del> }));
<del> });
<del>
<del> it('in-addon initializer-test foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['initializer-test', 'foo'], _file => {
<del> expect(_file('tests/unit/initializers/foo-test.js'))
<del> .to.contain("import { initialize } from 'dummy/initializers/foo';")
<del> .to.contain("module('Unit | Initializer | foo'")
<del> .to.contain("application = Application.create();")
<del> .to.contain("initialize(this.application);");
<del> }));
<del> });
<del>
<del> it('initializer-test foo for mocha', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => emberGenerateDestroy(['initializer-test', 'foo'], _file => {
<del> expect(_file('tests/unit/initializers/foo-test.js'))
<del> .to.contain("import { initialize } from 'my-app/initializers/foo';")
<del> .to.contain("describe('Unit | Initializer | foo', function() {")
<del> .to.contain("application = Application.create();")
<del> .to.contain("initialize(application);");
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/instance-initializer-addon-test.js
<ide> const expect = chai.expect;
<ide> describe('Blueprint: instance-initializer-addon', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('instance-initializer-addon foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['instance-initializer-addon', 'foo'], _file => {
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('instance-initializer-addon foo', function() {
<add> return emberGenerateDestroy(['instance-initializer-addon', 'foo'], _file => {
<ide> expect(_file('app/instance-initializers/foo.js'))
<ide> .to.contain("export { default, initialize } from 'my-addon/instance-initializers/foo';");
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/instance-initializer-test.js
<ide> const expect = chai.expect;
<ide> describe('Blueprint: instance-initializer', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('instance-initializer foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['instance-initializer', 'foo'], _file => {
<add>
<add> describe('in app', function() {
<add> beforeEach(function() {
<add> return emberNew();
<add> });
<add>
<add> it('instance-initializer foo', function() {
<add> return emberGenerateDestroy(['instance-initializer', 'foo'], _file => {
<ide> expect(_file('app/instance-initializers/foo.js'))
<ide> .to.contain("export function initialize(/* appInstance */) {\n" +
<ide> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: instance-initializer', function() {
<ide>
<ide> expect(_file('tests/unit/instance-initializers/foo-test.js'))
<ide> .to.contain("import { initialize } from 'my-app/instance-initializers/foo';");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('instance-initializer foo/bar', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['instance-initializer', 'foo/bar'], _file => {
<add> it('instance-initializer foo/bar', function() {
<add> return emberGenerateDestroy(['instance-initializer', 'foo/bar'], _file => {
<ide> expect(_file('app/instance-initializers/foo/bar.js'))
<ide> .to.contain("export function initialize(/* appInstance */) {\n" +
<ide> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: instance-initializer', function() {
<ide>
<ide> expect(_file('tests/unit/instance-initializers/foo/bar-test.js'))
<ide> .to.contain("import { initialize } from 'my-app/instance-initializers/foo/bar';");
<del> }));
<add> });
<add> });
<add>
<add> it('instance-initializer foo --pod', function() {
<add> return emberGenerateDestroy(['instance-initializer', 'foo', '--pod'], _file => {
<add> expect(_file('app/instance-initializers/foo.js'))
<add> .to.contain("export function initialize(/* appInstance */) {\n" +
<add> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<add> "}\n" +
<add> "\n" +
<add> "export default {\n" +
<add> " initialize\n" +
<add> "};");
<add> });
<add> });
<add>
<add> it('instance-initializer foo/bar --pod', function() {
<add> return emberGenerateDestroy(['instance-initializer', 'foo/bar', '--pod'], _file => {
<add> expect(_file('app/instance-initializers/foo/bar.js'))
<add> .to.contain("export function initialize(/* appInstance */) {\n" +
<add> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<add> "}\n" +
<add> "\n" +
<add> "export default {\n" +
<add> " initialize\n" +
<add> "};");
<add> });
<add> });
<add>
<add> it('instance-initializer-test foo', function() {
<add> return emberGenerateDestroy(['instance-initializer-test', 'foo'], _file => {
<add> expect(_file('tests/unit/instance-initializers/foo-test.js'))
<add> .to.contain("import { initialize } from 'my-app/instance-initializers/foo';")
<add> .to.contain("module('Unit | Instance Initializer | foo'")
<add> .to.contain("application = Application.create();")
<add> .to.contain("this.appInstance = this.application.buildInstance();")
<add> .to.contain("initialize(this.appInstance);");
<add> });
<add> });
<add>
<add> describe('with podModulePrefix', function() {
<add> beforeEach(function() {
<add> setupPodConfig({ podModulePrefix: true });
<add> });
<add>
<add> it('instance-initializer foo --pod', function() {
<add> return emberGenerateDestroy(['instance-initializer', 'foo', '--pod'], _file => {
<add> expect(_file('app/instance-initializers/foo.js'))
<add> .to.contain("export function initialize(/* appInstance */) {\n" +
<add> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<add> "}\n" +
<add> "\n" +
<add> "export default {\n" +
<add> " initialize\n" +
<add> "};");
<add> });
<add> });
<add>
<add> it('instance-initializer foo/bar --pod', function() {
<add> return emberGenerateDestroy(['instance-initializer', 'foo/bar', '--pod'], _file => {
<add> expect(_file('app/instance-initializers/foo/bar.js'))
<add> .to.contain("export function initialize(/* appInstance */) {\n" +
<add> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<add> "}\n" +
<add> "\n" +
<add> "export default {\n" +
<add> " initialize\n" +
<add> "};");
<add> });
<add> });
<add> });
<add>
<add> describe('with ember-cli-mocha', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> });
<add>
<add> it('instance-initializer-test foo for mocha', function() {
<add> return emberGenerateDestroy(['instance-initializer-test', 'foo'], _file => {
<add> expect(_file('tests/unit/instance-initializers/foo-test.js'))
<add> .to.contain("import { initialize } from 'my-app/instance-initializers/foo';")
<add> .to.contain("describe('Unit | Instance Initializer | foo', function() {")
<add> .to.contain("application = Application.create();")
<add> .to.contain("appInstance = application.buildInstance();")
<add> .to.contain("initialize(appInstance);");
<add> });
<add> });
<add> });
<ide> });
<ide>
<del> it('in-addon instance-initializer foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['instance-initializer', 'foo'], _file => {
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('instance-initializer foo', function() {
<add> return emberGenerateDestroy(['instance-initializer', 'foo'], _file => {
<ide> expect(_file('addon/instance-initializers/foo.js'))
<ide> .to.contain("export function initialize(/* appInstance */) {\n" +
<ide> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: instance-initializer', function() {
<ide> .to.contain("export { default, initialize } from 'my-addon/instance-initializers/foo';");
<ide>
<ide> expect(_file('tests/unit/instance-initializers/foo-test.js'));
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-addon instance-initializer foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['instance-initializer', 'foo/bar'], _file => {
<add> it('instance-initializer foo/bar', function() {
<add> return emberGenerateDestroy(['instance-initializer', 'foo/bar'], _file => {
<ide> expect(_file('addon/instance-initializers/foo/bar.js'))
<ide> .to.contain("export function initialize(/* appInstance */) {\n" +
<ide> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: instance-initializer', function() {
<ide> .to.contain("export { default, initialize } from 'my-addon/instance-initializers/foo/bar';");
<ide>
<ide> expect(_file('tests/unit/instance-initializers/foo/bar-test.js'));
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('dummy instance-initializer foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['instance-initializer', 'foo', '--dummy'], _file => {
<add> it('instance-initializer foo --dummy', function() {
<add> return emberGenerateDestroy(['instance-initializer', 'foo', '--dummy'], _file => {
<ide> expect(_file('tests/dummy/app/instance-initializers/foo.js'))
<ide> .to.contain("export function initialize(/* appInstance */) {\n" +
<ide> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: instance-initializer', function() {
<ide>
<ide> expect(_file('tests/unit/instance-initializers/foo-test.js'))
<ide> .to.not.exist;
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('dummy instance-initializer foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['instance-initializer', 'foo/bar', '--dummy'], _file => {
<add> it('instance-initializer foo/bar --dummy', function() {
<add> return emberGenerateDestroy(['instance-initializer', 'foo/bar', '--dummy'], _file => {
<ide> expect(_file('tests/dummy/app/instance-initializers/foo/bar.js'))
<ide> .to.contain("export function initialize(/* appInstance */) {\n" +
<ide> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: instance-initializer', function() {
<ide>
<ide> expect(_file('tests/unit/instance-initializers/foo/bar-test.js'))
<ide> .to.not.exist;
<del> }));
<add> });
<add> });
<add>
<add> it('instance-initializer-test foo', function() {
<add> return emberGenerateDestroy(['instance-initializer-test', 'foo'], _file => {
<add> expect(_file('tests/unit/instance-initializers/foo-test.js'))
<add> .to.contain("import { initialize } from 'dummy/instance-initializers/foo';")
<add> .to.contain("module('Unit | Instance Initializer | foo'")
<add> .to.contain("application = Application.create();")
<add> .to.contain("this.appInstance = this.application.buildInstance();")
<add> .to.contain("initialize(this.appInstance);");
<add> });
<add> });
<ide> });
<ide>
<del> it('in-repo-addon instance-initializer foo', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['instance-initializer', 'foo', '--in-repo-addon=my-addon'], _file => {
<add> describe('in in-repo-addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'in-repo-addon' });
<add> });
<add>
<add> it('instance-initializer foo --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['instance-initializer', 'foo', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('lib/my-addon/addon/instance-initializers/foo.js'))
<ide> .to.contain("export function initialize(/* appInstance */) {\n" +
<ide> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: instance-initializer', function() {
<ide>
<ide> expect(_file('tests/unit/instance-initializers/foo-test.js'))
<ide> .to.exist;
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-repo-addon instance-initializer foo/bar', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['instance-initializer', 'foo/bar', '--in-repo-addon=my-addon'], _file => {
<add> it('instance-initializer foo/bar --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['instance-initializer', 'foo/bar', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('lib/my-addon/addon/instance-initializers/foo/bar.js'))
<ide> .to.contain("export function initialize(/* appInstance */) {\n" +
<ide> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<ide> describe('Blueprint: instance-initializer', function() {
<ide>
<ide> expect(_file('tests/unit/instance-initializers/foo/bar-test.js'))
<ide> .to.exist;
<del> }));
<del> });
<del>
<del> /* Pod tests */
<del>
<del> it('instance-initializer foo --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['instance-initializer', 'foo', '--pod'], _file => {
<del> expect(_file('app/instance-initializers/foo.js'))
<del> .to.contain("export function initialize(/* appInstance */) {\n" +
<del> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<del> "}\n" +
<del> "\n" +
<del> "export default {\n" +
<del> " initialize\n" +
<del> "};");
<del> }));
<del> });
<del>
<del> it('instance-initializer foo --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['instance-initializer', 'foo', '--pod'], _file => {
<del> expect(_file('app/instance-initializers/foo.js'))
<del> .to.contain("export function initialize(/* appInstance */) {\n" +
<del> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<del> "}\n" +
<del> "\n" +
<del> "export default {\n" +
<del> " initialize\n" +
<del> "};");
<del> }));
<del> });
<del>
<del> it('instance-initializer foo/bar --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['instance-initializer', 'foo/bar', '--pod'], _file => {
<del> expect(_file('app/instance-initializers/foo/bar.js'))
<del> .to.contain("export function initialize(/* appInstance */) {\n" +
<del> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<del> "}\n" +
<del> "\n" +
<del> "export default {\n" +
<del> " initialize\n" +
<del> "};");
<del> }));
<del> });
<del>
<del>
<del> it('instance-initializer foo/bar --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['instance-initializer', 'foo/bar', '--pod'], _file => {
<del> expect(_file('app/instance-initializers/foo/bar.js'))
<del> .to.contain("export function initialize(/* appInstance */) {\n" +
<del> " // appInstance.inject('route', 'foo', 'service:foo');\n" +
<del> "}\n" +
<del> "\n" +
<del> "export default {\n" +
<del> " initialize\n" +
<del> "};");
<del> }));
<del> });
<del>
<del>
<del> it('instance-initializer-test foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['instance-initializer-test', 'foo'], _file => {
<del> expect(_file('tests/unit/instance-initializers/foo-test.js'))
<del> .to.contain("import { initialize } from 'my-app/instance-initializers/foo';")
<del> .to.contain("module('Unit | Instance Initializer | foo'")
<del> .to.contain("application = Application.create();")
<del> .to.contain("this.appInstance = this.application.buildInstance();")
<del> .to.contain("initialize(this.appInstance);");
<del> }));
<del> });
<del>
<del> it('in-addon instance-initializer-test foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['instance-initializer-test', 'foo'], _file => {
<del> expect(_file('tests/unit/instance-initializers/foo-test.js'))
<del> .to.contain("import { initialize } from 'dummy/instance-initializers/foo';")
<del> .to.contain("module('Unit | Instance Initializer | foo'")
<del> .to.contain("application = Application.create();")
<del> .to.contain("this.appInstance = this.application.buildInstance();")
<del> .to.contain("initialize(this.appInstance);");
<del> }));
<del> });
<del>
<del> it('instance-initializer-test foo for mocha', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => emberGenerateDestroy(['instance-initializer-test', 'foo'], _file => {
<del> expect(_file('tests/unit/instance-initializers/foo-test.js'))
<del> .to.contain("import { initialize } from 'my-app/instance-initializers/foo';")
<del> .to.contain("describe('Unit | Instance Initializer | foo', function() {")
<del> .to.contain("application = Application.create();")
<del> .to.contain("appInstance = application.buildInstance();")
<del> .to.contain("initialize(appInstance);");
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/mixin-test.js
<ide> const expect = chai.expect;
<ide> describe('Blueprint: mixin', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('mixin foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['mixin', 'foo'], _file => {
<add>
<add> describe('in app', function() {
<add> beforeEach(function() {
<add> return emberNew();
<add> });
<add>
<add> it('mixin foo', function() {
<add> return emberGenerateDestroy(['mixin', 'foo'], _file => {
<ide> expect(_file('app/mixins/foo.js'))
<ide> .to.contain('import Mixin from \'@ember/object/mixin\';')
<ide> .to.contain('export default Mixin.create({\n});');
<ide>
<ide> expect(_file('tests/unit/mixins/foo-test.js'))
<ide> .to.contain("import FooMixin from 'my-app/mixins/foo';");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('mixin foo/bar', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['mixin', 'foo/bar'], _file => {
<add> it('mixin foo/bar', function() {
<add> return emberGenerateDestroy(['mixin', 'foo/bar'], _file => {
<ide> expect(_file('app/mixins/foo/bar.js'))
<ide> .to.contain('import Mixin from \'@ember/object/mixin\';')
<ide> .to.contain('export default Mixin.create({\n});');
<ide>
<ide> expect(_file('tests/unit/mixins/foo/bar-test.js'))
<ide> .to.contain("import FooBarMixin from 'my-app/mixins/foo/bar';");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('mixin foo/bar/baz', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['mixin', 'foo/bar/baz'], _file => {
<add> it('mixin foo/bar/baz', function() {
<add> return emberGenerateDestroy(['mixin', 'foo/bar/baz'], _file => {
<ide> expect(_file('tests/unit/mixins/foo/bar/baz-test.js'))
<ide> .to.contain("import FooBarBazMixin from 'my-app/mixins/foo/bar/baz';");
<del> }));
<del> });
<add> });
<add> });
<add>
<add> it('mixin foo --pod', function() {
<add> return emberGenerateDestroy(['mixin', 'foo', '--pod'], _file => {
<add> expect(_file('app/mixins/foo.js'))
<add> .to.contain('import Mixin from \'@ember/object/mixin\';')
<add> .to.contain('export default Mixin.create({\n});');
<add>
<add> expect(_file('tests/unit/mixins/foo-test.js'))
<add> .to.contain("import FooMixin from 'my-app/mixins/foo';");
<add> });
<add> });
<add>
<add> it('mixin foo/bar --pod', function() {
<add> return emberGenerateDestroy(['mixin', 'foo/bar', '--pod'], _file => {
<add> expect(_file('app/mixins/foo/bar.js'))
<add> .to.contain('import Mixin from \'@ember/object/mixin\';')
<add> .to.contain('export default Mixin.create({\n});');
<add>
<add> expect(_file('tests/unit/mixins/foo/bar-test.js'))
<add> .to.contain("import FooBarMixin from 'my-app/mixins/foo/bar';");
<add> });
<add> });
<add>
<add> it('mixin foo/bar/baz --pod', function() {
<add> return emberGenerateDestroy(['mixin', 'foo/bar/baz', '--pod'], _file => {
<add> expect(_file('tests/unit/mixins/foo/bar/baz-test.js'))
<add> .to.contain("import FooBarBazMixin from 'my-app/mixins/foo/bar/baz';");
<add> });
<add> });
<ide>
<del> it('in-addon mixin foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['mixin', 'foo'], _file => {
<add> it('mixin-test foo', function() {
<add> return emberGenerateDestroy(['mixin-test', 'foo'], _file => {
<add> expect(_file('tests/unit/mixins/foo-test.js'))
<add> .to.contain("import FooMixin from 'my-app/mixins/foo';");
<add> });
<add> });
<add>
<add> describe('with podModulePrefix', function() {
<add> beforeEach(function() {
<add> setupPodConfig({ podModulePrefix: true });
<add> });
<add>
<add> it('mixin foo --pod', function() {
<add> return emberGenerateDestroy(['mixin', 'foo', '--pod'], _file => {
<add> expect(_file('app/mixins/foo.js'))
<add> .to.contain('import Mixin from \'@ember/object/mixin\';')
<add> .to.contain('export default Mixin.create({\n});');
<add>
<add> expect(_file('tests/unit/mixins/foo-test.js'))
<add> .to.contain("import FooMixin from 'my-app/mixins/foo';");
<add> });
<add> });
<add>
<add> it('mixin foo/bar --pod', function() {
<add> return emberGenerateDestroy(['mixin', 'foo/bar', '--pod'], _file => {
<add> expect(_file('app/mixins/foo/bar.js'))
<add> .to.contain('import Mixin from \'@ember/object/mixin\';')
<add> .to.contain('export default Mixin.create({\n});');
<add>
<add> expect(_file('tests/unit/mixins/foo/bar-test.js'))
<add> .to.contain("import FooBarMixin from 'my-app/mixins/foo/bar';");
<add> });
<add> });
<add> });
<add>
<add> describe('with ember-cli-mocha', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> });
<add>
<add> it('mixin-test foo', function() {
<add> return emberGenerateDestroy(['mixin-test', 'foo'], _file => {
<add> expect(_file('tests/unit/mixins/foo-test.js'))
<add> .to.contain("import { describe, it } from 'mocha';")
<add> .to.contain("import FooMixin from 'my-app/mixins/foo';")
<add> .to.contain("describe('Unit | Mixin | foo', function() {");
<add> });
<add> });
<add> });
<add> });
<add>
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('mixin foo', function() {
<add> return emberGenerateDestroy(['mixin', 'foo'], _file => {
<ide> expect(_file('addon/mixins/foo.js'))
<ide> .to.contain('import Mixin from \'@ember/object/mixin\';')
<ide> .to.contain('export default Mixin.create({\n});');
<ide> describe('Blueprint: mixin', function() {
<ide>
<ide> expect(_file('app/mixins/foo.js'))
<ide> .to.not.exist;
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-addon mixin foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['mixin', 'foo/bar'], _file => {
<add> it('mixin foo/bar', function() {
<add> return emberGenerateDestroy(['mixin', 'foo/bar'], _file => {
<ide> expect(_file('addon/mixins/foo/bar.js'))
<ide> .to.contain('import Mixin from \'@ember/object/mixin\';')
<ide> .to.contain('export default Mixin.create({\n});');
<ide> describe('Blueprint: mixin', function() {
<ide>
<ide> expect(_file('app/mixins/foo/bar.js'))
<ide> .to.not.exist;
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-addon mixin foo/bar/baz', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['mixin', 'foo/bar/baz'], _file => {
<add> it('mixin foo/bar/baz', function() {
<add> return emberGenerateDestroy(['mixin', 'foo/bar/baz'], _file => {
<ide> expect(_file('addon/mixins/foo/bar/baz.js'))
<ide> .to.contain('import Mixin from \'@ember/object/mixin\';')
<ide> .to.contain('export default Mixin.create({\n});');
<ide> describe('Blueprint: mixin', function() {
<ide>
<ide> expect(_file('app/mixins/foo/bar/baz.js'))
<ide> .to.not.exist;
<del> }));
<add> });
<add> });
<add>
<add> it('mixin-test foo', function() {
<add> return emberGenerateDestroy(['mixin-test', 'foo'], _file => {
<add> expect(_file('tests/unit/mixins/foo-test.js'))
<add> .to.contain("import FooMixin from 'my-addon/mixins/foo';");
<add> });
<add> });
<ide> });
<ide>
<del> it('in-repo-addon mixin foo', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['mixin', 'foo', '--in-repo-addon=my-addon'], _file => {
<add> describe('in in-repo-addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'in-repo-addon' });
<add> });
<add>
<add> it('mixin foo --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['mixin', 'foo', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('lib/my-addon/addon/mixins/foo.js'))
<ide> .to.contain('import Mixin from \'@ember/object/mixin\';')
<ide> .to.contain('export default Mixin.create({\n});');
<ide>
<ide> expect(_file('tests/unit/mixins/foo-test.js'))
<ide> .to.contain("import FooMixin from 'my-addon/mixins/foo';");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-repo-addon mixin foo/bar', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['mixin', 'foo/bar', '--in-repo-addon=my-addon'], _file => {
<add> it('mixin foo/bar --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['mixin', 'foo/bar', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('lib/my-addon/addon/mixins/foo/bar.js'))
<ide> .to.contain('import Mixin from \'@ember/object/mixin\';')
<ide> .to.contain('export default Mixin.create({\n});');
<ide>
<ide> expect(_file('tests/unit/mixins/foo/bar-test.js'))
<ide> .to.contain("import FooBarMixin from 'my-addon/mixins/foo/bar';");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-repo-addon mixin foo/bar/baz', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['mixin', 'foo/bar/baz', '--in-repo-addon=my-addon'], _file => {
<add> it('mixin foo/bar/baz --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['mixin', 'foo/bar/baz', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('tests/unit/mixins/foo/bar/baz-test.js'))
<ide> .to.contain("import FooBarBazMixin from 'my-addon/mixins/foo/bar/baz';");
<del> }));
<del> });
<del>
<del> /* Pod tests */
<del>
<del> it('mixin foo --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['mixin', 'foo', '--pod'], _file => {
<del> expect(_file('app/mixins/foo.js'))
<del> .to.contain('import Mixin from \'@ember/object/mixin\';')
<del> .to.contain('export default Mixin.create({\n});');
<del>
<del> expect(_file('tests/unit/mixins/foo-test.js'))
<del> .to.contain("import FooMixin from 'my-app/mixins/foo';");
<del> }));
<del> });
<del>
<del> it('mixin foo --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['mixin', 'foo', '--pod'], _file => {
<del> expect(_file('app/mixins/foo.js'))
<del> .to.contain('import Mixin from \'@ember/object/mixin\';')
<del> .to.contain('export default Mixin.create({\n});');
<del>
<del> expect(_file('tests/unit/mixins/foo-test.js'))
<del> .to.contain("import FooMixin from 'my-app/mixins/foo';");
<del> }));
<del> });
<del>
<del> it('mixin foo/bar --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['mixin', 'foo/bar', '--pod'], _file => {
<del> expect(_file('app/mixins/foo/bar.js'))
<del> .to.contain('import Mixin from \'@ember/object/mixin\';')
<del> .to.contain('export default Mixin.create({\n});');
<del>
<del> expect(_file('tests/unit/mixins/foo/bar-test.js'))
<del> .to.contain("import FooBarMixin from 'my-app/mixins/foo/bar';");
<del> }));
<del> });
<del>
<del> it('mixin foo/bar --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['mixin', 'foo/bar', '--pod'], _file => {
<del> expect(_file('app/mixins/foo/bar.js'))
<del> .to.contain('import Mixin from \'@ember/object/mixin\';')
<del> .to.contain('export default Mixin.create({\n});');
<del>
<del> expect(_file('tests/unit/mixins/foo/bar-test.js'))
<del> .to.contain("import FooBarMixin from 'my-app/mixins/foo/bar';");
<del> }));
<del> });
<del>
<del> it('mixin foo/bar/baz --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['mixin', 'foo/bar/baz', '--pod'], _file => {
<del> expect(_file('tests/unit/mixins/foo/bar/baz-test.js'))
<del> .to.contain("import FooBarBazMixin from 'my-app/mixins/foo/bar/baz';");
<del> }));
<del> });
<del>
<del> it('mixin-test foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['mixin-test', 'foo'], _file => {
<del> expect(_file('tests/unit/mixins/foo-test.js'))
<del> .to.contain("import FooMixin from 'my-app/mixins/foo';");
<del> }));
<del> });
<del>
<del> it('in-addon mixin-test foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['mixin-test', 'foo'], _file => {
<del> expect(_file('tests/unit/mixins/foo-test.js'))
<del> .to.contain("import FooMixin from 'my-addon/mixins/foo';");
<del> }));
<del> });
<del>
<del> it('mixin-test foo for mocha', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => emberGenerateDestroy(['mixin-test', 'foo'], _file => {
<del> expect(_file('tests/unit/mixins/foo-test.js'))
<del> .to.contain("import { describe, it } from 'mocha';")
<del> .to.contain("import FooMixin from 'my-app/mixins/foo';")
<del> .to.contain("describe('Unit | Mixin | foo', function() {");
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/route-addon-test.js
<ide> const expect = chai.expect;
<ide> describe('Blueprint: route-addon', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('route-addon foo', function() {
<del> return emberNew({ target: 'addon' }).then(() => emberGenerateDestroy(['route-addon', 'foo'], _file => {
<del> expect(_file('app/routes/foo.js'))
<del> .to.contain("export { default } from 'my-addon/routes/foo';");
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<ide>
<del> expect(_file('app/templates/foo.js'))
<del> .to.contain("export { default } from 'my-addon/templates/foo';");
<del> }));
<add> it('route-addon foo', function() {
<add> return emberGenerateDestroy(['route-addon', 'foo'], _file => {
<add> expect(_file('app/routes/foo.js'))
<add> .to.contain("export { default } from 'my-addon/routes/foo';");
<add>
<add> expect(_file('app/templates/foo.js'))
<add> .to.contain("export { default } from 'my-addon/templates/foo';");
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/route-test.js
<ide> const generateFakePackageManifest = require('../helpers/generate-fake-package-ma
<ide> describe('Blueprint: route', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('route foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['route', 'foo'], (_file) => {
<del> expect(_file('app/routes/foo.js'))
<del> .to.contain('import Route from \'@ember/routing/route\';')
<del> .to.contain('export default Route.extend({\n});');
<add> describe('in app', function() {
<add> beforeEach(function() {
<add> return emberNew();
<add> });
<ide>
<del> expect(_file('app/templates/foo.hbs'))
<del> .to.equal('{{outlet}}');
<add> it('route foo', function() {
<add> return emberGenerateDestroy(['route', 'foo'], (_file) => {
<add> expect(_file('app/routes/foo.js'))
<add> .to.contain('import Route from \'@ember/routing/route\';')
<add> .to.contain('export default Route.extend({\n});');
<ide>
<del> expect(_file('tests/unit/routes/foo-test.js'))
<del> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<del> .to.contain('moduleFor(\'route:foo\'');
<add> expect(_file('app/templates/foo.hbs'))
<add> .to.equal('{{outlet}}');
<ide>
<del> expect(file('app/router.js'))
<del> .to.contain('this.route(\'foo\')');
<del> }))
<del> .then(() => expect(file('app/router.js'))
<del> .to.not.contain('this.route(\'foo\')'));
<del> });
<add> expect(_file('tests/unit/routes/foo-test.js'))
<add> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<add> .to.contain('moduleFor(\'route:foo\'');
<ide>
<del> it('route foo with --skip-router', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['route', 'foo', '--skip-router'], (_file) => {
<add> expect(file('app/router.js'))
<add> .to.contain('this.route(\'foo\')');
<add>
<add> }).then(() => {
<add> expect(file('app/router.js'))
<add> .to.not.contain('this.route(\'foo\')');
<add> });
<add> });
<add>
<add> it('route foo --skip-router', function() {
<add> return emberGenerateDestroy(['route', 'foo', '--skip-router'], (_file) => {
<ide> expect(_file('app/routes/foo.js')).to.exist;
<ide> expect(_file('app/templates/foo.hbs')).to.exist;
<ide> expect(_file('tests/unit/routes/foo-test.js')).to.exist;
<ide> expect(file('app/router.js')).to.not.contain('this.route(\'foo\')');
<del> }))
<del> .then(() => expect(file('app/router.js')).to.not.contain('this.route(\'foo\')'));
<del> });
<add> }).then(() => {
<add> expect(file('app/router.js')).to.not.contain('this.route(\'foo\')');
<add> });
<add> });
<ide>
<del> it('route foo with --path', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['route', 'foo', '--path=:foo_id/show'], (_file) => {
<add> it('route foo --path=:foo_id/show', function() {
<add> return emberGenerateDestroy(['route', 'foo', '--path=:foo_id/show'], (_file) => {
<ide> expect(_file('app/routes/foo.js'))
<ide> .to.contain('import Route from \'@ember/routing/route\';')
<ide> .to.contain('export default Route.extend({\n});');
<ide> describe('Blueprint: route', function() {
<ide> .to.contain('this.route(\'foo\', {')
<ide> .to.contain('path: \':foo_id/show\'')
<ide> .to.contain('});');
<del> }))
<del> .then(() => expect(file('app/router.js'))
<del> .to.not.contain('this.route(\'foo\'')
<del> .to.not.contain('path: \':foo_id/show\''));
<del> });
<ide>
<del> it('route --reset-namespace', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['route', 'parent/child', '--reset-namespace'], (_file) => {
<add> }).then(() => {
<add> expect(file('app/router.js'))
<add> .to.not.contain('this.route(\'foo\'')
<add> .to.not.contain('path: \':foo_id/show\'');
<add> });
<add> });
<add>
<add> it('route parent/child --reset-namespace', function() {
<add> return emberGenerateDestroy(['route', 'parent/child', '--reset-namespace'], (_file) => {
<ide> expect(_file('app/routes/child.js'))
<ide> .to.contain('import Route from \'@ember/routing/route\';')
<ide> .to.contain('export default Route.extend({\n});');
<ide> describe('Blueprint: route', function() {
<ide> .to.contain('this.route(\'child\', {')
<ide> .to.contain('resetNamespace: true')
<ide> .to.contain('});');
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('route --reset-namespace --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['route', 'parent/child', '--reset-namespace', '--pod'], (_file) => {
<add> it('route parent/child --reset-namespace --pod', function() {
<add> return emberGenerateDestroy(['route', 'parent/child', '--reset-namespace', '--pod'], (_file) => {
<ide> expect(_file('app/child/route.js'))
<ide> .to.contain('import Route from \'@ember/routing/route\';')
<ide> .to.contain('export default Route.extend({\n});');
<ide> describe('Blueprint: route', function() {
<ide> .to.contain('this.route(\'child\', {')
<ide> .to.contain('resetNamespace: true')
<ide> .to.contain('});');
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('route index', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['route', 'index'], (_file) => {
<add> it('route index', function() {
<add> return emberGenerateDestroy(['route', 'index'], (_file) => {
<ide> expect(_file('app/routes/index.js')).to.exist;
<ide> expect(_file('app/templates/index.hbs')).to.exist;
<ide> expect(_file('tests/unit/routes/index-test.js')).to.exist;
<ide> expect(file('app/router.js')).to.not.contain('this.route(\'index\')');
<del> }))
<del> .then(() => expect(file('app/router.js')).to.not.contain('this.route(\'index\')'));
<del> });
<add> }).then(() => {
<add> expect(file('app/router.js')).to.not.contain('this.route(\'index\')');
<add> });
<add> });
<ide>
<del> it('route application', function() {
<del> return emberNew()
<del> .then(() => emberGenerate(['route', 'application']))
<del> .then(() => expect(file('app/router.js')).to.not.contain('this.route(\'application\')'));
<del> });
<add> it('route application', function() {
<add> return emberGenerate(['route', 'application']).then(() => {
<add> expect(file('app/router.js')).to.not.contain('this.route(\'application\')');
<add> });
<add> });
<ide>
<del> it('route basic isn\'t added to router', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['route', 'basic'], (_file) => {
<add> it('route basic', function() {
<add> return emberGenerateDestroy(['route', 'basic'], (_file) => {
<ide> expect(_file('app/routes/basic.js')).to.exist;
<ide> expect(file('app/router.js')).to.not.contain('this.route(\'basic\')');
<del> }))
<del> .then(() => expect(file('app/router.js')).to.not.contain('this.route(\'basic\')'));
<add> }).then(() => {
<add> expect(file('app/router.js')).to.not.contain('this.route(\'basic\')');
<add> });
<add> });
<add>
<add>
<add> it('route foo --pod', function() {
<add> return emberGenerateDestroy(['route', 'foo', '--pod'], (_file) => {
<add> expect(_file('app/foo/route.js'))
<add> .to.contain('import Route from \'@ember/routing/route\';')
<add> .to.contain('export default Route.extend({\n});');
<add>
<add> expect(_file('app/foo/template.hbs'))
<add> .to.equal('{{outlet}}');
<add>
<add> expect(_file('tests/unit/foo/route-test.js'))
<add> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<add> .to.contain('moduleFor(\'route:foo\'');
<add>
<add> expect(file('app/router.js'))
<add> .to.contain('this.route(\'foo\')');
<add>
<add> }).then(() => {
<add> expect(file('app/router.js'))
<add> .to.not.contain('this.route(\'foo\')');
<add> });
<add> });
<add>
<add> it('route foo --pod with --path', function() {
<add> return emberGenerate(['route', 'foo', '--pod', '--path=:foo_id/show'])
<add> .then(() => expect(file('app/router.js'))
<add> .to.contain('this.route(\'foo\', {')
<add> .to.contain('path: \':foo_id/show\'')
<add> .to.contain('});'))
<add>
<add> .then(() => emberDestroy(['route', 'foo', '--pod', '--path=:foo_id/show']))
<add> .then(() => expect(file('app/router.js'))
<add> .to.not.contain('this.route(\'foo\', {')
<add> .to.not.contain('path: \':foo_id/show\''));
<add> });
<add>
<add> it('route index --pod', function() {
<add> return emberGenerate(['route', 'index', '--pod'])
<add> .then(() => expect(file('app/router.js'))
<add> .to.not.contain('this.route(\'index\')'));
<add> });
<add>
<add> it('route application --pod', function() {
<add> return emberGenerate(['route', 'application', '--pod'])
<add> .then(() => expect(file('app/application/route.js')).to.exist)
<add> .then(() => expect(file('app/application/template.hbs')).to.exist)
<add> .then(() => expect(file('app/router.js')).to.not.contain('this.route(\'application\')'));
<add> });
<add>
<add> it('route basic --pod', function() {
<add> return emberGenerateDestroy(['route', 'basic', '--pod'], (_file) => {
<add> expect(_file('app/basic/route.js')).to.exist;
<add> expect(file('app/router.js'))
<add> .to.not.contain('this.route(\'index\')');
<add> });
<add> });
<add>
<add> it('route-test foo', function() {
<add> return emberGenerateDestroy(['route-test', 'foo'], (_file) => {
<add> expect(_file('tests/unit/routes/foo-test.js'))
<add> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<add> .to.contain('moduleFor(\'route:foo\'');
<add> });
<add> });
<add>
<add> describe('with podModulePrefix', function() {
<add> beforeEach(function() {
<add> setupPodConfig({ podModulePrefix: true });
<add> });
<add>
<add> it('route foo --pod', function() {
<add> return emberGenerateDestroy(['route', 'foo', '--pod'], (_file) => {
<add> expect(_file('app/pods/foo/route.js'))
<add> .to.contain('import Route from \'@ember/routing/route\';')
<add> .to.contain('export default Route.extend({\n});');
<add>
<add> expect(_file('app/pods/foo/template.hbs'))
<add> .to.equal('{{outlet}}');
<add>
<add> expect(_file('tests/unit/pods/foo/route-test.js'))
<add> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<add> .to.contain('moduleFor(\'route:foo\'');
<add>
<add> expect(file('app/router.js'))
<add> .to.contain('this.route(\'foo\')');
<add>
<add> }).then(() => {
<add> expect(file('app/router.js'))
<add> .to.not.contain('this.route(\'foo\')');
<add> });
<add> });
<add> });
<add>
<add> describe('with [email protected]', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> generateFakePackageManifest('ember-cli-mocha', '0.11.0');
<add> });
<add>
<add> it('route-test foo', function() {
<add> return emberGenerateDestroy(['route-test', 'foo'], (_file) => {
<add> expect(_file('tests/unit/routes/foo-test.js'))
<add> .to.contain('import { describeModule, it } from \'ember-mocha\';')
<add> .to.contain('describeModule(\'route:foo\', \'Unit | Route | foo\'');
<add> });
<add> });
<add> });
<add>
<add> describe('with [email protected]', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> generateFakePackageManifest('ember-cli-mocha', '0.12.0');
<add> });
<add>
<add> it('route-test foo', function() {
<add> return emberGenerateDestroy(['route-test', 'foo'], (_file) => {
<add> expect(_file('tests/unit/routes/foo-test.js'))
<add> .to.contain('import { describe, it } from \'mocha\';')
<add> .to.contain('import { setupTest } from \'ember-mocha\';')
<add> .to.contain('describe(\'Unit | Route | foo\', function() {')
<add> .to.contain('setupTest(\'route:foo\',');
<add> });
<add> });
<add> });
<ide> });
<ide>
<del> it('in-addon route foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['route', 'foo'], (_file) => {
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('route foo', function() {
<add> return emberGenerateDestroy(['route', 'foo'], (_file) => {
<ide> expect(_file('addon/routes/foo.js'))
<ide> .to.contain('import Route from \'@ember/routing/route\';')
<ide> .to.contain('export default Route.extend({\n});');
<ide> describe('Blueprint: route', function() {
<ide>
<ide> expect(file('tests/dummy/app/router.js'))
<ide> .to.not.contain('this.route(\'foo\')');
<del> }))
<del> .then(() => expect(file('tests/dummy/app/router.js'))
<del> .to.not.contain('this.route(\'foo\')'));
<del> });
<ide>
<del> it('in-addon route foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['route', 'foo/bar'], (_file) => {
<add> }).then(() => {
<add> expect(file('tests/dummy/app/router.js'))
<add> .to.not.contain('this.route(\'foo\')');
<add> });
<add> });
<add>
<add> it('route foo/bar', function() {
<add> return emberGenerateDestroy(['route', 'foo/bar'], (_file) => {
<ide> expect(_file('addon/routes/foo/bar.js'))
<ide> .to.contain('import Route from \'@ember/routing/route\';')
<ide> .to.contain('export default Route.extend({\n});');
<ide> describe('Blueprint: route', function() {
<ide>
<ide> expect(file('tests/dummy/app/router.js'))
<ide> .to.not.contain('this.route(\'bar\')');
<del> }))
<del> .then(() => expect(file('tests/dummy/app/router.js'))
<del> .to.not.contain('this.route(\'bar\')'));
<del> });
<ide>
<del> it('dummy route foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['route', 'foo', '--dummy'], (_file) => {
<add> }).then(() => {
<add> expect(file('tests/dummy/app/router.js'))
<add> .to.not.contain('this.route(\'bar\')');
<add> });
<add> });
<add>
<add> it('route foo --dummy', function() {
<add> return emberGenerateDestroy(['route', 'foo', '--dummy'], (_file) => {
<ide> expect(_file('tests/dummy/app/routes/foo.js'))
<ide> .to.contain('import Route from \'@ember/routing/route\';')
<ide> .to.contain('export default Route.extend({\n});');
<ide> describe('Blueprint: route', function() {
<ide>
<ide> expect(file('tests/dummy/app/router.js'))
<ide> .to.contain('this.route(\'foo\')');
<del> }))
<del> .then(() => expect(file('tests/dummy/app/router.js'))
<del> .to.not.contain('this.route(\'foo\')'));
<del> });
<ide>
<del> it('dummy route foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['route', 'foo/bar', '--dummy'], (_file) => {
<add> }).then(() => {
<add> expect(file('tests/dummy/app/router.js'))
<add> .to.not.contain('this.route(\'foo\')');
<add> });
<add> });
<add>
<add> it('route foo/bar --dummy', function() {
<add> return emberGenerateDestroy(['route', 'foo/bar', '--dummy'], (_file) => {
<ide> expect(_file('tests/dummy/app/routes/foo/bar.js'))
<ide> .to.contain('import Route from \'@ember/routing/route\';')
<ide> .to.contain('export default Route.extend({\n});');
<ide> describe('Blueprint: route', function() {
<ide> expect(file('tests/dummy/app/router.js'))
<ide> .to.contain('this.route(\'foo\', function() {')
<ide> .to.contain('this.route(\'bar\')');
<del> }))
<del> .then(() => expect(file('tests/dummy/app/router.js'))
<del> .to.not.contain('this.route(\'bar\')'));
<add>
<add> }).then(() => {
<add> expect(file('tests/dummy/app/router.js'))
<add> .to.not.contain('this.route(\'bar\')');
<add> });
<add> });
<add>
<add> it('route foo --pod', function() {
<add> return emberGenerateDestroy(['route', 'foo', '--pod'], (_file) => {
<add> expect(_file('addon/foo/route.js'))
<add> .to.contain('import Route from \'@ember/routing/route\';')
<add> .to.contain('export default Route.extend({\n});');
<add>
<add> expect(_file('addon/foo/template.hbs'))
<add> .to.equal('{{outlet}}');
<add>
<add> expect(_file('app/foo/route.js'))
<add> .to.contain('export { default } from \'my-addon/foo/route\';');
<add>
<add> expect(_file('app/foo/template.js'))
<add> .to.contain('export { default } from \'my-addon/foo/template\';');
<add>
<add> expect(_file('tests/unit/foo/route-test.js'))
<add> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<add> .to.contain('moduleFor(\'route:foo\'');
<add> });
<add> });
<add>
<add> it('route-test foo', function() {
<add> return emberGenerateDestroy(['route-test', 'foo'], (_file) => {
<add> expect(_file('tests/unit/routes/foo-test.js'))
<add> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<add> .to.contain('moduleFor(\'route:foo\'');
<add> });
<add> });
<ide> });
<ide>
<del> it('in-repo-addon route foo', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['route', 'foo', '--in-repo-addon=my-addon'], (_file) => {
<add> describe('in in-repo-addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'in-repo-addon' });
<add> });
<add>
<add> it('route foo --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['route', 'foo', '--in-repo-addon=my-addon'], (_file) => {
<ide> expect(_file('lib/my-addon/addon/routes/foo.js'))
<ide> .to.contain('import Route from \'@ember/routing/route\';')
<ide> .to.contain('export default Route.extend({\n});');
<ide> describe('Blueprint: route', function() {
<ide> expect(_file('tests/unit/routes/foo-test.js'))
<ide> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<ide> .to.contain('moduleFor(\'route:foo\'');
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-repo-addon route foo/bar', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['route', 'foo/bar', '--in-repo-addon=my-addon'], (_file) => {
<add> it('route foo/bar --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['route', 'foo/bar', '--in-repo-addon=my-addon'], (_file) => {
<ide> expect(_file('lib/my-addon/addon/routes/foo/bar.js'))
<ide> .to.contain('import Route from \'@ember/routing/route\';')
<ide> .to.contain('export default Route.extend({\n});');
<ide> describe('Blueprint: route', function() {
<ide> expect(_file('tests/unit/routes/foo/bar-test.js'))
<ide> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<ide> .to.contain('moduleFor(\'route:foo/bar\'');
<del> }));
<del> });
<del>
<del> it('route foo --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['route', 'foo', '--pod'], (_file) => {
<del> expect(_file('app/foo/route.js'))
<del> .to.contain('import Route from \'@ember/routing/route\';')
<del> .to.contain('export default Route.extend({\n});');
<del>
<del> expect(_file('app/foo/template.hbs'))
<del> .to.equal('{{outlet}}');
<del>
<del> expect(_file('tests/unit/foo/route-test.js'))
<del> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<del> .to.contain('moduleFor(\'route:foo\'');
<del>
<del> expect(file('app/router.js'))
<del> .to.contain('this.route(\'foo\')');
<del> }))
<del> .then(() => expect(file('app/router.js'))
<del> .to.not.contain('this.route(\'foo\')'));
<del> });
<del>
<del> it('route foo --pod with --path', function() {
<del> return emberNew()
<del> .then(() => emberGenerate(['route', 'foo', '--pod', '--path=:foo_id/show']))
<del> .then(() => expect(file('app/router.js'))
<del> .to.contain('this.route(\'foo\', {')
<del> .to.contain('path: \':foo_id/show\'')
<del> .to.contain('});'))
<del>
<del> .then(() => emberDestroy(['route', 'foo', '--pod', '--path=:foo_id/show']))
<del> .then(() => expect(file('app/router.js'))
<del> .to.not.contain('this.route(\'foo\', {')
<del> .to.not.contain('path: \':foo_id/show\''));
<del> });
<del>
<del> it('route foo --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['route', 'foo', '--pod'], (_file) => {
<del> expect(_file('app/pods/foo/route.js'))
<del> .to.contain('import Route from \'@ember/routing/route\';')
<del> .to.contain('export default Route.extend({\n});');
<del>
<del> expect(_file('app/pods/foo/template.hbs'))
<del> .to.equal('{{outlet}}');
<del>
<del> expect(_file('tests/unit/pods/foo/route-test.js'))
<del> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<del> .to.contain('moduleFor(\'route:foo\'');
<del>
<del> expect(file('app/router.js'))
<del> .to.contain('this.route(\'foo\')');
<del> }))
<del> .then(() => expect(file('app/router.js'))
<del> .to.not.contain('this.route(\'foo\')'));
<del> });
<del>
<del> it('route index --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerate(['route', 'index', '--pod']))
<del> .then(() => expect(file('app/router.js'))
<del> .to.not.contain('this.route(\'index\')'));
<del> });
<del>
<del> it('route application --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerate(['route', 'application', '--pod']))
<del> .then(() => expect(file('app/application/route.js')).to.exist)
<del> .then(() => expect(file('app/application/template.hbs')).to.exist)
<del> .then(() => expect(file('app/router.js')).to.not.contain('this.route(\'application\')'));
<del> });
<del>
<del> it('route basic --pod isn\'t added to router', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['route', 'basic', '--pod'], (_file) => {
<del> expect(_file('app/basic/route.js')).to.exist;
<del> expect(file('app/router.js'))
<del> .to.not.contain('this.route(\'index\')');
<del> }));
<del> });
<del>
<del> it('in-addon route foo --pod', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['route', 'foo', '--pod'], (_file) => {
<del> expect(_file('addon/foo/route.js'))
<del> .to.contain('import Route from \'@ember/routing/route\';')
<del> .to.contain('export default Route.extend({\n});');
<del>
<del> expect(_file('addon/foo/template.hbs'))
<del> .to.equal('{{outlet}}');
<del>
<del> expect(_file('app/foo/route.js'))
<del> .to.contain('export { default } from \'my-addon/foo/route\';');
<del>
<del> expect(_file('app/foo/template.js'))
<del> .to.contain('export { default } from \'my-addon/foo/template\';');
<del>
<del> expect(_file('tests/unit/foo/route-test.js'))
<del> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<del> .to.contain('moduleFor(\'route:foo\'');
<del> }));
<del> });
<del>
<del> it('route-test foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['route-test', 'foo'], (_file) => {
<del> expect(_file('tests/unit/routes/foo-test.js'))
<del> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<del> .to.contain('moduleFor(\'route:foo\'');
<del> }));
<del> });
<del>
<del> it('in-addon route-test foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['route-test', 'foo'], (_file) => {
<del> expect(_file('tests/unit/routes/foo-test.js'))
<del> .to.contain('import { moduleFor, test } from \'ember-qunit\';')
<del> .to.contain('moduleFor(\'route:foo\'');
<del> }));
<del> });
<del>
<del> it('route-test foo for mocha', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.11.0'))
<del> .then(() => emberGenerateDestroy(['route-test', 'foo'], (_file) => {
<del> expect(_file('tests/unit/routes/foo-test.js'))
<del> .to.contain('import { describeModule, it } from \'ember-mocha\';')
<del> .to.contain('describeModule(\'route:foo\', \'Unit | Route | foo\'');
<del> }));
<del> });
<del>
<del> it('route-test foo for mocha v0.12+', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.12.0'))
<del> .then(() => emberGenerateDestroy(['route-test', 'foo'], (_file) => {
<del> expect(_file('tests/unit/routes/foo-test.js'))
<del> .to.contain('import { describe, it } from \'mocha\';')
<del> .to.contain('import { setupTest } from \'ember-mocha\';')
<del> .to.contain('describe(\'Unit | Route | foo\', function() {')
<del> .to.contain('setupTest(\'route:foo\',');
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/service-test.js
<ide> const generateFakePackageManifest = require('../helpers/generate-fake-package-ma
<ide> describe('Blueprint: service', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('service foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['service', 'foo'], _file => {
<add> describe('in app', function() {
<add> beforeEach(function() {
<add> return emberNew();
<add> });
<add>
<add> it('service foo', function() {
<add> return emberGenerateDestroy(['service', 'foo'], _file => {
<ide> expect(_file('app/services/foo.js'))
<ide> .to.contain("import Service from '@ember/service';")
<ide> .to.contain('export default Service.extend({\n});');
<ide>
<ide> expect(_file('tests/unit/services/foo-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<ide> .to.contain("moduleFor('service:foo'");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('service foo/bar', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['service', 'foo/bar'], _file => {
<add> it('service foo/bar', function() {
<add> return emberGenerateDestroy(['service', 'foo/bar'], _file => {
<ide> expect(_file('app/services/foo/bar.js'))
<ide> .to.contain("import Service from '@ember/service';")
<ide> .to.contain('export default Service.extend({\n});');
<ide>
<ide> expect(_file('tests/unit/services/foo/bar-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<ide> .to.contain("moduleFor('service:foo/bar'");
<del> }));
<del> });
<del> it('in-addon service foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['service', 'foo'], _file => {
<del> expect(_file('addon/services/foo.js'))
<del> .to.contain("import Service from '@ember/service';")
<del> .to.contain('export default Service.extend({\n});');
<del>
<del> expect(_file('app/services/foo.js'))
<del> .to.contain("export { default } from 'my-addon/services/foo';");
<del>
<del> expect(_file('tests/unit/services/foo-test.js'))
<del> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<del> .to.contain("moduleFor('service:foo'");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-addon service foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['service', 'foo/bar'], _file => {
<del> expect(_file('addon/services/foo/bar.js'))
<del> .to.contain("import Service from '@ember/service';")
<del> .to.contain('export default Service.extend({\n});');
<del>
<del> expect(_file('app/services/foo/bar.js'))
<del> .to.contain("export { default } from 'my-addon/services/foo/bar';");
<del>
<del> expect(_file('tests/unit/services/foo/bar-test.js'))
<del> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<del> .to.contain("moduleFor('service:foo/bar'");
<del> }));
<del> });
<del>
<del> it('service foo --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['service', 'foo', '--pod'], _file => {
<add> it('service foo --pod', function() {
<add> return emberGenerateDestroy(['service', 'foo', '--pod'], _file => {
<ide> expect(_file('app/foo/service.js'))
<ide> .to.contain("import Service from '@ember/service';")
<ide> .to.contain('export default Service.extend({\n});');
<ide>
<ide> expect(_file('tests/unit/foo/service-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<ide> .to.contain("moduleFor('service:foo'");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('service foo/bar --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['service', 'foo/bar', '--pod'], _file => {
<add> it('service foo/bar --pod', function() {
<add> return emberGenerateDestroy(['service', 'foo/bar', '--pod'], _file => {
<ide> expect(_file('app/foo/bar/service.js'))
<ide> .to.contain("import Service from '@ember/service';")
<ide> .to.contain('export default Service.extend({\n});');
<ide>
<ide> expect(_file('tests/unit/foo/bar/service-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<ide> .to.contain("moduleFor('service:foo/bar'");
<del> }));
<add> });
<add> });
<add>
<add> it('service-test foo', function() {
<add> return emberGenerateDestroy(['service-test', 'foo'], _file => {
<add> expect(_file('tests/unit/services/foo-test.js'))
<add> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<add> .to.contain("moduleFor('service:foo'");
<add> });
<add> });
<add>
<add> describe('with podModulePrefix', function() {
<add> beforeEach(function() {
<add> setupPodConfig({ podModulePrefix: true });
<add> });
<add>
<add> it('service foo --pod', function() {
<add> return emberGenerateDestroy(['service', 'foo', '--pod'], _file => {
<add> expect(_file('app/pods/foo/service.js'))
<add> .to.contain("import Service from '@ember/service';")
<add> .to.contain('export default Service.extend({\n});');
<add>
<add> expect(_file('tests/unit/pods/foo/service-test.js'))
<add> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<add> .to.contain("moduleFor('service:foo'");
<add> });
<add> });
<add>
<add> it('service foo/bar --pod', function() {
<add> return emberGenerateDestroy(['service', 'foo/bar', '--pod'], _file => {
<add> expect(_file('app/pods/foo/bar/service.js'))
<add> .to.contain("import Service from '@ember/service';")
<add> .to.contain('export default Service.extend({\n});');
<add>
<add> expect(_file('tests/unit/pods/foo/bar/service-test.js'))
<add> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<add> .to.contain("moduleFor('service:foo/bar'");
<add> });
<add> });
<add> });
<add>
<add> describe('with [email protected]', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> generateFakePackageManifest('ember-cli-mocha', '0.11.0');
<add> });
<add>
<add> it('service-test foo', function() {
<add> return emberGenerateDestroy(['service-test', 'foo'], _file => {
<add> expect(_file('tests/unit/services/foo-test.js'))
<add> .to.contain("import { describeModule, it } from 'ember-mocha';")
<add> .to.contain("describeModule('service:foo', 'Unit | Service | foo'");
<add> });
<add> });
<add>
<add> it('service-test foo --pod', function() {
<add> return emberGenerateDestroy(['service-test', 'foo', '--pod'], _file => {
<add> expect(_file('tests/unit/foo/service-test.js'))
<add> .to.contain("import { describeModule, it } from 'ember-mocha';")
<add> .to.contain("describeModule('service:foo', 'Unit | Service | foo'");
<add> });
<add> });
<add> });
<add>
<add> describe('with [email protected]', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> generateFakePackageManifest('ember-cli-mocha', '0.12.0');
<add> });
<add>
<add> it('service-test foo', function() {
<add> return emberGenerateDestroy(['service-test', 'foo'], _file => {
<add> expect(_file('tests/unit/services/foo-test.js'))
<add> .to.contain("import { describe, it } from 'mocha';")
<add> .to.contain("import { setupTest } from 'ember-mocha';")
<add> .to.contain("describe('Unit | Service | foo', function() {")
<add> .to.contain("setupTest('service:foo',");
<add> });
<add> });
<add>
<add> it('service-test foo --pod', function() {
<add> return emberGenerateDestroy(['service-test', 'foo', '--pod'], _file => {
<add> expect(_file('tests/unit/foo/service-test.js'))
<add> .to.contain("import { describe, it } from 'mocha';")
<add> .to.contain("import { setupTest } from 'ember-mocha';")
<add> .to.contain("describe('Unit | Service | foo', function() {")
<add> .to.contain("setupTest('service:foo',");
<add> });
<add> });
<add> });
<ide> });
<ide>
<del> it('service foo --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['service', 'foo', '--pod'], _file => {
<del> expect(_file('app/pods/foo/service.js'))
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('service foo', function() {
<add> return emberGenerateDestroy(['service', 'foo'], _file => {
<add> expect(_file('addon/services/foo.js'))
<ide> .to.contain("import Service from '@ember/service';")
<ide> .to.contain('export default Service.extend({\n});');
<ide>
<del> expect(_file('tests/unit/pods/foo/service-test.js'))
<add> expect(_file('app/services/foo.js'))
<add> .to.contain("export { default } from 'my-addon/services/foo';");
<add>
<add> expect(_file('tests/unit/services/foo-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<ide> .to.contain("moduleFor('service:foo'");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('service foo/bar --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['service', 'foo/bar', '--pod'], _file => {
<del> expect(_file('app/pods/foo/bar/service.js'))
<add> it('service foo/bar', function() {
<add> return emberGenerateDestroy(['service', 'foo/bar'], _file => {
<add> expect(_file('addon/services/foo/bar.js'))
<ide> .to.contain("import Service from '@ember/service';")
<ide> .to.contain('export default Service.extend({\n});');
<ide>
<del> expect(_file('tests/unit/pods/foo/bar/service-test.js'))
<del> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<del> .to.contain("moduleFor('service:foo/bar'");
<del> }));
<del> });
<add> expect(_file('app/services/foo/bar.js'))
<add> .to.contain("export { default } from 'my-addon/services/foo/bar';");
<ide>
<del> it('service-test foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['service-test', 'foo'], _file => {
<del> expect(_file('tests/unit/services/foo-test.js'))
<add> expect(_file('tests/unit/services/foo/bar-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<del> .to.contain("moduleFor('service:foo'");
<del> }));
<del> });
<add> .to.contain("moduleFor('service:foo/bar'");
<add> });
<add> });
<ide>
<del> it('in-addon service-test foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['service-test', 'foo'], _file => {
<add> it('service-test foo', function() {
<add> return emberGenerateDestroy(['service-test', 'foo'], _file => {
<ide> expect(_file('tests/unit/services/foo-test.js'))
<ide> .to.contain("import { moduleFor, test } from 'ember-qunit';")
<ide> .to.contain("moduleFor('service:foo'");
<ide>
<ide> expect(_file('app/service-test/foo.js'))
<ide> .to.not.exist;
<del> }));
<del> });
<del>
<del> it('service-test foo for mocha', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.11.0'))
<del> .then(() => emberGenerateDestroy(['service-test', 'foo'], _file => {
<del> expect(_file('tests/unit/services/foo-test.js'))
<del> .to.contain("import { describeModule, it } from 'ember-mocha';")
<del> .to.contain("describeModule('service:foo', 'Unit | Service | foo'");
<del> }));
<del> });
<del>
<del> it('service-test foo for mocha --pod', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.11.0'))
<del> .then(() => emberGenerateDestroy(['service-test', 'foo', '--pod'], _file => {
<del> expect(_file('tests/unit/foo/service-test.js'))
<del> .to.contain("import { describeModule, it } from 'ember-mocha';")
<del> .to.contain("describeModule('service:foo', 'Unit | Service | foo'");
<del> }));
<del> });
<del>
<del> it('service-test foo for mocha v0.12+', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.12.0'))
<del> .then(() => emberGenerateDestroy(['service-test', 'foo'], _file => {
<del> expect(_file('tests/unit/services/foo-test.js'))
<del> .to.contain("import { describe, it } from 'mocha';")
<del> .to.contain("import { setupTest } from 'ember-mocha';")
<del> .to.contain("describe('Unit | Service | foo', function() {")
<del> .to.contain("setupTest('service:foo',");
<del> }));
<del> });
<del>
<del> it('service-test foo for mocha v0.12+ --pod', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => generateFakePackageManifest('ember-cli-mocha', '0.12.0'))
<del> .then(() => emberGenerateDestroy(['service-test', 'foo', '--pod'], _file => {
<del> expect(_file('tests/unit/foo/service-test.js'))
<del> .to.contain("import { describe, it } from 'mocha';")
<del> .to.contain("import { setupTest } from 'ember-mocha';")
<del> .to.contain("describe('Unit | Service | foo', function() {")
<del> .to.contain("setupTest('service:foo',");
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/template-test.js
<ide> const expect = chai.expect;
<ide> describe('Blueprint: template', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('template foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['template', 'foo'], _file => {
<add> describe('in app', function() {
<add> beforeEach(function() {
<add> return emberNew();
<add> });
<add>
<add> it('template foo', function() {
<add> return emberGenerateDestroy(['template', 'foo'], _file => {
<ide> expect(_file('app/templates/foo.hbs')).to.equal('');
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('template foo/bar', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['template', 'foo/bar'], _file => {
<add> it('template foo/bar', function() {
<add> return emberGenerateDestroy(['template', 'foo/bar'], _file => {
<ide> expect(_file('app/templates/foo/bar.hbs')).to.equal('');
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('template foo --pod', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({
<del> usePods: true
<del> }))
<del> .then(() => emberGenerateDestroy(['template', 'foo'], _file => {
<del> expect(_file('app/foo/template.hbs')).to.equal('');
<del> }));
<del> });
<add> describe('with usePods', function() {
<add> beforeEach(function() {
<add> setupPodConfig({ usePods: true });
<add> });
<ide>
<del> it('template foo/bar --pod', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({
<del> usePods: true
<del> }))
<del> .then(() => emberGenerateDestroy(['template', 'foo/bar'], _file => {
<del> expect(_file('app/foo/bar/template.hbs')).to.equal('');
<del> }));
<del> });
<add> it('template foo', function() {
<add> return emberGenerateDestroy(['template', 'foo'], _file => {
<add> expect(_file('app/foo/template.hbs')).to.equal('');
<add> });
<add> });
<ide>
<del> it('template foo --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({
<del> usePods: true,
<del> podModulePrefix: true
<del> }))
<del> .then(() => emberGenerateDestroy(['template', 'foo'], _file => {
<del> expect(_file('app/pods/foo/template.hbs')).to.equal('');
<del> }));
<del> });
<add> it('template foo/bar', function() {
<add> return emberGenerateDestroy(['template', 'foo/bar'], _file => {
<add> expect(_file('app/foo/bar/template.hbs')).to.equal('');
<add> });
<add> });
<add> });
<add>
<add> describe('with usePods + podModulePrefix', function() {
<add> beforeEach(function() {
<add> setupPodConfig({
<add> usePods: true,
<add> podModulePrefix: true
<add> });
<add> });
<ide>
<del> it('template foo/bar --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({
<del> usePods: true,
<del> podModulePrefix: true
<del> }))
<del> .then(() => emberGenerateDestroy(['template', 'foo/bar'], _file => {
<del> expect(_file('app/pods/foo/bar/template.hbs')).to.equal('');
<del> }));
<add> it('template foo', function() {
<add> return emberGenerateDestroy(['template', 'foo'], _file => {
<add> expect(_file('app/pods/foo/template.hbs')).to.equal('');
<add> });
<add> });
<add>
<add> it('template foo/bar', function() {
<add> return emberGenerateDestroy(['template', 'foo/bar'], _file => {
<add> expect(_file('app/pods/foo/bar/template.hbs')).to.equal('');
<add> });
<add> });
<add> });
<ide> });
<ide>
<del> it('in-addon template foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['template', 'foo'], _file => {
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('template foo', function() {
<add> return emberGenerateDestroy(['template', 'foo'], _file => {
<ide> expect(_file('addon/templates/foo.hbs')).to.equal('');
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-addon template foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['template', 'foo/bar'], _file => {
<add> it('template foo/bar', function() {
<add> return emberGenerateDestroy(['template', 'foo/bar'], _file => {
<ide> expect(_file('addon/templates/foo/bar.hbs')).to.equal('');
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('dummy template foo', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['template', 'foo', '--dummy'], _file => {
<add> it('template foo --dummy', function() {
<add> return emberGenerateDestroy(['template', 'foo', '--dummy'], _file => {
<ide> expect(_file('tests/dummy/app/templates/foo.hbs')).to.equal('');
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('dummy template foo/bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['template', 'foo/bar', '--dummy'], _file => {
<add> it('template foo/bar --dummy', function() {
<add> return emberGenerateDestroy(['template', 'foo/bar', '--dummy'], _file => {
<ide> expect(_file('tests/dummy/app/templates/foo/bar.hbs')).to.equal('');
<del> }));
<add> });
<add> });
<ide> });
<ide>
<del> it('in-repo-addon template foo', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['template', 'foo', '--in-repo-addon=my-addon'], _file => {
<add> describe('in in-repo-addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'in-repo-addon' });
<add> });
<add>
<add> it('template foo --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['template', 'foo', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('lib/my-addon/addon/templates/foo.hbs')).to.equal('');
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-repo-addon template foo/bar', function() {
<del> return emberNew({ target: 'in-repo-addon' })
<del> .then(() => emberGenerateDestroy(['template', 'foo/bar', '--in-repo-addon=my-addon'], _file => {
<add> it('template foo/bar --in-repo-addon=my-addon', function() {
<add> return emberGenerateDestroy(['template', 'foo/bar', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('lib/my-addon/addon/templates/foo/bar.hbs')).to.equal('');
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/test-helper-test.js
<ide> const expect = chai.expect;
<ide> describe('Blueprint: test-helper', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('test-helper foo', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['test-helper', 'foo'], _file => {
<add> describe('in app', function() {
<add> beforeEach(function() {
<add> return emberNew();
<add> });
<add>
<add> it('test-helper foo', function() {
<add> return emberGenerateDestroy(['test-helper', 'foo'], _file => {
<ide> expect(_file('tests/helpers/foo.js'))
<ide> .to.contain("import { registerAsyncHelper } from '@ember/test';")
<ide> .to.contain('export default registerAsyncHelper(\'foo\', function(app) {\n\n}');
<del> }));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/util-test.js
<ide> const expect = chai.expect;
<ide> describe('Blueprint: util', function() {
<ide> setupTestHooks(this);
<ide>
<del> it('util foo-bar', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['util', 'foo-bar'], _file => {
<add> describe('in app', function() {
<add> beforeEach(function() {
<add> return emberNew();
<add> });
<add>
<add> it('util foo-bar', function() {
<add> return emberGenerateDestroy(['util', 'foo-bar'], _file => {
<ide> expect(_file('app/utils/foo-bar.js'))
<ide> .to.contain('export default function fooBar() {\n' +
<ide> ' return true;\n' +
<ide> '}');
<ide>
<ide> expect(_file('tests/unit/utils/foo-bar-test.js'))
<ide> .to.contain("import fooBar from 'my-app/utils/foo-bar';");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('util foo-bar/baz', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['util', 'foo/bar-baz'], _file => {
<add> it('util foo-bar/baz', function() {
<add> return emberGenerateDestroy(['util', 'foo/bar-baz'], _file => {
<ide> expect(_file('app/utils/foo/bar-baz.js'))
<ide> .to.contain('export default function fooBarBaz() {\n' +
<ide> ' return true;\n' +
<ide> '}');
<ide>
<ide> expect(_file('tests/unit/utils/foo/bar-baz-test.js'))
<ide> .to.contain("import fooBarBaz from 'my-app/utils/foo/bar-baz';");
<del> }));
<del> });
<add> });
<add> });
<ide>
<del> it('in-addon util foo-bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['util', 'foo-bar'], _file => {
<del> expect(_file('addon/utils/foo-bar.js'))
<add> it('util foo-bar --pod', function() {
<add> return emberGenerateDestroy(['util', 'foo-bar', '--pod'], _file => {
<add> expect(_file('app/utils/foo-bar.js'))
<ide> .to.contain('export default function fooBar() {\n' +
<ide> ' return true;\n' +
<ide> '}');
<ide>
<del> expect(_file('app/utils/foo-bar.js'))
<del> .to.contain("export { default } from 'my-addon/utils/foo-bar';");
<del>
<ide> expect(_file('tests/unit/utils/foo-bar-test.js'))
<del> .to.contain("import fooBar from 'dummy/utils/foo-bar';");
<del> }));
<del> });
<add> .to.contain("import fooBar from 'my-app/utils/foo-bar';");
<add> });
<add> });
<ide>
<del> it('in-addon util foo-bar/baz', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['util', 'foo/bar-baz'], _file => {
<del> expect(_file('addon/utils/foo/bar-baz.js'))
<add> it('util foo-bar/baz --pod', function() {
<add> return emberGenerateDestroy(['util', 'foo/bar-baz', '--pod'], _file => {
<add> expect(_file('app/utils/foo/bar-baz.js'))
<ide> .to.contain('export default function fooBarBaz() {\n' +
<ide> ' return true;\n' +
<ide> '}');
<ide>
<del> expect(_file('app/utils/foo/bar-baz.js'))
<del> .to.contain("export { default } from 'my-addon/utils/foo/bar-baz';");
<del>
<ide> expect(_file('tests/unit/utils/foo/bar-baz-test.js'))
<del> .to.contain("import fooBarBaz from 'dummy/utils/foo/bar-baz';");
<del> }));
<del> });
<del>
<del> it('util foo-bar --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['util', 'foo-bar', '--pod'], _file => {
<del> expect(_file('app/utils/foo-bar.js'))
<del> .to.contain('export default function fooBar() {\n' +
<del> ' return true;\n' +
<del> '}');
<add> .to.contain("import fooBarBaz from 'my-app/utils/foo/bar-baz';");
<add> });
<add> });
<ide>
<add> it('util-test foo-bar', function() {
<add> return emberGenerateDestroy(['util-test', 'foo-bar'], _file => {
<ide> expect(_file('tests/unit/utils/foo-bar-test.js'))
<ide> .to.contain("import fooBar from 'my-app/utils/foo-bar';");
<del> }));
<add> });
<add> });
<add>
<add> describe('with podModulePrefix', function() {
<add> beforeEach(function() {
<add> return setupPodConfig({ podModulePrefix: true });
<add> });
<add>
<add> it('util foo-bar --pod', function() {
<add> return emberGenerateDestroy(['util', 'foo-bar', '--pod'], _file => {
<add> expect(_file('app/utils/foo-bar.js'))
<add> .to.contain('export default function fooBar() {\n' +
<add> ' return true;\n' +
<add> '}');
<add>
<add> expect(_file('tests/unit/utils/foo-bar-test.js'))
<add> .to.contain("import fooBar from 'my-app/utils/foo-bar';");
<add> });
<add> });
<add> });
<add>
<add> describe('with ember-cli-mocha', function() {
<add> beforeEach(function() {
<add> modifyPackages([
<add> { name: 'ember-cli-qunit', delete: true },
<add> { name: 'ember-cli-mocha', dev: true }
<add> ]);
<add> });
<add>
<add> it('util-test foo-bar', function() {
<add> return emberGenerateDestroy(['util-test', 'foo-bar'], _file => {
<add> expect(_file('tests/unit/utils/foo-bar-test.js'))
<add> .to.contain("import { describe, it } from 'mocha';")
<add> .to.contain("import fooBar from 'my-app/utils/foo-bar';")
<add> .to.contain("describe('Unit | Utility | foo bar', function() {");
<add> });
<add> });
<add> });
<ide> });
<ide>
<del> it('util foo-bar --pod podModulePrefix', function() {
<del> return emberNew()
<del> .then(() => setupPodConfig({ podModulePrefix: true }))
<del> .then(() => emberGenerateDestroy(['util', 'foo-bar', '--pod'], _file => {
<del> expect(_file('app/utils/foo-bar.js'))
<add> describe('in addon', function() {
<add> beforeEach(function() {
<add> return emberNew({ target: 'addon' });
<add> });
<add>
<add> it('util foo-bar', function() {
<add> return emberGenerateDestroy(['util', 'foo-bar'], _file => {
<add> expect(_file('addon/utils/foo-bar.js'))
<ide> .to.contain('export default function fooBar() {\n' +
<del> ' return true;\n' +
<del> '}');
<add> ' return true;\n' +
<add> '}');
<add>
<add> expect(_file('app/utils/foo-bar.js'))
<add> .to.contain("export { default } from 'my-addon/utils/foo-bar';");
<ide>
<ide> expect(_file('tests/unit/utils/foo-bar-test.js'))
<del> .to.contain("import fooBar from 'my-app/utils/foo-bar';");
<del> }));
<del> });
<add> .to.contain("import fooBar from 'dummy/utils/foo-bar';");
<add> });
<add> });
<ide>
<del> it('util foo-bar/baz --pod', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['util', 'foo/bar-baz', '--pod'], _file => {
<del> expect(_file('app/utils/foo/bar-baz.js'))
<add> it('util foo-bar/baz', function() {
<add> return emberGenerateDestroy(['util', 'foo/bar-baz'], _file => {
<add> expect(_file('addon/utils/foo/bar-baz.js'))
<ide> .to.contain('export default function fooBarBaz() {\n' +
<del> ' return true;\n' +
<del> '}');
<add> ' return true;\n' +
<add> '}');
<ide>
<del> expect(_file('tests/unit/utils/foo/bar-baz-test.js'))
<del> .to.contain("import fooBarBaz from 'my-app/utils/foo/bar-baz';");
<del> }));
<del> });
<add> expect(_file('app/utils/foo/bar-baz.js'))
<add> .to.contain("export { default } from 'my-addon/utils/foo/bar-baz';");
<ide>
<del> it('util-test foo-bar', function() {
<del> return emberNew()
<del> .then(() => emberGenerateDestroy(['util-test', 'foo-bar'], _file => {
<del> expect(_file('tests/unit/utils/foo-bar-test.js'))
<del> .to.contain("import fooBar from 'my-app/utils/foo-bar';");
<del> }));
<del> });
<add> expect(_file('tests/unit/utils/foo/bar-baz-test.js'))
<add> .to.contain("import fooBarBaz from 'dummy/utils/foo/bar-baz';");
<add> });
<add> });
<ide>
<del> it('in-addon util-test foo-bar', function() {
<del> return emberNew({ target: 'addon' })
<del> .then(() => emberGenerateDestroy(['util-test', 'foo-bar'], _file => {
<add> it('util-test foo-bar', function() {
<add> return emberGenerateDestroy(['util-test', 'foo-bar'], _file => {
<ide> expect(_file('tests/unit/utils/foo-bar-test.js'))
<ide> .to.contain("import fooBar from 'dummy/utils/foo-bar';");
<del> }));
<del> });
<del>
<del> it('util-test foo-bar for mocha', function() {
<del> return emberNew()
<del> .then(() => modifyPackages([
<del> { name: 'ember-cli-qunit', delete: true },
<del> { name: 'ember-cli-mocha', dev: true }
<del> ]))
<del> .then(() => emberGenerateDestroy(['util-test', 'foo-bar'], _file => {
<del> expect(_file('tests/unit/utils/foo-bar-test.js'))
<del> .to.contain("import { describe, it } from 'mocha';")
<del> .to.contain("import fooBar from 'my-app/utils/foo-bar';")
<del> .to.contain("describe('Unit | Utility | foo bar', function() {");
<del> }));
<add> });
<add> });
<ide> });
<ide> }); | 17 |
Javascript | Javascript | fix typo in pre_execution.js | 97815bd6e9dee6a893d94bc9085958f9af23ac85 | <ide><path>lib/internal/bootstrap/pre_execution.js
<ide> function patchProcessObject(expandArgv1) {
<ide> }
<ide>
<ide> // TODO(joyeecheung): most of these should be deprecated and removed,
<del> // execpt some that we need to be able to mutate during run time.
<add> // except some that we need to be able to mutate during run time.
<ide> addReadOnlyProcessAlias('_eval', '--eval');
<ide> addReadOnlyProcessAlias('_print_eval', '--print');
<ide> addReadOnlyProcessAlias('_syntax_check_only', '--check'); | 1 |
Javascript | Javascript | add single process tls connection benchmark | 23dc0992995fb1307d516e6c6f33439da8bc112b | <ide><path>benchmark/tls-connect.js
<add>
<add>var assert = require('assert'),
<add> fs = require('fs'),
<add> path = require('path'),
<add> tls = require('tls');
<add>
<add>
<add>var target_connections = 10000,
<add> concurrency = 10;
<add>
<add>for (var i = 2; i < process.argv.length; i++) {
<add> switch (process.argv[i]) {
<add> case '-c':
<add> concurrency = ~~process.argv[++i];
<add> break;
<add>
<add> case '-n':
<add> target_connections = ~~process.argv[++i];
<add> break;
<add>
<add> default:
<add> throw new Error('Invalid flag: ' + process.argv[i]);
<add> }
<add>}
<add>
<add>
<add>var cert_dir = path.resolve(__dirname, '../test/fixtures'),
<add> options = { key: fs.readFileSync(cert_dir + '/test_key.pem'),
<add> cert: fs.readFileSync(cert_dir + '/test_cert.pem'),
<add> ca: [ fs.readFileSync(cert_dir + '/test_ca.pem') ] };
<add>
<add>var server = tls.createServer(options, onConnection);
<add>server.listen(8000);
<add>
<add>
<add>var initiated_connections = 0,
<add> server_connections = 0,
<add> client_connections = 0,
<add> start = Date.now();
<add>
<add>for (var i = 0; i < concurrency; i++)
<add> makeConnection();
<add>
<add>
<add>process.on('exit', onExit);
<add>
<add>
<add>function makeConnection() {
<add> if (initiated_connections >= target_connections)
<add> return;
<add>
<add> initiated_connections++;
<add>
<add> var conn = tls.connect(8000, function() {
<add> client_connections++;
<add>
<add> if (client_connections % 100 === 0)
<add> console.log(client_connections + ' of ' + target_connections +
<add> ' connections made');
<add>
<add> conn.end();
<add> makeConnection();
<add> });
<add>}
<add>
<add>
<add>function onConnection(conn) {
<add> server_connections++;
<add>
<add> if (server_connections === target_connections)
<add> server.close();
<add>}
<add>
<add>
<add>function onExit() {
<add> var end = Date.now(),
<add> s = (end - start) / 1000,
<add> persec = Math.round(target_connections / s);
<add>
<add> assert.equal(initiated_connections, target_connections);
<add> assert.equal(client_connections, target_connections);
<add> assert.equal(server_connections, target_connections);
<add>
<add> console.log('%d connections in %d s', target_connections, s);
<add> console.log('%d connections per second', persec);
<add>} | 1 |
Python | Python | use assert_equal instead of assertequals | 8e9f0bdedca3edd5ba2e8902f05406acff8b8b44 | <ide><path>flask/testsuite/testing.py
<ide> def view(company_id):
<ide> url = flask.url_for('view', company_id='xxx')
<ide> response = self.client.get(url)
<ide>
<del> self.assertEquals(200, response.status_code)
<del> self.assertEquals(b'xxx', response.data)
<add> self.assert_equal(200, response.status_code)
<add> self.assert_equal(b'xxx', response.data)
<ide>
<ide>
<ide> def test_nosubdomain(self):
<ide> def view(company_id):
<ide> url = flask.url_for('view', company_id='xxx')
<ide> response = self.client.get(url)
<ide>
<del> self.assertEquals(200, response.status_code)
<del> self.assertEquals(b'xxx', response.data)
<add> self.assert_equal(200, response.status_code)
<add> self.assert_equal(b'xxx', response.data)
<ide>
<ide>
<ide> def suite(): | 1 |
PHP | PHP | add new line per styleci | 98936eecd8c79ff6f31afe3b4596b69a878e6f92 | <ide><path>src/Illuminate/Routing/RoutingServiceProvider.php
<ide> protected function registerPsrRequest()
<ide> {
<ide> $this->app->bind(ServerRequestInterface::class, function ($app) {
<ide> $psr17Factory = new Psr17Factory;
<add>
<ide> return (new PsrHttpFactory($psr17Factory, $psr17Factory, $psr17Factory, $psr17Factory))
<ide> ->createRequest($app->make('request'));
<ide> }); | 1 |
Text | Text | streamline copy and remove gitter references | e21e6c1ce6ba3e8fc3c7a637216cfef9789f4e60 | <ide><path>docs/FAQ.md
<del>### I am new to GitHub and Open Source, where should I start?
<add>### I am new to GitHub and Open Source. Where should I start?
<ide>
<del>Read our ["How to Contribute to Open Source Guide"](https://github.com/freeCodeCamp/how-to-contribute-to-open-source). It's a compresensive resource of first timer friendly projects and guidelines.
<add>Read our ["How to Contribute to Open Source Guide"](https://github.com/freeCodeCamp/how-to-contribute-to-open-source). It's a comprehensive reference for first-timer-friendly projects. And it includes a lot of open source contribution tips.
<ide>
<ide> ### Can I translate freeCodeCamp's curriculum?
<ide>
<del>We will be kicking-off the localization efforts on freeCodeCamp starting with the **Chinese** first.
<add>Yes. We will roll out the tools for translating freeCodeCamp's curriculum soon.
<ide>
<del>We intend to make the curriculum available in more languages, but can't commit to any deadlines yet. Several operational limits like staff, active contributors and server costs play a role in driving the internationalisation (i18n) efforts.
<del>
<del>We intend to start i18n efforts in these languages: Arabic, Portuguese, Russian and Spanish (in no particular order) after we have released the curriculum in Chinese.
<del>
<del>**Why are you focusing on Chinese curriculum first?**
<del>
<del>China is the largest demographic regions, in our non-native English speaking audience. Currently, we have hundreds of thousands of users using an old version of the platform. Focusing on the Chinese curriculum will give us an estimate of logistics involved in the i18n.
<del>
<del>We will focus on Latin America as the demographic region for Portuguese and Spanish.
<del>
<del>We do not have any plans for i18n to any other languages other than the above for the foreseeable future.
<del>
<del>We do not encourage you to work on i18n on the above languages. We may accept pull requests, without any expectation of any timelines on deployments.
<add>Eventually, we intend to localize freeCodeCamp into several major world languages, starting with: Arabic, Chinese, Hindi, Portuguese, Russian, and Spanish.
<ide>
<ide> ### How can I report a new bug?
<ide>
<ide> If you think you've found a bug, first read the ["Help I've Found a Bug"](https://forum.freecodecamp.org/t/how-to-report-a-bug/19543) article and follow its instructions.
<ide>
<ide> If you're confident it's a new bug, go ahead and create a new GitHub issue. Be sure to include as much information as possible so that we can reproduce the bug. We have a pre-defined issue template to help you through this.
<ide>
<del>Please note that any issues that seek coding help on a challenge will be closed. The issue tracker is strictly for codebase related issues and discussions. Whenever in doubt, you should [seek assistance on the forum](https://forum.freecodecamp.org) before making a report.
<add>Please note that these GitHub issues are for codebase-related issues and discussions – not for getting help with learning to code. Whenever in doubt, you should [seek assistance on the forum](https://forum.freecodecamp.org) before creating a GitHub issue.
<ide>
<ide> ### How can I report a security issue?
<ide>
<ide> Please don't create GitHub issues for security issues. Instead, please send an email to `[email protected]` and we'll look into it immediately.
<ide>
<del>### I am a student, can I or our team work on a feature for academic credits?
<del>
<del>Yes, sure.
<add>### I am a student. Can I work on a feature for academic credits?
<ide>
<del>While, we are open to all contributions, please note we are unable to commit to any timelines that may be a requirement at your college or university. We receive many pull-requests and code contributions by volunteer developers, and we respect their time and efforts. We will not be able to give any PR any special attention to be fair to all.
<add>Yes. Please note we are unable to commit to any timelines or paperwork that may be a requirement by your college or university. We receive many pull-requests and code contributions by volunteer developers, and we respect their time and efforts. Out of respect for all of our other contributors, we will not give any PR special priority just because it happens to be school-related.
<ide>
<del>We request you to plan ahead and work on a feature with this in mind.
<add>We request you to plan ahead and work on code contributions with this in mind.
<ide>
<ide> ### What do these different labels that are tagged on issues mean?
<ide>
<ide> The code maintainers [triage](https://en.wikipedia.org/wiki/Software_bug#Bug_man
<ide>
<ide> You should go through [**`help wanted`**](https://github.com/freeCodeCamp/freeCodeCamp/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) or [**`first timers only`**](https://github.com/freeCodeCamp/freeCodeCamp/issues?q=is%3Aopen+is%3Aissue+label%3A%22first+timers+only%22) issues for a quick overview of what is available for you to work on.
<ide>
<del>> [!TIP] **`help wanted`** issues are up for grabs, and you do not need to seek permission before working on them. However, issues with the **`first timers only`** label are reserved for people that have not contributed to the freeCodeCamp codebase before. If these issues lack clarity on what needs to be done, feel free to ask questions in the comments.
<add>> [!TIP] **`help wanted`** issues are up for grabs, and you do not need to seek permission before working on them. However, issues with the **`first timers only`** label are special issues that are designed for people who have not contributed to the freeCodeCamp codebase before.
<ide>
<del>### I found a typo, should I report an issue before I can make a pull request?
<add>### I found a typo. Should I report an issue before I can make a pull request?
<ide>
<del>For typos and other wording changes, you can directly open pull requests without creating an issue first. Issues are more for discussing larger problems associated with code or structural aspects of the curriculum.
<add>For typos and other wording changes, you can directly open pull requests without creating an issue first. Please be sure to mention details in the pull request description to help us understand and review your contribution – even if it's just a minor change.
<ide>
<del>However, please mention details, context etc. in the pull request's description area to help us understand and review your contribution even for minor changes.
<add>Please do create an issue if you want to discuss bigger aspects of the codebase or curriculum.
<ide>
<del>### How do I get an issue assigned to me?
<add>### How can I get an issue assigned to me?
<ide>
<del>We typically do not assign issues to anyone other than long-time contributors to avoid ambiguous no-shows. Instead, we follow the below policy to be fair to everyone:
<add>We typically do not assign issues to anyone other than long-time contributors. Instead, we follow the below policy to be fair to everyone:
<ide>
<del>1. The first pull request for any issue is preferred to be merged.
<del>2. In the case of multiple pull requests for the same issue, we give priority to the quality of the code in the pull requests.
<add>1. We are most likely to merge the first pull request that addresses the issue.
<add>2. In the case of multiple contributors opening a pull request for the same issue at around the same time, we will give priority to the pull request that best addresses the issue. Some of the things we consider:
<ide> - Did you include tests?
<ide> - Did you catch all usecases?
<del> - Did you ensure all tests pass, and you confirmed everything works locally?
<del>3. Finally, we favor pull requests which follow our recommended guidelines.
<add> - Did you ensure all tests pass, and confirm everything works locally?
<add>3. Finally, we give priority to pull requests which follow our recommended guidelines.
<ide> - Did you follow the pull request checklist?
<del> - Did you name your pull request title meaningfully?
<del>
<del>You do not need any permission for issues that are marked `help wanted` or `first timers only` as explained earlier.
<del>
<del>Follow the guidelines carefully and open a pull request.
<add> - Did you give your pull request a meaningful title?
<ide>
<ide> ### I am stuck on something that is not included in this documentation.
<ide>
<ide> **Feel free to ask for help in:**
<ide>
<ide> - The `Contributors` category of [our community forum](https://forum.freecodecamp.org/c/contributors).
<del>- The public `Contributors` [chat room on Gitter](https://gitter.im/FreeCodeCamp/Contributors).
<ide> - The `#Contributors` channel on [our Discord community server](https://discord.gg/pFspAhS).
<ide>
<del>We are excited to help you contribute to any of the topics that you would like to work on. Feel free to ask us questions on the related issue threads, and we will be glad to clarify. Make sure you search for your query before posting a new one.
<add>We are excited to help you contribute to any of the topics that you would like to work on. If you ask us questions on the related issue threads, we will be glad to clarify. Be sure to search for your question before posting a new one.
<ide>
<del>Be polite and patient. Our community of volunteers and moderators are always around to guide you through your queries.
<add>Thanks in advance for being polite and patient. Remember – this community is run mainly by volunteers.
<ide>
<ide> **Additional Assistance:**
<ide>
<ide><path>docs/how-to-open-a-pull-request.md
<ide> # How to open a Pull Request (PR)
<ide>
<del>A pull request enables you to send changes from your fork on GitHub to freeCodeCamp.org's main repository. Once you are done making changes to the code, or coding challenges you should follow these guidelines to send a PR.
<add>A pull request (PR) enables you to send changes from your fork on GitHub to freeCodeCamp.org's main repository. Once you are done making changes to the code, you can follow these guidelines to open a PR.
<ide>
<ide> ## Prepare a good PR title
<ide>
<ide> Some examples of good PRs titles would be:
<ide>
<ide> Our moderators will now take a look and leave you feedback. Please be patient with the fellow moderators and respect their time. All pull requests are reviewed in due course.
<ide>
<del>If you need any assistance please discuss in the [contributors chat room](https://gitter.im/FreeCodeCamp/Contributors), we are more than happy to help you.
<add>And as always, feel free to ask questions on the ['Contributors' category on our forum](https://forum.freecodecamp.org/c/contributors) or [our Discord server](https://discord.gg/6vJYm9V).
<ide>
<ide> > [!TIP]
<ide> > If you are to be contributing more pull requests, we recommend you read the [making changes and syncing](https://contribute.freecodecamp.org/#/how-to-setup-freecodecamp-locally?id=making-changes-locally) guidelines to avoid having to delete your fork.
<ide><path>docs/how-to-setup-freecodecamp-locally.md
<ide> Follow these guidelines for setting up freeCodeCamp locally on your system. This is highly recommended if you want to contribute regularly.
<ide>
<del>For some of the contribution workflows, you need to have freeCodeCamp running locally. For example, previewing coding challenges or debugging and fixing bugs in the codebase.
<add>Some of these contribution workflows – like fixing bugs in the codebase or curriculum – need you to run freeCodeCamp locally on your computer.
<ide>
<ide> > [!TIP]
<del>> If you are not interested in setting up freeCodeCamp locally consider using Gitpod, a free online dev environment.
<add>> If you are not interested in setting up freeCodeCamp locally, consider using Gitpod, a free online dev environment.
<ide> >
<ide> > [](https://gitpod.io/#https://github.com/freeCodeCamp/freeCodeCamp)
<ide> >
<ide> > (Starts a ready-to-code dev environment for freeCodeCamp in your browser.)
<ide>
<del>## Prepare your local machine
<add>### How to prepare your local machine
<ide>
<ide> Start by installing the prerequisite software for your operating system.
<ide>
<del>We primarily support development on **\*nix** systems. Our staff and community contributors regularly work with the codebase using tools installed on Ubuntu and macOS.
<add>We primarily support development on Linux and Unix-based systems. Our staff and community contributors regularly work with the codebase using tools installed on Ubuntu and macOS.
<ide>
<ide> We also support Windows 10 via WSL2, which you can prepare by [reading this guide](/how-to-setup-wsl).
<ide>
<ide> Now that you have a local copy of freeCodeCamp, you can follow these instruction
<ide>
<ide> If you do run into issues, first perform a web search for your issue and see if it has already been answered. If you cannot find a solution, please search our [GitHub issues](https://github.com/freeCodeCamp/freeCodeCamp/issues) page for a solution and report the issue if it has not yet been reported.
<ide>
<del>And as always, feel free to hop on to our [Contributors Chat room on Gitter](https://gitter.im/FreeCodeCamp/Contributors) or [our Discord server](https://discord.gg/6vJYm9V), for quick queries.
<add>And as always, feel free to ask questions on the ['Contributors' category on our forum](https://forum.freecodecamp.org/c/contributors) or [our Discord server](https://discord.gg/6vJYm9V).
<ide>
<ide> > [!TIP]
<ide> > You may skip running freeCodeCamp locally if you are simply editing files. For instance, performing a `rebase`, or resolving `merge` conflicts.
<ide> The first time setup can take a while depending on your network bandwidth. Be pa
<ide>
<ide> ## Getting Help
<ide>
<del>If you are stuck and need help, let us know by asking in the ['Contributors' category on our forum](https://forum.freecodecamp.org/c/contributors) or the [Contributors chat room](https://gitter.im/FreeCodeCamp/Contributors) on Gitter.
<add>If you are stuck and need help, feel free to ask questions on the ['Contributors' category on our forum](https://forum.freecodecamp.org/c/contributors) or [our Discord server](https://discord.gg/6vJYm9V).
<ide>
<ide> There might be an error in the console of your browser or in Bash / Terminal / Command Line that will help identify the problem. Provide this error message in your problem description so others can more easily identify the issue and help you find a resolution.
<ide><path>docs/how-to-work-on-coding-challenges.md
<ide>
<ide> Our goal is to develop a fun and clear interactive learning experience.
<ide>
<del>Designing interactive coding challenges is difficult. It would be much easier to write a lengthy explanation or to create a video tutorial, and there's a place for those on Medium and YouTube. However, for our core curriculum, we're sticking with what works best for most people - a fully interactive, video game-like experience.
<add>Designing interactive coding challenges is difficult. It would be much easier to write a lengthy explanation or to create a video tutorial. But for our core curriculum, we're sticking with what works best for most people - a fully interactive, video game-like experience.
<ide>
<ide> We want campers to achieve a flow state. We want them to build momentum and blast through our curriculum with as few snags as possible. We want them to go into the projects with confidence and gain a wide exposure to programming concepts.
<ide>
<del>Creating these challenges requires immense creativity and attention to detail. There's plenty of help available. You'll have support from a whole team of contributors to whom you can bounce ideas off and demo your challenges. Stay active in the [contributors room](https://gitter.im/freecodecamp/contributors) and ask lots of questions.
<add>Note that for Version 7.0 of the freeCodeCamp curriculum, we are moving toward [an entirely project-focused model with a lot more repetition](https://www.freecodecamp.org/news/python-curriculum-is-live/).
<add>
<add>Creating these challenges requires immense creativity and attention to detail. There's plenty of help available. You'll have support from a whole team of contributors to whom you can bounce ideas off and demo your challenges.
<add>
<add>And as always, feel free to ask questions on the ['Contributors' category on our forum](https://forum.freecodecamp.org/c/contributors) or [our Discord server](https://discord.gg/6vJYm9V).
<ide>
<ide> With your help we can design an interactive coding curriculum that will help millions of people learn to code for years to come.
<ide>
<ide><path>docs/index.md
<del>The [freeCodeCamp.org](https://freecodecamp.org) community is possible thanks to thousands of kind volunteers like you. We welcome any and all contributions to the community and are excited to welcome you aboard.
<add>The [freeCodeCamp.org](https://freecodecamp.org) community is possible thanks to thousands of kind volunteers like you. If you want to contribute your time and expertise, we would be excited to welcome you aboard.
<ide>
<ide> > [!NOTE]
<del>> Before you proceed, please take a quick 2 minutes to read our [Code of Conduct](https://www.freecodecamp.org/code-of-conduct). We enforce it strictly across our community. We want contributing to freeCodeCamp.org to be a safe and inclusive experience for everyone.
<add>> Before you proceed, please take a quick 2 minutes to read our [Code of Conduct](https://www.freecodecamp.org/code-of-conduct). We strictly enforce it across our community so that contributing to freeCodeCamp.org is a safe, inclusive experience for everyone.
<ide>
<ide> Happy contributing.
<ide>
<ide> You are welcome to:
<ide>
<del>- Create, update and fix bugs in our [coding challenges](#coding-challenges).
<add>- Create, update and fix bugs in our [curriculum](#curriculum).
<ide> - Help us fix bugs in freeCodeCamp.org's [learning platform](#learning-platform).
<ide> - _(Coming Soon)_ Help us translate freeCodeCamp.org to world languages.
<ide>
<del>Have questions? Head over to [these FAQs](/FAQ.md) where we answer some common queries about contributing.
<add>We answer the most common questions about contributing [in our contributor FAQ](/FAQ.md).
<ide>
<del>## Coding Challenges
<add>## Curriculum
<ide>
<del>All our coding challenges are curated by the community, bringing in expert knowledge from volunteers like you.
<add>Our curriculum is curated by the global freeCodeCamp community. This way, we are able to incorporate expert knowledge from volunteers like you.
<ide>
<del>You can help expand them and make their wording better. You can also update the user stories to explain the concept better or remove redundant ones and improve the challenge tests to make them more accurately test people's code.
<add>You can help expand and improve the curriculum. You can also update project user stories to better-explain concepts. And you can improve our automated tests so that we can more accurately test people's code.
<ide>
<del>**If you're interested in improving these coding challenges, here's [how to work on coding challenges](how-to-work-on-coding-challenges.md).**
<add>**If you're interested in improving our curriculum, here's [how to contribute to the curriculum](how-to-work-on-coding-challenges.md).**
<ide>
<ide> ## Learning Platform
<ide>
<del>Our learning platform runs on a modern JavaScript stack. It has various components, tools, and libraries, including but not limited to, Node.js, MongoDB, LoopBack, OAuth 2.0, React, Gatsby, Webpack, and more.
<add>Our learning platform runs on a modern JavaScript stack. It has various components, tools, and libraries. These include Node.js, MongoDB, OAuth 2.0, React, Gatsby, Webpack, and more.
<ide>
<del>Broadly,
<add>Broadly, we use
<ide>
<del>- We have a Node.js based API server.
<del>- A set of React-based client applications.
<del>- A script that we use to evaluate our front-end projects.
<add>- a Node.js based API server
<add>- a set of React-based client applications
<add>- and testing scripts to evaluate camper-submitted curriculum projects.
<ide>
<del>Contributing to this requires some understanding of APIs, ES6 Syntax, and a lot of curiosity.
<add>If you want to productively contribute to the curriculum, we recommend some familiarity with these tools.
<ide>
<del>Essentially, we expect basic familiarity with some of the aforementioned technologies, tools, and libraries. With that being said, you are not required to be an expert on them to contribute.
<add>If you want to help us improve our codebase...
<ide>
<del>**If you want to help us improve our codebase, you can either use Gitpod, a free online dev environment**
<add>**you can either use Gitpod, a free online dev environment that starts a ready-to-code dev environment for freeCodeCamp in your browser.**
<ide>
<ide> [](https://gitpod.io/#https://github.com/freeCodeCamp/freeCodeCamp)
<ide>
<del>(Starts a ready-to-code dev environment for freeCodeCamp in your browser.)
<add>Or you can...
<ide>
<del>OR
<del>
<del>**you can [set up freeCodeCamp locally](how-to-setup-freecodecamp-locally.md) on your machine.**
<add>**[set up freeCodeCamp locally](how-to-setup-freecodecamp-locally.md) on your machine.**
<ide>\ No newline at end of file
<ide><path>docs/moderator-handbook.md
<ide> Thanks again! 😊
<ide>
<ide> ---
<ide>
<del>> If you have any questions, feel free to reach out through [Gitter](https://gitter.im/FreeCodeCamp/Contributors) or by commenting below. 💬
<add>> If you have any questions, feel free to ask questions on the ['Contributors' category on our forum](https://forum.freecodecamp.org/c/contributors) or [our Discord server](https://discord.gg/6vJYm9V).
<ide> ```
<ide>
<ide> ### Closing invalid pull requests | 6 |
Text | Text | update multi-db docs | dfb519ac71df8be08641cc1da02a07972c3d947f | <ide><path>guides/source/active_record_multiple_databases.md
<ide> The following features are not (yet) supported:
<ide> * Sharding
<ide> * Joining across clusters
<ide> * Load balancing replicas
<add>* Dumping schema caches for multiple databases
<ide>
<ide> ## Setting up your application
<ide>
<ide> config.active_record.writing_role = :default
<ide> config.active_record.reading_role = :readonly
<ide> ```
<ide>
<add>It's important to connect to your database in a single model and then inherit from that model
<add>for the tables rather than connect multiple individual models to the same database. Database
<add>clients have a limit to the number of open connections there can be and if you do this it will
<add>multiply the number of connections you have since Rails uses the model class name for the
<add>connection specification name.
<add>
<ide> Now that we have the database.yml and the new model set up it's time to create the databases.
<ide> Rails 6.0 ships with all the rails tasks you need to use multiple databases in Rails.
<ide>
<ide> for the 'nonexistent' role.)`
<ide>
<ide> ## Caveats
<ide>
<add>### Sharding
<add>
<ide> As noted at the top, Rails doesn't (yet) support sharding. We had to do a lot of work
<ide> to support multiple databases for Rails 6.0. The lack of support for sharding isn't
<ide> an oversight, but does require additional work that didn't make it in for 6.0. For now
<ide> if you need sharding it may be advisable to continue using one of the many gems
<ide> that supports this.
<ide>
<add>### Load Balancing Replicas
<add>
<ide> Rails also doesn't support automatic load balancing of replicas. This is very
<ide> dependent on your infrastructure. We may implement basic, primitive load balancing
<ide> in the future, but for an application at scale this should be something your application
<ide> handles outside of Rails.
<ide>
<del>Lastly, you cannot join across databases. Rails 6.1 will support using `has_many`
<add>### Joining Across Databases
<add>
<add>Applications cannot join across databases. Rails 6.1 will support using `has_many`
<ide> relationships and creating 2 queries instead of joining, but Rails 6.0 will require
<ide> you to split the joins into 2 selects manually.
<add>
<add>### Schema Cache
<add>
<add>If you use a schema cache and multiple database you'll need to write an initialzer
<add>that loads the schema cache from your app. This wasn't an issue we could resolve in
<add>time for Rails 6.0 but hope to have it in a future version soon. | 1 |
Go | Go | rename the default seccomp profile to "builtin" | ac449d6b5ad29a5086824729ce54eec6b0cc8545 | <ide><path>daemon/config/config.go
<ide> const (
<ide> LinuxV2RuntimeName = "io.containerd.runc.v2"
<ide>
<ide> // SeccompProfileDefault is the built-in default seccomp profile.
<del> SeccompProfileDefault = "default"
<add> SeccompProfileDefault = "builtin"
<ide> // SeccompProfileUnconfined is a special profile name for seccomp to use an
<ide> // "unconfined" seccomp profile.
<ide> SeccompProfileUnconfined = "unconfined" | 1 |
Javascript | Javascript | add type annotations for chunk | f1618aed044f4ede5e7bca2db16e793079a9ab8b | <ide><path>lib/Chunk.js
<ide> /*
<del> MIT License http://www.opensource.org/licenses/mit-license.php
<del> Author Tobias Koppers @sokra
<add>MIT License http://www.opensource.org/licenses/mit-license.php
<add>Author Tobias Koppers @sokra
<ide> */
<ide> "use strict";
<ide>
<ide> const ERR_CHUNK_ENTRY = "Chunk.entry was removed. Use hasRuntime()";
<ide> const ERR_CHUNK_INITIAL =
<ide> "Chunk.initial was removed. Use canBeInitial/isOnlyInitial()";
<ide>
<add>/** @typedef {import("./Module.js")} Module */
<add>/** @typedef {import("./ChunkGroup")} ChunkGroup */
<add>/** @typedef {import("./ModuleReason.js")} ModuleReason */
<add>/** @typedef {import("webpack-sources").Source} Source */
<add>
<add>/**
<add> * @typedef {Object} Identifiable an object who contains an identifier function property
<add> * @property {() => string} identifier the resource or unique identifier of something
<add> */
<add>
<add>/**
<add> * @typedef {Object} WithId an object who has an id property
<add> * @property {string} id the id of the object
<add> */
<add>
<add>/** @typedef {(a: Module, b: Module) => -1|0|1} ModuleSortPredicate */
<add>/** @typedef {(m: Module) => boolean} ModuleFilterPredicate */
<add>/** @typedef {(c: Chunk) => boolean} ChunkFilterPredicate */
<add>
<add>/**
<add> * @param {WithId} a object that contains an ID property
<add> * @param {WithId} b object that contains an ID property
<add> * @returns {-1|0|1} sort value
<add> */
<ide> const sortById = (a, b) => {
<ide> if (a.id < b.id) return -1;
<ide> if (b.id < a.id) return 1;
<ide> return 0;
<ide> };
<ide>
<add>/**
<add> *
<add> * @param {Identifiable} a first object with ident fn
<add> * @param {Identifiable} b second object with ident fn
<add> * @returns {-1|0|1} The order number of the sort
<add> */
<ide> const sortByIdentifier = (a, b) => {
<ide> if (a.identifier() > b.identifier()) return 1;
<ide> if (a.identifier() < b.identifier()) return -1;
<ide> return 0;
<ide> };
<ide>
<add>/**
<add> * @returns {string} a concatenation of module identifiers sorted
<add> * @param {SortableSet} set to pull module identifiers from
<add> */
<ide> const getModulesIdent = set => {
<ide> set.sort();
<ide> let str = "";
<ide> const getModulesIdent = set => {
<ide> return str;
<ide> };
<ide>
<add>/**
<add> * @template {T}
<add> * @param {Set<T>} set the set to convert to array
<add> * @returns {Array<T>} the array returned from Array.from(set)
<add> */
<ide> const getArray = set => Array.from(set);
<ide>
<add>/**
<add> * @param {Set<Module>} set the Set to get the count/size of
<add> * @returns {number} the size of the modules
<add> */
<ide> const getModulesSize = set => {
<del> let count = 0;
<add> let size = 0;
<ide> for (const module of set) {
<del> count += module.size();
<add> size += module.size();
<ide> }
<del> return count;
<add> return size;
<ide> };
<ide>
<add>/**
<add> * A Chunk is a unit of encapsulation for Modules.
<add> * Chunks are "rendered" into bundles that get emitted when the build completes.
<add> */
<ide> class Chunk {
<add> /**
<add> * @param {string=} name of chunk being created, is optional (for subclasses)
<add> */
<ide> constructor(name) {
<add> /** @type {number | null} */
<ide> this.id = null;
<add> /** @type {number[] | null} */
<ide> this.ids = null;
<add> /** @type {number} */
<ide> this.debugId = debugId++;
<add> /** @type {string} */
<ide> this.name = name;
<add> /** @type {boolean} */
<ide> this.preventIntegration = false;
<add> /** @type {Module=} */
<ide> this.entryModule = undefined;
<add> //TODO make these typed generics for Module[] and ChunkGroup[] and their sort being (T, T): => 1,-1,0
<add> //See https://github.com/webpack/webpack/pull/7046
<add> /** @private */
<ide> this._modules = new SortableSet(undefined, sortByIdentifier);
<add> /** @private */
<ide> this._groups = new SortableSet(undefined, sortById);
<add> /** @type {Source[]} */
<ide> this.files = [];
<add> /** @type {boolean} */
<ide> this.rendered = false;
<add> /** @type {string=} */
<ide> this.hash = undefined;
<add> /** @type {Object} */
<ide> this.contentHash = Object.create(null);
<add> /** @type {string=} */
<ide> this.renderedHash = undefined;
<add> /** @type {string=} */
<ide> this.chunkReason = undefined;
<add> /** @type {boolean} */
<ide> this.extraAsync = false;
<ide> }
<ide>
<add> /**
<add> * @deprecated Chunk.entry has been deprecated. Please use .hasRuntime() instead
<add> * @returns {never} Throws an error trying to access this property
<add> */
<ide> get entry() {
<ide> throw new Error(ERR_CHUNK_ENTRY);
<ide> }
<ide>
<add> /**
<add> * @deprecated .entry has been deprecated. Please use .hasRuntime() instead
<add> * @param {never} data The data that was attempting to be set
<add> * @returns {never} Throws an error trying to access this property
<add> */
<ide> set entry(data) {
<ide> throw new Error(ERR_CHUNK_ENTRY);
<ide> }
<ide>
<add> /**
<add> * @deprecated Chunk.initial was removed. Use canBeInitial/isOnlyInitial()
<add> * @returns {never} Throws an error trying to access this property
<add> */
<ide> get initial() {
<ide> throw new Error(ERR_CHUNK_INITIAL);
<ide> }
<ide>
<add> /**
<add> * @deprecated Chunk.initial was removed. Use canBeInitial/isOnlyInitial()
<add> * @param {never} data The data attempting to be set
<add> * @returns {never} Throws an error trying to access this property
<add> */
<ide> set initial(data) {
<ide> throw new Error(ERR_CHUNK_INITIAL);
<ide> }
<ide>
<add> /**
<add> * @returns {boolean} whether or not the Chunk will have a runtime
<add> */
<ide> hasRuntime() {
<ide> for (const chunkGroup of this._groups) {
<ide> // We only need to check the first one
<ide> class Chunk {
<ide> return false;
<ide> }
<ide>
<add> /**
<add> * @returns {boolean} whether or not this chunk can be an initial chunk
<add> */
<ide> canBeInitial() {
<ide> for (const chunkGroup of this._groups) {
<ide> if (chunkGroup.isInitial()) return true;
<ide> }
<ide> return false;
<ide> }
<ide>
<add> /**
<add> * @returns {boolean} whether this chunk can only be an initial chunk
<add> */
<ide> isOnlyInitial() {
<ide> if (this._groups.size <= 0) return false;
<ide> for (const chunkGroup of this._groups) {
<ide> class Chunk {
<ide> return true;
<ide> }
<ide>
<add> /**
<add> * @returns {boolean} if this chunk contains the entry module
<add> */
<ide> hasEntryModule() {
<ide> return !!this.entryModule;
<ide> }
<ide>
<add> /**
<add> * @param {Module} module the module that will be added to this chunk.
<add> * @returns {boolean} returns true if the chunk doesn't have the module and it was added
<add> */
<ide> addModule(module) {
<ide> if (!this._modules.has(module)) {
<ide> this._modules.add(module);
<ide> class Chunk {
<ide> return false;
<ide> }
<ide>
<add> /**
<add> * @param {Module} module the module that will be removed from this chunk
<add> * @returns {boolean} returns true if chunk exists and is successfully deleted
<add> */
<ide> removeModule(module) {
<ide> if (this._modules.delete(module)) {
<ide> module.removeChunk(this);
<ide> class Chunk {
<ide> return false;
<ide> }
<ide>
<add> /**
<add> * @param {Module[]} modules the new modules to be set
<add> * @returns {void} set new modules to this chunk and return nothing
<add> */
<ide> setModules(modules) {
<ide> this._modules = new SortableSet(modules, sortByIdentifier);
<ide> }
<ide>
<add> /**
<add> * @returns {number} the amount of modules in chunk
<add> */
<ide> getNumberOfModules() {
<ide> return this._modules.size;
<ide> }
<ide>
<add> /**
<add> * @returns {SortableSet} return the modules SortableSet for this chunk
<add> */
<ide> get modulesIterable() {
<ide> return this._modules;
<ide> }
<ide>
<add> /**
<add> * @param {ChunkGroup} chunkGroup the chunkGroup the chunk is being added
<add> * @returns {boolean} returns true if chunk is not apart of chunkGroup and is added successfully
<add> */
<ide> addGroup(chunkGroup) {
<ide> if (this._groups.has(chunkGroup)) return false;
<ide> this._groups.add(chunkGroup);
<ide> return true;
<ide> }
<ide>
<add> /**
<add> * @param {ChunkGroup} chunkGroup the chunkGroup the chunk is being removed from
<add> * @returns {boolean} returns true if chunk does exist in chunkGroup and is removed
<add> */
<ide> removeGroup(chunkGroup) {
<ide> if (!this._groups.has(chunkGroup)) return false;
<ide> this._groups.delete(chunkGroup);
<ide> return true;
<ide> }
<ide>
<add> /**
<add> * @param {ChunkGroup} chunkGroup the chunkGroup to check
<add> * @returns {boolean} returns true if chunk has chunkGroup reference and exists in chunkGroup
<add> */
<ide> isInGroup(chunkGroup) {
<ide> return this._groups.has(chunkGroup);
<ide> }
<ide>
<add> /**
<add> * @returns {number} the amount of groups said chunk is in
<add> */
<ide> getNumberOfGroups() {
<ide> return this._groups.size;
<ide> }
<ide>
<add> /**
<add> * @returns {SortableSet} the chunkGroups that said chunk is referenced in
<add> */
<ide> get groupsIterable() {
<ide> return this._groups;
<ide> }
<ide>
<add> /**
<add> * @param {Chunk} otherChunk the chunk to compare itself with
<add> * @returns {-1|0|1} this is a comparitor function like sort and returns -1, 0, or 1 based on sort order
<add> */
<ide> compareTo(otherChunk) {
<ide> this._modules.sort();
<ide> otherChunk._modules.sort();
<ide> class Chunk {
<ide> }
<ide> }
<ide>
<add> /**
<add> * @param {Module} module Module to check
<add> * @returns {boolean} returns true if module does exist in this chunk
<add> */
<ide> containsModule(module) {
<ide> return this._modules.has(module);
<ide> }
<ide> class Chunk {
<ide> return this._modules.getFromUnorderedCache(getModulesIdent);
<ide> }
<ide>
<del> remove(reason) {
<add> remove() {
<ide> // cleanup modules
<ide> // Array.from is used here to create a clone, because removeChunk modifies this._modules
<ide> for (const module of Array.from(this._modules)) {
<ide> class Chunk {
<ide> }
<ide> }
<ide>
<add> /**
<add> *
<add> * @param {Module} module module to move
<add> * @param {Chunk} otherChunk other chunk to move it to
<add> * @returns {void}
<add> */
<ide> moveModule(module, otherChunk) {
<ide> GraphHelpers.disconnectChunkAndModule(this, module);
<ide> GraphHelpers.connectChunkAndModule(otherChunk, module);
<ide> module.rewriteChunkInReasons(this, [otherChunk]);
<ide> }
<ide>
<add> /**
<add> *
<add> * @param {Chunk} otherChunk the chunk to integrate with
<add> * @param {ModuleReason} reason reason why the module is being integrated
<add> * @returns {boolean} returns true or false if integration succeeds or fails
<add> */
<ide> integrate(otherChunk, reason) {
<ide> if (!this.canBeIntegrated(otherChunk)) {
<ide> return false;
<ide> class Chunk {
<ide> return true;
<ide> }
<ide>
<add> /**
<add> * @param {Chunk} newChunk the new chunk that will be split out of, and then chunk raphi twil=
<add> * @returns {void}
<add> */
<ide> split(newChunk) {
<ide> for (const chunkGroup of this._groups) {
<ide> chunkGroup.insertChunk(newChunk, this);
<ide> class Chunk {
<ide> return true;
<ide> }
<ide>
<add> /**
<add> *
<add> * @param {number} size the size
<add> * @param {Object} options the options passed in
<add> * @returns {number} the multiplier returned
<add> */
<ide> addMultiplierAndOverhead(size, options) {
<ide> const overhead =
<ide> typeof options.chunkOverhead === "number" ? options.chunkOverhead : 10000;
<ide> class Chunk {
<ide> return size * multiplicator + overhead;
<ide> }
<ide>
<add> /**
<add> * @returns {number} the size of all modules
<add> */
<ide> modulesSize() {
<ide> return this._modules.getFromUnorderedCache(getModulesSize);
<ide> }
<ide>
<add> /**
<add> * @param {Object} options the size display options
<add> * @returns {number} the chunk size
<add> */
<ide> size(options) {
<ide> return this.addMultiplierAndOverhead(this.modulesSize(), options);
<ide> }
<ide> class Chunk {
<ide> return this.addMultiplierAndOverhead(integratedModulesSize, options);
<ide> }
<ide>
<add> /**
<add> * @param {ModuleSortPredicate=} sortByFn a predicate function used to sort modules
<add> * @returns {void}
<add> */
<ide> sortModules(sortByFn) {
<ide> this._modules.sortWith(sortByFn || sortById);
<ide> }
<ide> class Chunk {
<ide> };
<ide> }
<ide>
<add> /**
<add> *
<add> * @param {ModuleFilterPredicate} filterFn predicate function used to filter modules
<add> * @param {ChunkFilterPredicate} filterChunkFn predicate function used to filter chunks
<add> * @returns {boolean} return true if module exists in graph
<add> */
<ide> hasModuleInGraph(filterFn, filterChunkFn) {
<ide> const queue = new Set(this.groupsIterable);
<ide> const chunksProcessed = new Set();
<ide><path>lib/util/SortableSet.js
<ide> "use strict";
<del>
<add>//TODO: Make this a generic type
<add>//https://github.com/Microsoft/TypeScript/issues/23385
<add>//https://github.com/Microsoft/TypeScript/issues/23384
<ide> class SortableSet extends Set {
<ide> constructor(initialIterable, defaultSort) {
<ide> super(initialIterable); | 2 |
Text | Text | describe gotcha for 'status' option [ci skip] | 2ad466b867844d405110d1862cf020848f7a1814 | <ide><path>guides/source/layouts_and_rendering.md
<ide> Rails understands both numeric status codes and the corresponding symbols shown
<ide> | | 510 | :not_extended |
<ide> | | 511 | :network_authentication_required |
<ide>
<add>NOTE: If you try to render content along with a non-content status code
<add>(100-199, 204, 205 or 304), it will be dropped from the response.
<add>
<ide> #### Finding Layouts
<ide>
<ide> To find the current layout, Rails first looks for a file in `app/views/layouts` with the same base name as the controller. For example, rendering actions from the `PhotosController` class will use `app/views/layouts/photos.html.erb` (or `app/views/layouts/photos.builder`). If there is no such controller-specific layout, Rails will use `app/views/layouts/application.html.erb` or `app/views/layouts/application.builder`. If there is no `.erb` layout, Rails will use a `.builder` layout if one exists. Rails also provides several ways to more precisely assign specific layouts to individual controllers and actions. | 1 |
Ruby | Ruby | expand check to dylibs and bundles | 440adcbec0f8a725590a830c3efa7dfa8a745626 | <ide><path>Library/Homebrew/linkage_checker.rb
<ide> def initialize(keg, formula = nil, cache_db:, rebuild_cache: false)
<ide> @unnecessary_deps = []
<ide> @unwanted_system_dylibs = []
<ide> @version_conflict_deps = []
<del> @executables_missing_rpaths = []
<add> @files_missing_rpaths = []
<ide>
<ide> check_dylibs(rebuild_cache: rebuild_cache)
<ide> end
<ide> def display_normal_output
<ide> display_items "Undeclared dependencies with linkage", @undeclared_deps
<ide> display_items "Dependencies with no linkage", @unnecessary_deps
<ide> display_items "Unwanted system libraries", @unwanted_system_dylibs
<del> display_items "Executables with missing rpath", @executables_missing_rpaths
<add> display_items "Files with missing rpath", @files_missing_rpaths
<ide> end
<ide>
<ide> def display_reverse_output
<ide> def display_test_output(puts_output: true, strict: false)
<ide> display_items "Unwanted system libraries", @unwanted_system_dylibs, puts_output: puts_output
<ide> display_items "Conflicting libraries", @version_conflict_deps, puts_output: puts_output
<ide> display_items "Undeclared dependencies with linkage", @undeclared_deps, puts_output: puts_output if strict
<del> display_items "Executables with missing rpath", @executables_missing_rpaths, puts_output: puts_output
<add> display_items "Files with missing rpath", @files_missing_rpaths, puts_output: puts_output
<ide> end
<ide>
<ide> sig { params(strict: T::Boolean).returns(T::Boolean) }
<ide> def broken_library_linkage?(strict: false)
<del> issues = [@broken_deps, @unwanted_system_dylibs, @version_conflict_deps, @executables_missing_rpaths]
<add> issues = [@broken_deps, @unwanted_system_dylibs, @version_conflict_deps, @files_missing_rpaths]
<ide> issues << @undeclared_deps if strict
<ide> [issues, unexpected_broken_dylibs, unexpected_present_dylibs].flatten.any?(&:present?)
<ide> end
<ide> def check_dylibs(rebuild_cache:)
<ide> dylibs.each do |dylib|
<ide> @reverse_links[dylib] << file
<ide>
<del> # Binary executables that link @rpath-prefixed dylibs must include at
<add> # Files that link @rpath-prefixed dylibs must include at
<ide> # least one rpath in order to resolve it.
<ide> if !file_has_any_rpath_dylibs && (dylib.start_with? "@rpath/")
<ide> file_has_any_rpath_dylibs = true
<ide> pathname = Pathname(file)
<del> @executables_missing_rpaths << file if pathname.binary_executable? && pathname.rpaths.empty?
<add> @files_missing_rpaths << file if pathname.rpaths.empty?
<ide> end
<ide>
<ide> next if checked_dylibs.include? dylib | 1 |
Javascript | Javascript | remove irrelevant comment | bb51d3b211215ed432714387a89f20c2e08407c6 | <ide><path>lib/Chunk.js
<ide> class Chunk {
<ide> const b = otherChunk._modules[Symbol.iterator]();
<ide> // eslint-disable-next-line
<ide> while (true) {
<del> // eslint-disable-line
<ide> const aItem = a.next();
<ide> const bItem = b.next();
<ide> if (aItem.done) return 0; | 1 |
Mixed | Ruby | update example formula documentation | 8e12390fc8d9b198dd4b68ca20c0108e5d60e1a5 | <ide><path>Library/Contributions/example-formula.rb
<ide> class ExampleFormula < Formula
<ide> version "1.2-final"
<ide>
<ide> # For integrity and security, we verify the hash (`openssl dgst -sha1 <FILE>`)
<del> # You may also use sha256 if the software uses sha256 on their homepage.
<del> # Leave it empty at first and `brew install` will tell you the expected.
<add> # You may also use sha256 if the software uses sha256 on their homepage. Do not use md5.
<add> # Either generate the sha locally or leave it empty & `brew install` will tell you the expected.
<ide> sha1 "cafebabe78901234567890123456789012345678"
<ide>
<ide> # Stable-only dependencies should be nested inside a `stable` block rather than
<ide> class ExampleFormula < Formula
<ide> # Optionally, specify a repository to be used. Brew then generates a
<ide> # `--HEAD` option. Remember to also test it.
<ide> # The download strategies (:using =>) are the same as for `url`.
<add> # "master" is the default branch and doesn't need stating with a :branch conditional
<ide> head "https://we.prefer.https.over.git.example.com/.git"
<ide> head "https://example.com/.git", :branch => "name_of_branch", :revision => "abc123"
<ide> head "https://hg.is.awesome.but.git.has.won.example.com/", :using => :hg # If autodetect fails.
<ide> class ExampleFormula < Formula
<ide> # Bottles are pre-built and added by the Homebrew maintainers for you.
<ide> # If you maintain your own repository, you can add your own bottle links.
<ide> # https://github.com/Homebrew/homebrew/blob/master/share/doc/homebrew/Bottles.md
<add> # You can ignore this block entirely if submitting to Homebrew/Homebrew, It'll be
<add> # handled for you by the Brew Test Bot.
<ide> bottle do
<ide> root_url "http://mikemcquaid.com" # Optional root to calculate bottle URLs
<ide> prefix "/opt/homebrew" # Optional HOMEBREW_PREFIX in which the bottles were built.
<ide> def pour_bottle?
<ide> depends_on :arch => :x86_64 # If this formula only build on intel x86 64bit.
<ide> depends_on :arch => :ppc # Only builds on PowerPC?
<ide> depends_on :ld64 # Sometimes ld fails on `MacOS.version < :leopard`. Then use this.
<del> depends_on :x11 # X11/XQuartz components.
<add> depends_on :x11 # X11/XQuartz components. Non-optional X11 deps should go in Homebrew/Homebrew-x11
<ide> depends_on :osxfuse # Permits the use of the upstream signed binary or our source package.
<ide> depends_on :tuntap # Does the same thing as above. This is vital for Yosemite and above.
<ide> depends_on :mysql => :recommended
<ide> def pour_bottle?
<ide>
<ide> # If any Python >= 2.7 < 3.x is okay (either from OS X or brewed):
<ide> depends_on :python
<add> # to depend on Python >= 2.7 but use system Python where possible
<add> depends_on :python if MacOS.version <= :snow_leopard
<ide> # Python 3.x if the `--with-python3` is given to `brew install example`
<ide> depends_on :python3 => :optional
<ide>
<ide> def pour_bottle?
<ide> end
<ide>
<ide> fails_with :clang do
<del> build 425
<add> build 600
<ide> cause "multiple configure and compile errors"
<ide> end
<ide>
<ide> def install
<ide> args << "--some-new-stuff" if build.head? # if head is used instead of url.
<ide> args << "--universal-binary" if build.universal?
<ide>
<add> # If there are multiple conditional arguments use a block instead of lines.
<add> if build.head?
<add> args << "--i-want-pizza"
<add> args << "--and-a-cold-beer" if build.with? "cold-beer"
<add> end
<add>
<add> # If a formula presents a user with a choice, but the choice must be fulfilled:
<add> if build.with? "example2"
<add> args << "--with-example2"
<add> else
<add> args << "--with-example1"
<add> end
<add>
<ide> # The `build.with?` and `build.without?` are smart enough to do the
<ide> # right thing with respect to defaults defined via `:optional` and
<ide> # `:recommended` dependencies.
<ide> def install
<ide> # break if they remember that exact path. In contrast to that, the
<ide> # `$(brew --prefix)/opt/formula` is the same path for all future
<ide> # versions of the formula!
<del> args << "--with-readline=#{Formula["readline"].opt_prefix}/lib" if build.with? "readline"
<add> args << "--with-readline=#{Formula["readline"].opt_prefix}" if build.with? "readline"
<ide>
<ide> # Most software still uses `configure` and `make`.
<ide> # Check with `./configure --help` what our options are.
<ide> system "./configure", "--disable-debug", "--disable-dependency-tracking",
<del> "--prefix=#{prefix}",
<add> "--disable-silent-rules", "--prefix=#{prefix}",
<ide> *args # our custom arg list (needs `*` to unpack)
<ide>
<ide> # If your formula's build system is not thread safe:
<ide> def install
<ide>
<ide> # Overwriting any env var:
<ide> ENV["LDFLAGS"] = "--tag CC"
<del>
<add> # Is the formula struggling to find the pkgconfig file? Point it to it.
<add> # This is done automatically for `keg_only` formulae.
<add> ENV.prepend_path "PKG_CONFIG_PATH", "#{Formula["glib"].opt_lib}/pkgconfig"
<add>
<add> # Need to install into the bin but the makefile doesn't mkdir -p prefix/bin?
<add> bin.mkpath
<add> # A custom directory?
<add> mkdir_p share/"example"
<add> # And then move something from the buildpath to that directory?
<add> mv "ducks.txt", share/"example/ducks.txt"
<add> # No "make", "install" available?
<add> bin.install "binary1"
<add> include.install "example.h"
<add> lib.install "example.dylib"
<add> man1.install "example.1"
<add> man3.install "example.3"
<add> # All that README/LICENSE/NOTES/CHANGELOG stuff? Use "metafiles"
<add> prefix.install_metafiles
<add> # Maybe you'd like to remove a broken or unnecessary element?
<add> # Empty directories will be removed by Homebrew automatically post-install!
<add> rm "bin/example"
<add> rm_rf "share/pointless"
<add>
<add> # If there is a "make", "install" available, please use it!
<ide> system "make", "install"
<ide>
<ide> # We are in a temporary directory and don't have to care about cleanup.
<ide> def plist; <<-EOS.undent
<ide> <plist version="1.0">
<ide> <dict>
<ide> <key>Label</key>
<del> <string>#{plist_name}</string>
<add> <string>#{plist_name}</string>
<add> <key>ProgramArguments</key>
<add> <array>
<add> <string>#{bin}/example</string>
<add> <string>--do-this</string>
<add> </array>
<ide> <key>RunAtLoad</key>
<ide> <true/>
<ide> <key>KeepAlive</key>
<ide> <true/>
<add> <key>StandardErrorPath</key>
<add> <string>/dev/null</string>
<add> <key>StandardOutPath</key>
<add> <string>/dev/null</string>
<ide> </plist>
<ide> EOS
<ide> end
<ide><path>share/doc/homebrew/Formula-Cookbook.md
<ide> Formulae aren’t that complicated. [etl](https://github.com/Homebrew/homebrew/b
<ide>
<ide> And then [Git](https://github.com/Homebrew/homebrew/tree/master/Library/Formula/git.rb) and [flac](https://github.com/Homebrew/homebrew/tree/master/Library/Formula/flac.rb) show more advanced functionality.
<ide>
<del>A more complete [cheat-sheet](https://github.com/Homebrew/homebrew/blob/master/Library/Contributions/example-formula.rb) shows almost all the stuff you can use in a Formula.
<add>A more complete example-formula [cheat-sheet](https://github.com/Homebrew/homebrew/blob/master/Library/Contributions/example-formula.rb) shows almost all the stuff you can use in a Formula.
<ide>
<ide> ## Grab the URL
<ide> | 2 |
Go | Go | add a whitespace in plugin's logging | 97c77b7e0dd2eef23beca1180be93733a3a95e82 | <ide><path>plugin/manager.go
<ide> func LookupWithCapability(name, capability string) (Plugin, error) {
<ide>
<ide> // StateChanged updates daemon inter...
<ide> func (pm *Manager) StateChanged(id string, e libcontainerd.StateInfo) error {
<del> logrus.Debugf("plugin statechanged %s %#v", id, e)
<add> logrus.Debugf("plugin state changed %s %#v", id, e)
<ide>
<ide> return nil
<ide> } | 1 |
PHP | PHP | turn tests inline to make more clear; | 4824f4b2fd402b597dfa9e93d27075497e411afd | <ide><path>tests/Http/HttpRequestTest.php
<ide> public function testCreateFromBase()
<ide> * Tests for Http\Request magic methods `__get()` and `__isset()`.
<ide> *
<ide> * @link https://github.com/laravel/framework/issues/10403 Form request object attribute returns empty when have some string.
<del> * @dataProvider magicMethodsProvider
<ide> */
<del> public function testMagicMethods($uri, $route, $parameters, $property, $propertyValue, $propertyIsset, $propertyEmpty)
<del> {
<del> $request = Request::create($uri, 'GET', $parameters);
<add> public function testMagicMethods()
<add> {
<add> // Simulates QueryStrings.
<add> $request = Request::create('/', 'GET', ['foo' => 'bar', 'empty' => '']);
<add>
<add> // Parameter 'foo' is 'bar', then it ISSET and is NOT EMPTY.
<add> $this->assertEquals($request->foo, 'bar');
<add> $this->assertEquals(isset($request->foo), true);
<add> $this->assertEquals(empty($request->foo), false);
<add>
<add> // Parameter 'empty' is '', then it ISSET and is EMPTY.
<add> $this->assertEquals($request->empty, '');
<add> $this->assertEquals(isset($request->empty), true);
<add> $this->assertEquals(empty($request->empty), true);
<add>
<add> // Parameter 'undefined' is undefined/null, then it NOT ISSET and is EMPTY.
<add> $this->assertEquals($request->undefined, null);
<add> $this->assertEquals(isset($request->undefined), false);
<add> $this->assertEquals(empty($request->undefined), true);
<add>
<add> // Simulates Route parameters.
<add> $request = Request::create('/example/bar', 'GET', [ 'xyz' => 'overwrited' ]);
<add> $request->setRouteResolver(function () use ($request) {
<add> $route = new Route('GET', '/example/{foo}/{xyz?}/{undefined?}', []);
<add> $route->bind($request);
<add>
<add> return $route;
<add> });
<add>
<add> // Router parameter 'foo' is 'bar', then it ISSET and is NOT EMPTY.
<add> $this->assertEquals($request->foo, 'bar');
<add> $this->assertEquals(isset($request->foo), true);
<add> $this->assertEquals(empty($request->foo), false);
<add>
<add> // Router parameter 'undefined' is undefined/null, then it NOT ISSET and is EMPTY.
<add> $this->assertEquals($request->undefined, null);
<add> $this->assertEquals(isset($request->undefined), false);
<add> $this->assertEquals(empty($request->undefined), true);
<add>
<add> // Special case: router parameter 'xyz' is 'overwrited' by QueryString, then it ISSET and is NOT EMPTY.
<add> // Basically, QueryStrings have priority over router parameters.
<add> $this->assertEquals($request->xyz, 'overwrited');
<add> $this->assertEquals(isset($request->foo), true);
<add> $this->assertEquals(empty($request->foo), false);
<add>
<add> // Simulates empty QueryString and Routes.
<add> $request = Request::create('/', 'GET');
<add> $request->setRouteResolver(function () use ($request) {
<add> $route = new Route('GET', '/', []);
<add> $route->bind($request);
<ide>
<del> // Allow to simulates when a route is inaccessible or undefined.
<del> if (! is_null($route)) {
<del> $request->setRouteResolver(function () use ($request, $route) {
<del> $route = new Route('GET', $route, []);
<del> $route->bind($request);
<add> return $route;
<add> });
<ide>
<del> return $route;
<del> });
<del> }
<add> // Parameter 'undefined' is undefined/null, then it NOT ISSET and is EMPTY.
<add> $this->assertEquals($request->undefined, null);
<add> $this->assertEquals(isset($request->undefined), false);
<add> $this->assertEquals(empty($request->undefined), true);
<ide>
<del> $this->assertEquals($request->{$property}, $propertyValue);
<del> $this->assertEquals(isset($request->{$property}), $propertyIsset);
<del> $this->assertEquals(empty($request->{$property}), $propertyEmpty);
<del> }
<add> // Special case: simulates empty QueryString and Routes, without the Route Resolver.
<add> // It'll happen when you try to get a parameter outside a route.
<add> $request = Request::create('/', 'GET');
<ide>
<del> public function magicMethodsProvider()
<del> {
<del> return [
<del> // Simulates QueryStrings.
<del> ['/', null, ['foo' => 'bar', 'empty' => ''], 'foo', 'bar', true, false],
<del> ['/', null, ['foo' => 'bar', 'empty' => ''], 'empty', '', true, true],
<del> ['/', null, ['foo' => 'bar', 'empty' => ''], 'undefined', null, false, true],
<del>
<del> // Simulates Routes.
<del> ['/example/bar', '/example/{foo}/{undefined?}', [], 'foo', 'bar', true, false],
<del> ['/example/bar', '/example/{foo}/{undefined?}', [], 'undefined', null, false, true],
<del>
<del> // Simulates no QueryStrings or Routes.
<del> ['/', '/', [], 'undefined', null, false, true],
<del> ['/', null, [], 'undefined', null, false, true],
<del> ];
<add> // Parameter 'undefined' is undefined/null, then it NOT ISSET and is EMPTY.
<add> $this->assertEquals($request->undefined, null);
<add> $this->assertEquals(isset($request->undefined), false);
<add> $this->assertEquals(empty($request->undefined), true);
<ide> }
<ide>
<ide> public function testHttpRequestFlashCallsSessionFlashInputWithInputData() | 1 |
Javascript | Javascript | update variable declarations according to es6 | a94149988913c20101aa97687e063567126dcdad | <ide><path>lib/BasicEvaluatedExpression.js
<ide> class BasicEvaluatedExpression {
<ide> else if(this.isWrapped()) return this.prefix && this.prefix.asBool() || this.postfix && this.postfix.asBool() ? true : undefined;
<ide> else if(this.isTemplateString()) {
<ide> if(this.quasis.length === 1) return this.quasis[0].asBool();
<del> for(var i = 0; i < this.quasis.length; i++) {
<add> for(let i = 0; i < this.quasis.length; i++) {
<ide> if(this.quasis[i].asBool()) return true;
<ide> }
<ide> // can't tell if string will be empty without executing
<ide><path>lib/Chunk.js
<ide> class Chunk {
<ide> }
<ide>
<ide> removeModule(module) {
<del> var idx = this.modules.indexOf(module);
<add> const idx = this.modules.indexOf(module);
<ide> if(idx >= 0) {
<ide> this.modules.splice(idx, 1);
<ide> module.removeChunk(this);
<ide> class Chunk {
<ide> }
<ide>
<ide> removeChunk(chunk) {
<del> var idx = this.chunks.indexOf(chunk);
<add> const idx = this.chunks.indexOf(chunk);
<ide> if(idx >= 0) {
<ide> this.chunks.splice(idx, 1);
<ide> chunk.removeParent(this);
<ide> class Chunk {
<ide> }
<ide>
<ide> removeParent(chunk) {
<del> var idx = this.parents.indexOf(chunk);
<add> const idx = this.parents.indexOf(chunk);
<ide> if(idx >= 0) {
<ide> this.parents.splice(idx, 1);
<ide> chunk.removeChunk(this);
<ide><path>lib/Compilation.js
<ide> function byId(a, b) {
<ide> }
<ide>
<ide> function iterationBlockVariable(variables, fn) {
<del> for(var indexVariable = 0; indexVariable < variables.length; indexVariable++) {
<del> var varDep = variables[indexVariable].dependencies;
<del> for(var indexVDep = 0; indexVDep < varDep.length; indexVDep++) {
<add> for(let indexVariable = 0; indexVariable < variables.length; indexVariable++) {
<add> let varDep = variables[indexVariable].dependencies;
<add> for(let indexVDep = 0; indexVDep < varDep.length; indexVDep++) {
<ide> fn(varDep[indexVDep]);
<ide> }
<ide> }
<ide> }
<ide>
<ide> function iterationOfArrayCallback(arr, fn) {
<del> for(var index = 0; index < arr.length; index++) {
<add> for(let index = 0; index < arr.length; index++) {
<ide> fn(arr[index]);
<ide> }
<ide> }
<ide> class Compilation extends Tapable {
<ide> building.forEach(cb => cb(err));
<ide> }
<ide> module.build(this.options, this, this.resolvers.normal, this.inputFileSystem, (error) => {
<del> var errors = module.errors;
<del> for(var indexError = 0; indexError < errors.length; indexError++) {
<del> var err = errors[indexError];
<add> const errors = module.errors;
<add> for(let indexError = 0; indexError < errors.length; indexError++) {
<add> const err = errors[indexError];
<ide> err.origin = origin;
<ide> err.dependencies = dependencies;
<ide> if(optional)
<ide> class Compilation extends Tapable {
<ide> this.errors.push(err);
<ide> }
<ide>
<del> var warnings = module.warnings;
<del> for(var indexWarning = 0; indexWarning < warnings.length; indexWarning++) {
<del> var war = warnings[indexWarning];
<add> const warnings = module.warnings;
<add> for(let indexWarning = 0; indexWarning < warnings.length; indexWarning++) {
<add> const war = warnings[indexWarning];
<ide> war.origin = origin;
<ide> war.dependencies = dependencies;
<ide> this.warnings.push(war);
<ide> class Compilation extends Tapable {
<ide> }
<ide>
<ide> function iterationDependencies(depend) {
<del> for(var index = 0; index < depend.length; index++) {
<del> var dep = depend[index];
<add> for(let index = 0; index < depend.length; index++) {
<add> const dep = depend[index];
<ide> dep.module = dependentModule;
<ide> dependentModule.addReason(module, dep);
<ide> }
<ide> class Compilation extends Tapable {
<ide> }
<ide>
<ide> finish() {
<del> var modules = this.modules;
<add> const modules = this.modules;
<ide> this.applyPlugins1("finish-modules", modules);
<ide>
<del> for(var index = 0; index < modules.length; index++) {
<del> var module = modules[index];
<add> for(let index = 0; index < modules.length; index++) {
<add> const module = modules[index];
<ide> this.reportDependencyErrorsAndWarnings(module, [module]);
<ide> }
<ide> }
<ide> class Compilation extends Tapable {
<ide> }
<ide>
<ide> reportDependencyErrorsAndWarnings(module, blocks) {
<del> for(var indexBlock = 0; indexBlock < blocks.length; indexBlock++) {
<del> var block = blocks[indexBlock];
<del> var dependencies = block.dependencies;
<add> for(let indexBlock = 0; indexBlock < blocks.length; indexBlock++) {
<add> const block = blocks[indexBlock];
<add> const dependencies = block.dependencies;
<ide>
<del> for(var indexDep = 0; indexDep < dependencies.length; indexDep++) {
<del> var d = dependencies[indexDep];
<add> for(let indexDep = 0; indexDep < dependencies.length; indexDep++) {
<add> const d = dependencies[indexDep];
<ide>
<ide> const warnings = d.getWarnings();
<ide> if(warnings) {
<del> for(var indexWar = 0; indexWar < warnings.length; indexWar++) {
<del> var w = warnings[indexWar];
<add> for(let indexWar = 0; indexWar < warnings.length; indexWar++) {
<add> const w = warnings[indexWar];
<ide>
<del> var warning = new ModuleDependencyWarning(module, w, d.loc);
<add> const warning = new ModuleDependencyWarning(module, w, d.loc);
<ide> this.warnings.push(warning);
<ide> }
<ide> }
<ide> const errors = d.getErrors();
<ide> if(errors) {
<del> for(var indexErr = 0; indexErr < errors.length; indexErr++) {
<del> var e = errors[indexErr];
<add> for(let indexErr = 0; indexErr < errors.length; indexErr++) {
<add> const e = errors[indexErr];
<ide>
<del> var error = new ModuleDependencyError(module, e, d.loc);
<add> const error = new ModuleDependencyError(module, e, d.loc);
<ide> this.errors.push(error);
<ide> }
<ide> }
<ide> class Compilation extends Tapable {
<ide> }
<ide>
<ide> function assignIndexToDependencyBlock(block) {
<del> var allDependencies = [];
<add> let allDependencies = [];
<ide>
<ide> function iteratorDependency(d) {
<ide> allDependencies.push(d);
<ide> class Compilation extends Tapable {
<ide> iterationOfArrayCallback(block.dependencies, iteratorDependency);
<ide> }
<ide> if(block.blocks) {
<del> var blocks = block.blocks;
<del> var indexBlock = blocks.length;
<add> const blocks = block.blocks;
<add> let indexBlock = blocks.length;
<ide> while(indexBlock--) {
<ide> iteratorBlock(blocks[indexBlock]);
<ide> }
<ide> }
<ide>
<del> var indexAll = allDependencies.length;
<add> let indexAll = allDependencies.length;
<ide> while(indexAll--) {
<ide> iteratorAllDependencies(allDependencies[indexAll]);
<ide> }
<ide> class Compilation extends Tapable {
<ide> ];
<ide>
<ide> while(queue.length) {
<del> var queueItem = queue.pop();
<add> const queueItem = queue.pop();
<ide> block = queueItem[0];
<ide> chunk = queueItem[1];
<ide>
<ide> class Compilation extends Tapable {
<ide> }
<ide> };
<ide>
<del> var blocks = block.blocks;
<del> for(var indexBlock = 0; indexBlock < blocks.length; indexBlock++) {
<del> var chunks = blocks[indexBlock].chunks;
<del> for(var indexChunk = 0; indexChunk < chunks.length; indexChunk++) {
<del> var blockChunk = chunks[indexChunk];
<add> const blocks = block.blocks;
<add> for(let indexBlock = 0; indexBlock < blocks.length; indexBlock++) {
<add> const chunks = blocks[indexBlock].chunks;
<add> for(let indexChunk = 0; indexChunk < chunks.length; indexChunk++) {
<add> const blockChunk = chunks[indexChunk];
<ide> chunk.removeChunk(blockChunk);
<ide> blockChunk.removeParent(chunk);
<ide> this.removeChunkFromDependencies(chunks, blockChunk);
<ide> class Compilation extends Tapable {
<ide> }
<ide>
<ide> applyModuleIds() {
<del> var unusedIds = [];
<del> var nextFreeModuleId = 0;
<del> var usedIds = [];
<add> let unusedIds = [];
<add> let nextFreeModuleId = 0;
<add> let usedIds = [];
<ide> // TODO consider Map when performance has improved https://gist.github.com/sokra/234c077e1299b7369461f1708519c392
<del> var usedIdMap = Object.create(null);
<add> const usedIdMap = Object.create(null);
<ide> if(this.usedModuleIds) {
<ide> Object.keys(this.usedModuleIds).forEach(key => {
<ide> const id = this.usedModuleIds[key];
<ide> class Compilation extends Tapable {
<ide> });
<ide> }
<ide>
<del> var modules1 = this.modules;
<del> for(var indexModule1 = 0; indexModule1 < modules1.length; indexModule1++) {
<del> var module1 = modules1[indexModule1];
<add> const modules1 = this.modules;
<add> for(let indexModule1 = 0; indexModule1 < modules1.length; indexModule1++) {
<add> const module1 = modules1[indexModule1];
<ide> if(module1.id && !usedIdMap[module1.id]) {
<ide> usedIds.push(module1.id);
<ide> usedIdMap[module1.id] = true;
<ide> }
<ide> }
<ide>
<ide> if(usedIds.length > 0) {
<del> var usedIdMax = -1;
<del> for(var index = 0; index < usedIds.length; index++) {
<del> var usedIdKey = usedIds[index];
<add> let usedIdMax = -1;
<add> for(let index = 0; index < usedIds.length; index++) {
<add> const usedIdKey = usedIds[index];
<ide>
<ide> if(typeof usedIdKey !== "number") {
<ide> continue;
<ide> class Compilation extends Tapable {
<ide> usedIdMax = Math.max(usedIdMax, usedIdKey);
<ide> }
<ide>
<del> var lengthFreeModules = nextFreeModuleId = usedIdMax + 1;
<add> let lengthFreeModules = nextFreeModuleId = usedIdMax + 1;
<ide>
<ide> while(lengthFreeModules--) {
<ide> if(!usedIdMap[lengthFreeModules]) {
<ide> class Compilation extends Tapable {
<ide> }
<ide> }
<ide>
<del> var modules2 = this.modules;
<del> for(var indexModule2 = 0; indexModule2 < modules2.length; indexModule2++) {
<del> var module2 = modules2[indexModule2];
<add> const modules2 = this.modules;
<add> for(let indexModule2 = 0; indexModule2 < modules2.length; indexModule2++) {
<add> const module2 = modules2[indexModule2];
<ide> if(module2.id === null) {
<ide> if(unusedIds.length > 0)
<ide> module2.id = unusedIds.pop();
<ide> class Compilation extends Tapable {
<ide> let nextFreeChunkId = 0;
<ide>
<ide> function getNextFreeChunkId(usedChunkIds) {
<del> var keyChunks = Object.keys(usedChunkIds);
<del> var result = -1;
<add> const keyChunks = Object.keys(usedChunkIds);
<add> let result = -1;
<ide>
<del> for(var index = 0; index < keyChunks.length; index++) {
<del> var usedIdKey = keyChunks[index];
<del> var usedIdValue = usedChunkIds[usedIdKey];
<add> for(let index = 0; index < keyChunks.length; index++) {
<add> const usedIdKey = keyChunks[index];
<add> const usedIdValue = usedChunkIds[usedIdKey];
<ide>
<ide> if(typeof usedIdValue !== "number") {
<ide> continue;
<ide> class Compilation extends Tapable {
<ide>
<ide> if(this.usedChunkIds) {
<ide> nextFreeChunkId = getNextFreeChunkId(this.usedChunkIds) + 1;
<del> var index = nextFreeChunkId;
<add> let index = nextFreeChunkId;
<ide> while(index--) {
<ide> if(this.usedChunkIds[index] !== index) {
<ide> unusedIds.push(index);
<ide> }
<ide> }
<ide> }
<ide>
<del> var chunks = this.chunks;
<del> for(var indexChunk = 0; indexChunk < chunks.length; indexChunk++) {
<del> var chunk = chunks[indexChunk];
<add> const chunks = this.chunks;
<add> for(let indexChunk = 0; indexChunk < chunks.length; indexChunk++) {
<add> const chunk = chunks[indexChunk];
<ide> if(chunk.id === null) {
<ide> if(unusedIds.length > 0)
<ide> chunk.id = unusedIds.pop();
<ide> class Compilation extends Tapable {
<ide> sortItemsWithModuleIds() {
<ide> this.modules.sort(byId);
<ide>
<del> var modules = this.modules;
<del> for(var indexModule = 0; indexModule < modules.length; indexModule++) {
<add> const modules = this.modules;
<add> for(let indexModule = 0; indexModule < modules.length; indexModule++) {
<ide> modules[indexModule].sortItems();
<ide> }
<ide>
<del> var chunks = this.chunks;
<del> for(var indexChunk = 0; indexChunk < chunks.length; indexChunk++) {
<add> const chunks = this.chunks;
<add> for(let indexChunk = 0; indexChunk < chunks.length; indexChunk++) {
<ide> chunks[indexChunk].sortItems();
<ide> }
<ide> }
<ide>
<ide> sortItemsWithChunkIds() {
<ide> this.chunks.sort(byId);
<ide>
<del> var modules = this.modules;
<del> for(var indexModule = 0; indexModule < modules.length; indexModule++) {
<add> const modules = this.modules;
<add> for(let indexModule = 0; indexModule < modules.length; indexModule++) {
<ide> modules[indexModule].sortItems();
<ide> }
<ide>
<del> var chunks = this.chunks;
<del> for(var indexChunk = 0; indexChunk < chunks.length; indexChunk++) {
<add> const chunks = this.chunks;
<add> for(let indexChunk = 0; indexChunk < chunks.length; indexChunk++) {
<ide> chunks[indexChunk].sortItems();
<ide> }
<ide> }
<ide> class Compilation extends Tapable {
<ide> this.contextDependencies = [];
<ide> this.missingDependencies = [];
<ide>
<del> var children = this.children;
<del> for(var indexChildren = 0; indexChildren < children.length; indexChildren++) {
<del> var child = children[indexChildren];
<add> const children = this.children;
<add> for(let indexChildren = 0; indexChildren < children.length; indexChildren++) {
<add> const child = children[indexChildren];
<ide>
<ide> this.fileDependencies = this.fileDependencies.concat(child.fileDependencies);
<ide> this.contextDependencies = this.contextDependencies.concat(child.contextDependencies);
<ide> this.missingDependencies = this.missingDependencies.concat(child.missingDependencies);
<ide> }
<ide>
<del> var modules = this.modules;
<del> for(var indexModule = 0; indexModule < modules.length; indexModule++) {
<del> var module = modules[indexModule];
<add> const modules = this.modules;
<add> for(let indexModule = 0; indexModule < modules.length; indexModule++) {
<add> const module = modules[indexModule];
<ide>
<ide> if(module.fileDependencies) {
<del> var fileDependencies = module.fileDependencies;
<del> for(var indexFileDep = 0; indexFileDep < fileDependencies.length; indexFileDep++) {
<add> const fileDependencies = module.fileDependencies;
<add> for(let indexFileDep = 0; indexFileDep < fileDependencies.length; indexFileDep++) {
<ide> this.fileDependencies.push(fileDependencies[indexFileDep]);
<ide> }
<ide> }
<ide> if(module.contextDependencies) {
<del> var contextDependencies = module.contextDependencies;
<del> for(var indexContextDep = 0; indexContextDep < contextDependencies.length; indexContextDep++) {
<add> const contextDependencies = module.contextDependencies;
<add> for(let indexContextDep = 0; indexContextDep < contextDependencies.length; indexContextDep++) {
<ide> this.contextDependencies.push(contextDependencies[indexContextDep]);
<ide> }
<ide> }
<ide> class Compilation extends Tapable {
<ide> checkConstraints() {
<ide> const usedIds = {};
<ide>
<del> var modules = this.modules;
<del> for(var indexModule = 0; indexModule < modules.length; indexModule++) {
<del> var moduleId = modules[indexModule].id;
<add> const modules = this.modules;
<add> for(let indexModule = 0; indexModule < modules.length; indexModule++) {
<add> const moduleId = modules[indexModule].id;
<ide>
<ide> if(usedIds[moduleId])
<ide> throw new Error(`checkConstraints: duplicate module id ${moduleId}`);
<ide> }
<ide>
<del> var chunks = this.chunks;
<del> for(var indexChunk = 0; indexChunk < chunks.length; indexChunk++) {
<del> var chunk = chunks[indexChunk];
<add> const chunks = this.chunks;
<add> for(let indexChunk = 0; indexChunk < chunks.length; indexChunk++) {
<add> const chunk = chunks[indexChunk];
<ide>
<ide> if(chunks.indexOf(chunk) !== indexChunk)
<ide> throw new Error(`checkConstraints: duplicate chunk in compilation ${chunk.debugId}`); | 3 |
Python | Python | add an unitest for issue | 6ca31c947aaeb618c33115bfcd6b684b5f2a642b | <ide><path>unitest-restful.py
<ide> def test_010_history(self):
<ide> self.assertIsInstance(req.json()['system'], list)
<ide> self.assertTrue(len(req.json()['system']) > 1)
<ide>
<add> def test_011_issue1401(self):
<add> """Check issue #1401."""
<add> method = "network/interface_name"
<add> print('INFO: [TEST_011] Issue #1401')
<add> req = self.http_get("%s/%s" % (URL, method))
<add> self.assertTrue(req.ok)
<add> self.assertIsInstance(req.json(), dict)
<add> self.assertIsInstance(req.json()['interface_name'], list)
<add>
<ide> def test_999_stop_server(self):
<ide> """Stop the Glances Web Server."""
<ide> print('INFO: [TEST_999] Stop the Glances Web Server') | 1 |
Javascript | Javascript | avoid cyclic dependency on vector3 | 58b6bd2dadd9986192ec80683af53e4f2c229ba5 | <ide><path>src/math/Quaternion.js
<ide> */
<ide>
<ide> import { _Math } from './Math.js';
<del>import { Vector3 } from './Vector3.js';
<ide>
<ide> function Quaternion( x, y, z, w ) {
<ide>
<ide> Object.assign( Quaternion.prototype, {
<ide>
<ide> // assumes direction vectors vFrom and vTo are normalized
<ide>
<del> var v1 = new Vector3();
<ide> var r;
<ide>
<ide> var EPS = 0.000001;
<ide>
<ide> return function setFromUnitVectors( vFrom, vTo ) {
<ide>
<del> if ( v1 === undefined ) v1 = new Vector3();
<del>
<ide> r = vFrom.dot( vTo ) + 1;
<ide>
<ide> if ( r < EPS ) {
<ide> Object.assign( Quaternion.prototype, {
<ide>
<ide> if ( Math.abs( vFrom.x ) > Math.abs( vFrom.z ) ) {
<ide>
<del> v1.set( - vFrom.y, vFrom.x, 0 );
<add> this._x = - vFrom.y;
<add> this._y = vFrom.x;
<add> this._z = 0;
<add> this._w = r;
<ide>
<ide> } else {
<ide>
<del> v1.set( 0, - vFrom.z, vFrom.y );
<add> this._x = 0;
<add> this._y = - vFrom.z;
<add> this._z = vFrom.y;
<add> this._w = r;
<ide>
<ide> }
<ide>
<ide> } else {
<ide>
<del> v1.crossVectors( vFrom, vTo );
<add> // crossVectors( vFrom, vTo ); // inlined to avoid cyclic dependency on Vector3
<ide>
<del> }
<add> this._x = vFrom.y * vTo.z - vFrom.z * vTo.y;
<add> this._y = vFrom.z * vTo.x - vFrom.x * vTo.z;
<add> this._z = vFrom.x * vTo.y - vFrom.y * vTo.x;
<add> this._w = r;
<ide>
<del> this._x = v1.x;
<del> this._y = v1.y;
<del> this._z = v1.z;
<del> this._w = r;
<add> }
<ide>
<ide> return this.normalize();
<ide> | 1 |
Text | Text | replace env description with args in extend config | abdc031aeaa757546735da0075868b87dd99ef7a | <ide><path>docs/extend/config.md
<ide> Config provides the base accessible fields for working with V0 plugin format
<ide>
<ide> - **`name`** *string*
<ide>
<del> name of the env.
<add> name of the args.
<ide>
<ide> - **`description`** *string*
<ide>
<del> description of the env.
<add> description of the args.
<ide>
<ide> - **`value`** *string array*
<ide> | 1 |
Javascript | Javascript | fix flaky smartos test | 5b80ca9339683dd94cc9d7adae99640c4ec9f859 | <ide><path>test/parallel/test-net-server-max-connections.js
<ide> function makeConnection(index) {
<ide> if (index + 1 < N) {
<ide> makeConnection(index + 1);
<ide> }
<add>
<add> c.on('close', function() {
<add> console.error('closed %d', index);
<add> closes++;
<add>
<add> if (closes < N / 2) {
<add> assert.ok(server.maxConnections <= index,
<add> index +
<add> ' was one of the first closed connections ' +
<add> 'but shouldnt have been');
<add> }
<add>
<add> if (closes === N / 2) {
<add> var cb;
<add> console.error('calling wait callback.');
<add> while (cb = waits.shift()) {
<add> cb();
<add> }
<add> server.close();
<add> }
<add>
<add> if (index < server.maxConnections) {
<add> assert.equal(true, gotData,
<add> index + ' didn\'t get data, but should have');
<add> } else {
<add> assert.equal(false, gotData,
<add> index + ' got data, but shouldn\'t have');
<add> }
<add> });
<ide> });
<ide>
<ide> c.on('end', function() { c.end(); });
<ide> function makeConnection(index) {
<ide> });
<ide>
<ide> c.on('error', function(e) {
<del> console.error('error %d: %s', index, e);
<del> });
<del>
<del> c.on('close', function() {
<del> console.error('closed %d', index);
<del> closes++;
<del>
<del> if (closes < N / 2) {
<del> assert.ok(server.maxConnections <= index,
<del> index +
<del> ' was one of the first closed connections ' +
<del> 'but shouldnt have been');
<del> }
<del>
<del> if (closes === N / 2) {
<del> var cb;
<del> console.error('calling wait callback.');
<del> while (cb = waits.shift()) {
<del> cb();
<del> }
<del> server.close();
<del> }
<del>
<del> if (index < server.maxConnections) {
<del> assert.equal(true, gotData,
<del> index + ' didn\'t get data, but should have');
<del> } else {
<del> assert.equal(false, gotData,
<del> index + ' got data, but shouldn\'t have');
<add> // Retry if SmartOS and ECONNREFUSED. See
<add> // https://github.com/nodejs/node/issues/2663.
<add> if (common.isSunOS && (e.code === 'ECONNREFUSED')) {
<add> c.connect(common.PORT);
<ide> }
<add> console.error('error %d: %s', index, e);
<ide> });
<ide> }
<ide> | 1 |
Go | Go | fix getcontainer() returning (nil, nil) | 00157a42d367eca1dc140a5638d41444ab7434ce | <ide><path>daemon/container.go
<ide> func (daemon *Daemon) GetContainer(prefixOrName string) (*container.Container, e
<ide> }
<ide> return nil, errdefs.System(indexError)
<ide> }
<del> return daemon.containers.Get(containerID), nil
<add> ctr := daemon.containers.Get(containerID)
<add> if ctr == nil {
<add> // Updates to the daemon.containersReplica ViewDB are not atomic
<add> // or consistent w.r.t. the live daemon.containers Store so
<add> // while reaching this code path may be indicative of a bug,
<add> // it is not _necessarily_ the case.
<add> logrus.WithField("prefixOrName", prefixOrName).
<add> WithField("id", containerID).
<add> Debugf("daemon.GetContainer: container is known to daemon.containersReplica but not daemon.containers")
<add> return nil, containerNotFound(prefixOrName)
<add> }
<add> return ctr, nil
<ide> }
<ide>
<ide> // checkContainer make sure the specified container validates the specified conditions | 1 |
PHP | PHP | remove obsolete class import in request.php | df9c223ed684f75d4f6aa1a628c14ff052ee870e | <ide><path>laravel/request.php
<del><?php namespace Laravel; use Closure;
<add><?php namespace Laravel;
<ide>
<ide> class Request {
<ide> | 1 |
Javascript | Javascript | add new-stream util | c8c0fc88e9b04065addfcd45919867b7fcf99725 | <ide><path>packages/ember-htmlbars/lib/hooks/bind-self.js
<ide> @submodule ember-htmlbars
<ide> */
<ide>
<del>import ProxyStream from 'ember-metal/streams/proxy-stream';
<del>import subscribe from 'ember-htmlbars/utils/subscribe';
<add>import newStream from 'ember-htmlbars/utils/new-stream';
<ide>
<ide> export default function bindSelf(env, scope, _self) {
<ide> let self = _self;
<ide> export default function bindSelf(env, scope, _self) {
<ide> scope.locals.controller = scope.self;
<ide> }
<ide> }
<del>
<del>function newStream(scope, key, newValue, renderNode, isSelf) {
<del> var stream = new ProxyStream(newValue, isSelf ? '' : key);
<del> if (renderNode) { subscribe(renderNode, scope, stream); }
<del> scope[key] = stream;
<del>}
<ide><path>packages/ember-htmlbars/lib/hooks/bind-shadow-scope.js
<ide> @submodule ember-htmlbars
<ide> */
<ide>
<add>import newStream from 'ember-htmlbars/utils/new-stream';
<add>
<ide> export default function bindShadowScope(env, parentScope, shadowScope, options) {
<ide> if (!options) { return; }
<ide>
<ide> export default function bindShadowScope(env, parentScope, shadowScope, options)
<ide>
<ide> return shadowScope;
<ide> }
<del>
<del>import ProxyStream from 'ember-metal/streams/proxy-stream';
<del>import subscribe from 'ember-htmlbars/utils/subscribe';
<del>
<del>function newStream(scope, key, newValue, renderNode, isSelf) {
<del> var stream = new ProxyStream(newValue, isSelf ? '' : key);
<del> if (renderNode) { subscribe(renderNode, scope, stream); }
<del> scope[key] = stream;
<del>}
<ide><path>packages/ember-htmlbars/lib/utils/new-stream.js
<add>import ProxyStream from 'ember-metal/streams/proxy-stream';
<add>import subscribe from 'ember-htmlbars/utils/subscribe';
<add>
<add>export default function newStream(scope, key, newValue, renderNode, isSelf) {
<add> var stream = new ProxyStream(newValue, isSelf ? '' : key);
<add> if (renderNode) { subscribe(renderNode, scope, stream); }
<add> scope[key] = stream;
<add>} | 3 |
Javascript | Javascript | resolve prettier issues | f34a9ebd2ca6de6ac023dc34e920f4bc35de3636 | <ide><path>lib/DelegatedModule.js
<ide> class DelegatedModule extends Module {
<ide> hash.update(JSON.stringify(this.request));
<ide> super.updateHash(hash, chunkGraph);
<ide> }
<del>
<add>
<ide> serialize(context) {
<ide> const { write } = context;
<ide> // constructor
<ide> class DelegatedModule extends Module {
<ide> obj.deserialize(context);
<ide> return obj;
<ide> }
<del>
<add>
<ide> deserialize(context) {
<ide> super.deserialize(context);
<ide> }
<ide> class DelegatedModule extends Module {
<ide> this.delegationType = m.delegationType;
<ide> this.userRequest = m.userRequest;
<ide> this.originalRequest = m.originalRequest;
<del> this.delegateData = m.delegateData
<add> this.delegateData = m.delegateData;
<ide> }
<del>
<ide> }
<ide>
<ide> makeSerializable(DelegatedModule, "webpack/lib/DelegatedModule");
<ide><path>lib/dependencies/DelegatedSourceDependency.js
<ide> class DelegatedSourceDependency extends ModuleDependency {
<ide> get type() {
<ide> return "delegated source";
<ide> }
<del>
<add>
<ide> serialize(context) {
<ide> super.serialize(context);
<ide> } | 2 |
PHP | PHP | add method to paginator | b0b042018c326982e6410852c7a757802f1bf650 | <ide><path>src/Illuminate/Pagination/Paginator.php
<ide> public function nextPageUrl()
<ide> }
<ide> }
<ide>
<add> /**
<add> * Manually indicate that the paginator does have more pages.
<add> *
<add> * @return $this
<add> */
<add> public function doesHaveMorePages()
<add> {
<add> $this->hasMore = true;
<add>
<add> return $this;
<add> }
<add>
<ide> /**
<ide> * Determine if there are more items in the data source.
<ide> * | 1 |
PHP | PHP | use the current timestamp as a default | 0c2b7da2635f7bbbaf63d1b93fa817232bdd9d65 | <ide><path>src/Illuminate/Database/Schema/Blueprint.php
<ide> public function nullableTimestamps()
<ide> */
<ide> public function timestamps()
<ide> {
<del> $this->timestamp('created_at');
<add> $this->timestamp('created_at')->useCurrent();
<ide>
<del> $this->timestamp('updated_at');
<add> $this->timestamp('updated_at')->useCurrent();
<ide> }
<ide>
<ide> /** | 1 |
Javascript | Javascript | add .renderoutline() to outlineeffect | 4d77551ca2d49ad7db8fd75126d565613cdd6f87 | <ide><path>examples/js/effects/OutlineEffect.js
<ide> *
<ide> * Reference: https://en.wikipedia.org/wiki/Cel_shading
<ide> *
<add> * API
<add> *
<add> * 1. Traditional
<add> *
<add> * var effect = new THREE.OutlineEffect( renderer );
<add> *
<add> * function render() {
<add> *
<add> * effect.render( scene, camera );
<add> *
<add> * }
<add> *
<add> * 2. VR compatible
<add> *
<add> * var effect = new THREE.OutlineEffect( renderer );
<add> * var renderingOutline = false;
<add> *
<add> * scene.onAfterRender = function () {
<add> *
<add> * if ( renderingOutline ) return;
<add> *
<add> * renderingOutline = true;
<add> *
<add> * effect.renderOutline( scene, camera );
<add> *
<add> * renderingOutline = false;
<add> *
<add> * };
<add> *
<add> * function render() {
<add> *
<add> * renderer.render( scene, camera );
<add> *
<add> * }
<add> *
<ide> * // How to set default outline parameters
<ide> * new THREE.OutlineEffect( renderer, {
<ide> * defaultThickness: 0.01,
<ide> THREE.OutlineEffect = function ( renderer, parameters ) {
<ide> var currentAutoClear = renderer.autoClear;
<ide> renderer.autoClear = this.autoClear;
<ide>
<del> // 1. render normally
<ide> renderer.render( scene, camera );
<ide>
<del> // 2. render outline
<add> renderer.autoClear = currentAutoClear;
<add>
<add> this.renderOutline( scene, camera );
<add>
<add> };
<add>
<add> this.renderOutline = function ( scene, camera ) {
<add>
<add> var currentAutoClear = renderer.autoClear;
<ide> var currentSceneAutoUpdate = scene.autoUpdate;
<ide> var currentSceneBackground = scene.background;
<ide> var currentShadowMapEnabled = renderer.shadowMap.enabled; | 1 |
PHP | PHP | remove methods that just call parent | 5f86e1fcf68b1fbd7bca79d4f377233aa1323332 | <ide><path>src/I18n/Time.php
<ide> public static function listTimezones($filter = null, $country = null, $options =
<ide>
<ide> return array_combine($identifiers, $identifiers);
<ide> }
<del>
<del> /**
<del> * Returns true this instance will happen within the specified interval
<del> *
<del> * @param string $timeInterval the numeric value with space then time type.
<del> * Example of valid types: 6 hours, 2 days, 1 minute.
<del> * @return bool
<del> */
<del> public function wasWithinLast($timeInterval)
<del> {
<del> return parent::wasWithinLast($timeInterval);
<del> }
<del>
<del> /**
<del> * Returns true this instance happened within the specified interval
<del> *
<del> * @param string $timeInterval the numeric value with space then time type.
<del> * Example of valid types: 6 hours, 2 days, 1 minute.
<del> * @return bool
<del> */
<del> public function isWithinNext($timeInterval)
<del> {
<del> return parent::isWithinNext($timeInterval);
<del> }
<ide> } | 1 |
Text | Text | add usage recommendation for writable._destroy | 5016181697d63af9501ff3d6a719ba50237eb518 | <ide><path>doc/api/stream.md
<ide> added: v8.0.0
<ide>
<ide> The `_destroy()` method is called by [`writable.destroy()`][writable-destroy].
<ide> It can be overridden by child classes but it **must not** be called directly.
<add>Furthermore, the `callback` should not be mixed with async/await
<add>once it is executed when a promise is resolved.
<ide>
<ide> #### `writable._final(callback)`
<ide> | 1 |
Ruby | Ruby | parse opam archives | cc7a047edc8dbd34706dfe4014a78dc0b176cf6b | <ide><path>Library/Homebrew/test/test_versions.rb
<ide> def test_with_arch
<ide> assert_version_detected "4.0.18",
<ide> "http://ftpmirror.gnu.org/mtools/mtools_4.0.18_i386.deb"
<ide> end
<add>
<add> def test_opam_version
<add> assert_version_detected "2.18.3",
<add> "https://opam.ocaml.org/archives/lablgtk.2.18.3+opam.tar.gz"
<add> assert_version_detected "1.9",
<add> "https://opam.ocaml.org/archives/sha.1.9+opam.tar.gz"
<add> assert_version_detected "0.99.2",
<add> "https://opam.ocaml.org/archives/ppx_tools.0.99.2+opam.tar.gz"
<add> assert_version_detected "1.0.2",
<add> "https://opam.ocaml.org/archives/easy-format.1.0.2+opam.tar.gz"
<add> end
<ide> end
<ide><path>Library/Homebrew/version.rb
<ide> def self._parse(spec)
<ide> m = /-(\d+\.\d+(?:\.\d+)?)-w(?:in)?(?:32|64)$/.match(stem)
<ide> return m.captures.first unless m.nil?
<ide>
<add> # Opam packages
<add> # e.g. https://opam.ocaml.org/archives/sha.1.9+opam.tar.gz
<add> # e.g. https://opam.ocaml.org/archives/lablgtk.2.18.3+opam.tar.gz
<add> # e.g. https://opam.ocaml.org/archives/easy-format.1.0.2+opam.tar.gz
<add> m = /\.(\d+\.\d+(?:\.\d+)?)\+opam$/.match(stem)
<add> return m.captures.first unless m.nil?
<add>
<ide> # e.g. http://ftpmirror.gnu.org/mtools/mtools-4.0.18-1.i686.rpm
<ide> # e.g. http://ftpmirror.gnu.org/autogen/autogen-5.5.7-5.i386.rpm
<ide> # e.g. http://ftpmirror.gnu.org/libtasn1/libtasn1-2.8-x86.zip | 2 |
Ruby | Ruby | move relation#merge tests into separate file | 742adce211649a3d3965d134cd53feab287a0bf6 | <ide><path>activerecord/test/cases/relation/merging_test.rb
<add>require 'cases/helper'
<add>require 'models/author'
<add>require 'models/comment'
<add>require 'models/developer'
<add>require 'models/post'
<add>require 'models/project'
<add>
<add>class RelationMergingTest < ActiveRecord::TestCase
<add> fixtures :developers, :comments, :authors, :posts
<add>
<add> def test_relation_merging
<add> devs = Developer.where("salary >= 80000").merge(Developer.limit(2)).merge(Developer.order('id ASC').where("id < 3"))
<add> assert_equal [developers(:david), developers(:jamis)], devs.to_a
<add>
<add> dev_with_count = Developer.limit(1).merge(Developer.order('id DESC')).merge(Developer.select('developers.*'))
<add> assert_equal [developers(:poor_jamis)], dev_with_count.to_a
<add> end
<add>
<add> def test_relation_to_sql
<add> sql = Post.connection.unprepared_statement do
<add> Post.first.comments.to_sql
<add> end
<add> assert_no_match(/\?/, sql)
<add> end
<add>
<add> def test_relation_merging_with_arel_equalities_keeps_last_equality
<add> devs = Developer.where(Developer.arel_table[:salary].eq(80000)).merge(
<add> Developer.where(Developer.arel_table[:salary].eq(9000))
<add> )
<add> assert_equal [developers(:poor_jamis)], devs.to_a
<add> end
<add>
<add> def test_relation_merging_with_arel_equalities_keeps_last_equality_with_non_attribute_left_hand
<add> salary_attr = Developer.arel_table[:salary]
<add> devs = Developer.where(
<add> Arel::Nodes::NamedFunction.new('abs', [salary_attr]).eq(80000)
<add> ).merge(
<add> Developer.where(
<add> Arel::Nodes::NamedFunction.new('abs', [salary_attr]).eq(9000)
<add> )
<add> )
<add> assert_equal [developers(:poor_jamis)], devs.to_a
<add> end
<add>
<add> def test_relation_merging_with_eager_load
<add> relations = []
<add> relations << Post.order('comments.id DESC').merge(Post.eager_load(:last_comment)).merge(Post.all)
<add> relations << Post.eager_load(:last_comment).merge(Post.order('comments.id DESC')).merge(Post.all)
<add>
<add> relations.each do |posts|
<add> post = posts.find { |p| p.id == 1 }
<add> assert_equal Post.find(1).last_comment, post.last_comment
<add> end
<add> end
<add>
<add> def test_relation_merging_with_locks
<add> devs = Developer.lock.where("salary >= 80000").order("id DESC").merge(Developer.limit(2))
<add> assert devs.locked.present?
<add> end
<add>
<add> def test_relation_merging_with_preload
<add> [Post.all.merge(Post.preload(:author)), Post.preload(:author).merge(Post.all)].each do |posts|
<add> assert_queries(2) { assert posts.first.author }
<add> end
<add> end
<add>
<add> def test_relation_merging_with_joins
<add> comments = Comment.joins(:post).where(:body => 'Thank you for the welcome').merge(Post.where(:body => 'Such a lovely day'))
<add> assert_equal 1, comments.count
<add> end
<add>
<add> def test_relation_merging_with_association
<add> assert_queries(2) do # one for loading post, and another one merged query
<add> post = Post.where(:body => 'Such a lovely day').first
<add> comments = Comment.where(:body => 'Thank you for the welcome').merge(post.comments)
<add> assert_equal 1, comments.count
<add> end
<add> end
<add>
<add> test "merge collapses wheres from the LHS only" do
<add> left = Post.where(title: "omg").where(comments_count: 1)
<add> right = Post.where(title: "wtf").where(title: "bbq")
<add>
<add> expected = [left.where_values[1]] + right.where_values
<add> merged = left.merge(right)
<add>
<add> assert_equal expected, merged.where_values
<add> assert !merged.to_sql.include?("omg")
<add> assert merged.to_sql.include?("wtf")
<add> assert merged.to_sql.include?("bbq")
<add> end
<add>
<add> def test_merging_removes_rhs_bind_parameters
<add> left = Post.where(id: Arel::Nodes::BindParam.new('?'))
<add> column = Post.columns_hash['id']
<add> left.bind_values += [[column, 20]]
<add> right = Post.where(id: 10)
<add>
<add> merged = left.merge(right)
<add> assert_equal [], merged.bind_values
<add> end
<add>
<add> def test_merging_keeps_lhs_bind_parameters
<add> column = Post.columns_hash['id']
<add> binds = [[column, 20]]
<add>
<add> right = Post.where(id: Arel::Nodes::BindParam.new('?'))
<add> right.bind_values += binds
<add> left = Post.where(id: 10)
<add>
<add> merged = left.merge(right)
<add> assert_equal binds, merged.bind_values
<add> end
<add>
<add> def test_merging_reorders_bind_params
<add> post = Post.first
<add> id_column = Post.columns_hash['id']
<add> title_column = Post.columns_hash['title']
<add>
<add> bv = Post.connection.substitute_at id_column, 0
<add>
<add> right = Post.where(id: bv)
<add> right.bind_values += [[id_column, post.id]]
<add>
<add> left = Post.where(title: bv)
<add> left.bind_values += [[title_column, post.title]]
<add>
<add> merged = left.merge(right)
<add> assert_equal post, merged.first
<add> end
<add>end
<ide><path>activerecord/test/cases/relations_test.rb
<ide> def test_select_argument_error
<ide> assert_raises(ArgumentError) { Developer.select }
<ide> end
<ide>
<del> def test_relation_merging
<del> devs = Developer.where("salary >= 80000").merge(Developer.limit(2)).merge(Developer.order('id ASC').where("id < 3"))
<del> assert_equal [developers(:david), developers(:jamis)], devs.to_a
<del>
<del> dev_with_count = Developer.limit(1).merge(Developer.order('id DESC')).merge(Developer.select('developers.*'))
<del> assert_equal [developers(:poor_jamis)], dev_with_count.to_a
<del> end
<del>
<del> def test_relation_to_sql
<del> sql = Post.connection.unprepared_statement do
<del> Post.first.comments.to_sql
<del> end
<del> assert_no_match(/\?/, sql)
<del> end
<del>
<del> def test_relation_merging_with_arel_equalities_keeps_last_equality
<del> devs = Developer.where(Developer.arel_table[:salary].eq(80000)).merge(
<del> Developer.where(Developer.arel_table[:salary].eq(9000))
<del> )
<del> assert_equal [developers(:poor_jamis)], devs.to_a
<del> end
<del>
<del> def test_relation_merging_with_arel_equalities_keeps_last_equality_with_non_attribute_left_hand
<del> salary_attr = Developer.arel_table[:salary]
<del> devs = Developer.where(
<del> Arel::Nodes::NamedFunction.new('abs', [salary_attr]).eq(80000)
<del> ).merge(
<del> Developer.where(
<del> Arel::Nodes::NamedFunction.new('abs', [salary_attr]).eq(9000)
<del> )
<del> )
<del> assert_equal [developers(:poor_jamis)], devs.to_a
<del> end
<del>
<del> def test_relation_merging_with_eager_load
<del> relations = []
<del> relations << Post.order('comments.id DESC').merge(Post.eager_load(:last_comment)).merge(Post.all)
<del> relations << Post.eager_load(:last_comment).merge(Post.order('comments.id DESC')).merge(Post.all)
<del>
<del> relations.each do |posts|
<del> post = posts.find { |p| p.id == 1 }
<del> assert_equal Post.find(1).last_comment, post.last_comment
<del> end
<del> end
<del>
<del> def test_relation_merging_with_locks
<del> devs = Developer.lock.where("salary >= 80000").order("id DESC").merge(Developer.limit(2))
<del> assert devs.locked.present?
<del> end
<del>
<del> def test_relation_merging_with_preload
<del> [Post.all.merge(Post.preload(:author)), Post.preload(:author).merge(Post.all)].each do |posts|
<del> assert_queries(2) { assert posts.first.author }
<del> end
<del> end
<del>
<del> def test_relation_merging_with_joins
<del> comments = Comment.joins(:post).where(:body => 'Thank you for the welcome').merge(Post.where(:body => 'Such a lovely day'))
<del> assert_equal 1, comments.count
<del> end
<del>
<del> def test_relation_merging_with_association
<del> assert_queries(2) do # one for loading post, and another one merged query
<del> post = Post.where(:body => 'Such a lovely day').first
<del> comments = Comment.where(:body => 'Thank you for the welcome').merge(post.comments)
<del> assert_equal 1, comments.count
<del> end
<del> end
<del>
<ide> def test_count
<ide> posts = Post.all
<ide>
<ide> def __omg__
<ide> Array.send(:remove_method, :__omg__)
<ide> end
<ide> end
<del>
<del> test "merge collapses wheres from the LHS only" do
<del> left = Post.where(title: "omg").where(comments_count: 1)
<del> right = Post.where(title: "wtf").where(title: "bbq")
<del>
<del> expected = [left.where_values[1]] + right.where_values
<del> merged = left.merge(right)
<del>
<del> assert_equal expected, merged.where_values
<del> assert !merged.to_sql.include?("omg")
<del> assert merged.to_sql.include?("wtf")
<del> assert merged.to_sql.include?("bbq")
<del> end
<del>
<del> def test_merging_removes_rhs_bind_parameters
<del> left = Post.where(id: Arel::Nodes::BindParam.new('?'))
<del> column = Post.columns_hash['id']
<del> left.bind_values += [[column, 20]]
<del> right = Post.where(id: 10)
<del>
<del> merged = left.merge(right)
<del> assert_equal [], merged.bind_values
<del> end
<del>
<del> def test_merging_keeps_lhs_bind_parameters
<del> column = Post.columns_hash['id']
<del> binds = [[column, 20]]
<del>
<del> right = Post.where(id: Arel::Nodes::BindParam.new('?'))
<del> right.bind_values += binds
<del> left = Post.where(id: 10)
<del>
<del> merged = left.merge(right)
<del> assert_equal binds, merged.bind_values
<del> end
<del>
<del> def test_merging_reorders_bind_params
<del> post = Post.first
<del> id_column = Post.columns_hash['id']
<del> title_column = Post.columns_hash['title']
<del>
<del> bv = Post.connection.substitute_at id_column, 0
<del>
<del> right = Post.where(id: bv)
<del> right.bind_values += [[id_column, post.id]]
<del>
<del> left = Post.where(title: bv)
<del> left.bind_values += [[title_column, post.title]]
<del>
<del> merged = left.merge(right)
<del> assert_equal post, merged.first
<del> end
<ide> end | 2 |
Text | Text | make the readme for textsum a little clearer | 3bbc5d2f38095747a3ccc27e6f1dad58d11d5e64 | <ide><path>textsum/README.md
<ide> vocabulary size: Most frequent 200k words from dataset's article and summaries.
<ide>
<ide> <b>How To Run</b>
<ide>
<del>Pre-requesite:
<del>
<del>Install TensorFlow and Bazel.
<add>Prerequisite: install TensorFlow and Bazel.
<ide>
<ide> ```shell
<ide> # cd to your workspace
<ide> Install TensorFlow and Bazel.
<ide> # If your data files have different names, update the --data_path.
<ide> # If you don't have data but want to try out the model, copy the toy
<ide> # data from the textsum/data/data to the data/ directory in the workspace.
<del>ls -R
<add>$ ls -R
<ide> .:
<ide> data textsum WORKSPACE
<ide>
<ide> data.py seq2seq_attention_decode.py seq2seq_attention.py seq2seq_lib.py
<ide> ./textsum/data:
<ide> data vocab
<ide>
<del>bazel build -c opt --config=cuda textsum/...
<add>$ bazel build -c opt --config=cuda textsum/...
<ide>
<ide> # Run the training.
<del>bazel-bin/textsum/seq2seq_attention \
<del> --mode=train \
<del> --article_key=article \
<del> --abstract_key=abstract \
<del> --data_path=data/training-* \
<del> --vocab_path=data/vocab \
<del> --log_root=textsum/log_root \
<del> --train_dir=textsum/log_root/train
<add>$ bazel-bin/textsum/seq2seq_attention \
<add> --mode=train \
<add> --article_key=article \
<add> --abstract_key=abstract \
<add> --data_path=data/training-* \
<add> --vocab_path=data/vocab \
<add> --log_root=textsum/log_root \
<add> --train_dir=textsum/log_root/train
<ide>
<ide> # Run the eval. Try to avoid running on the same machine as training.
<del>bazel-bin/textsum/seq2seq_attention \
<del> --mode=eval \
<del> --article_key=article \
<del> --abstract_key=abstract \
<del> --data_path=data/validation-* \
<del> --vocab_path=data/vocab \
<del> --log_root=textsum/log_root \
<del> --eval_dir=textsum/log_root/eval
<add>$ bazel-bin/textsum/seq2seq_attention \
<add> --mode=eval \
<add> --article_key=article \
<add> --abstract_key=abstract \
<add> --data_path=data/validation-* \
<add> --vocab_path=data/vocab \
<add> --log_root=textsum/log_root \
<add> --eval_dir=textsum/log_root/eval
<ide>
<ide> # Run the decode. Run it when the most is mostly converged.
<del>bazel-bin/textsum/seq2seq_attention \
<del> --mode=decode \
<del> --article_key=article \
<del> --abstract_key=abstract \
<del> --data_path=data/test-* \
<del> --vocab_path=data/vocab \
<del> --log_root=textsum/log_root \
<del> --decode_dir=textsum/log_root/decode \
<del> --beam_size=8
<add>$ bazel-bin/textsum/seq2seq_attention \
<add> --mode=decode \
<add> --article_key=article \
<add> --abstract_key=abstract \
<add> --data_path=data/test-* \
<add> --vocab_path=data/vocab \
<add> --log_root=textsum/log_root \
<add> --decode_dir=textsum/log_root/decode \
<add> --beam_size=8
<ide> ```
<ide>
<ide>
<ide> article: the european court of justice ( ecj ) recently ruled in lock v british
<ide>
<ide> abstract: will british gas ecj ruling fuel holiday pay hike ?
<ide>
<del>decode: eu law requires worker 's statutory holiday pay
<add>decode: eu law requires worker 's statutory holiday pay
<ide>
<ide> ======================================
<ide> | 1 |
Text | Text | add v3.17.0-beta.5 to changelog | 230050945f50ad36aff3d05263b688870aff474b | <ide><path>CHANGELOG.md
<ide> # Ember Changelog
<ide>
<add>### v3.17.0-beta.5 (February 18, 2020)
<add>
<add>- [#18730](https://github.com/emberjs/ember.js/pull/18730) Workaround for the Glimmer VM bug which encodes/decodes integer literals correctly.
<add>
<ide> ### v3.17.0-beta.4 (February 10, 2020)
<ide>
<ide> - [#18727](https://github.com/emberjs/ember.js/pull/18727) [BUGFIX] Avoid breaking {{-in-element}} usage | 1 |
PHP | PHP | ignore the remaining errors | edd54e4ddf89325018cd05386bdec42562fd5797 | <ide><path>lib/Cake/Test/Case/Utility/SetTest.php
<ide> public function testMapReverse() {
<ide> array('id' => 1, 'article_id' => 1, 'user_id' => 1, 'comment' => 'First Comment for First Article', 'published' => 'Y', 'created' => '2007-03-18 10:47:23', 'updated' => '2007-03-18 10:49:31'),
<ide> array('id' => 2, 'article_id' => 1, 'user_id' => 2, 'comment' => 'Second Comment for First Article', 'published' => 'Y', 'created' => '2007-03-18 10:47:23', 'updated' => '2007-03-18 10:49:31'))));
<ide>
<add> // @codingStandardsIgnoreStart
<ide> $class = new stdClass;
<ide> $class->User = new stdClass;
<ide> $class->User->psword = 'whatever';
<ide> public function testMapReverse() {
<ide> $comment2->published = 'Y';
<ide> $comment2->created = '2007-03-18 10:47:23';
<ide> $comment2->updated = '2007-03-18 10:49:31';
<add> // @codingStandardsIgnoreEnd
<ide> $class->User->Comment = array($comment, $comment2);
<ide> $result = Set::reverse($class);
<ide> $this->assertEquals($expected, $result);
<ide> public function testMapReverse() {
<ide> );
<ide> $this->assertEquals($expected, $result);
<ide>
<add> // @codingStandardsIgnoreStart
<ide> $class = new stdClass;
<ide> $class->User = new stdClass;
<ide> $class->User->id = 100;
<ide> $class->User->_name_ = 'User';
<ide> $class->Profile = new stdClass;
<ide> $class->Profile->name = 'Joe Mamma';
<ide> $class->Profile->_name_ = 'Profile';
<add> // @codingStandardsIgnoreEnd
<ide>
<ide> $result = Set::reverse($class);
<ide> $expected = array('User' => array('id' => '100'), 'Profile' => array('name' => 'Joe Mamma'));
<ide> public function testMapNesting() {
<ide> );
<ide> $mapped = Set::map($data);
<ide>
<add> // @codingStandardsIgnoreStart
<ide> $expected = new stdClass();
<ide> $expected->_name_ = 'IndexedPage';
<ide> $expected->id = 2;
<ide> public function testMapNesting() {
<ide> $expected->redirect = '';
<ide> $expected->created = "1195055503";
<ide> $expected->updated = "1195055503";
<add> // @codingStandardsIgnoreEnd
<ide> $this->assertEquals($mapped[1], $expected);
<ide>
<ide> $ids = array();
<ide> public function testNestedMappedData() {
<ide> )
<ide> ));
<ide>
<add> // @codingStandardsIgnoreStart
<ide> $expected = new stdClass;
<ide> $expected->_name_ = 'Post';
<ide> $expected->id = '1';
<ide> public function testNestedMappedData() {
<ide> $expected2->Author->updated = "2007-03-17 01:22:31";
<ide> $expected2->Author->test = "working";
<ide> $expected2->Author->_name_ = 'Author';
<add> // @codingStandardsIgnoreEnd
<ide>
<ide> $test = array();
<ide> $test[0] = $expected;
<ide> public function testNestedMappedData() {
<ide> $this->assertEquals($test, $result);
<ide>
<ide> $result = Set::map(
<del> array(
<del> 'Post' => array('id' => '1', 'author_id' => '1', 'title' => 'First Post', 'body' => 'First Post Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31'),
<del> 'Author' => array('id' => '1', 'user' => 'mariano', 'password' => '5f4dcc3b5aa765d61d8327deb882cf99', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31', 'test' => 'working'),
<del> )
<del> );
<add> array(
<add> 'Post' => array('id' => '1', 'author_id' => '1', 'title' => 'First Post', 'body' => 'First Post Body', 'published' => 'Y', 'created' => '2007-03-18 10:39:23', 'updated' => '2007-03-18 10:41:31'),
<add> 'Author' => array('id' => '1', 'user' => 'mariano', 'password' => '5f4dcc3b5aa765d61d8327deb882cf99', 'created' => '2007-03-17 01:16:23', 'updated' => '2007-03-17 01:18:31', 'test' => 'working'),
<add> )
<add> );
<add> // @codingStandardsIgnoreStart
<ide> $expected = new stdClass;
<ide> $expected->_name_ = 'Post';
<ide> $expected->id = '1';
<ide> public function testNestedMappedData() {
<ide> $expected->Author->updated = "2007-03-17 01:18:31";
<ide> $expected->Author->test = "working";
<ide> $expected->Author->_name_ = 'Author';
<add> // @codingStandardsIgnoreEnd
<ide> $this->assertEquals($expected, $result);
<ide>
<ide> //Case where extra HABTM fields come back in a result
<ide> public function testNestedMappedData() {
<ide>
<ide> $result = Set::map($data);
<ide>
<add> // @codingStandardsIgnoreStart
<ide> $expected = new stdClass();
<ide> $expected->_name_ = 'User';
<ide> $expected->id = 1;
<ide> public function testNestedMappedData() {
<ide> $piece2->PiecesUser->_name_ = 'PiecesUser';
<ide>
<ide> $piece2->_name_ = 'Piece';
<add> // @codingStandardsIgnoreEnd
<ide>
<ide> $expected->Piece = array($piece, $piece2);
<ide>
<ide> public function testNestedMappedData() {
<ide>
<ide> $result = Set::map($data);
<ide>
<add> // @codingStandardsIgnoreStart
<ide> $expected = new stdClass();
<ide> $expected->_name_ = 'FooUser';
<ide> $expected->id = 1;
<ide> public function testNestedMappedData() {
<ide> $piece2->PiecesUser->piece_id = 2;
<ide> $piece2->PiecesUser->user_id = 2;
<ide> $piece2->PiecesUser->_name_ = 'FooPiecesUser';
<add> // @codingStandardsIgnoreEnd
<ide>
<ide> $expected->Piece = array($piece, $piece2);
<ide> | 1 |
Python | Python | fix typo in methodview doc | dbeed240674ed7a7160ae82a603eaa930dd1fd00 | <ide><path>flask/views.py
<ide> def __new__(cls, name, bases, d):
<ide> class MethodView(with_metaclass(MethodViewType, View)):
<ide> """Like a regular class-based view but that dispatches requests to
<ide> particular methods. For instance if you implement a method called
<del> :meth:`get` it means you will response to ``'GET'`` requests and
<add> :meth:`get` it means it will respond to ``'GET'`` requests and
<ide> the :meth:`dispatch_request` implementation will automatically
<ide> forward your request to that. Also :attr:`options` is set for you
<ide> automatically:: | 1 |
PHP | PHP | remove unused var | 97e2e4395f0a00897f57b2d5807b296f20cd90fa | <ide><path>tests/TestCase/Database/TypeTest.php
<ide> public function testMapAndBuild()
<ide> public function testReMapAndBuild()
<ide> {
<ide> $fooType = FooType::class;
<del> $map = Type::map('foo', $fooType);
<add> Type::map('foo', $fooType);
<ide> $type = Type::build('foo');
<ide> $this->assertInstanceOf($fooType, $type);
<ide> | 1 |
PHP | PHP | remove redundant call to empty() | 5baac38e32619081e08406c5052c5c567bda45ce | <ide><path>src/Http/Session.php
<ide> public function delete(string $name): void
<ide> */
<ide> protected function _overwrite(array &$old, array $new): void
<ide> {
<del> if (!empty($old)) {
<del> foreach ($old as $key => $var) {
<del> if (!isset($new[$key])) {
<del> unset($old[$key]);
<del> }
<add> foreach ($old as $key => $var) {
<add> if (!isset($new[$key])) {
<add> unset($old[$key]);
<ide> }
<ide> }
<add>
<ide> foreach ($new as $key => $var) {
<ide> $old[$key] = $var;
<ide> } | 1 |
Ruby | Ruby | prefer head version if its installed | a305360099d5aa973640556653ec0b5be266a792 | <ide><path>Library/Homebrew/brew.h.rb
<ide> def info f
<ide> kids=f.prefix.parent.children
<ide> kids.each do |keg|
<ide> print "#{keg} (#{keg.abv})"
<del> print " *" if f.prefix == keg and kids.length > 1
<add> print " *" if f.installed_prefix == keg and kids.length > 1
<ide> puts
<ide> end
<ide> else
<ide> def cleanup name
<ide> if f.installed? and formula_cellar.directory?
<ide> kids = f.prefix.parent.children
<ide> kids.each do |keg|
<del> next if f.prefix == keg
<add> next if f.installed_prefix == keg
<ide> print "Uninstalling #{keg}..."
<ide> FileUtils.rm_rf keg
<ide> puts
<ide><path>Library/Homebrew/formula.rb
<ide> def initialize name='__UNKNOWN__'
<ide>
<ide> # if the dir is there, but it's empty we consider it not installed
<ide> def installed?
<del> return prefix.children.length > 0
<add> return installed_prefix.children.length > 0
<ide> rescue
<ide> return false
<ide> end
<ide>
<add> def installed_prefix
<add> head_prefix = HOMEBREW_CELLAR+@name+'HEAD'
<add> if @version == 'HEAD' || head_prefix.directory?
<add> head_prefix
<add> else
<add> prefix
<add> end
<add> end
<add>
<ide> def path
<ide> self.class.path name
<ide> end | 2 |
Python | Python | update example for ec2's multi-locationness | a7d84010941f651f0a96be5ca26a953d29e65ab8 | <ide><path>example.py
<ide> from libcloud.types import Provider
<ide> from libcloud.providers import get_driver
<ide>
<del>EC2 = get_driver(Provider.EC2)
<add>EC2 = get_driver(Provider.EC2_US_EAST)
<ide> Slicehost = get_driver(Provider.SLICEHOST)
<ide> Rackspace = get_driver(Provider.RACKSPACE)
<ide> | 1 |
Text | Text | add nokogiri to docs generation guides | e950c3e47b89a83aa81a9b0cde3c1f4848dc006d | <ide><path>guides/source/getting_started.md
<ide> command-line utility:
<ide> in your web browser to explore the API documentation.
<ide>
<ide> TIP: To be able to generate the Rails Guides locally with the `doc:guides` rake
<del>task you need to install the RedCloth gem. Add it to your `Gemfile` and run
<add>task you need to install the RedCloth and Nokogiri gems. Add it to your `Gemfile` and run
<ide> `bundle install` and you're ready to go.
<ide>
<ide> Configuration Gotchas | 1 |
Text | Text | post about v0.10.2 | 708e8589ea76548f61ccbf8c064fd65a34429b1f | <ide><path>doc/blog/release/v0.10.2.md
<add>date: Thu Mar 28 13:00:39 PDT 2013
<add>version: 0.10.2
<add>category: release
<add>title: Node v0.10.2 (Stable)
<add>slug: node-v0-10-2-stable
<add>
<add>2013.03.28, Version 0.10.2 (Stable)
<add>
<add>* npm: Upgrade to 1.2.15
<add>
<add>* uv: Upgrade to 0.10.3
<add>
<add>* tls: handle SSL_ERROR_ZERO_RETURN (Fedor Indutny)
<add>
<add>* tls: handle errors before calling C++ methods (Fedor Indutny)
<add>
<add>* tls: remove harmful unnecessary bounds checking (Marcel Laverdet)
<add>
<add>* crypto: make getCiphers() return non-SSL ciphers (Ben Noordhuis)
<add>
<add>* crypto: check randomBytes() size argument (Ben Noordhuis)
<add>
<add>* timers: do not calculate Timeout._when property (Alexey Kupershtokh)
<add>
<add>* timers: fix off-by-one ms error (Alexey Kupershtokh)
<add>
<add>* timers: handle signed int32 overflow in enroll() (Fedor Indutny)
<add>
<add>* stream: Fix stall in Transform under very specific conditions (Gil Pedersen)
<add>
<add>* stream: Handle late 'readable' event listeners (isaacs)
<add>
<add>* stream: Fix early end in Writables on zero-length writes (isaacs)
<add>
<add>* domain: fix domain callback from MakeCallback (Trevor Norris)
<add>
<add>* child_process: don't emit same handle twice (Ben Noordhuis)
<add>
<add>* child_process: fix sending utf-8 to child process (Ben Noordhuis)
<add>
<add>
<add>Source Code: http://nodejs.org/dist/v0.10.2/node-v0.10.2.tar.gz
<add>
<add>Macintosh Installer (Universal): http://nodejs.org/dist/v0.10.2/node-v0.10.2.pkg
<add>
<add>Windows Installer: http://nodejs.org/dist/v0.10.2/node-v0.10.2-x86.msi
<add>
<add>Windows x64 Installer: http://nodejs.org/dist/v0.10.2/x64/node-v0.10.2-x64.msi
<add>
<add>Windows x64 Files: http://nodejs.org/dist/v0.10.2/x64/
<add>
<add>Linux 32-bit Binary: http://nodejs.org/dist/v0.10.2/node-v0.10.2-linux-x86.tar.gz
<add>
<add>Linux 64-bit Binary: http://nodejs.org/dist/v0.10.2/node-v0.10.2-linux-x64.tar.gz
<add>
<add>Solaris 32-bit Binary: http://nodejs.org/dist/v0.10.2/node-v0.10.2-sunos-x86.tar.gz
<add>
<add>Solaris 64-bit Binary: http://nodejs.org/dist/v0.10.2/node-v0.10.2-sunos-x64.tar.gz
<add>
<add>Other release files: http://nodejs.org/dist/v0.10.2/
<add>
<add>Website: http://nodejs.org/docs/v0.10.2/
<add>
<add>Documentation: http://nodejs.org/docs/v0.10.2/api/
<add>
<add>Shasums:
<add>
<add>```
<add>860ed25d3e77d4676b5512f87f3f98b6783ee258 node-v0.10.2-darwin-x64.tar.gz
<add>811eb3b66651dfffeaf928496e8eecab5c9304fb node-v0.10.2-darwin-x86.tar.gz
<add>0013be477da5d066471390c9964f796356b48948 node-v0.10.2-linux-x64.tar.gz
<add>97c3a052d833bfc799bc9b748520a15cfb189a58 node-v0.10.2-linux-x86.tar.gz
<add>17bc5bf26af7da790e6b0c4cbb2b73ea1c9f2ed5 node-v0.10.2-sunos-x64.tar.gz
<add>5e02e35cc15ae56953921ad4c8e45b849c736e20 node-v0.10.2-sunos-x86.tar.gz
<add>2adb1bf5919fb8adeaf96edd8a8ed16d71a3f8f8 node-v0.10.2-x86.msi
<add>73ff97a4d2d3bb1f468db2654b5b59a28f868cce node-v0.10.2.pkg
<add>759a05eff48ff0b54e55748012c5c45502f7cecd node-v0.10.2.tar.gz
<add>6c1336a61395747fed20a12c8977a2b2ecf23354 node.exe
<add>f0775d4f649ee9c3d5614fdb26e64bc7d000cd5d node.exp
<add>9860c6eb9062fbdc50b515f4ccab179f74dd3ec8 node.lib
<add>d41d99a3921022533c1760e15447ce3acf050a7d node.pdb
<add>1dbd11a5278831356daca035fe5bbbe1062798b4 x64/node-v0.10.2-x64.msi
<add>d36abd4ecf02c522e8c75fce24eab1ce800d6458 x64/node.exe
<add>295a950fe3c1c3ceb04249474388b891bf2a39ed x64/node.exp
<add>b64eabafc3f9498552b3ea97bd0d922db1f90f75 x64/node.lib
<add>1f31d6c0079e9f2c9a6de3d956649d83ca6e7a25 x64/node.pdb
<add>``` | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.