hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
5d673e6e48bdcae3e1a9851e82d5dcb5f4a8c922
587
# frozen_string_literal: true module V1 module Resources class Orders < Base resource :customers do route_param :customer_id, type: Integer do get :orders do orders = ::Orders::Report.call(customer_id: params[:customer_id]) present orders end params do requires :sku end post :orders do order = ::Orders::Factory.call(customer_id: params[:customer_id], sku: params[:sku]) present order end end end end end end
21.740741
77
0.545145
79626ac0b76b769ed7e25d06ea346ef16d479aa9
4,557
# frozen_string_literal: true RSpec.describe RuboCop::Cop::Style::BisectedAttrAccessor do subject(:cop) { described_class.new } it 'registers an offense and corrects when both accessors of the name exists' do expect_offense(<<~RUBY) class Foo attr_reader :bar ^^^^ Combine both accessors into `attr_accessor :bar`. attr_writer :bar ^^^^ Combine both accessors into `attr_accessor :bar`. other_macro :something end RUBY expect_correction(<<~RUBY) class Foo attr_accessor :bar #{trailing_whitespace} other_macro :something end RUBY end it 'registers an offense and corrects when attr and attr_writer exists' do expect_offense(<<~RUBY) class Foo attr :bar ^^^^ Combine both accessors into `attr_accessor :bar`. attr_writer :bar ^^^^ Combine both accessors into `attr_accessor :bar`. other_macro :something end RUBY expect_correction(<<~RUBY) class Foo attr_accessor :bar #{trailing_whitespace} other_macro :something end RUBY end it 'registers an offense and corrects when both accessors of the splat exists' do expect_offense(<<~RUBY) class Foo ATTRIBUTES = %i[foo bar] attr_reader *ATTRIBUTES ^^^^^^^^^^^ Combine both accessors into `attr_accessor *ATTRIBUTES`. attr_writer *ATTRIBUTES ^^^^^^^^^^^ Combine both accessors into `attr_accessor *ATTRIBUTES`. other_macro :something end RUBY expect_correction(<<~RUBY) class Foo ATTRIBUTES = %i[foo bar] attr_accessor *ATTRIBUTES #{trailing_whitespace} other_macro :something end RUBY end it 'registers an offense and corrects when both accessors of the name exists and accessor contains multiple names' do expect_offense(<<~RUBY) class Foo attr_reader :baz, :bar, :quux ^^^^ Combine both accessors into `attr_accessor :bar`. attr_writer :bar, :zoo ^^^^ Combine both accessors into `attr_accessor :bar`. other_macro :something end RUBY expect_correction(<<~RUBY) class Foo attr_accessor :bar attr_reader :baz, :quux attr_writer :zoo other_macro :something end RUBY end it 'registers an offense and corrects when both accessors are in the same visibility scope' do expect_offense(<<~RUBY) class Foo attr_reader :bar ^^^^ Combine both accessors into `attr_accessor :bar`. attr_writer :bar ^^^^ Combine both accessors into `attr_accessor :bar`. private attr_writer :baz ^^^^ Combine both accessors into `attr_accessor :baz`. attr_reader :baz ^^^^ Combine both accessors into `attr_accessor :baz`. end RUBY expect_correction(<<~RUBY) class Foo attr_accessor :bar #{trailing_whitespace} private #{trailing_whitespace} attr_accessor :baz end RUBY end it 'registers an offense and corrects when withing eigenclass' do expect_offense(<<~RUBY) class Foo attr_reader :bar class << self attr_reader :baz ^^^^ Combine both accessors into `attr_accessor :baz`. attr_writer :baz ^^^^ Combine both accessors into `attr_accessor :baz`. private attr_reader :quux end end RUBY expect_correction(<<~RUBY) class Foo attr_reader :bar class << self attr_accessor :baz #{trailing_whitespace} private attr_reader :quux end end RUBY end it 'does not register an offense when only one accessor of the name exists' do expect_no_offenses(<<~RUBY) class Foo attr_reader :bar attr_writer :baz end RUBY end it 'does not register an offense when accessors are withing different visibility scopes' do expect_no_offenses(<<~RUBY) class Foo attr_reader :bar private attr_writer :baz end RUBY end it 'does not register an offense when using `attr_accessor`' do expect_no_offenses(<<~RUBY) class Foo attr_accessor :bar end RUBY end end
25.176796
119
0.592715
e82cfcbf2ece0f385dd815d796646e5eac34c62a
139
require 'spec_helper' describe Github::Client::Repos::Projects do it_should_behave_like 'api interface' end # Github::Repos::Projects
17.375
43
0.776978
1c03321176e7e69122ec559ad9cfbb745f1e1799
1,441
require 'pronto' require 'brakeman' module Pronto class Brakeman < Runner def run return [] if ruby_patches.none? output = ::Brakeman.run(app_path: repo_path, output_formats: [:to_s]) messages_for(ruby_patches, output).compact rescue ::Brakeman::NoApplication [] end def messages_for(ruby_patches, output) output.filtered_warnings.map do |warning| patch = patch_for_warning(ruby_patches, warning) next unless patch line = patch.added_lines.find do |added_line| added_line.new_lineno == warning.line end new_message(line, warning) if line end end def new_message(line, warning) Message.new(line.patch.delta.new_file[:path], line, severity_for_confidence(warning.confidence), "Possible security vulnerability: [#{warning.message}](#{warning.link})", nil, self.class) end def severity_for_confidence(confidence_level) case confidence_level when 0 # Brakeman High confidence :fatal when 1 # Brakeman Medium confidence :warning else # Brakeman Low confidence (and other possibilities) :info end end def patch_for_warning(ruby_patches, warning) ruby_patches.find do |patch| patch.new_file_full_path.to_s == warning.file.to_s end end end end
26.685185
91
0.632894
b9ba80068b1b340a72cb6813841e64c1f89f4312
37,131
# # Author:: Adam Jacob (<[email protected]>) # Author:: Christopher Walters (<[email protected]>) # Author:: Tim Hinderliter (<[email protected]>) # Author:: Seth Chisamore (<[email protected]>) # Copyright:: Copyright (c) 2008-2011 Opscode, Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require 'spec_helper' describe Chef::Resource do before(:each) do @cookbook_repo_path = File.join(CHEF_SPEC_DATA, 'cookbooks') @cookbook_collection = Chef::CookbookCollection.new(Chef::CookbookLoader.new(@cookbook_repo_path)) @node = Chef::Node.new @events = Chef::EventDispatch::Dispatcher.new @run_context = Chef::RunContext.new(@node, @cookbook_collection, @events) @resource = Chef::Resource.new("funk", @run_context) end it "should mixin shell_out" do expect(@resource.respond_to?(:shell_out)).to be true end it "should mixin shell_out!" do expect(@resource.respond_to?(:shell_out!)).to be true end it "should mixin shell_out_with_systems_locale" do expect(@resource.respond_to?(:shell_out_with_systems_locale)).to be true end describe "when inherited" do it "adds an entry to a list of subclasses" do subclass = Class.new(Chef::Resource) expect(Chef::Resource.resource_classes).to include(subclass) end it "keeps track of subclasses of subclasses" do subclass = Class.new(Chef::Resource) subclass_of_subclass = Class.new(subclass) expect(Chef::Resource.resource_classes).to include(subclass_of_subclass) end end describe "when declaring the identity attribute" do it "has :name as identity attribute by default" do expect(Chef::Resource.identity_attr).to eq(:name) end it "sets an identity attribute" do resource_class = Class.new(Chef::Resource) resource_class.identity_attr(:path) expect(resource_class.identity_attr).to eq(:path) end it "inherits an identity attribute from a superclass" do resource_class = Class.new(Chef::Resource) resource_subclass = Class.new(resource_class) resource_class.identity_attr(:package_name) expect(resource_subclass.identity_attr).to eq(:package_name) end it "overrides the identity attribute from a superclass when the identity attr is set" do resource_class = Class.new(Chef::Resource) resource_subclass = Class.new(resource_class) resource_class.identity_attr(:package_name) resource_subclass.identity_attr(:something_else) expect(resource_subclass.identity_attr).to eq(:something_else) end end describe "when no identity attribute has been declared" do before do @resource_sans_id = Chef::Resource.new("my-name") end # Would rather force identity attributes to be set for everything, # but that's not plausible for back compat reasons. it "uses the name as the identity" do expect(@resource_sans_id.identity).to eq("my-name") end end describe "when an identity attribute has been declared" do before do @file_resource_class = Class.new(Chef::Resource) do identity_attr :path attr_accessor :path end @file_resource = @file_resource_class.new("identity-attr-test") @file_resource.path = "/tmp/foo.txt" end it "gives the value of its identity attribute" do expect(@file_resource.identity).to eq("/tmp/foo.txt") end end describe "when declaring state attributes" do it "has no state_attrs by default" do expect(Chef::Resource.state_attrs).to be_empty end it "sets a list of state attributes" do resource_class = Class.new(Chef::Resource) resource_class.state_attrs(:checksum, :owner, :group, :mode) expect(resource_class.state_attrs).to match_array([:checksum, :owner, :group, :mode]) end it "inherits state attributes from the superclass" do resource_class = Class.new(Chef::Resource) resource_subclass = Class.new(resource_class) resource_class.state_attrs(:checksum, :owner, :group, :mode) expect(resource_subclass.state_attrs).to match_array([:checksum, :owner, :group, :mode]) end it "combines inherited state attributes with non-inherited state attributes" do resource_class = Class.new(Chef::Resource) resource_subclass = Class.new(resource_class) resource_class.state_attrs(:checksum, :owner) resource_subclass.state_attrs(:group, :mode) expect(resource_subclass.state_attrs).to match_array([:checksum, :owner, :group, :mode]) end end describe "when a set of state attributes has been declared" do before do @file_resource_class = Class.new(Chef::Resource) do state_attrs :checksum, :owner, :group, :mode attr_accessor :checksum attr_accessor :owner attr_accessor :group attr_accessor :mode end @file_resource = @file_resource_class.new("describe-state-test") @file_resource.checksum = "abc123" @file_resource.owner = "root" @file_resource.group = "wheel" @file_resource.mode = "0644" end it "describes its state" do resource_state = @file_resource.state expect(resource_state.keys).to match_array([:checksum, :owner, :group, :mode]) expect(resource_state[:checksum]).to eq("abc123") expect(resource_state[:owner]).to eq("root") expect(resource_state[:group]).to eq("wheel") expect(resource_state[:mode]).to eq("0644") end end describe "load_from" do before(:each) do @prior_resource = Chef::Resource.new("funk") @prior_resource.supports(:funky => true) @prior_resource.source_line @prior_resource.allowed_actions << :funkytown @prior_resource.action(:funkytown) @resource.allowed_actions << :funkytown @run_context.resource_collection << @prior_resource end it "should load the attributes of a prior resource" do @resource.load_from(@prior_resource) expect(@resource.supports).to eq({ :funky => true }) end it "should not inherit the action from the prior resource" do @resource.load_from(@prior_resource) expect(@resource.action).not_to eq(@prior_resource.action) end end describe "name" do it "should have a name" do expect(@resource.name).to eql("funk") end it "should let you set a new name" do @resource.name "monkey" expect(@resource.name).to eql("monkey") end it "coerces arrays to names" do expect(@resource.name ['a', 'b']).to eql('a, b') end it "should coerce objects to a string" do expect(@resource.name Object.new).to be_a(String) end end describe "noop" do it "should accept true or false for noop" do expect { @resource.noop true }.not_to raise_error expect { @resource.noop false }.not_to raise_error expect { @resource.noop "eat it" }.to raise_error(ArgumentError) end end describe "notifies" do it "should make notified resources appear in the actions hash" do @run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee") @resource.notifies :reload, @run_context.resource_collection.find(:zen_master => "coffee") expect(@resource.delayed_notifications.detect{|e| e.resource.name == "coffee" && e.action == :reload}).not_to be_nil end it "should make notified resources be capable of acting immediately" do @run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee") @resource.notifies :reload, @run_context.resource_collection.find(:zen_master => "coffee"), :immediate expect(@resource.immediate_notifications.detect{|e| e.resource.name == "coffee" && e.action == :reload}).not_to be_nil end it "should raise an exception if told to act in other than :delay or :immediate(ly)" do @run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee") expect { @resource.notifies :reload, @run_context.resource_collection.find(:zen_master => "coffee"), :someday }.to raise_error(ArgumentError) end it "should allow multiple notified resources appear in the actions hash" do @run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee") @resource.notifies :reload, @run_context.resource_collection.find(:zen_master => "coffee") expect(@resource.delayed_notifications.detect{|e| e.resource.name == "coffee" && e.action == :reload}).not_to be_nil @run_context.resource_collection << Chef::Resource::ZenMaster.new("beans") @resource.notifies :reload, @run_context.resource_collection.find(:zen_master => "beans") expect(@resource.delayed_notifications.detect{|e| e.resource.name == "beans" && e.action == :reload}).not_to be_nil end it "creates a notification for a resource that is not yet in the resource collection" do @resource.notifies(:restart, :service => 'apache') expected_notification = Chef::Resource::Notification.new({:service => "apache"}, :restart, @resource) expect(@resource.delayed_notifications).to include(expected_notification) end it "notifies another resource immediately" do @resource.notifies_immediately(:restart, :service => 'apache') expected_notification = Chef::Resource::Notification.new({:service => "apache"}, :restart, @resource) expect(@resource.immediate_notifications).to include(expected_notification) end it "notifies a resource to take action at the end of the chef run" do @resource.notifies_delayed(:restart, :service => "apache") expected_notification = Chef::Resource::Notification.new({:service => "apache"}, :restart, @resource) expect(@resource.delayed_notifications).to include(expected_notification) end it "notifies a resource with an array for its name via its prettified string name" do @run_context.resource_collection << Chef::Resource::ZenMaster.new(["coffee", "tea"]) @resource.notifies :reload, @run_context.resource_collection.find(:zen_master => "coffee, tea") expect(@resource.delayed_notifications.detect{|e| e.resource.name == "coffee, tea" && e.action == :reload}).not_to be_nil end end describe "subscribes" do it "should make resources appear in the actions hash of subscribed nodes" do @run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee") zr = @run_context.resource_collection.find(:zen_master => "coffee") @resource.subscribes :reload, zr expect(zr.delayed_notifications.detect{|e| e.resource.name == "funk" && e.action == :reload}).not_to be_nil end it "should make resources appear in the actions hash of subscribed nodes" do @run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee") zr = @run_context.resource_collection.find(:zen_master => "coffee") @resource.subscribes :reload, zr expect(zr.delayed_notifications.detect{|e| e.resource.name == @resource.name && e.action == :reload}).not_to be_nil @run_context.resource_collection << Chef::Resource::ZenMaster.new("bean") zrb = @run_context.resource_collection.find(:zen_master => "bean") zrb.subscribes :reload, zr expect(zr.delayed_notifications.detect{|e| e.resource.name == @resource.name && e.action == :reload}).not_to be_nil end it "should make subscribed resources be capable of acting immediately" do @run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee") zr = @run_context.resource_collection.find(:zen_master => "coffee") @resource.subscribes :reload, zr, :immediately expect(zr.immediate_notifications.detect{|e| e.resource.name == @resource.name && e.action == :reload}).not_to be_nil end end describe "defined_at" do it "should correctly parse source_line on unix-like operating systems" do @resource.source_line = "/some/path/to/file.rb:80:in `wombat_tears'" expect(@resource.defined_at).to eq("/some/path/to/file.rb line 80") end it "should correctly parse source_line on Windows" do @resource.source_line = "C:/some/path/to/file.rb:80 in 1`wombat_tears'" expect(@resource.defined_at).to eq("C:/some/path/to/file.rb line 80") end it "should include the cookbook and recipe when it knows it" do @resource.source_line = "/some/path/to/file.rb:80:in `wombat_tears'" @resource.recipe_name = "wombats" @resource.cookbook_name = "animals" expect(@resource.defined_at).to eq("animals::wombats line 80") end it "should recognize dynamically defined resources" do expect(@resource.defined_at).to eq("dynamically defined") end end describe "to_s" do it "should become a string like resource_name[name]" do zm = Chef::Resource::ZenMaster.new("coffee") expect(zm.to_s).to eql("zen_master[coffee]") end end describe "self.resource_name" do context "When resource_name is not set" do it "and there are no provides lines, resource_name is nil" do c = Class.new(Chef::Resource) do end r = c.new('hi') r.declared_type = :d expect(c.resource_name).to be_nil expect(r.resource_name).to be_nil expect(r.declared_type).to eq :d end it "and there are no provides lines, @resource_name is used" do c = Class.new(Chef::Resource) do def initialize(*args, &block) @resource_name = :blah super end end r = c.new('hi') r.declared_type = :d expect(c.resource_name).to be_nil expect(r.resource_name).to eq :blah expect(r.declared_type).to eq :d end it "and the resource class gets a late-bound name, resource_name is nil" do c = Class.new(Chef::Resource) do def self.name "ResourceSpecNameTest" end end r = c.new('hi') r.declared_type = :d expect(c.resource_name).to be_nil expect(r.resource_name).to be_nil expect(r.declared_type).to eq :d end end it "resource_name without provides is honored" do c = Class.new(Chef::Resource) do resource_name 'blah' end r = c.new('hi') r.declared_type = :d expect(c.resource_name).to eq :blah expect(r.resource_name).to eq :blah expect(r.declared_type).to eq :d end it "setting class.resource_name with 'resource_name = blah' overrides declared_type" do c = Class.new(Chef::Resource) do provides :self_resource_name_test_2 end c.resource_name = :blah r = c.new('hi') r.declared_type = :d expect(c.resource_name).to eq :blah expect(r.resource_name).to eq :blah expect(r.declared_type).to eq :d end it "setting class.resource_name with 'resource_name blah' overrides declared_type" do c = Class.new(Chef::Resource) do resource_name :blah provides :self_resource_name_test_3 end r = c.new('hi') r.declared_type = :d expect(c.resource_name).to eq :blah expect(r.resource_name).to eq :blah expect(r.declared_type).to eq :d end end describe "is" do it "should return the arguments passed with 'is'" do zm = Chef::Resource::ZenMaster.new("coffee") expect(zm.is("one", "two", "three")).to eq(%w|one two three|) end it "should allow arguments preceded by is to methods" do @resource.noop(@resource.is(true)) expect(@resource.noop).to eql(true) end end describe "to_json" do it "should serialize to json" do json = @resource.to_json expect(json).to match(/json_class/) expect(json).to match(/instance_vars/) end include_examples "to_json equivalent to Chef::JSONCompat.to_json" do let(:jsonable) { @resource } end end describe "to_hash" do it "should convert to a hash" do hash = @resource.to_hash expected_keys = [ :allowed_actions, :params, :provider, :updated, :updated_by_last_action, :before, :supports, :noop, :ignore_failure, :name, :source_line, :action, :retries, :retry_delay, :elapsed_time, :default_guard_interpreter, :guard_interpreter, :sensitive ] expect(hash.keys - expected_keys).to eq([]) expect(expected_keys - hash.keys).to eq([]) expect(hash[:name]).to eql("funk") end end describe "self.json_create" do it "should deserialize itself from json" do json = Chef::JSONCompat.to_json(@resource) serialized_node = Chef::JSONCompat.from_json(json) expect(serialized_node).to be_a_kind_of(Chef::Resource) expect(serialized_node.name).to eql(@resource.name) end end describe "supports" do it "should allow you to set what features this resource supports" do support_hash = { :one => :two } @resource.supports(support_hash) expect(@resource.supports).to eql(support_hash) end it "should return the current value of supports" do expect(@resource.supports).to eq({}) end end describe "ignore_failure" do it "should default to throwing an error if a provider fails for a resource" do expect(@resource.ignore_failure).to eq(false) end it "should allow you to set whether a provider should throw exceptions with ignore_failure" do @resource.ignore_failure(true) expect(@resource.ignore_failure).to eq(true) end it "should allow you to epic_fail" do @resource.epic_fail(true) expect(@resource.epic_fail).to eq(true) end end describe "retries" do before do @retriable_resource = Chef::Resource::Cat.new("precious", @run_context) @retriable_resource.provider = Chef::Provider::SnakeOil @retriable_resource.action = :purr @node.automatic_attrs[:platform] = "fubuntu" @node.automatic_attrs[:platform_version] = '10.04' end it "should default to not retrying if a provider fails for a resource" do expect(@retriable_resource.retries).to eq(0) end it "should allow you to set how many retries a provider should attempt after a failure" do @retriable_resource.retries(2) expect(@retriable_resource.retries).to eq(2) end it "should default to a retry delay of 2 seconds" do expect(@retriable_resource.retry_delay).to eq(2) end it "should allow you to set the retry delay" do @retriable_resource.retry_delay(10) expect(@retriable_resource.retry_delay).to eq(10) end it "should keep given value of retries intact after the provider fails for a resource" do @retriable_resource.retries(3) @retriable_resource.retry_delay(0) # No need to wait. provider = Chef::Provider::SnakeOil.new(@retriable_resource, @run_context) allow(Chef::Provider::SnakeOil).to receive(:new).and_return(provider) allow(provider).to receive(:action_purr).and_raise expect(@retriable_resource).to receive(:sleep).exactly(3).times expect { @retriable_resource.run_action(:purr) }.to raise_error expect(@retriable_resource.retries).to eq(3) end end describe "setting the base provider class for the resource" do it "defaults to Chef::Provider for the base class" do expect(Chef::Resource.provider_base).to eq(Chef::Provider) end it "allows the base provider to be overridden" do Chef::Config.treat_deprecation_warnings_as_errors(false) class OverrideProviderBaseTest < Chef::Resource provider_base Chef::Provider::Package end expect(OverrideProviderBaseTest.provider_base).to eq(Chef::Provider::Package) end it "warns when setting provider_base" do expect { class OverrideProviderBaseTest2 < Chef::Resource provider_base Chef::Provider::Package end }.to raise_error(Chef::Exceptions::DeprecatedFeatureError) end end it "runs an action by finding its provider, loading the current resource and then running the action" do skip end describe "when updated by a provider" do before do @resource.updated_by_last_action(true) end it "records that it was updated" do expect(@resource).to be_updated end it "records that the last action updated the resource" do expect(@resource).to be_updated_by_last_action end describe "and then run again without being updated" do before do @resource.updated_by_last_action(false) end it "reports that it is updated" do expect(@resource).to be_updated end it "reports that it was not updated by the last action" do expect(@resource).not_to be_updated_by_last_action end end end describe "when invoking its action" do before do @resource = Chef::Resource.new("provided", @run_context) @resource.provider = Chef::Provider::SnakeOil @node.automatic_attrs[:platform] = "fubuntu" @node.automatic_attrs[:platform_version] = '10.04' end it "does not run only_if if no only_if command is given" do expect_any_instance_of(Chef::Resource::Conditional).not_to receive(:evaluate) @resource.only_if.clear @resource.run_action(:purr) end it "runs runs an only_if when one is given" do snitch_variable = nil @resource.only_if { snitch_variable = true } expect(@resource.only_if.first.positivity).to eq(:only_if) #Chef::Mixin::Command.should_receive(:only_if).with(true, {}).and_return(false) @resource.run_action(:purr) expect(snitch_variable).to be_truthy end it "runs multiple only_if conditionals" do snitch_var1, snitch_var2 = nil, nil @resource.only_if { snitch_var1 = 1 } @resource.only_if { snitch_var2 = 2 } @resource.run_action(:purr) expect(snitch_var1).to eq(1) expect(snitch_var2).to eq(2) end it "accepts command options for only_if conditionals" do expect_any_instance_of(Chef::Resource::Conditional).to receive(:evaluate_command).at_least(1).times @resource.only_if("true", :cwd => '/tmp') expect(@resource.only_if.first.command_opts).to eq({:cwd => '/tmp'}) @resource.run_action(:purr) end it "runs not_if as a command when it is a string" do expect_any_instance_of(Chef::Resource::Conditional).to receive(:evaluate_command).at_least(1).times @resource.not_if "pwd" @resource.run_action(:purr) end it "runs not_if as a block when it is a ruby block" do expect_any_instance_of(Chef::Resource::Conditional).to receive(:evaluate_block).at_least(1).times @resource.not_if { puts 'foo' } @resource.run_action(:purr) end it "does not run not_if if no not_if command is given" do expect_any_instance_of(Chef::Resource::Conditional).not_to receive(:evaluate) @resource.not_if.clear @resource.run_action(:purr) end it "accepts command options for not_if conditionals" do @resource.not_if("pwd" , :cwd => '/tmp') expect(@resource.not_if.first.command_opts).to eq({:cwd => '/tmp'}) end it "accepts multiple not_if conditionals" do snitch_var1, snitch_var2 = true, true @resource.not_if {snitch_var1 = nil} @resource.not_if {snitch_var2 = false} @resource.run_action(:purr) expect(snitch_var1).to be_nil expect(snitch_var2).to be_falsey end it "reports 0 elapsed time if actual elapsed time is < 0" do expected = Time.now allow(Time).to receive(:now).and_return(expected, expected - 1) @resource.run_action(:purr) expect(@resource.elapsed_time).to eq(0) end describe "guard_interpreter attribute" do let(:resource) { @resource } it "should be set to :default by default" do expect(resource.guard_interpreter).to eq(:default) end it "if set to :default should return :default when read" do resource.guard_interpreter(:default) expect(resource.guard_interpreter).to eq(:default) end it "should raise Chef::Exceptions::ValidationFailed on an attempt to set the guard_interpreter attribute to something other than a Symbol" do expect { resource.guard_interpreter('command_dot_com') }.to raise_error(Chef::Exceptions::ValidationFailed) end it "should not raise an exception when setting the guard interpreter attribute to a Symbol" do allow(Chef::GuardInterpreter::ResourceGuardInterpreter).to receive(:new).and_return(nil) expect { resource.guard_interpreter(:command_dot_com) }.not_to raise_error end end end describe "should_skip?" do before do @resource = Chef::Resource::Cat.new("sugar", @run_context) end it "should return false by default" do expect(@resource.should_skip?(:purr)).to be_falsey end it "should return false when only_if is met" do @resource.only_if { true } expect(@resource.should_skip?(:purr)).to be_falsey end it "should return true when only_if is not met" do @resource.only_if { false } expect(@resource.should_skip?(:purr)).to be_truthy end it "should return true when not_if is met" do @resource.not_if { true } expect(@resource.should_skip?(:purr)).to be_truthy end it "should return false when not_if is not met" do @resource.not_if { false } expect(@resource.should_skip?(:purr)).to be_falsey end it "should return true when only_if is met but also not_if is met" do @resource.only_if { true } @resource.not_if { true } expect(@resource.should_skip?(:purr)).to be_truthy end it "should return true when one of multiple only_if's is not met" do @resource.only_if { true } @resource.only_if { false } @resource.only_if { true } expect(@resource.should_skip?(:purr)).to be_truthy end it "should return true when one of multiple not_if's is met" do @resource.not_if { false } @resource.not_if { true } @resource.not_if { false } expect(@resource.should_skip?(:purr)).to be_truthy end it "should return true when action is :nothing" do expect(@resource.should_skip?(:nothing)).to be_truthy end it "should return true when action is :nothing ignoring only_if/not_if conditionals" do @resource.only_if { true } @resource.not_if { false } expect(@resource.should_skip?(:nothing)).to be_truthy end it "should print \"skipped due to action :nothing\" message for doc formatter when action is :nothing" do fdoc = Chef::Formatters.new(:doc, STDOUT, STDERR) allow(@run_context).to receive(:events).and_return(fdoc) expect(fdoc).to receive(:puts).with(" (skipped due to action :nothing)", anything()) @resource.should_skip?(:nothing) end end describe "when resource action is :nothing" do before do @resource1 = Chef::Resource::Cat.new("sugar", @run_context) @resource1.action = :nothing @node.automatic_attrs[:platform] = "fubuntu" @node.automatic_attrs[:platform_version] = '10.04' end it "should not run only_if/not_if conditionals (CHEF-972)" do snitch_var1 = 0 @resource1.only_if { snitch_var1 = 1 } @resource1.not_if { snitch_var1 = 2 } @resource1.run_action(:nothing) expect(snitch_var1).to eq(0) end it "should run only_if/not_if conditionals when notified to run another action (CHEF-972)" do snitch_var1 = snitch_var2 = 0 @runner = Chef::Runner.new(@run_context) Chef::Platform.set( :resource => :cat, :provider => Chef::Provider::SnakeOil ) @resource1.only_if { snitch_var1 = 1 } @resource1.not_if { snitch_var2 = 2 } @resource2 = Chef::Resource::Cat.new("coffee", @run_context) @resource2.notifies :purr, @resource1 @resource2.action = :purr @run_context.resource_collection << @resource1 @run_context.resource_collection << @resource2 @runner.converge expect(snitch_var1).to eq(1) expect(snitch_var2).to eq(2) end end describe "building the platform map" do let(:klz) { Class.new(Chef::Resource) } before do Chef::Resource::Klz = klz end after do Chef::Resource.send(:remove_const, :Klz) end it 'adds mappings for a single platform' do expect(Chef.resource_handler_map).to receive(:set).with( :dinobot, Chef::Resource::Klz, { platform: ['autobots'] } ) klz.provides :dinobot, platform: ['autobots'] end it 'adds mappings for multiple platforms' do expect(Chef.resource_handler_map).to receive(:set).with( :energy, Chef::Resource::Klz, { platform: ['autobots', 'decepticons']} ) klz.provides :energy, platform: ['autobots', 'decepticons'] end it 'adds mappings for all platforms' do expect(Chef.resource_handler_map).to receive(:set).with( :tape_deck, Chef::Resource::Klz, {} ) klz.provides :tape_deck end end describe "resource_for_node" do describe "lookups from the platform map" do let(:klz1) { Class.new(Chef::Resource) } before(:each) do Chef::Resource::Klz1 = klz1 @node = Chef::Node.new @node.name("bumblebee") @node.automatic[:platform] = "autobots" @node.automatic[:platform_version] = "6.1" Object.const_set('Soundwave', klz1) klz1.provides :soundwave end after(:each) do Object.send(:remove_const, :Soundwave) Chef::Resource.send(:remove_const, :Klz1) end it "returns a resource by short_name if nothing else matches" do expect(Chef::Resource.resource_for_node(:soundwave, @node)).to eql(klz1) end end describe "lookups from the platform map" do let(:klz2) { Class.new(Chef::Resource) } before(:each) do Chef::Resource::Klz2 = klz2 @node = Chef::Node.new @node.name("bumblebee") @node.automatic[:platform] = "autobots" @node.automatic[:platform_version] = "6.1" klz2.provides :dinobot, :platform => ['autobots'] Object.const_set('Grimlock', klz2) klz2.provides :grimlock end after(:each) do Object.send(:remove_const, :Grimlock) Chef::Resource.send(:remove_const, :Klz2) end it "returns a resource by short_name and node" do expect(Chef::Resource.resource_for_node(:dinobot, @node)).to eql(klz2) end end end describe "when creating notifications" do describe "with a string resource spec" do it "creates a delayed notification when timing is not specified" do @resource.notifies(:run, "execute[foo]") expect(@run_context.delayed_notification_collection.size).to eq(1) end it "creates a delayed notification when :delayed is not specified" do @resource.notifies(:run, "execute[foo]", :delayed) expect(@run_context.delayed_notification_collection.size).to eq(1) end it "creates an immediate notification when :immediate is specified" do @resource.notifies(:run, "execute[foo]", :immediate) expect(@run_context.immediate_notification_collection.size).to eq(1) end it "creates an immediate notification when :immediately is specified" do @resource.notifies(:run, "execute[foo]", :immediately) expect(@run_context.immediate_notification_collection.size).to eq(1) end describe "with a syntax error in the resource spec" do it "raises an exception immmediately" do expect do @resource.notifies(:run, "typo[missing-closing-bracket") end.to raise_error(Chef::Exceptions::InvalidResourceSpecification) end end end describe "with a resource reference" do before do @notified_resource = Chef::Resource.new("punk", @run_context) end it "creates a delayed notification when timing is not specified" do @resource.notifies(:run, @notified_resource) expect(@run_context.delayed_notification_collection.size).to eq(1) end it "creates a delayed notification when :delayed is not specified" do @resource.notifies(:run, @notified_resource, :delayed) expect(@run_context.delayed_notification_collection.size).to eq(1) end it "creates an immediate notification when :immediate is specified" do @resource.notifies(:run, @notified_resource, :immediate) expect(@run_context.immediate_notification_collection.size).to eq(1) end it "creates an immediate notification when :immediately is specified" do @resource.notifies(:run, @notified_resource, :immediately) expect(@run_context.immediate_notification_collection.size).to eq(1) end end end describe "resource sensitive attribute" do before(:each) do @resource_file = Chef::Resource::File.new("/nonexistent/CHEF-5098/file", @run_context) @action = :create end def compiled_resource_data(resource, action, err) error_inspector = Chef::Formatters::ErrorInspectors::ResourceFailureInspector.new(resource, action, err) description = Chef::Formatters::ErrorDescription.new("test") error_inspector.add_explanation(description) Chef::Log.info("descrtiption: #{description.inspect},error_inspector: #{error_inspector}") description.sections[1]["Compiled Resource:"] end it "set to false by default" do expect(@resource.sensitive).to be_falsey end it "when set to false should show compiled resource for failed resource" do expect { @resource_file.run_action(@action) }.to raise_error { |err| expect(compiled_resource_data(@resource_file, @action, err)).to match 'path "/nonexistent/CHEF-5098/file"' } end it "when set to true should show compiled resource for failed resource" do @resource_file.sensitive true expect { @resource_file.run_action(@action) }.to raise_error { |err| expect(compiled_resource_data(@resource_file, @action, err)).to eql("suppressed sensitive resource output") } end end describe "#action" do let(:resource_class) do Class.new(described_class) do allowed_actions(%i{one two}) end end let(:resource) { resource_class.new('test', nil) } subject { resource.action } context "with a no action" do it { is_expected.to eq [:nothing] } end context "with a default action" do let(:resource_class) do Class.new(described_class) do default_action(:one) end end it { is_expected.to eq [:one] } end context "with a symbol action" do before { resource.action(:one) } it { is_expected.to eq [:one] } end context "with a string action" do before { resource.action('two') } it { is_expected.to eq [:two] } end context "with an array action" do before { resource.action([:two, :one]) } it { is_expected.to eq [:two, :one] } end context "with an assignment" do before { resource.action = :one } it { is_expected.to eq [:one] } end context "with an array assignment" do before { resource.action = [:two, :one] } it { is_expected.to eq [:two, :one] } end context "with an invalid action" do it { expect { resource.action(:three) }.to raise_error Chef::Exceptions::ValidationFailed } end context "with an invalid assignment action" do it { expect { resource.action = :three }.to raise_error Chef::Exceptions::ValidationFailed } end end describe ".default_action" do let(:default_action) { } let(:resource_class) do actions = default_action Class.new(described_class) do default_action(actions) if actions end end subject { resource_class.default_action } context "with no default actions" do it { is_expected.to eq [:nothing] } end context "with a symbol default action" do let(:default_action) { :one } it { is_expected.to eq [:one] } end context "with a string default action" do let(:default_action) { 'one' } it { is_expected.to eq [:one] } end context "with an array default action" do let(:default_action) { [:two, :one] } it { is_expected.to eq [:two, :one] } end end end
34.799438
147
0.675096
4ae41aa19c1c6498c82a4bb82ec4a4bb4c7662eb
6,563
require "cases/helper" require 'active_support/core_ext/object/instance_variables' class SerializationTest < ActiveModel::TestCase class User include ActiveModel::Serialization attr_accessor :name, :email, :gender, :address, :friends def initialize(name, email, gender) @name, @email, @gender = name, email, gender @friends = [] end def attributes instance_values.except("address", "friends") end def foo 'i_am_foo' end end class Address include ActiveModel::Serialization attr_accessor :street, :city, :state, :zip def attributes instance_values end end setup do @user = User.new('David', '[email protected]', 'male') @user.address = Address.new @user.address.street = "123 Lane" @user.address.city = "Springfield" @user.address.state = "CA" @user.address.zip = 11111 @user.friends = [User.new('Joe', '[email protected]', 'male'), User.new('Sue', '[email protected]', 'female')] end def test_method_serializable_hash_should_work expected = {"name"=>"David", "gender"=>"male", "email"=>"[email protected]"} assert_equal expected, @user.serializable_hash end def test_method_serializable_hash_should_work_with_only_option expected = {"name"=>"David"} assert_equal expected, @user.serializable_hash(only: [:name]) end def test_method_serializable_hash_should_work_with_except_option expected = {"gender"=>"male", "email"=>"[email protected]"} assert_equal expected, @user.serializable_hash(except: [:name]) end def test_method_serializable_hash_should_work_with_methods_option expected = {"name"=>"David", "gender"=>"male", "foo"=>"i_am_foo", "email"=>"[email protected]"} assert_equal expected, @user.serializable_hash(methods: [:foo]) end def test_method_serializable_hash_should_work_with_only_and_methods expected = {"foo"=>"i_am_foo"} assert_equal expected, @user.serializable_hash(only: [], methods: [:foo]) end def test_method_serializable_hash_should_work_with_except_and_methods expected = {"gender"=>"male", "foo"=>"i_am_foo"} assert_equal expected, @user.serializable_hash(except: [:name, :email], methods: [:foo]) end def test_should_not_call_methods_that_dont_respond expected = {"name"=>"David", "gender"=>"male", "email"=>"[email protected]"} assert_equal expected, @user.serializable_hash(methods: [:bar]) end def test_should_use_read_attribute_for_serialization def @user.read_attribute_for_serialization(n) "Jon" end expected = { "name" => "Jon" } assert_equal expected, @user.serializable_hash(only: :name) end def test_include_option_with_singular_association expected = {"name"=>"David", "gender"=>"male", "email"=>"[email protected]", "address"=>{"street"=>"123 Lane", "city"=>"Springfield", "state"=>"CA", "zip"=>11111}} assert_equal expected, @user.serializable_hash(include: :address) end def test_include_option_with_plural_association expected = {"email"=>"[email protected]", "gender"=>"male", "name"=>"David", "friends"=>[{"name"=>'Joe', "email"=>'[email protected]', "gender"=>'male'}, {"name"=>'Sue', "email"=>'[email protected]', "gender"=>'female'}]} assert_equal expected, @user.serializable_hash(include: :friends) end def test_include_option_with_empty_association @user.friends = [] expected = {"email"=>"[email protected]", "gender"=>"male", "name"=>"David", "friends"=>[]} assert_equal expected, @user.serializable_hash(include: :friends) end class FriendList def initialize(friends) @friends = friends end def to_ary @friends end end def test_include_option_with_ary @user.friends = FriendList.new(@user.friends) expected = {"email"=>"[email protected]", "gender"=>"male", "name"=>"David", "friends"=>[{"name"=>'Joe', "email"=>'[email protected]', "gender"=>'male'}, {"name"=>'Sue', "email"=>'[email protected]', "gender"=>'female'}]} assert_equal expected, @user.serializable_hash(include: :friends) end def test_multiple_includes expected = {"email"=>"[email protected]", "gender"=>"male", "name"=>"David", "address"=>{"street"=>"123 Lane", "city"=>"Springfield", "state"=>"CA", "zip"=>11111}, "friends"=>[{"name"=>'Joe', "email"=>'[email protected]', "gender"=>'male'}, {"name"=>'Sue', "email"=>'[email protected]', "gender"=>'female'}]} assert_equal expected, @user.serializable_hash(include: [:address, :friends]) end def test_include_with_options expected = {"email"=>"[email protected]", "gender"=>"male", "name"=>"David", "address"=>{"street"=>"123 Lane"}} assert_equal expected, @user.serializable_hash(include: { address: { only: "street" } }) end def test_nested_include @user.friends.first.friends = [@user] expected = {"email"=>"[email protected]", "gender"=>"male", "name"=>"David", "friends"=>[{"name"=>'Joe', "email"=>'[email protected]', "gender"=>'male', "friends"=> [{"email"=>"[email protected]", "gender"=>"male", "name"=>"David"}]}, {"name"=>'Sue', "email"=>'[email protected]', "gender"=>'female', "friends"=> []}]} assert_equal expected, @user.serializable_hash(include: { friends: { include: :friends } }) end def test_only_include expected = {"name"=>"David", "friends" => [{"name" => "Joe"}, {"name" => "Sue"}]} assert_equal expected, @user.serializable_hash(only: :name, include: { friends: { only: :name } }) end def test_except_include expected = {"name"=>"David", "email"=>"[email protected]", "friends"=> [{"name" => 'Joe', "email" => '[email protected]'}, {"name" => "Sue", "email" => '[email protected]'}]} assert_equal expected, @user.serializable_hash(except: :gender, include: { friends: { except: :gender } }) end def test_multiple_includes_with_options expected = {"email"=>"[email protected]", "gender"=>"male", "name"=>"David", "address"=>{"street"=>"123 Lane"}, "friends"=>[{"name"=>'Joe', "email"=>'[email protected]', "gender"=>'male'}, {"name"=>'Sue', "email"=>'[email protected]', "gender"=>'female'}]} assert_equal expected, @user.serializable_hash(include: [{ address: {only: "street" } }, :friends]) end end
38.83432
110
0.628676
01168c1083ec2182dbe679c066a6d09ee4e08105
1,759
Rails.application.routes.draw do # Map admin controllers scope "/#{Humpyard::config.admin_prefix}" do resources :humpyard_pages, :controller => 'humpyard/pages', :path => "pages", :only => [:index, :new, :create, :edit, :update, :show, :destroy] do collection do post :move end end resources :humpyard_elements, :controller => 'humpyard/elements', :path => "elements", :only => [:new, :create, :edit, :update, :show, :destroy] do member do get :inline_edit end collection do post :move end end resources :humpyard_assets, :controller => 'humpyard/assets', :path => "assets", :only => [:index, :new, :create, :edit, :update, :show, :destroy] do end end # Map "/" URL root :to => 'humpyard/pages#show', :webpath => 'index' # Map sitemap.xml match "/sitemap.xml" => 'humpyard/pages#sitemap', :as => 'sitemap' # Map human readable page URLs if Humpyard::config.www_prefix.match /:locale/ match "/#{Humpyard::config.www_prefix}" => 'humpyard/pages#show', :webpath => 'index', :constraints => { :locale => Humpyard.config.locales_contraint } match "/#{Humpyard::config.www_prefix}*webpath.:format" => 'humpyard/pages#show', :constraints => { :locale => Humpyard.config.locales_contraint, :format => Humpyard.config.page_formats_contraint } match "/#{Humpyard::config.www_prefix}*path" => 'humpyard/errors#error404', :constraints => { :locale => Humpyard.config.locales_contraint } else match "/#{Humpyard::config.www_prefix}*webpath.:format" => 'humpyard/pages#show', :constraints => { :format => Humpyard.config.page_formats_contraint } match "/#{Humpyard::config.www_prefix}*path" => 'humpyard/errors#error404' end end
45.102564
201
0.654918
33e0286bc25511804eb473f5ab63c6fa5b197a8e
882
require 'spec_helper' describe 'postfix::lookup::sqlite' do let(:title) do '/etc/postfix/test.cf' end let(:params) do { :dbpath => '/path/to/database', :query => "SELECT address FROM aliases WHERE alias = '%s'", } end on_supported_os.each do |os, facts| context "on #{os}" do let(:facts) do facts end context 'without postfix class included' do it { expect { is_expected.to compile }.to raise_error(/must include the postfix base class/) } end context 'with postfix class included' do let(:pre_condition) do 'include ::postfix' end it { is_expected.to compile.with_all_deps } it { is_expected.to contain_file('/etc/postfix/test.cf') } it { is_expected.to contain_postfix__lookup__sqlite('/etc/postfix/test.cf') } end end end end
23.837838
102
0.611111
bf710879265d6ef4854457bceb06abb8e1e7aa57
1,781
class Newsletter attr_reader :data def initialize(params) @data = params end def valid? data["X-Mailgun-Incoming"] == "Yes" && signature_valid? end def token @token ||= begin to_email.sub("@newsletters.feedbin.com", "").sub("@development.newsletters.feedbin.com", "").sub("test-subscribe+", "").sub("subscribe+", "") end end def to_email data["recipient"] end def from_email parsed_from.address end def from_name parsed_from.name || from_email end def subject data["subject"] end def text data["body-plain"] end def html data["body-html"] end def content html || text end def timestamp data["timestamp"] end def feed_id @feed_id ||= Digest::SHA1.hexdigest("#{token}#{from_email}") end def entry_id @entry_id ||= Digest::SHA1.hexdigest("#{feed_id}#{subject}#{timestamp}") end def domain parsed_from.domain end def feed_url "#{site_url}?#{feed_id}" end def site_url @site_url ||= URI::HTTP.build(host: domain).to_s end def format html ? "html" : "text" end def headers { "List-Unsubscribe" => data["List-Unsubscribe"] } end private def parsed_from Mail::Address.new(data["from"]) rescue Mail::Field::ParseError name, address = data["from"].split(/[<>]/).map(&:strip) domain = address.split("@").last OpenStruct.new(name: name, address: address, domain: domain) end def signature_valid? data["signature"] == signature end def signature @signature ||= begin digest = OpenSSL::Digest::SHA256.new signed_data = [data["timestamp"], data["token"]].join OpenSSL::HMAC.hexdigest(digest, ENV['MAILGUN_INBOUND_KEY'], signed_data) end end end
17.291262
147
0.627737
fff1a0bab07c42d58f67d8f730447802dc82c955
2,125
# frozen_string_literal: true module GitHubRepoable extend ActiveSupport::Concern # Public # def add_team_to_github_repository github_repository = GitHubRepository.new(organization.github_client, github_repo_id) github_team = GitHubTeam.new(organization.github_client, github_team_id) github_team.add_team_repository(github_repository.full_name, repository_permissions) end # Public # def create_github_repository repo_description = "#{repo_name} created by GitHub Classroom" github_repository = github_organization.create_repository( repo_name, private: private?, description: repo_description ) self.github_global_relay_id = github_repository.node_id self.github_repo_id = github_repository.id end # Public # def destroy_github_repository github_organization.delete_repository(github_repo_id) end # Public # def delete_github_repository_on_failure yield rescue GitHub::Error => error silently_destroy_github_repository raise GitHub::Error, "Assignment failed to be created: #{error}" end # Public # def silently_destroy_github_repository destroy_github_repository true # Destroy ActiveRecord object even if we fail to delete the repository end # Public # def push_starter_code return true unless starter_code_repo_id client = creator.github_client assignment_repository = GitHubRepository.new(client, github_repo_id) starter_code_repository = GitHubRepository.new(client, starter_code_repo_id) assignment_repository.get_starter_code_from(starter_code_repository) end # Internal # def github_organization @github_organization ||= GitHubOrganization.new(organization.github_client, organization.github_id) end # Internal # def give_admin_permission? student_assignment = respond_to?(:assignment) ? assignment : group_assignment student_assignment.students_are_repo_admins? end # Internal # def repository_permissions {}.tap do |options| options[:permission] = "admin" if give_admin_permission? end end end
25
103
0.763294
2152e90e3acd5cd3a9987ecd6dd3553d4f326a35
3,269
module Elasticsearch module Persistence module Repository module Response # :nodoc: # Encapsulates the domain objects and documents returned from Elasticsearch when searching # # Implements `Enumerable` and forwards its methods to the {#results} object. # class Results include Enumerable delegate :aggregations, to: :response attr_reader :repository, :loaded alias :loaded? :loaded # @param repository [Elasticsearch::Persistence::Repository::Class] The repository instance # @param response [Hash] The full response returned from the Elasticsearch client # @param options [Hash] Optional parameters # def initialize(repository, response, options={}) @repository = repository @response = Elasticsearch::Persistence::Model::HashWrapper.new(response) @options = options @loaded = false end def method_missing(method_name, *arguments, &block) results.respond_to?(method_name) ? results.__send__(method_name, *arguments, &block) : super end def respond_to?(method_name, include_private = false) results.respond_to?(method_name) || super end def inner_hits response['hits']['hits'].collect { |d| d['inner_hits'] } end # The number of total hits for a query # def total response['hits']['total'] end # The maximum score for a query # def max_score response['hits']['max_score'] end # Yields [object, hit] pairs to the block # def each_with_hit(&block) results.zip(response['hits']['hits']).each(&block) end # Yields [object, hit] pairs and returns the result # def map_with_hit(&block) results.zip(response['hits']['hits']).map(&block) end # Return the collection of domain objects # # @example Iterate over the results # # results.map { |r| r.attributes[:title] } # => ["Fox", "Dog"] # # @return [Array] # def results @results ||= response['hits']['hits'].map do |document| repository.deserialize(document.to_hash) end @loaded = true @results end def delete(opts=nil) end # Access the response returned from Elasticsearch by the client # # @example Access the aggregations in the response # # results = repository.search query: { match: { title: 'fox dog' } }, # aggregations: { titles: { terms: { field: 'title' } } } # results.response.aggregations.titles.buckets.map { |term| "#{term['key']}: #{term['doc_count']}" } # # => ["brown: 1", "dog: 1", ...] # # @return [Hashie::Mash] # def response @response end end end end end end
30.839623
114
0.524013
ab922d2ed020f5277e33733b667fb6b74375ac5a
3,744
class FeedEvent < ActiveRecord::Base def before_validation_on_create send_email end belongs_to :source, :polymorphic => true validates_presence_of :user_id #receiver validate :source_user_has_event_enabled validate :user_has_subscribed_to_event @@event_types = [] belongs_to :user serialize :details #allow creaters of events to dynamically suppress emails even if there are email subscribers #and/or all other conditions are met attr_accessor :forbid_email #This method allows you to specify a class to follow the changes #It stashes the changes in a before filter and you can optionally #specify an after filter on the followed class as well. # #The idea is that this logic isn't core to the source class and you #may want to have lots of logic for displaying the feed depending #on what has changed # #This is certainly clunky, but until a better way comes around. I #considered using observers but it seemed a whole lot heavier for not #that much gain. The event classes are fairly empty and seem like an #appropriate place to put logic concerning...umm...the event. def self.follows(klass, opts={}) klass = klass.to_s.capitalize.constantize event = self klass.class_eval{ attr_accessor :stashed_changes @@followed_by = event before_save {|record| record.stashed_changes = record.changes} #I thought there might be a use for this, but not sure now... # after_save {|record| @@followed_by.send(opts[:with], record)} if opts[:with] } end def self.notify_subscribers(opts) User.find(:all).each do |u| UserUpdateEvent.create opts.merge(:user => u) end end def allowed_to_destroy?(_user_id) (user_id == _user_id) end def self.event_types load_subclasses if @@event_types.empty? @@event_types end def self.subscribable_feed_event_types event_types.reject{|type| type.user_cannot_subscribe_to_event?} end def self.enabable_feed_event_types event_types.select{|type| type.user_can_enable_event?} end def self.subscribe_description(desc = nil) @@subscribe_descriptions ||= {} @@subscribe_descriptions[name] = desc if desc @@subscribe_descriptions[name] end def self.privacy_description(desc = nil) @@privacy_descriptions ||= {} @@privacy_descriptions[name] = desc if desc @@privacy_descriptions[name] end def self.can_send_email? FeedEventMailer.send(:new).respond_to?(name.underscore[0..-7]) end def self.user_cannot_subscribe_to_event? subscribe_description.blank? end def self.user_can_enable_event? privacy_description end private def source_disabled_event? self.class.user_can_enable_event? && source.respond_to?(:user) && source.user && !source.user.feed_event_enabled?(self.class) end def send_email FeedEventMailer.send "deliver_#{self.class.name.underscore[0..-7]}", self if self.class.can_send_email? && (user.subscribed_to_email?(self.class) || self.class.user_cannot_subscribe_to_event?) && !user.try(:online?) && !source_disabled_event? && !self.forbid_email end def self.load_subclasses Dir[RAILS_ROOT+'/app/models/feed_events/*_event.rb'].each do |file| @@event_types << File.basename(file, '.rb').camelize.constantize end end def user_has_subscribed_to_event errors.add :user, "has not subscribed to event #{self.class}" unless user.if_not_nil?{|u| u.subscribed_to_feed_event?(self.class)} || self.class.user_cannot_subscribe_to_event? end def source_user_has_event_enabled errors.add :source, "this event is disabled by the user of #{self.source.class}" if source_disabled_event? end end
31.462185
163
0.724626
08e37d6d02fbab3da100da6cb1738f6f7734ef0d
190
#!/usr/bin/env ruby # # Simple sinatra app to receive and display callbacks # require 'pp' require 'sinatra' set :port, 9230 post '/callback' do puts "Received callback" pp params end
13.571429
53
0.715789
01cd357611475e2c0e1c9ac4eb98f84be65d299b
315
cask 'shruplay' do version '2015_05_16' sha256 'f23e25969e78935d94fbd6a8a3b28ac13fb3d0af9f3caa66fb3451eab9319150' url 'http://cdn.getshru.com/wp-content/uploads/2015/10/SHRUPlayMac_2015_05_16.zip' name 'PDX Pet Design SHRUPlay' homepage 'https://getshru.com/' license :gratis app 'ShruPlay.app' end
26.25
84
0.777778
bfd996b138178cc20bbbf8d43595f0e8493a8c9f
715
module PublicActivity # Module extending classes that serve as owners module Activist extend ActiveSupport::Concern # Module extending classes that serve as owners module ClassMethods # Adds has_many :activities association to model # so you can list activities performed by the owner. # It is completely optional, but simplifies your work. # # == Usage: # In model: # # class User < ActiveRecord::Base # activist # end # # In controller: # User.first.activities # def activist has_many :activities, :class_name => "PublicActivity::Activity", :as => :owner end end end end
26.481481
86
0.613986
01859efffef31727739ebcafefee7bf2870ac607
568
require "stringio" describe "StringIO#lineno" do before(:each) do @io = StringIO.new("this\nis\nan\nexample") end it "returns the number of lines read" do @io.gets @io.gets @io.gets @io.lineno.should eql(3) end end describe "StringIO#lineno=" do before(:each) do @io = StringIO.new("this\nis\nan\nexample") end it "sets the current line number, but has no impact on the position" do @io.lineno = 3 @io.pos.should eql(0) @io.gets.should == "this\n" @io.lineno.should eql(4) @io.pos.should eql(5) end end
18.933333
73
0.639085
015fd9485c860385646bfea1c7b41a98f19c71ae
222
class CreateMarcas < ActiveRecord::Migration def change create_table :marcas do |t| t.text :nombre t.text :abreviacion t.string :descripcion t.timestamps null: false end end end
20.181818
44
0.63964
ffcccb3677dbc76318f84c7f6e683bf1ac9f88ec
589
class CreateClients < ActiveRecord::Migration def self.up create_table :connect_clients do |t| t.belongs_to :account t.string( :identifier, :secret, :name, :jwks_uri, :sector_identifier, :redirect_uris ) t.boolean :dynamic, :native, :ppid, default: false t.boolean :superapp, default: false t.datetime :expires_at t.text :raw_registered_json t.timestamps end add_index :connect_clients, :identifier, unique: true end def self.down drop_table :connect_clients end end
21.814815
57
0.631579
083e4934eeda4e0e6e20461e4577d704442f97cb
1,631
# Copyright 2011 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require "vertx" include Vertx @server = NetServer.new.connect_handler do |socket| parser = RecordParser.new_delimited("\n") do |line| line = line.to_s.rstrip if line.start_with?("subscribe,") topic_name = line.split(",", 2)[1] puts "subscribing to #{topic_name}" topic = SharedData::get_set(topic_name) topic.add(socket.write_handler_id) elsif line.start_with?("unsubscribe,") topic_name = line.split(",", 2)[1] puts "unsubscribing from #{topic_name}" topic = SharedData::get_set(topic_name) topic.delete(socket.write_handler_id) SharedData::remove_set(topic_name) if topic.empty? elsif line.start_with?("publish,") sp = line.split(',', 3) puts "publishing to #{sp[1]} with #{sp[2]}" topic = SharedData::get_set(sp[1]) puts "topic is #{topic}" topic.each { |actor_id| Vertx::send_to_handler(actor_id, Buffer.create_from_str(sp[2])) } end end socket.data_handler(parser) end.listen(8080) def vertx_stop @server.close end
35.456522
95
0.70141
081b3fed6c7a6d045b301e99566f1b0651741f57
11,994
require 'spec_helper' describe SQB::Select do subject(:query) { SQB::Select.new(:posts) } context "filtering" do it "should always work on the default table" do query.where(:title => 'Hello') expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` = ?)" end it "should be able to query on sub-tables" do query.where({:comments => :author} => 'Hello') expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`comments`.`author` = ?)" end it "should handle searching with array values as numbers" do query.where(:author_id => [1,2,3]) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`author_id` IN (1, 2, 3))" end it "should handle searching with array values as strings" do query.where(:author_id => ['Adam', 'Dave', 'John']) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`author_id` IN (?, ?, ?))" end it "should handle searching for nils" do query.where(:title => nil) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` IS NULL)" end it "should allow multiple operators per query" do query.where(:views => {:greater_than => 10, :less_than => 100}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` > 10 AND `posts`.`views` < 100)" end it "should allow safe values to be passed in" do query.where(SQB.safe('IF(LENGTH(field2) > 0, field2, field1)') => "Hello") expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (IF(LENGTH(field2) > 0, field2, field1) = ?)" end context "operators" do it "should handle equal" do query.where(:title => {:equal => 'Hello'}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` = ?)" end it "should handle equal when null" do query.where(:title => {:equal => nil}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` IS NULL)" end it "should handle not equal to" do query.where(:title => {:not_equal => 'Hello'}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` != ?)" end it "should handle not equal to when null" do query.where(:title => {:not_equal => nil}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` IS NOT NULL)" end it "should handle greater than" do query.where(:views => {:greater_than => 2}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` > 2)" end it "should handle greater than with short hand" do query.where(:views => {:gt => 2}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` > 2)" end it "should handle less than" do query.where(:views => {:less_than => 2}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` < 2)" end it "should handle less than with short hand" do query.where(:views => {:lt => 2}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` < 2)" end it "should handle greater than or equal to" do query.where(:views => {:greater_than_or_equal_to => 2}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` >= 2)" end it "should handle greater than or equal to with short hand" do query.where(:views => {:gte => 2}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` >= 2)" end it "should handle less than or equal to" do query.where(:views => {:less_than_or_equal_to => 2}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` <= 2)" end it "should handle less than or equal to with short hand" do query.where(:views => {:lte => 2}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` <= 2)" end it "should handle in an array" do query.where(:author_id => {:in => [1,2,3]}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`author_id` IN (1, 2, 3))" end it "should handle not in an array" do query.where(:author_id => {:not_in => [1,2,3]}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`author_id` NOT IN (1, 2, 3))" end it "should handle searching on empty arrays by returning nothing" do query.where(:title => 'Hello', :author_id => {:in => []}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` = ? AND 1=0)" end it "should handle searching on empty arrays by returning nothing" do query.where(:title => []) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (1=0)" end it "should handle like" do query.where(:author => {:like => '%Adam'}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`author` LIKE ?)" end it "should handle not like" do query.where(:author => {:not_like => '%Adam'}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`author` NOT LIKE ?)" end it "should raise an error when an invalid operator is provided" do expect { query.where(:title => {:something => "Hello"})}.to raise_error(SQB::InvalidOperatorError) end end context "or" do it "should join with ORs within an or block" do query.or do query.where(:title => "Hello") query.where(:title => "World") end expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE ((`posts`.`title` = ?) OR (`posts`.`title` = ?))" end it "should join with ORs within an or block" do query.or do query.where(:title => "Hello") query.where(:title => "World") end query.or do query.where(:title => "Hello") query.where(:title => "World") end expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE ((`posts`.`title` = ?) OR (`posts`.`title` = ?)) AND ((`posts`.`title` = ?) OR (`posts`.`title` = ?))" end it "should raise an error with nested ors" do query.or do query.where(:title => "Hello") query.where(:title => "World") expect do query.or do query.where(:title => "Banana") end end.to raise_error(SQB::QueryError) end end end context "and" do it "should join with ANDs within an or block" do query.and do query.where(:title => "Hello") query.where(:title => "World") end expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE ((`posts`.`title` = ?) AND (`posts`.`title` = ?))" end it "should join with ANDs within an AND block" do query.and do query.where(:title => "Hello") query.where(:title => "World") end query.and do query.where(:title => "Potatos") query.where(:title => "Tomatoes") end expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE ((`posts`.`title` = ?) AND (`posts`.`title` = ?)) AND ((`posts`.`title` = ?) AND (`posts`.`title` = ?))" end it "should raise an error with nested ands" do query.and do query.where(:title => "Hello") query.where(:title => "World") expect do query.and do query.where(:title => "Banana") end end.to raise_error(SQB::QueryError) end end end context "sub queries" do it "should be able to be used in where clauses" do other_query = SQB::Select.new(:comments) other_query.where(post_id: SQB.safe("posts.id")) other_query.column(:id, :function => 'COUNT') query.where(other_query => {:greater_than => 10}) expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE ((SELECT COUNT( `comments`.`id` ) FROM `comments` WHERE (`comments`.`post_id` = posts.id)) > 10)" end it "should be able to be add prepared arguments as needed" do other_query = SQB::Select.new(:comments) other_query.where(post_id: SQB.safe("posts.id")) other_query.where(author_name: 'Steve') other_query.column(:id, :function => 'COUNT') query.where(other_query => {:greater_than => 10}) query.where(subject: 'Hello') expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE ((SELECT COUNT( `comments`.`id` ) FROM `comments` WHERE (`comments`.`post_id` = posts.id) AND (`comments`.`author_name` = ?)) > 10) AND (`posts`.`subject` = ?)" expect(query.prepared_arguments[0]).to eq 'Steve' expect(query.prepared_arguments[1]).to eq 'Hello' end end context "escaping" do it "should escape column names" do query.where("column`name" => 'Hello') expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`column``name` = ?)" end it "should escape table names" do query.where({"table`name" => "title"} => 'Hello') expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`table``name`.`title` = ?)" end end context "with a block" do it "should work with basic equals" do query.where { |w| w.title = "asd" } expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` = ?)" end it "should work with not equals" do query.where { |w| w.title.not = "asd" } expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` != ?)" end it "should work with includes" do query.where { |w| w.title.includes 1,2,3,4 } expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` IN (1, 2, 3, 4))" end it "should work with negative like" do query.where { |w| w.title.not.includes 1,2,3,4 } expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` NOT IN (1, 2, 3, 4))" end it "should work with greater than" do query.where { |w| w.views > 10 } expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` > 10)" end it "should work with greater than or equal to" do query.where { |w| w.views >= 10 } expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` >= 10)" end it "should work with less than" do query.where { |w| w.views < 10 } expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` < 10)" end it "should work with less than or equal to" do query.where { |w| w.views <= 10 } expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`views` <= 10)" end it "should work with like" do query.where { |w| w.title =~ "%Test%" } expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` LIKE ?)" end it "should work with negative like" do query.where { |w| w.title.not =~ "%Test%" } expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE (`posts`.`title` NOT LIKE ?)" end it "should work with OR queries" do query.or do query.where { |w| w.title = "Hello" } query.where { |w| w.title = "World" } end expect(query.to_sql).to eq "SELECT `posts`.* FROM `posts` WHERE ((`posts`.`title` = ?) OR (`posts`.`title` = ?))" end end end end
38.442308
232
0.573537
79e64743fa7898ba679a16880c2972e4aa15cb99
407
require_relative "words_from_strings.rb" require_relative "count_frequency.rb" raw_text = %{Here is a whack of text for analysis. Here is another sentence.} word_list = words_from_strings(raw_text) counts = count_frequency(word_list) sorted = counts.sort_by {|word, count| count} top_five = sorted.last(5) for i in 0...5 word = top_five[i][0] count = top_five[i][1] puts "#{word}: #{count}" end
23.941176
77
0.72973
7999ca16aaa4d372fb83a786ed4b720cfca3a026
1,930
# # Puma can serve each request in a thread from an internal thread pool. # # The `threads` method setting takes two numbers: a minimum and maximum. # # Any libraries that use thread pools should be configured to match # # the maximum value specified for Puma. Default is set to 5 threads for minimum # # and maximum; this matches the default thread size of Active Record. # # # threads_count = ENV.fetch("RAILS_MAX_THREADS") { 5 } # threads threads_count, threads_count # # # Specifies the `port` that Puma will listen on to receive requests; default is 3000. # # # port ENV.fetch("PORT") { 3000 } # # # Specifies the `environment` that Puma will run in. # # # environment ENV.fetch("RAILS_ENV") { "development" } # # # Specifies the number of `workers` to boot in clustered mode. # # Workers are forked webserver processes. If using threads and workers together # # the concurrency of the application would be max `threads` * `workers`. # # Workers do not work on JRuby or Windows (both of which do not support # # processes). # # # # workers ENV.fetch("WEB_CONCURRENCY") { 2 } # # # Use the `preload_app!` method when specifying a `workers` number. # # This directive tells Puma to first boot the application and load code # # before forking the application. This takes advantage of Copy On Write # # process behavior so workers use less memory. # # # # preload_app! # # # Allow puma to be restarted by `rails restart` command. # plugin :tmp_restart workers Integer(ENV['WEB_CONCURRENCY'] || 2) threads_count = Integer(ENV['RAILS_MAX_THREADS'] || 5) threads threads_count, threads_count preload_app! rackup DefaultRackup port ENV['PORT'] || 3000 environment ENV['RACK_ENV'] || 'development' on_worker_boot do # Worker specific setup for Rails 4.1+ # See: https://devcenter.heroku.com/articles/ # deploying-rails-applications-with-the-puma-web-server#on-worker-boot ActiveRecord::Base.establish_connection end
37.843137
87
0.736269
385884ec4d16deeb5be5acb10cbd9b6566d7b6b5
2,049
# frozen_string_literal: true module PlentyClient module Item module Variation class DefaultCategory include PlentyClient::Endpoint include PlentyClient::Request ITEM_VARIATION_DEFAULT_CATEGORY_PATH = '/items/{itemId}/variations/{variationId}' LIST_ITEM_VARIATIONS_DEFAULT_CATEGORY = '/variation_default_categories' GET_ITEM_VARIATIONS_DEFAULT_CATEGORY = '/variation_default_categories/{plentyId}' CREATE_ITEM_VARIATIONS_DEFAULT_CATEGORY = '/variation_default_categories' DELETE_ITEM_VARIATIONS_DEFAULT_CATEGORY = '/variation_default_categories/{plentyId}' class << self def list(item_id, variation_id, headers = {}, &block) get(build_endpoint("#{ITEM_VARIATION_DEFAULT_CATEGORY_PATH}#{LIST_ITEM_VARIATIONS_DEFAULT_CATEGORY}", item: item_id, variation: variation_id), headers, &block) end def find(item_id, variation_id, plenty_id, headers = {}, &block) get(build_endpoint("#{ITEM_VARIATION_DEFAULT_CATEGORY_PATH}#{GET_ITEM_VARIATIONS_DEFAULT_CATEGORY}", item: item_id, variation: variation_id, plenty: plenty_id), headers, &block) end def create(item_id, variation_id, body = {}) post(build_endpoint("#{ITEM_VARIATION_DEFAULT_CATEGORY_PATH}#{CREATE_ITEM_VARIATIONS_DEFAULT_CATEGORY}", item: item_id, variation: variation_id), body) end def destroy(item_id, variation_id, marketplace_id) delete(build_endpoint("#{ITEM_VARIATION_DEFAULT_CATEGORY_PATH}#{DELETE_ITEM_VARIATIONS_DEFAULT_CATEGORY}", item: item_id, variation: variation_id, marketplace: marketplace_id)) end end end end end end
42.6875
118
0.60859
216606bde096eecd1e04b04e2a0349ab5bd43783
605
module CustomerApi module V1 class CustomersController < ApplicationController def show @customer = Customer(params[:id]) end def create result = AuthToken.verify(customer_params[:id_token]) if result['uid'].blank? render status: :unauthorized, json: { status: 401, message: 'Unauthorized' } else @customer = Customer.find_or_create_stripe_customer(customer_params[:uid]) render :show end end private def customer_params params.permit(:uid, :id_token) end end end end
21.607143
86
0.616529
4a38b89d7770a3e479458ba23c7d6afe09487e6b
2,694
Rails.application.routes.draw do resources :posts do resources :comments end # These routes are reather interesting, but why are these routable # destinations? It seem to me that the user should not be able to find these # routes. Couldn't they be expressed as something like: # # # POST /post/:id/vote => 'posts#upvote' # # whose implementation: # # def upvote # Post.find(params[:id]).votes.create # end # # That would create it, no? # # As a user I'm never going to POST /comment_votes/create, am I? Seems # weird. I think the Vote is a side-effect of updating the Comment or Post resources :comment_votes, only: [:create, :destroy] resources :post_votes, only: [:create, :destroy] # I like these routes, they're nice for that pretty aspect, also, nice work # on using the user_comments get "/user/:id/posts" => "users#posts", as: :user_posts get "/user/:id/comments" => "users#comments", as: :user_comments get "/login" => "sessions#index", as: :login_page post "/login" => "sessions#login", as: :login delete "/logout" => "sessions#destroy", as: :logout # The priority is based upon order of creation: first created -> highest priority. # See how all your routes lay out with "rake routes". # You can have the root of your site routed with "root" root 'posts#index' # Example of regular route: # get 'products/:id' => 'catalog#view' # Example of named route that can be invoked with purchase_url(id: product.id) # get 'products/:id/purchase' => 'catalog#purchase', as: :purchase # Example resource route (maps HTTP verbs to controller actions automatically): # resources :products # Example resource route with options: # resources :products do # member do # get 'short' # post 'toggle' # end # # collection do # get 'sold' # end # end # Example resource route with sub-resources: # resources :products do # resources :comments, :sales # resource :seller # end # Example resource route with more complex sub-resources: # resources :products do # resources :comments # resources :sales do # get 'recent', on: :collection # end # end # Example resource route with concerns: # concern :toggleable do # post 'toggle' # end # resources :posts, concerns: :toggleable # resources :photos, concerns: :toggleable # Example resource route within a namespace: # namespace :admin do # # Directs /admin/products/* to Admin::ProductsController # # (app/controllers/admin/products_controller.rb) # resources :products # end end
29.604396
84
0.657016
7a1f7741c0bf0ff653cac35d0b736f8ec51e5700
418
cask 'cheatsheet' do version '1.2.2' sha256 '41cfec767f761e2400d5ad700c936339c8c2e80a9dfbaf44b66375e63192763c' url "http://mediaatelier.com/CheatSheet/CheatSheet_#{version}.zip" appcast 'http://mediaatelier.com/CheatSheet/feed.php' name 'CheatSheet' homepage 'http://www.cheatsheetapp.com/CheatSheet/' license :gratis app 'CheatSheet.app' postflight do suppress_move_to_applications end end
24.588235
75
0.772727
bf6a499531bfaed543cf1ca1f04c5390f2f99822
133
class AddIndexOnStateToSchoolDistricts < ActiveRecord::Migration[5.0] def change add_index :school_districts, :state end end
22.166667
69
0.789474
5d45b636bbc023c2a01a0f716cc7668ac3036c1b
990
module Fog module Compute class Google class Mock def abandon_instances(_instance_group_manager, _instances) # :no-coverage: Fog::Mock.not_implemented # :no-coverage: end end class Real def abandon_instances(instance_group_manager, instances) request = ::Google::Apis::ComputeV1::InstanceGroupManagersAbandonInstancesRequest.new( instances: instances.map{ |i| i.class == String ? i : i.self_link } ) if instance_group_manager.zone zone = instance_group_manager.zone.split("/")[-1] @compute.abandon_instance_group_manager_instances(@project, zone, instance_group_manager.name, request) else region = instance_group_manager.region.split("/")[-1] @compute.abandon_region_instance_group_manager_instances(@project, region, instance_group_manager.name, request) end end end end end end
34.137931
124
0.644444
6ad13a164d94e63d6180f7427db7f29c7110c3c8
13,439
## # This code was generated by # \ / _ _ _| _ _ # | (_)\/(_)(_|\/| |(/_ v1.0.0 # / / # # frozen_string_literal: true module Twilio module REST class Accounts < Domain class V1 < Version class CredentialList < ListResource class PublicKeyList < ListResource ## # Initialize the PublicKeyList # @param [Version] version Version that contains the resource # @return [PublicKeyList] PublicKeyList def initialize(version) super(version) # Path Solution @solution = {} @uri = "/Credentials/PublicKeys" end ## # Lists PublicKeyInstance records from the API as a list. # Unlike stream(), this operation is eager and will load `limit` records into # memory before returning. # @param [Integer] limit Upper limit for the number of records to return. stream() # guarantees to never return more than limit. Default is no limit # @param [Integer] page_size Number of records to fetch per request, when # not set will use the default value of 50 records. If no page_size is defined # but a limit is defined, stream() will attempt to read the limit with the most # efficient page size, i.e. min(limit, 1000) # @return [Array] Array of up to limit results def list(limit: nil, page_size: nil) self.stream(limit: limit, page_size: page_size).entries end ## # Streams PublicKeyInstance records from the API as an Enumerable. # This operation lazily loads records as efficiently as possible until the limit # is reached. # @param [Integer] limit Upper limit for the number of records to return. stream() # guarantees to never return more than limit. Default is no limit. # @param [Integer] page_size Number of records to fetch per request, when # not set will use the default value of 50 records. If no page_size is defined # but a limit is defined, stream() will attempt to read the limit with the most # efficient page size, i.e. min(limit, 1000) # @return [Enumerable] Enumerable that will yield up to limit results def stream(limit: nil, page_size: nil) limits = @version.read_limits(limit, page_size) page = self.page(page_size: limits[:page_size], ) @version.stream(page, limit: limits[:limit], page_limit: limits[:page_limit]) end ## # When passed a block, yields PublicKeyInstance records from the API. # This operation lazily loads records as efficiently as possible until the limit # is reached. def each limits = @version.read_limits page = self.page(page_size: limits[:page_size], ) @version.stream(page, limit: limits[:limit], page_limit: limits[:page_limit]).each {|x| yield x} end ## # Retrieve a single page of PublicKeyInstance records from the API. # Request is executed immediately. # @param [String] page_token PageToken provided by the API # @param [Integer] page_number Page Number, this value is simply for client state # @param [Integer] page_size Number of records to return, defaults to 50 # @return [Page] Page of PublicKeyInstance def page(page_token: :unset, page_number: :unset, page_size: :unset) params = Twilio::Values.of({ 'PageToken' => page_token, 'Page' => page_number, 'PageSize' => page_size, }) response = @version.page( 'GET', @uri, params ) PublicKeyPage.new(@version, response, @solution) end ## # Retrieve a single page of PublicKeyInstance records from the API. # Request is executed immediately. # @param [String] target_url API-generated URL for the requested results page # @return [Page] Page of PublicKeyInstance def get_page(target_url) response = @version.domain.request( 'GET', target_url ) PublicKeyPage.new(@version, response, @solution) end ## # Retrieve a single page of PublicKeyInstance records from the API. # Request is executed immediately. # @param [String] public_key URL encoded representation of the public key, e.g. # -----BEGIN PUBLIC KEY-----MIIBIjANB.pa9xQIDAQAB-----END PUBLIC KEY----- # @param [String] friendly_name A human readable description of this resource, up # to 64 characters. # @param [String] account_sid The Subaccount this Credential should be associated # with. Needs to be a valid Subaccount of the account issuing the request # @return [PublicKeyInstance] Newly created PublicKeyInstance def create(public_key: nil, friendly_name: :unset, account_sid: :unset) data = Twilio::Values.of({ 'PublicKey' => public_key, 'FriendlyName' => friendly_name, 'AccountSid' => account_sid, }) payload = @version.create( 'POST', @uri, data: data ) PublicKeyInstance.new(@version, payload, ) end ## # Provide a user friendly representation def to_s '#<Twilio.Accounts.V1.PublicKeyList>' end end class PublicKeyPage < Page ## # Initialize the PublicKeyPage # @param [Version] version Version that contains the resource # @param [Response] response Response from the API # @param [Hash] solution Path solution for the resource # @return [PublicKeyPage] PublicKeyPage def initialize(version, response, solution) super(version, response) # Path Solution @solution = solution end ## # Build an instance of PublicKeyInstance # @param [Hash] payload Payload response from the API # @return [PublicKeyInstance] PublicKeyInstance def get_instance(payload) PublicKeyInstance.new(@version, payload, ) end ## # Provide a user friendly representation def to_s '<Twilio.Accounts.V1.PublicKeyPage>' end end class PublicKeyContext < InstanceContext ## # Initialize the PublicKeyContext # @param [Version] version Version that contains the resource # @param [String] sid The Credential Sid that uniquely identifies the Credential # to fetch # @return [PublicKeyContext] PublicKeyContext def initialize(version, sid) super(version) # Path Solution @solution = {sid: sid, } @uri = "/Credentials/PublicKeys/#{@solution[:sid]}" end ## # Fetch a PublicKeyInstance # @return [PublicKeyInstance] Fetched PublicKeyInstance def fetch params = Twilio::Values.of({}) payload = @version.fetch( 'GET', @uri, params, ) PublicKeyInstance.new(@version, payload, sid: @solution[:sid], ) end ## # Update the PublicKeyInstance # @param [String] friendly_name A human readable description of this resource, up # to 64 characters. # @return [PublicKeyInstance] Updated PublicKeyInstance def update(friendly_name: :unset) data = Twilio::Values.of({'FriendlyName' => friendly_name, }) payload = @version.update( 'POST', @uri, data: data, ) PublicKeyInstance.new(@version, payload, sid: @solution[:sid], ) end ## # Deletes the PublicKeyInstance # @return [Boolean] true if delete succeeds, true otherwise def delete @version.delete('delete', @uri) end ## # Provide a user friendly representation def to_s context = @solution.map {|k, v| "#{k}: #{v}"}.join(',') "#<Twilio.Accounts.V1.PublicKeyContext #{context}>" end ## # Provide a detailed, user friendly representation def inspect context = @solution.map {|k, v| "#{k}: #{v}"}.join(',') "#<Twilio.Accounts.V1.PublicKeyContext #{context}>" end end class PublicKeyInstance < InstanceResource ## # Initialize the PublicKeyInstance # @param [Version] version Version that contains the resource # @param [Hash] payload payload that contains response from Twilio # @param [String] sid The Credential Sid that uniquely identifies the Credential # to fetch # @return [PublicKeyInstance] PublicKeyInstance def initialize(version, payload, sid: nil) super(version) # Marshaled Properties @properties = { 'sid' => payload['sid'], 'account_sid' => payload['account_sid'], 'friendly_name' => payload['friendly_name'], 'date_created' => Twilio.deserialize_iso8601_datetime(payload['date_created']), 'date_updated' => Twilio.deserialize_iso8601_datetime(payload['date_updated']), 'url' => payload['url'], } # Context @instance_context = nil @params = {'sid' => sid || @properties['sid'], } end ## # Generate an instance context for the instance, the context is capable of # performing various actions. All instance actions are proxied to the context # @return [PublicKeyContext] PublicKeyContext for this PublicKeyInstance def context unless @instance_context @instance_context = PublicKeyContext.new(@version, @params['sid'], ) end @instance_context end ## # @return [String] A 34 character string that uniquely identifies this resource. def sid @properties['sid'] end ## # @return [String] AccountSid the Credential resource belongs to def account_sid @properties['account_sid'] end ## # @return [String] A human readable description of this resource def friendly_name @properties['friendly_name'] end ## # @return [Time] The date this resource was created def date_created @properties['date_created'] end ## # @return [Time] The date this resource was last updated def date_updated @properties['date_updated'] end ## # @return [String] The URI for this resource, relative to `https://accounts.twilio.com` def url @properties['url'] end ## # Fetch a PublicKeyInstance # @return [PublicKeyInstance] Fetched PublicKeyInstance def fetch context.fetch end ## # Update the PublicKeyInstance # @param [String] friendly_name A human readable description of this resource, up # to 64 characters. # @return [PublicKeyInstance] Updated PublicKeyInstance def update(friendly_name: :unset) context.update(friendly_name: friendly_name, ) end ## # Deletes the PublicKeyInstance # @return [Boolean] true if delete succeeds, true otherwise def delete context.delete end ## # Provide a user friendly representation def to_s values = @params.map{|k, v| "#{k}: #{v}"}.join(" ") "<Twilio.Accounts.V1.PublicKeyInstance #{values}>" end ## # Provide a detailed, user friendly representation def inspect values = @properties.map{|k, v| "#{k}: #{v}"}.join(" ") "<Twilio.Accounts.V1.PublicKeyInstance #{values}>" end end end end end end end
37.963277
99
0.532108
b9d57113e3ffd8dc8740845a551bf1d18e98b0b5
2,231
module SoberSwag module Reporting module Output ## # Partition output into one of two possible cases. # We use a block to decide if we should use the first or the second. # If the block returns a truthy value, we use the first output. # If it returns a falsy value, we use the second. # # This is useful to serialize sum types, or types where it can be EITHER one thing OR another. # IE, if I can resolve a dispute by EITHER transfering money OR refunding a customer, I can do this: # # ```ruby # ResolutionOutput = SoberSwag::Reporting::Output.new( # proc { |x| x.is_a?(Transfer) }, # TransferOutput, # RefundOutput # ) # ``` class Partitioned < Base ## # @param partition [#call] block that returns true or false for the input type # @param true_output [Interface] serializer to use if block is true # @param false_output [Interface] serializer to use if block is false def initialize(partition, true_output, false_output) @partition = partition @true_output = true_output @false_output = false_output end ## # @return [#call] partitioning block attr_reader :partition ## # @return [Interface] attr_reader :true_output ## # @return [Interface] attr_reader :false_output def call(item) serializer_for(item).call(item) end def serialize_report(item) serializer_for(item).serialize_report(item) end def swagger_schema true_schema, true_found = true_output.swagger_schema false_schema, false_found = false_output.swagger_schema [ { oneOf: (true_schema[:oneOf] || [true_schema]) + (false_schema[:oneOf] || [false_schema]) }, true_found.merge(false_found) ] end private ## # @return [Interface] def serializer_for(item) if partition.call(item) true_output else false_output end end end end end end
28.602564
106
0.579561
7a67fb26184b1bd6a5ed2ed3bb0cf68d7367f715
986
require "spec_helper" describe EventStore do describe ".insert_table_name" do let(:date) { Date.parse("1955-01-31") } context "without partitioning defined" do let(:expected) { "es_test.test_events" } it "returns a properly formatted default table name" do expect(subject.insert_table_name(date)).to eq(expected) end end context "with partitioning defined" do let(:expected) { "es_test.test_events_1955_01_31" } let(:partition_config) { { "schema" => "es_test", "partition_name_suffix" => "_%Y_%m_%d", "partitioning" => true } } before { subject.custom_config(partition_config, subject.local_redis_config, "test_events", "test") } after { subject.custom_config(subject.raw_db_config["test"]["postgres"], subject.local_redis_config, "test_events", "test") } it "returns a properly formatted table name" do expect(subject.insert_table_name(date)).to eq(expected) end end end end
35.214286
132
0.681542
61d802c61c5c3fabff3add1e9a2bf253042f262b
218
class CreateCharacters < ActiveRecord::Migration[5.2] def change create_table :characters do |t| t.string :pinyin t.string :simplified t.string :definition t.timestamps end end end
18.166667
53
0.665138
b91c1eb7f3e6a3481160befc16405a4efffd57c0
2,174
# frozen_string_literal: true # a single table inheritance class # - attributes are defined on Request # - behavior can be defined here class AbsenceRequest < Request include AasmConfig def to_s "#{creator} Absence" end aasm column: "status" do # add in an additional state recorded which is only valid for an absence request state :recorded event :record do transitions from: :approved, to: :recorded end event :cancel do transitions from: [:pending, :approved, :recorded], to: :canceled, guard: :only_creator end end ########## Invalid Attributes ########### # Because we are using single table inheritance there are a number of fields in a Request # that are only valid for a travel request. # The methods below force an invalid attribute error if someone tries to access them # participation is not a valid property of a AbsenceRequest def participation=(*_args) raise_invalid_argument(property_name: :participation) end def participation raise_invalid_argument(property_name: :participation) end # purpose is not a valid property of a AbsenceRequest def purpose=(*_args) raise_invalid_argument(property_name: :purpose) end def purpose raise_invalid_argument(property_name: :purpose) end # estimates is not a valid property of a AbsenceRequest def estimates=(*_args) raise_invalid_argument(property_name: :estimates) end def estimates raise_invalid_argument(property_name: :estimates) end # event_requests is not a valid property of a AbsenceRequest def event_requests=(*_args) raise_invalid_argument(property_name: :event_requests) end def event_requests raise_invalid_argument(property_name: :event_requests) end # travel_category is not a valid property of a AbsenceRequest def travel_category=(*_args) raise_invalid_argument(property_name: :travel_category) end def travel_category raise_invalid_argument(property_name: :travel_category) end def can_modify_attributes? pending? end def can_record?(agent:) approved? && in_supervisor_chain(supervisor: creator, agent: agent) end end
26.192771
93
0.74057
fff154a5a5b72a29323b8034a7ecfe8d1d224ef9
1,093
require 'test_helper' class MicropostsInterfaceTest < ActionDispatch::IntegrationTest def setup @user = users(:michael) end test "micropost interface" do log_in_as(@user) get root_path assert_select 'div.pagination' # Invalid submission assert_no_difference 'Micropost.count' do post microposts_path, params: { micropost: { content: "" } } end assert_select 'div#error_explanation' # Valid submission content = "This micropost really ties the room together" assert_difference 'Micropost.count', 1 do post microposts_path, params: { micropost: { content: content } } end assertrai_redirected_to root_url follow_redirect! assert_match content, response.body # Delete post assert_select 'a', text: 'delete' first_micropost = @user.microposts.paginate(page: 1).first assert_difference 'Micropost.count', -1 do delete micropost_path(first_micropost) end # Visit different user (no delete links) get user_path(users(:archer)) assert_select 'a', text: 'delete', count: 0 end end
29.540541
71
0.706313
6a0ea12b62ef8af1055fe03734f063854d3ec40c
1,345
class Conan < Formula include Language::Python::Virtualenv desc "Distributed, open source, package manager for C/C++" homepage "https://github.com/conan-io/conan" url "https://github.com/conan-io/conan/archive/1.23.0.tar.gz" sha256 "0f670f1b7d14fb6edf106971651f311e447d3d6d09cdd3c59ff84fae4fcb79f7" head "https://github.com/conan-io/conan.git" bottle do cellar :any sha256 "a2b829b259eebd57153956309485f3d7ff01586ad27ff3dd04af18c613ab4c15" => :catalina sha256 "85af0ee6c31bc850eaae1762d247d27cf49df11e021a1c949100655d4b6fb83f" => :mojave sha256 "e4e455dd6212ba65583a2df45d714d2b9480e0f7c9f8c7d362f06aa525e5836d" => :high_sierra sha256 "f6f9a5bbc247ea7c16560356b28f9eda88b108fe850c33c10f93c7375e251acc" => :x86_64_linux end depends_on "pkg-config" => :build depends_on "libffi" depends_on "[email protected]" depends_on "[email protected]" def install venv = virtualenv_create(libexec, "python3") system libexec/"bin/pip", "install", "-v", "--no-binary", ":all:", "--ignore-installed", "PyYAML==3.13", buildpath system libexec/"bin/pip", "uninstall", "-y", name venv.pip_install_and_link buildpath end test do system bin/"conan", "install", "zlib/1.2.11@conan/stable", "--build" assert_predicate testpath/".conan/data/zlib/1.2.11", :exist? end end
37.361111
94
0.725651
8786d295d6a23d4f224c8b59b35e8dda171ec294
1,664
# frozen_string_literal: true module Clusters module Applications class CheckUninstallProgressService < BaseHelmService def execute return unless app.uninstalling? case installation_phase when Gitlab::Kubernetes::Pod::SUCCEEDED on_success when Gitlab::Kubernetes::Pod::FAILED on_failed else check_timeout end rescue Kubeclient::HttpError => e log_error(e) app.make_errored!(_('Kubernetes error: %{error_code}') % { error_code: e.error_code }) end private def on_success app.destroy! rescue StandardError => e app.make_errored!(_('Application uninstalled but failed to destroy: %{error_message}') % { error_message: e.message }) ensure remove_installation_pod end def on_failed app.make_errored!(_('Operation failed. Check pod logs for %{pod_name} for more details.') % { pod_name: pod_name }) end def check_timeout if timed_out? app.make_errored!(_('Operation timed out. Check pod logs for %{pod_name} for more details.') % { pod_name: pod_name }) else WaitForUninstallAppWorker.perform_in(WaitForUninstallAppWorker::INTERVAL, app.name, app.id) end end def pod_name app.uninstall_command.pod_name end def timed_out? Time.now.utc - app.updated_at.utc > WaitForUninstallAppWorker::TIMEOUT end def remove_installation_pod helm_api.delete_pod!(pod_name) end def installation_phase helm_api.status(pod_name) end end end end
26.412698
128
0.640024
2185c940717688d2845a07cb5ef859d2d7e06a75
979
# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'buildmetrics/version' Gem::Specification.new do |spec| spec.name = "buildmetrics" spec.version = Buildmetrics::VERSION spec.authors = ["Solomon White"] spec.email = ["[email protected]"] spec.summary = %q{Buildmetrics} spec.description = %q{Capture pass/fail/timing information from test suite} spec.homepage = "https://github.com/rubysolo/buildmetrics" spec.license = "MIT" spec.files = `git ls-files -z`.split("\x0") spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] spec.add_development_dependency "bundler", "~> 1.6" spec.add_development_dependency "pry-nav" spec.add_development_dependency "rake" spec.add_development_dependency "rspec" end
37.653846
79
0.675179
7a55d83cc54b5649490aabe0141cca1dddaf74d0
2,244
# Has the content "session=<base64>" COOKIE_FILE = 'cookie' YEAR=2021 def strip_newlines(strs) strs.map {|str| str.delete_suffix("\n")} end # Run ./decXX.rb <test_name> to run code on input in file decXX.<test_name>. # # For input like: # a # b # c def get_input_str_arr(original_filename) dot_slash_date = original_filename.chomp('.rb') date = dot_slash_date.delete_prefix('./') day = date.delete_prefix('dec').to_i.to_s if ARGV[0] strip_newlines(File.readlines("#{dot_slash_date}.#{ARGV[0]}")) else true_input_filename = "#{date}.input" true_input = nil if File.exist?(true_input_filename) true_input = strip_newlines(File.readlines("#{dot_slash_date}.input")) end if !true_input puts "Fetching input for day #{date}..." # Call .to_i.to_s to get rid of leading 0 for Dec. 1-9. cookie = File.read(COOKIE_FILE).strip curl_command = "curl https://adventofcode.com/#{YEAR}/day/#{day}/input "\ "-H 'cache-control: max-age=0' "\ "-H 'cookie: #{cookie}' "\ "--output #{true_input_filename} " system(curl_command) true_input = strip_newlines(File.readlines("#{dot_slash_date}.input")) end if true_input.empty? || true_input[0].start_with?("Please don't repeatedly request") || true_input[0] == "404 Not Found" puts "Input for day #{dot_slash_date} was fetched prematurely..." exit(1) end true_input end end # For input like: # a # b # # abc # # a # b # c def str_groups_separated_by_blank_lines(original_filename) groups = [] curr_group = [] get_input_str_arr(original_filename).each do |str| if str == '' groups << curr_group curr_group = [] next end curr_group << str end groups << curr_group groups end # For input like: # here-is-some-text def get_input_str(original_filename) get_input_str_arr(original_filename)[0] end # For input like: # 1,2,3 def get_single_line_input_int_arr(original_filename, separator: ',') get_input_str(original_filename).split(separator).map(&:to_i) end # For input like: # 1 # 2 # 3 def get_multi_line_input_int_arr(original_filename) get_input_str_arr(original_filename).map(&:to_i) end
21.371429
76
0.665775
ff51e3e13562d667a0f16efe20b229316f952e5f
208
class AddGoogleSyncToGroups < ActiveRecord::Migration def change add_column :groups, :google_sync, :boolean, null: false, default: false add_column :groups, :google_sync_user_id, :integer end end
29.714286
75
0.764423
79ce49a50e5273433eecb3cc7606014e051dc3f7
809
class IntegrationHelper MIN_RUBY_VERSIONS = { 'rails_5.2.4.1' => '>= 2.2.2', 'rails_6.0.2.1' => '>= 2.5.0' }.freeze def self.able_to_run?(file_path, ruby_version) return false unless ENV['INTEGRATION_TESTS'] file_name = File.basename(file_path) rails_app = File.basename(file_name, '_spec.rb') ruby_dependency = MIN_RUBY_VERSIONS[rails_app] required_version = Gem::Dependency.new('', ruby_dependency) able_to_run = required_version.match?('', ruby_version) unless able_to_run output = "\n" \ "Skipping running the integration test for #{file_name}.\n" \ "The current version of Ruby is #{ruby_version}, " \ "but the integration test requires Ruby #{ruby_dependency}." puts output end able_to_run end end
28.892857
73
0.656366
d5f6a558672f87fdbf3f35ec6f48e11642d7620c
97
SparkleFormation.dynamic(:local_dynamic) do outputs.bar do value "local_dynamic" end end
16.166667
43
0.762887
acd76f67a81fd7e000f3e90133e67dd93455c1c2
1,691
FactoryBot.define do factory :representative, class: OpenStruct do end trait :representative_valid do type {:"questions.your_representatives_details.type_of_representative.options.solicitor"} organisation_name { Faker::Company.bs } name { Faker::Name.name } building {"106"} street {"Mayfair"} locality {"London"} county {"Greater London"} post_code {"SW1H 9PP"} telephone_number {"01111 123456"} alternative_telephone_number {"02222 654321"} email_address {"[email protected]"} dx_number {"dx1234567890"} have_representative {:"questions.your_representatives.have_representative.options.yes"} representative_mobile {"07987654321"} representative_reference {"Rep Ref"} representative_contact_preference {:"questions.your_representatives_details.representative_contact_preference.options.fax"} representative_fax {"0207 345 6789"} end trait :representative_invalid do type {nil} organisation_name {"J4ne D0e"} name {'J4ne D0e'} building {nil} street {nil} locality {nil} county {nil} post_code {"string"} telephone_number {"string"} alternative_telephone_number {"string"} email_address {"Email"} dx_number {"724060 Derby 21 (no validation)"} have_representative {:"questions.your_representatives.have_representative.options.no"} representative_mobile {"string"} representative_reference {""} representative_contact_preference {:"questions.your_representatives_details.representative_contact_preference.options.email"} representative_fax {"string"} employer_contract_claim {:"questions.employer_contract_claim.yes.label"} end end
36.76087
129
0.743938
d5895d8e5d19fd240dcc47f1321e8bc80cc1f43e
344
require 'clientele/pipeline/transforms' module Clientele class Pipeline class Transforms class Before < self # Before forces yielding after transformation def apply(transform) Proc.new do |object, &block| block.call transform.call(object) end end end end end end
20.235294
53
0.622093
bf4c300198830d676ab14d6f921bd1db4642955a
2,137
class Hydra < Formula desc "Network logon cracker which supports many services" homepage "https://github.com/vanhauser-thc/thc-hydra" url "https://github.com/vanhauser-thc/thc-hydra/archive/v9.1.tar.gz" sha256 "ce08a5148c0ae5ff4b0a4af2f7f15c5946bc939a57eae1bbb6dda19f34410273" license "AGPL-3.0" head "https://github.com/vanhauser-thc/thc-hydra.git" bottle do sha256 cellar: :any, arm64_big_sur: "39d8556d476a03ffb86a748f00f8202767169f0fc0ee65cf46f16b4ee2208dc2" sha256 cellar: :any, big_sur: "a7190616a3532667f98baf9d8834f38869060499d0bc6ed8edbb49451e084c84" sha256 cellar: :any, catalina: "1db4a290bf2b7d04019c081f151676916e2f97f9cf2443ddfd1081cddddb193b" sha256 cellar: :any, mojave: "144dbb541e91c9443026136998ea4c30d6b556674b4f429c148f1df88ce0e82c" sha256 cellar: :any, high_sierra: "ca89ea37aa86dfa419ce97c414b72c9c154580cce4ccc8a4ed75fd6faa4ec826" sha256 cellar: :any, x86_64_linux: "ae4ec905f16950ca5a075f5ca3e0e7cdfe28d4051dad7071fefd9c7a4db003fd" end depends_on "pkg-config" => :build depends_on "libssh" depends_on "mysql-client" depends_on "[email protected]" def install inreplace "configure" do |s| # Link against our OpenSSL # https://github.com/vanhauser-thc/thc-hydra/issues/80 s.gsub! "/opt/local/lib", Formula["[email protected]"].opt_lib s.gsub! "/opt/local/*ssl", Formula["[email protected]"].opt_lib s.gsub! "/opt/*ssl/include", Formula["[email protected]"].opt_include # Avoid opportunistic linking of everything %w[ gtk+-2.0 libfreerdp2 libgcrypt libidn libmemcached libmongoc libpq libsvn ].each do |lib| s.gsub! lib, "oh_no_you_dont" end end # Having our gcc in the PATH first can cause issues. Monitor this. # https://github.com/vanhauser-thc/thc-hydra/issues/22 system "./configure", "--prefix=#{prefix}" bin.mkpath system "make", "all", "install" share.install prefix/"man" # Put man pages in correct place end test do assert_match version.to_s, shell_output("#{bin}/hydra", 255) end end
37.491228
106
0.709406
f741685a29c9190c8638418dad041b70a948d97e
452
require "test_helper" class WryteePalindromeTest < Minitest::Test def test_non_palindrome refute "apple".palindrome? end def test_literal_palindrome assert "racecar".palindrome? end def test_mixed_case_palindrome assert "RaceCar".palindrome? end def test_palindrome_with_punctuation assert "Madam, I'm Adam.".palindrome? end def test_letters assert_equal "MadamImAdam", "Madam, I'm Adam.".letters end end
17.384615
58
0.74115
ab2893ca7e3d7040903aabc3186c18d64055d9b6
3,470
## # $Id: mysql_payload.rb 11899 2011-03-08 22:42:26Z todb $ ## ## # This file is part of the Metasploit Framework and may be subject to # redistribution and commercial restrictions. Please see the Metasploit # Framework web site for more information on licensing and terms of use. # http://metasploit.com/framework/ ## require 'msf/core' class Metasploit3 < Msf::Exploit::Remote Rank = ExcellentRanking include Msf::Exploit::Remote::MYSQL include Msf::Exploit::CmdStagerVBS def initialize(info = {}) super( update_info( info, 'Name' => 'Oracle MySQL for Microsoft Windows Payload Execution', 'Description' => %q{ This module creates and enables a custom UDF (user defined function) on the target host via the SELECT ... into DUMPFILE method of binary injection. On default Microsoft Windows installations of MySQL (=< 5.5.9), directory write permissions not enforced, and the MySQL service runs as LocalSystem. NOTE: This module will leave a payload executable on the target system when the attack is finished, as well as the UDF DLL, and will define or redefine sys_eval() and sys_exec() functions. }, 'Author' => [ 'Bernardo Damele A. G. <bernardo.damele[at]gmail.com>', # the lib_mysqludf_sys.dll binaries 'todb' # this Metasploit module ], 'License' => MSF_LICENSE, 'Version' => '$Revision: 11899 $', 'References' => [ # Bernardo's work with cmd exec via udf [ 'URL', 'http://bernardodamele.blogspot.com/2009/01/command-execution-with-mysql-udf.html' ], # Advice from 2005 on securing MySQL on Windows, kind of helpful. [ 'URL', 'http://dev.mysql.com/tech-resources/articles/securing_mysql_windows.html' ] ], 'Platform' => 'win', 'Targets' => [ [ 'Automatic', { } ], # Confirmed on MySQL 4.1.22, 5.5.9, and 5.1.56 (64bit) ], 'DefaultTarget' => 0, 'DisclosureDate' => 'Jan 16 2009' # Date of Bernardo's blog post. )) register_options( [ OptBool.new('VERBOSE', [ false, 'Enable verbose output', false ]), OptBool.new('FORCE_UDF_UPLOAD', [ false, 'Always attempt to install a sys_exec() mysql.function.', false ]), OptString.new('USERNAME', [ false, 'The username to authenticate as', 'root' ]) ]) end def username datastore['USERNAME'] end def password datastore['PASSWORD'] end def login_and_get_sys_exec mysql_login(username,password,'mysql') @mysql_arch = mysql_get_arch @mysql_sys_exec_available = mysql_check_for_sys_exec() if !@mysql_sys_exec_available || datastore['FORCE_UDF_UPLOAD'] mysql_add_sys_exec @mysql_sys_exec_available = mysql_check_for_sys_exec() else print_status "sys_exec() already available, using that (override with FORCE_UDF_UPLOAD)." end end def execute_command(cmd, opts) mysql_sys_exec(cmd, datastore['VERBOSE']) end def exploit login_and_get_sys_exec() if not @mysql_handle print_status("Invalid MySQL credentials") return elsif not [:win32,:win64].include?(@mysql_arch) print_status("Incompatible MySQL target architecture: '#{@mysql_arch}'") return else if @mysql_sys_exec_available execute_cmdstager({:linemax => 1500, :nodelete => true}) handler else print_status("MySQL function sys_exec() not available") return end end disconnect end end
31.545455
113
0.67147
9103530d8df9bb2d8a12f2af24fcff0d1feb367f
573
Pod::Spec.new do |s| s.name = 'Reader' s.version = '2.7.3' s.license = 'MIT' s.summary = 'The open source PDF file reader/viewer for iOS.' s.homepage = 'http://www.vfr.org/' s.authors = { "Julius Oklamcak" => "[email protected]" } s.source = { :git => 'https://github.com/vfr/Reader.git', :tag => "v#{s.version}" } s.platform = :ios s.ios.deployment_target = '5.0' s.source_files = 'Sources/**/*.{h,m}' s.resources = 'Graphics/Reader-*.png' s.frameworks = 'UIKit', 'Foundation', 'CoreGraphics', 'QuartzCore', 'ImageIO', 'MessageUI' s.requires_arc = true end
35.8125
91
0.643979
e8d7915d061a73bc3d6be2f4104dee7f8b6178bc
761
# frozen_string_literal: true require 'test_helper' class MeasureTest < Minitest::Test def test_to_s id = Spectator::MeterId.new('name') m = Spectator::Measure.new(id, 42) assert_equal("Measure{id=#{id}, value=42.0}", m.to_s) end def test_equal id = Spectator::MeterId.new('name', test: 'val') m1 = Spectator::Measure.new(id, 42.0) id2 = Spectator::MeterId.new('name', 'test' => 'val') m2 = Spectator::Measure.new(id2, 42) assert_equal(m1, m2) end def test_equal_nan id = Spectator::MeterId.new('name', test: 'val') m1 = Spectator::Measure.new(id, Float::NAN) id2 = Spectator::MeterId.new('name', 'test' => 'val') m2 = Spectator::Measure.new(id2, Float::NAN) assert_equal(m1, m2) end end
23.060606
57
0.639947
4a870d434a1091b26c0cde7125ca676e953667b2
719
def connected_graph?(graph) visited = [0] key = 0 queue = graph[key] until queue.empty? key = queue.shift next if visited.include?(key) for value in graph[key] if !visited.include?(value) queue << value end end visited << key end visited.size == graph.keys.size end puts connected_graph?({ 0 => [2], 1 => [4], 2 => [0, 5, 3], 3 => [5, 2], 4 => [5, 1], 5 => [4, 2, 3] }) # => true puts connected_graph?({ 0 => [2], 1 => [4], 2 => [5, 3], 3 => [5, 2], 4 => [1], 5 => [2, 3] }) # => false puts connected_graph?({ 0 => [1, 2], 1 => [0, 2], 2 => [0, 1, 3, 4, 5], 3 => [2, 4], 4 => [3, 2], 5 => [2] }) # => true
14.098039
34
0.44089
ff1f39b8c8f128b9976b644191e84c31d7e03955
1,295
class DataExport < ActiveRecord::Base class Error < ::StandardError ; end class FileNotFoundError < Error ; end class FilePermissionError < Error ; end def self.data_attribute(name, options={}) reader_method, writer_method = name, "#{name}=" define_method(reader_method){ data.fetch(name){ options[:default] } } define_method(writer_method){ |value| self.data[name] = value } end def self.find_previous_version_of(export) query = where("name = ? AND finished_exporting_at IS NOT NULL", export.name) if export.type query = query.where("type = ?", export.type) else query = query.where("type IS NULL") end query = query.where("id <> ?", export.id) if export.id query.order("created_at DESC").limit(1).first end scope :not_exported, -> { where(started_exporting_at:nil, finished_exporting_at:nil) } serialize :data, Hash validates :name, presence: true def export! raise NotImplementedError, "Must implement #export! in subclass!" end def previous_version @previous_version ||= self.class.find_previous_version_of(self) end def state if failed_at "failed" elsif finished_exporting_at "done" elsif started_exporting_at "processing" else "pending" end end end
26.428571
88
0.687259
380baed7f135ca1db5f55d950a5802a98329ca15
45
require_relative "deployment/packages/kibana"
45
45
0.888889
1ddaeffba702cfed9af1737001562c0628edb7c9
528
Pod::Spec.new do |s| s.name = "RPJSONMapper" s.version = "0.3.1" s.summary = "JSON to object property mapper" s.homepage = "https://github.com/reygonzales/RPJSONMapper" s.license = 'MIT' s.author = { "Rey Gonzales" => "[email protected]" } s.platform = :ios, '5.0' s.source = { :git => "https://github.com/reygonzales/RPJSONMapper.git", :tag => s.version.to_s } s.source_files = 'RPJSONMapper/*.{h,m}' s.framework = 'Foundation' s.requires_arc = true end
37.714286
104
0.596591
186839ec551c13161d42be36e7a2db3546b9c7b0
380
When /^I create a data bag '(.+)' containing the JSON '(.+)'$/ do |bag, json| write_file "item.json", json run_simple "knife data bag create #{bag} -z -c knife.rb -d" run_simple "knife data bag from_file #{bag} -z -c knife.rb item.json" end Given(/^I create an empty data bag '(.+)'$/) do |databag| run_simple "knife data bag create #{databag} -z -c knife.rb", false end
38
77
0.655263
ff25b8f1c5035250f658a31244521f5b107d0922
2,370
module Somemoji class CommandLineArguments # @param argv [Array<String>] def initialize(argv) @argv = argv end # @return [String] def destination slop_parse_result[:destination] end # @return [String] def error_message slop_options.to_s end # @return [String, nil] def format slop_parse_result[:format] end # @return [String] def provider_name slop_parse_result[:provider] end # @return [Integer, nil] def size slop_parse_result[:size] end # @return [Boolean] def valid? command_name == "extract" && !slop_parse_result.nil? end private # @return [String] def command_name @argv[0] end # @return [Slop::Options] def slop_options @slop_options ||= begin if using_slop_version_4? slop_options = ::Slop::Options.new slop_options.banner = "Usage: somemoji extract [options]" slop_options.string "-p", "--provider", "(required) apple, emoji_one, noto, or twemoji" slop_options.string "-d", "--destination", "(required) directory path to locate extracted image files" slop_options.string "-f", "--format", "png or svg (default: png)" slop_options.integer "-s", "--size", "Some providers have different size image files" slop_options.bool "-h", "--help", "Display this help message" slop_options else ::Slop.new do banner "Usage: somemoji extract [options]" on "p", "provider=", "(required) apple, emoji_one, noto, or twemoji" on "d", "destination=", "(required) directory path to locate extracted image files" on "f", "format=", "png or svg (default: png)" on "s", "size=", "Some providers have different size image files" on "h", "help", "Display this help message" end end end end # @return [Slop::Result] def slop_parse_result @slop_parse_result ||= begin if using_slop_version_4? ::Slop::Parser.new(slop_options).parse(@argv) else slop_options.parse!(@argv) slop_options end end rescue ::Slop::Error end # @return [Boolean] def using_slop_version_4? ::Slop::VERSION >= "4.0.0" end end end
26.629213
112
0.58903
38f62cbfe8cb8680902c461ae5b3cd712bcc5558
131
class EchosController < ApiController def show respond_with Garage::HashRepresenter.new(:message => "Hello World") end end
21.833333
71
0.755725
87ef763ea44573330032125a23e1e09c1ef2fb9b
870
# frozen_string_literal: true def add_or_delete_schedule(remove, cron_jobs, key) if remove Sidekiq.remove_schedule(key.to_s) else cron_jobs[key] = SCHEDULED_JOBS[key] end end SCHEDULED_JOBS = { clean_old_releases: { cron: '0 6 * * *', class: 'CleanOldReleasesJob', queue: 'schedule', description: 'Clean old versions on each 6 AM', }, reset_for_demo_mode: { cron: '0 0 * * *', class: 'ResetForDemoModeJob', queue: 'schedule', description: 'Reset demo data everyday' } } Rails.application.reloader.to_prepare do if Sidekiq.server? cron_jobs = {} add_or_delete_schedule(Setting.keep_uploads, cron_jobs, :clean_old_releases) add_or_delete_schedule(!Setting.demo_mode, cron_jobs, :reset_for_demo_mode) Sidekiq.schedule = cron_jobs SidekiqScheduler::Scheduler.instance.reload_schedule! end end
24.857143
80
0.713793
3332c2deb62125c07d6f0575d96c21c61291712e
2,047
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::ServiceFabric::V7_0_0_42 module Models # # Information about a Service Fabric property. # class PropertyInfo include MsRestAzure # @return [String] The name of the Service Fabric property. attr_accessor :name # @return [PropertyValue] Describes a Service Fabric property value. attr_accessor :value # @return [PropertyMetadata] The metadata associated with a property, # including the property's name. attr_accessor :metadata # # Mapper for PropertyInfo class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'PropertyInfo', type: { name: 'Composite', class_name: 'PropertyInfo', model_properties: { name: { client_side_validation: true, required: true, serialized_name: 'Name', type: { name: 'String' } }, value: { client_side_validation: true, required: false, serialized_name: 'Value', type: { name: 'Composite', polymorphic_discriminator: 'Kind', uber_parent: 'PropertyValue', class_name: 'PropertyValue' } }, metadata: { client_side_validation: true, required: true, serialized_name: 'Metadata', type: { name: 'Composite', class_name: 'PropertyMetadata' } } } } } end end end end
27.662162
75
0.513434
e84f4e95083fa5d34f3b0b27fcb7557b0e2848b3
123
class AddNoCanDoDayToUsers < ActiveRecord::Migration def change add_column :users, :no_can_do_day, :date end end
13.666667
52
0.756098
33871dcae4edafb8e46856dcfb12b22b6227e35e
1,380
require 'rails_helper' require 'datetime_period' RSpec.describe DatetimePeriod, type: :model do describe 'validations' do let(:monday_string) { '20 September 2021 09:00' } let(:tuesday_string) { '21 September 2021 09:00' } it { is_expected.to validate_presence_of(:start_at_string) } it { is_expected.to validate_presence_of(:end_at_string) } it { is_expected.not_to allow_values('blah').for(:start_at_string) } it { is_expected.not_to allow_values('blah').for(:end_at_string) } context 'start before end' do subject { described_class.new(start_at_string: monday_string, end_at_string: tuesday_string) } it { is_expected.to be_valid } end context 'end before start' do subject { described_class.new(start_at_string: tuesday_string, end_at_string: monday_string) } it { is_expected.to be_invalid } end context 'same end and start' do subject { described_class.new(start_at_string: monday_string, end_at_string: monday_string) } it { is_expected.to be_invalid } end end describe '#to_s' do let(:start_at_string) { '1 Jan 2021 09:00' } let(:end_at_string) { '2 Jan 2021 10:00' } let(:period) { described_class.new(start_at_string: start_at_string, end_at_string: end_at_string) } specify { expect(period.to_s).to eq('2021-01-01T09:00--2021-01-02T10:00') } end end
32.857143
104
0.71087
61e7128d10387c495ba362c9d0410a76eb90b918
445
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::EdgeGateway::Mgmt::V2019_03_01 module Models # # Defines values for DownloadPhase # module DownloadPhase Unknown = "Unknown" Initializing = "Initializing" Downloading = "Downloading" Verifying = "Verifying" end end end
23.421053
70
0.694382
6107f2865757462e26c11ebc5d12eac2b0fc62ad
2,283
require 'ds9.so' require 'stringio' module DS9 VERSION = '1.1.1' module Frames class Frame Header = Struct.new :length, :stream_id, :type, :flags def length header.length end def settings?; false; end def headers?; false; end def data?; false; end def push_promise?; false; end def priority?; false; end def rst_stream?; false; end def ping?; false; end def goaway?; false; end def window_update?; false; end def continuation?; false; end def end_stream? flags & Flags::END_STREAM > 0 end end class Continuation def continuation?; true; end end class WindowUpdate def window_update?; true; end end class Goaway def goaway?; true; end end class Ping def ping?; true; end end class Priority def priority?; true; end end class RstStream def rst_stream?; true; end end class PushPromise def push_promise?; true; end end class Data def data?; true; end end class Settings def settings?; true; end end class Headers def headers?; true; end def request?; category == REQUEST; end def response?; category == RESPONSE; end def push_response?; category == PUSH_RESPONSE; end end end class Session def initialize @post_buffers = {} cbs = make_callbacks init_internals cbs end private def save_post_buffer id, stream @post_buffers[id] = stream end def remove_post_buffer id @post_buffers.delete id end def send_event string raise NotImplementedError end def on_data_source_read stream_id, length raise NotImplementedError end def recv_event length raise NotImplementedError end end class Exception < StandardError def self.abort code raise new(to_string(code), code) end attr_reader :code def initialize str, code @code = code super(str) end end class Client def submit_request headers, body = nil case body when String body = StringIO.new body end super(headers, body) end end end
17.976378
60
0.605344
2629e29e7ffbb6a9f8e330ef4bbfc96677deb861
595
Pod::Spec.new do |s| s.name = "Verbena" s.version = "0.0.2" s.summary = "A collection of Core Graphics helper methods." s.homepage = "https://github.com/kaishin/Verbena" s.screenshots = "https://raw.github.com/kaishin/Verbena/master/screenshot.png" s.license = 'BSD' s.author = { "Reda Lemeden" => "[email protected]" } s.source = { :git => "https://github.com/kaishin/Verbena.git", :tag => "0.0.2" } s.platform = :ios, '6.1' s.source_files = 'Verbena/Verbena/' s.frameworks = 'Foundation', 'UIKit' s.requires_arc = true end
39.666667
88
0.606723
e945571cf40eea2e5defa6d034f1f5a0fe11cb35
3,635
require "yaml" require_relative "data_loader" Rect = Struct.new(:left, :top, :right, :bottom) class Tileset def self.load(name, loader: DataLoader) data = loader.load_file("tilesets/#{name}") new(data) end def initialize(tiles) @tiles = tiles end def [](key) tiles[key] end private attr_reader :tiles end class Dungeon attr_reader :rows def initialize(width, height, tileset:) @tileset = tileset @rows = Array.new(height) { Array.new(width) { tileset[:stone] } } end def build(type, x, y) rows[y][x] = tileset[type] end private attr_reader :tileset end class RoomGenerator MIN_WIDTH = 2 MIN_HEIGHT = 2 MAX_WIDTH_MODIFIER = 12 MAX_HEIGHT_MODIFIER = 4 MAX_FLOOR_AREA = 50 def initialize(rect) @rect = rect end def generate constrain_floor_area build_room end private attr_reader :rect def constrain_floor_area if floor_area > MAX_FLOOR_AREA @width = 50 / height end end def build_room Rect.new(left, top, right, bottom) end def floor_area width * height end def height @height ||= MIN_HEIGHT + rand(MAX_HEIGHT_MODIFIER) end def width @width ||= MIN_WIDTH + rand(MAX_WIDTH_MODIFIER) end def left @left ||= rect.left + 1 + rand(rect.right - width - 2) end def top @top ||= rect.top + 1 + rand(rect.bottom - height - 2) end def right @right ||= left + width end def bottom @bottom ||= top + height end end class DungeonGenerator DEFAULT_WIDTH = 80 DEFAULT_HEIGHT = 21 DEFAULT_TILESET_NAME = "default" def initialize(options = {}) @options = options @dungeon = Dungeon.new(width, height, tileset: tileset) @rects = [ Rect.new(0, 0, width, height) ] end def generate room = create_room render_room(room) dungeon end private attr_reader :dungeon, :rects, :options def create_room room_generator.new(rects.first).generate end def render_room(room) room_renderer.new(room, dungeon).render end def room_generator options.fetch(:room_generator, RoomGenerator) end def room_renderer options.fetch(:room_renderer, RoomRenderer) end def width options.fetch(:width, DEFAULT_WIDTH) end def height options.fetch(:height, DEFAULT_HEIGHT) end def tileset @_tileset ||= Tileset.load(tileset_name) end def tileset_name options.fetch(:tileset_name, DEFAULT_TILESET_NAME) end def print_dungeon puts dungeon.map(&:join) end end class DungeonPrinter def initialize(dungeon, io = STDOUT) @dungeon = dungeon @io = io end def print io.puts dungeon.rows.map(&:join) end private attr_reader :io, :dungeon end class RoomRenderer def initialize(room, dungeon) @left = room.left @right = room.right @top = room.top @bottom = room.bottom @dungeon = dungeon end def render render_floor render_vertical_walls render_horizontal_walls end private attr_reader :top, :left, :right, :bottom, :dungeon def render_floor left.upto(right) do |x| top.upto(bottom) do |y| dungeon.build(:floor, x, y) end end end def render_vertical_walls top.upto(bottom) do |y| dungeon.build(:vertical_wall, left - 1, y) dungeon.build(:vertical_wall, right + 1, y) end end def render_horizontal_walls (left - 1).upto(right + 1) do |x| dungeon.build(:horizontal_wall, x, top - 1) dungeon.build(:horizontal_wall, x, bottom + 1) end end end dungeon = DungeonGenerator.new.generate DungeonPrinter.new(dungeon).print
16.674312
70
0.663549
e2158868b4c8924e80f79631b09ee4a779fa3934
2,029
# frozen_string_literal: true # rubocop:disable Style/NumericLiteralPrefix describe RuboCop::Cop::Lint::ScriptPermission do subject(:cop) { described_class.new(config, options) } let(:config) { RuboCop::Config.new } let(:options) { nil } let(:file) { Tempfile.new('') } let(:filename) { file.path.split('/').last } after do file.close file.unlink end context 'with file permission 0644' do let(:source) { '#!/usr/bin/ruby' } before do File.write(file.path, source) FileUtils.chmod(0644, file.path) end if RuboCop::Platform.windows? context 'Windows' do it 'allows any file permissions' do expect_no_offenses(<<-RUBY.strip_indent, file) #!/usr/bin/ruby RUBY end end else it 'registers an offense for script permission' do expect_offense(<<-RUBY.strip_indent, file) #!/usr/bin/ruby ^^^^^^^^^^^^^^^ Script file #{filename} doesn't have execute permission. RUBY end end end context 'with file permission 0755' do before do FileUtils.chmod(0755, file.path) end it 'accepts with shebang line' do File.write(file.path, '#!/usr/bin/ruby') expect_no_offenses(file.read, file) end it 'accepts without shebang line' do File.write(file.path, 'puts "hello"') expect_no_offenses(file.read, file) end it 'accepts with blank' do File.write(file.path, '') expect_no_offenses(file.read, file) end end context 'with stdin' do let(:options) { { stdin: '' } } it 'skips investigation' do expect_no_offenses('#!/usr/bin/ruby') end end unless RuboCop::Platform.windows? context 'auto-correct' do it 'adds execute permissions to the file' do File.write(file.path, '#!/usr/bin/ruby') autocorrect_source(file.read, file) expect(file.stat.executable?).to be_truthy end end end end # rubocop:enable Style/NumericLiteralPrefix
22.797753
80
0.625924
79c00390669df9eb8cc63ee28344584792306d69
3,301
require 'minitest/autorun' require_relative 'pangram' # Common test data version: 1.3.0 d79e13e class PangramTest < Minitest::Test def test_sentence_empty # skip phrase = '' result = Pangram.pangram?(phrase) refute result, "Expected false, got: #{result.inspect}. #{phrase.inspect} is NOT a pangram" end def test_recognizes_a_perfect_lower_case_pangram skip phrase = 'abcdefghijklmnopqrstuvwxyz' result = Pangram.pangram?(phrase) assert result, "Expected true, got: #{result.inspect}. #{phrase.inspect} IS a pangram" end def test_pangram_with_only_lower_case skip phrase = 'the quick brown fox jumps over the lazy dog' result = Pangram.pangram?(phrase) assert result, "Expected true, got: #{result.inspect}. #{phrase.inspect} IS a pangram" end def test_missing_character_x skip phrase = 'a quick movement of the enemy will jeopardize five gunboats' result = Pangram.pangram?(phrase) refute result, "Expected false, got: #{result.inspect}. #{phrase.inspect} is NOT a pangram" end def test_another_missing_character_eg_h skip phrase = 'five boxing wizards jump quickly at it' result = Pangram.pangram?(phrase) refute result, "Expected false, got: #{result.inspect}. #{phrase.inspect} is NOT a pangram" end def test_pangram_with_underscores skip phrase = 'the_quick_brown_fox_jumps_over_the_lazy_dog' result = Pangram.pangram?(phrase) assert result, "Expected true, got: #{result.inspect}. #{phrase.inspect} IS a pangram" end def test_pangram_with_numbers skip phrase = 'the 1 quick brown fox jumps over the 2 lazy dogs' result = Pangram.pangram?(phrase) assert result, "Expected true, got: #{result.inspect}. #{phrase.inspect} IS a pangram" end def test_missing_letters_replaced_by_numbers skip phrase = '7h3 qu1ck brown fox jumps ov3r 7h3 lazy dog' result = Pangram.pangram?(phrase) refute result, "Expected false, got: #{result.inspect}. #{phrase.inspect} is NOT a pangram" end def test_pangram_with_mixed_case_and_punctuation skip phrase = '"Five quacking Zephyrs jolt my wax bed."' result = Pangram.pangram?(phrase) assert result, "Expected true, got: #{result.inspect}. #{phrase.inspect} IS a pangram" end def test_upper_and_lower_case_versions_of_the_same_character_should_not_be_counted_separately skip phrase = 'the quick brown fox jumps over with lazy FX' result = Pangram.pangram?(phrase) refute result, "Expected false, got: #{result.inspect}. #{phrase.inspect} is NOT a pangram" end # Problems in exercism evolve over time, as we find better ways to ask # questions. # The version number refers to the version of the problem you solved, # not your solution. # # Define a constant named VERSION inside of the top level BookKeeping # module, which may be placed near the end of your file. # # In your file, it will look like this: # # module BookKeeping # VERSION = 1 # Where the version number matches the one in the test. # end # # If you are curious, read more about constants on RubyDoc: # http://ruby-doc.org/docs/ruby-doc-bundle/UsersGuide/rg/constants.html def test_bookkeeping skip assert_equal 6, BookKeeping::VERSION end end
33.683673
95
0.719479
5d78e5fbfd376159bebf34fb5e702750fcc415b1
1,414
Spree::CheckoutController.class_eval do before_filter :pay_with_payu, only: :update private def pay_with_payu return unless params[:state] == 'confirm' pm_id = @order.payments.order('created_at ASC').last.payment_method_id payment_method = Spree::PaymentMethod.find(pm_id) if payment_method && payment_method.kind_of?(Spree::PaymentMethod::Payu) params = PayuOrder.params(@order, request.remote_ip, order_url(@order), payu_notify_url, order_url(@order)) response = OpenPayU::Order.create(params) case response.status['status_code'] when 'SUCCESS' payment_success(payment_method) redirect_to response.redirect_uri else payu_error end end rescue StandardError => e payu_error(e) end def payment_success(payment_method) payment = @order.payments.build( payment_method_id: payment_method.id, amount: @order.total, state: 'checkout' ) unless payment.save flash[:error] = payment.errors.full_messages.join("\n") redirect_to checkout_state_path(@order.state) and return end unless @order.next flash[:error] = @order.errors.full_messages.join("\n") redirect_to checkout_state_path(@order.state) and return end payment.pend! end def payu_error(e = nil) @order.errors[:base] << "PayU error #{e.try(:message)}" render :edit end end
25.25
113
0.685997
282b79a3d0f49ff2072b912a84718fba75eff154
564
module UiHelper def return_to_dashboard_button link_to "Return to Dashboard", root_path, {class: "btn btn-info pull-right"} end def grouped_options_for_assigning_case(volunteer) [ [ "Not Assigned", CasaCase .not_assigned(@volunteer.casa_org) .map{ |casa_case| [casa_case.case_number, casa_case.id] } ], [ "Assigned", CasaCase .actively_assigned_excluding_volunteer(@volunteer) .map{ |casa_case| [casa_case.case_number, casa_case.id] } ] ] end end
24.521739
80
0.62234
bb19a9ce8ab008da0abca0f4c385c906bb42a8e8
1,332
class Castxml < Formula desc "C-family Abstract Syntax Tree XML Output" homepage "https://github.com/CastXML/CastXML" url "https://mirrors.ocf.berkeley.edu/debian/pool/main/c/castxml/castxml_0.1+git20170823.orig.tar.xz" mirror "https://mirrorservice.org/sites/ftp.debian.org/debian/pool/main/c/castxml/castxml_0.1+git20170823.orig.tar.xz" version "0.1+git20170823" sha256 "aa10c17f703ef46a88f9772205d8f51285fd3567aa91931ee1a7a5abfff95b11" revision 1 head "https://github.com/CastXML/castxml.git" bottle do cellar :any sha256 "2b492ee4cedbc2c225798add5492155b04d7b9d661c265b55bc3f6bdbe8f5efd" => :high_sierra sha256 "20ecdeb7cf69686765614ac7a408778866d05f6d53dab99b4dadca833a0a913e" => :sierra sha256 "f0cf6a659bf4df48891080168307d82280ae50014af339c643f4ee173e42312d" => :el_capitan sha256 "3abae7e083d39e6394f1ffff5fee6e4df9f3b6554ea18e79c711a24b0699fccd" => :yosemite end depends_on "cmake" => :build depends_on "llvm" def install mkdir "build" do system "cmake", "..", *std_cmake_args system "make", "install" end end test do (testpath/"test.cpp").write <<~EOS int main() { return 0; } EOS system "#{bin}/castxml", "-c", "-x", "c++", "--castxml-cc-gnu", "clang++", "--castxml-gccxml", "-o", "test.xml", "test.cpp" end end
35.052632
127
0.71997
b98cb585c349954661838194a81b5ed1b880a726
1,175
require_dependency "wms/application_controller" module Wms class AccountsController < ApplicationController def welcome end def signup @account = Wms::Account.new end def login end def logout cookies.delete(:token) session[:return_to] = nil redirect_to root_url, notice: "Signed out successfully." end def create_login_session account = Wms::Account.where(name: params[:name]).first if account && account.authenticate(params[:password]) cookies.permanent[:token] = account.token logger.info "return_to: #{session[:return_to]}" return_url = session[:return_to] || root_url session[:return_to] = nil redirect_to return_url, notice: "Signed in successfully." else flash[:notice] = "name or password not right" redirect_to login_path end end def create @account = Wms::Account.new(account_params) if @account.save cookies.permanent[:token] = @account.token redirect_to root_url else render :signup end end private def account_params params.require(:account).permit! end end end
22.596154
64
0.658723
038e8c51320d70e28c350e49e97724a4e62cae87
662
module EpicRoadTrips class EpicRoadTrips::Trips attr_accessor :road, :country, :description def self.get_page doc = Nokogiri::HTML(open("https://www.fodors.com/news/photos/the-20-best-road-trips-on-earth")) list_trips = doc.css(".container.slides") list_trips.collect do |road_trip| trip = self.new trip.road = road_trip.css("h2").text.strip.gsub("\n", "").gsub("Book a Hotel", "").strip trip.country = road_trip.css("h3 span").text.strip.gsub("\n", "") trip.description = road_trip.css("p").text.gsub("\u2019", " ").gsub("\u00A0", " ") trip end end end end
27.583333
103
0.599698
4ac5237e2dbf3402449eda5c8288a64811968395
808
{ matrix_id: '1850', name: 'BenElechi1', group: 'BenElechi', description: 'problem with many sparse right-hand sides, S. Ben Elechi', author: 'S. Ben Elechi', editor: 'T. Davis', date: '2007', kind: '2D/3D problem', problem_2D_or_3D: '1', num_rows: '245874', num_cols: '245874', nonzeros: '13150496', num_explicit_zeros: '0', num_strongly_connected_components: '7', num_dmperm_blocks: '7', structural_full_rank: 'true', structural_rank: '245874', pattern_symmetry: '1.000', numeric_symmetry: '1.000', rb_type: 'real', structure: 'symmetric', cholesky_candidate: 'yes', positive_definite: 'yes', b_field: 'sparse 245874-by-40543 ', image_files: 'BenElechi1.png,BenElechi1_dmperm.png,BenElechi1_graph.gif,', }
27.862069
78
0.648515
21d4a265599ae82d6728f9fe74a15b509cdf3827
3,439
# # Copyright Peter Donald # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =begin #< Downloads, and extracts the glassfish binaries, creates the glassfish user and group. Does not create any Application Server or Message Broker instances. This recipe is not typically included directly but is included transitively through either <code>glassfish::attribute_driven_domain</code> or <code>glassfish::attribute_driven_mq</code>. #> =end if node['glassfish']['package_url'].nil? if node['glassfish']['version'] == '4.1.152' raise "The version 4.1.152 requires that node['glassfish']['variant'] be set to 'payara'" unless node['glassfish']['variant'] == 'payara' node.override['glassfish']['package_url'] = 'https://s3-eu-west-1.amazonaws.com/payara.co/Payara+Downloads/payara-4.1.152.zip' elsif node['glassfish']['version'] == '4.1.151' raise "The version 4.1.151 requires that node['glassfish']['variant'] be set to 'payara'" unless node['glassfish']['variant'] == 'payara' node.override['glassfish']['package_url'] = 'http://s3-eu-west-1.amazonaws.com/payara.co/Payara+Downloads/payara-4.1.151.zip' elsif ['3.1.2.2', '4.0', '4.1'].include?(node['glassfish']['version']) raise "The version #{node['glassfish']['version']} requires that node['glassfish']['variant'] be set to 'glassfish'" unless node['glassfish']['variant'] == 'glassfish' node.override['glassfish']['package_url'] = "http://dlc.sun.com.edgesuite.net/glassfish/#{node['glassfish']['version']}/release/glassfish-#{node['glassfish']['version']}.zip" end end raise "glassfish.package_url not specified and unable to be derived. Please specify an attribute value for node['glassfish']['package_url']" unless node['glassfish']['package_url'] include_recipe 'java' group node['glassfish']['group'] do end user node['glassfish']['user'] do comment 'GlassFish Application Server' gid node['glassfish']['group'] home node['glassfish']['base_dir'] shell '/bin/bash' system true end directory node['glassfish']['base_dir'] do mode '0755' owner node['glassfish']['user'] group node['glassfish']['group'] end a = archive 'glassfish' do url node['glassfish']['package_url'] version node['glassfish']['version'] owner node['glassfish']['user'] group node['glassfish']['group'] extract_action 'unzip_and_strip_dir' end exists_at_run_start = ::File.exist?(a.target_directory) node.override['glassfish']['install_dir'] = a.target_directory directory "#{node['glassfish']['install_dir']}/glassfish/domains/domain1" do recursive true action :delete not_if { exists_at_run_start } end if node['glassfish']['remove_domains_dir_on_install'] # We remove the domains directory on initial install as it is expected that they will need to be # recreated due to upgrade in glassfish version directory node['glassfish']['domains_dir'] do recursive true action :nothing not_if { exists_at_run_start } end end
39.079545
180
0.727246
e8062ddb09b824d6bed46e48189cca5f19f563fb
1,655
require 'test_helper' class FollowingTest < ActionDispatch::IntegrationTest def setup @user = users(:michael) @other = users(:archer) log_in_as(@user) end test "following page" do get following_user_path(@user) assert_not @user.following.empty? assert_match @user.following.count.to_s, response.body @user.following.each do |user| assert_select "a[href=?]", user_path(user) assert_select "a[href=?]", user_path(user) end end test "followers page" do get followers_user_path(@user) assert_not @user.followers.empty? assert_match @user.followers.count.to_s, response.body @user.followers.each do |user| assert_select "a[href=?]", user_path(user) end end test "should follow a user the standard way" do assert_difference '@user.following.count', 1 do post relationships_path, params: { followed_id: @other.id } end end test "should follow a user with Ajax" do assert_difference '@user.following.count', 1 do post relationships_path, xhr: true, params: { followed_id: @other.id } end end test "should unfollow a user the standard way" do @user.follow(@other) relationship = @user.active_relationships.find_by(followed_id: @other.id) assert_difference '@user.following.count', -1 do delete relationship_path(relationship) end end test "should unfollow a user with Ajax" do @user.follow(@other) relationship = @user.active_relationships.find_by(followed_id: @other.id) assert_difference '@user.following.count', -1 do delete relationship_path(relationship), xhr: true end end end
28.534483
77
0.699094
5d291d9d7230707719059f45ea6de7a715ee396c
2,535
# frozen_string_literal: true module Gitlab module GithubImport module Importer class PullRequestsImporter include ParallelScheduling def importer_class PullRequestImporter end def representation_class Gitlab::GithubImport::Representation::PullRequest end def sidekiq_worker_class ImportPullRequestWorker end def id_for_already_imported_cache(pr) pr.number end def object_type :pull_request end def each_object_to_import super do |pr| update_repository if update_repository?(pr) yield pr end end def update_repository # We set this column _before_ fetching the repository, and this is # deliberate. If we were to update this column after the fetch we may # miss out on changes pushed during the fetch or between the fetch and # updating the timestamp. project.touch(:last_repository_updated_at) # rubocop: disable Rails/SkipsModelValidations project.repository.fetch_remote(project.import_url, refmap: Gitlab::GithubImport.refmap, forced: true) pname = project.path_with_namespace Gitlab::Import::Logger.info( message: 'GitHub importer finished updating repository', project_name: pname ) repository_updates_counter.increment end def update_repository?(pr) last_update = project.last_repository_updated_at || project.created_at return false if pr.updated_at < last_update # PRs may be updated without there actually being new commits, thus we # check to make sure we only re-fetch if truly necessary. !(commit_exists?(pr.head.sha) && commit_exists?(pr.base.sha)) end def commit_exists?(sha) project.repository.commit(sha).present? end def collection_method :pull_requests end def collection_options { state: 'all', sort: 'created', direction: 'asc' } end def parallel_import_batch { size: 200, delay: 1.minute } end def repository_updates_counter @repository_updates_counter ||= Gitlab::Metrics.counter( :github_importer_repository_updates, 'The number of times repositories have to be updated again' ) end end end end end
27.857143
112
0.625641
26e82b31fdc82cdb3e389a4a1fce072eac091300
669
require WulinAuth::Engine.root.join('lib', 'password_complexity_validator') module WulinAuth class User < ApplicationRecord has_secure_password validates :email, presence: true, uniqueness: true validates_with PasswordComplexityValidator scope :by_token, lambda { |token| where(["token = ? AND token_expires_at > ?", token, Time.current]) } def create_token self.token = SecureRandom.urlsafe_base64 self.token_expires_at = 24.hours.from_now end def send_password_reset! create_token if save PasswordResetMailer.reset_password(self).deliver else false end end end end
23.892857
75
0.695067
792137781391ef3c4363405b415926b96963b6d9
697
require 'sinatra' require 'byebug' set :port, 3000 set :bind, '0.0.0.0' def json(hash) content_type :json JSON.pretty_generate hash end def authorized? auth ||= Rack::Auth::Basic::Request.new(request.env) auth.provided? and auth.basic? and auth.credentials and auth.credentials == ["bill","dollar"] end def require_auth unless authorized? response['WWW-Authenticate'] = %(Basic realm="Restricted Area") throw(:halt, [401, "Unauthorized\n"]) end end get '/' do json mockserver: :online end get '/do/Error' do status 500 json error: "You asked for it..." end get '/do/*' do json received: params end get '/secure/*' do require_auth json received: params end
17
95
0.688666
e2fd009a5fab431c24aabb54f70e345fb177ef4e
1,585
require File.expand_path('../../../spec_helper', __FILE__) ruby_version_is '2.5' do describe "Integer#anybits?" do it "returns true iff all the bits of the argument are set in the receiver" do 42.anybits?(42).should == true 0b1010_1010.anybits?(0b1000_0010).should == true 0b1010_1010.anybits?(0b1000_0001).should == true 0b1000_0010.anybits?(0b0010_1100).should == false different_bignum = (2 * bignum_value) & (~bignum_value) (0b1010_1010 | different_bignum).anybits?(0b1000_0010 | bignum_value).should == true (0b1010_1010 | different_bignum).anybits?(0b0010_1100 | bignum_value).should == true (0b1000_0010 | different_bignum).anybits?(0b0010_1100 | bignum_value).should == false end it "handles negative values using two's complement notation" do (~42).anybits?(42).should == false (-42).anybits?(-42).should == true (~0b100).anybits?(~0b1).should == true (~(0b100 | bignum_value)).anybits?(~(0b1 | bignum_value)).should == true end it "coerces the rhs using to_int" do obj = mock("the int 0b10") obj.should_receive(:to_int).and_return(0b10) 0b110.anybits?(obj).should == true end it "raises a TypeError when given a non-Integer" do lambda { (obj = mock('10')).should_receive(:coerce).any_number_of_times.and_return([42,10]) 13.anybits?(obj) }.should raise_error(TypeError) lambda { 13.anybits?("10") }.should raise_error(TypeError) lambda { 13.anybits?(:symbol) }.should raise_error(TypeError) end end end
40.641026
91
0.666246
1deff801fbe079f91eaccb40580490c8d93b5506
281
cask 'terraform-0.8.1' do version '0.8.1' sha256 '275104513600bf50a28942131d928d2be405c75f9f36a9c722718500075856a1' url "https://releases.hashicorp.com/terraform/#{version}/terraform_#{version}_darwin_amd64.zip" name 'Terraform' homepage 'https://www.terraform.io/' end
31.222222
97
0.775801
7a98b12cedea9ae22a60895482b585db3e12a442
766
cask "sauerbraten" do version "2020.11.29,2020.12.21" sha256 "a7e26c85ff15be88b9ced26e64c2fa0e68bde1325c146763833abac591898bcb" url "https://downloads.sourceforge.net/sauerbraten/sauerbraten/#{version.csv.first.dots_to_underscores}/sauerbraten_#{version.csv.second.dots_to_underscores}_macos.dmg", verified: "sourceforge.net/sauerbraten/" name "Cube 2: Sauerbraten" desc "Multiplayer & singleplayer first person shooter" homepage "http://sauerbraten.org/" livecheck do url :homepage strategy :page_match do |page| match = page.match(%r{(\d+(?:_\d+)*)/sauerbraten[._-]?(\d+(?:_\d+)*)[._-]?macos.dmg}i) next if match.blank? "#{match[1].tr("_", ".")},#{match[2].tr("_", ".")}" end end app "Sauerbraten.app" end
33.304348
171
0.686684
e9d7d8b190fb8d7c33bb9a6d5adac0740e9f3775
1,031
# Cookbook Name:: docker_engine # Attributes:: default # # Author : OneOps # Apache License, Version 2.0 # Docker binary attributes default[:docker_engine][:service] = 'docker' default[:docker_engine][:package] = 'docker-engine' default[:docker_engine][:release] = '1.el7.centos' default[:docker_engine][:arch] = 'x86_64' default[:docker_engine][:api_gem] = 'docker-api-1.28.0.gem' # RHEL Platform config default[:docker_engine][:tlscacert_file] = '/etc/docker/ca.pem' default[:docker_engine][:tlscert_file] = '/etc/docker/cert.pem' default[:docker_engine][:tlskey_file] = '/etc/docker/key.pem' # Default Daemon socket(s) to connect to default[:docker_engine][:def_unix_sock] = '/var/run/docker.sock' default[:docker_engine][:systemd_path] = '/usr/lib/systemd/system' default[:docker_engine][:systemd_drop_in_path] = '/etc/systemd/system/docker.service.d' default[:docker_engine][:repo_file] = '/etc/yum.repos.d/docker.repo' default[:docker_engine][:default_repo] = 'https://yum.dockerproject.org/repo/main/centos/$releasever/'
41.24
102
0.748788
335e245615005cc03aa0781fe44aa70ac1906a87
980
# frozen_string_literal: true module Interaction # # The Result object is responsible for reporting the status and details # of the action it was initialized on. A Result is a success until it # is explicitly invoked to fail. # # # Example usage # result = Result.new # # result.fail(error: "Could not complete action") # # if result.success? # puts "yay!" # else # puts "boo...#{result.details[:error]}" # end # # class Result attr_accessor :details attr_writer :custom_exception_detail def initialize @failure = false @details = {} @custom_exception_detail = {} end def success? !failure? end def failure? @failure end def error @details[:error] end def fail(details = {}) @failure = true @details = details end def fail_from_exception(details) fail(details.merge(@custom_exception_detail)) end end end
18.148148
73
0.614286
1ae2dbaf1208198df73a514097129da7fbde4b9a
8,393
require 'test_helper' begin require "hiredis" require "redis/connection/hiredis" puts "running tests with hiredis driver" rescue LoadError puts "running test with default redis driver" end class TestRedis < Minitest::Test ERROR_TIMEOUT = 5 ERROR_THRESHOLD = 1 SEMIAN_OPTIONS = { name: :testing, tickets: 1, timeout: 0, error_threshold: ERROR_THRESHOLD, success_threshold: 2, error_timeout: ERROR_TIMEOUT, } attr_writer :threads def setup @proxy = Toxiproxy[:semian_test_redis] Semian.destroy(:redis_testing) end def test_semian_identifier assert_equal :redis_foo, new_redis(semian: {name: 'foo'})._client.semian_identifier assert_equal :"redis_#{SemianConfig['toxiproxy_upstream_host']}:16379/1", new_redis(semian: {name: nil})._client.semian_identifier assert_equal :'redis_example.com:42/1', new_redis(host: 'example.com', port: 42, semian: {name: nil})._client.semian_identifier end def test_client_alias redis = connect_to_redis! assert_equal redis._client.semian_resource, redis.semian_resource assert_equal redis._client.semian_identifier, redis.semian_identifier end def test_semian_can_be_disabled resource = Redis.new(semian: false)._client.semian_resource assert_instance_of Semian::UnprotectedResource, resource end def test_semian_resource_in_pipeline redis = connect_to_redis! redis.pipelined do assert_instance_of Semian::ProtectedResource, redis.semian_resource end end def test_connection_errors_open_the_circuit client = connect_to_redis! @proxy.downstream(:latency, latency: 600).apply do ERROR_THRESHOLD.times do assert_raises ::Redis::TimeoutError do client.get('foo') end end assert_raises ::Redis::CircuitOpenError do client.get('foo') end end end def test_command_errors_does_not_open_the_circuit client = connect_to_redis! client.hset('my_hash', 'foo', 'bar') (ERROR_THRESHOLD * 2).times do assert_raises Redis::CommandError do client.get('my_hash') end end end def test_command_errors_because_of_oom_do_open_the_circuit client = connect_to_redis! with_maxmemory(1) do ERROR_THRESHOLD.times do exception = assert_raises ::Redis::OutOfMemoryError do client.set('foo', 'bar') end assert_equal :redis_testing, exception.semian_identifier end assert_raises ::Redis::CircuitOpenError do client.set('foo', 'bla') end end end def test_script_errors_because_of_oom_do_open_the_circuit client = connect_to_redis! with_maxmemory(1) do ERROR_THRESHOLD.times do exception = assert_raises ::Redis::OutOfMemoryError do client.eval("return redis.call('set', 'foo', 'bar');") end assert_equal :redis_testing, exception.semian_identifier end assert_raises ::Redis::CircuitOpenError do client.eval("return redis.call('set', 'foo', 'bar');") end end end def test_connect_instrumentation notified = false subscriber = Semian.subscribe do |event, resource, scope, adapter| next unless event == :success notified = true assert_equal Semian[:redis_testing], resource assert_equal :connection, scope assert_equal :redis, adapter end connect_to_redis! assert notified, 'No notifications has been emitted' ensure Semian.unsubscribe(subscriber) end def test_resource_acquisition_for_connect connect_to_redis! Semian[:redis_testing].acquire do error = assert_raises Redis::ResourceBusyError do connect_to_redis! end assert_equal :redis_testing, error.semian_identifier end end def test_redis_connection_errors_are_tagged_with_the_resource_identifier @proxy.downstream(:latency, latency: 600).apply do error = assert_raises ::Redis::TimeoutError do redis = connect_to_redis! redis.get('foo') end assert_equal :redis_testing, error.semian_identifier end end def test_other_redis_errors_are_not_tagged_with_the_resource_identifier client = connect_to_redis! client.set('foo', 'bar') error = assert_raises ::Redis::CommandError do client.hget('foo', 'bar') end refute error.respond_to?(:semian_identifier) end def test_resource_timeout_on_connect @proxy.downstream(:latency, latency: 500).apply do background { connect_to_redis! } assert_raises Redis::ResourceBusyError do connect_to_redis! end end end def test_dns_resolution_failures_open_circuit ERROR_THRESHOLD.times do assert_raises Redis::ResolveError do connect_to_redis!(host: 'thisdoesnotresolve') end end assert_raises Redis::CircuitOpenError do connect_to_redis!(host: 'thisdoesnotresolve') end Timecop.travel(ERROR_TIMEOUT + 1) do connect_to_redis! end end [ "Temporary failure in name resolution", "Can't resolve example.com", "name or service not known", "Could not resolve hostname example.com: nodename nor servname provided, or not known", ].each do |message| test_suffix = message.gsub(/\W/, '_').downcase define_method(:"test_dns_resolution_failure_#{test_suffix}") do Redis::Client.any_instance.expects(:raw_connect).raises(message) assert_raises Redis::ResolveError do connect_to_redis!(host: 'example.com') end end end def test_circuit_breaker_on_connect @proxy.downstream(:latency, latency: 500).apply do background { connect_to_redis! } ERROR_THRESHOLD.times do assert_raises Redis::ResourceBusyError do connect_to_redis! end end end yield_to_background assert_raises Redis::CircuitOpenError do connect_to_redis! end Timecop.travel(ERROR_TIMEOUT + 1) do connect_to_redis! end end def test_query_instrumentation client = connect_to_redis! notified = false subscriber = Semian.subscribe do |event, resource, scope, adapter| notified = true assert_equal :success, event assert_equal Semian[:redis_testing], resource assert_equal :query, scope assert_equal :redis, adapter end client.get('foo') assert notified, 'No notifications has been emitted' ensure Semian.unsubscribe(subscriber) end def test_resource_acquisition_for_query client = connect_to_redis! Semian[:redis_testing].acquire do assert_raises Redis::ResourceBusyError do client.get('foo') end end end def test_resource_timeout_on_query client = connect_to_redis! client2 = connect_to_redis! @proxy.downstream(:latency, latency: 500).apply do background { client2.get('foo') } assert_raises Redis::ResourceBusyError do client.get('foo') end end end def test_circuit_breaker_on_query client = connect_to_redis! client2 = connect_to_redis! client.set('foo', 2) @proxy.downstream(:latency, latency: 1000).apply do background { client2.get('foo') } ERROR_THRESHOLD.times do assert_raises Redis::ResourceBusyError do client.get('foo') end end end yield_to_background assert_raises Redis::CircuitOpenError do client.get('foo') end Timecop.travel(ERROR_TIMEOUT + 1) do assert_equal '2', client.get('foo') end end private def new_redis(options = {}) options[:host] = SemianConfig['toxiproxy_upstream_host'] if options[:host].nil? semian_options = SEMIAN_OPTIONS.merge(options.delete(:semian) || {}) Redis.new({ port: SemianConfig['redis_toxiproxy_port'], reconnect_attempts: 0, db: 1, timeout: 0.5, semian: semian_options, }.merge(options)) end def connect_to_redis!(semian_options = {}) host = semian_options.delete(:host) redis = new_redis(host: host, semian: semian_options) redis._client.connect redis end def with_maxmemory(bytes) client = connect_to_redis!(name: 'maxmemory') _, old = client.config('get', 'maxmemory') begin client.config('set', 'maxmemory', bytes) yield ensure client.config('set', 'maxmemory', old) end end end
25.510638
134
0.694031
01a02202d0cb1c4d76fa0b9d53963edbc0618a87
574
require 'test_helper' class AssetTextDatumTest < ActiveSupport::TestCase def setup @asset_data = asset_text_datum(:one) end test "should validate with valid data" do skip assert @asset_data.valid? end test "should check if data is present" do skip @asset_data.data = "" assert_not @assert_data.valid? @asset_data.data = nil assert_not @assert_data.valid? end test "should check if associated asset is valid" do skip assert_not asset_text_datum(:invalid_asset_id) assert_not asset_text_datum(:invalid_asset_value) end end
21.259259
53
0.736934
61d01eb7e918ecb1ef2bf105a0d76c239211540f
9,435
require 'spec_helper' require 'puppet/defaults' describe "Puppet defaults" do describe "when default_manifest is set" do it "returns ./manifests by default" do expect(Puppet[:default_manifest]).to eq('./manifests') end end describe "when disable_per_environment_manifest is set" do it "returns false by default" do expect(Puppet[:disable_per_environment_manifest]).to eq(false) end it "errors when set to true and default_manifest is not an absolute path" do expect { Puppet[:default_manifest] = './some/relative/manifest.pp' Puppet[:disable_per_environment_manifest] = true }.to raise_error Puppet::Settings::ValidationError, /'default_manifest' setting must be.*absolute/ end end describe "when setting the :factpath" do it "should add the :factpath to Facter's search paths" do expect(Facter).to receive(:search).with("/my/fact/path") Puppet.settings[:factpath] = "/my/fact/path" end end describe "when setting the :certname" do it "should fail if the certname is not downcased" do expect { Puppet.settings[:certname] = "Host.Domain.Com" }.to raise_error(ArgumentError) end end describe "when setting :node_name_value" do it "should default to the value of :certname" do Puppet.settings[:certname] = 'blargle' expect(Puppet.settings[:node_name_value]).to eq('blargle') end end describe "when setting the :node_name_fact" do it "should fail when also setting :node_name_value" do expect do Puppet.settings[:node_name_value] = "some value" Puppet.settings[:node_name_fact] = "some_fact" end.to raise_error("Cannot specify both the node_name_value and node_name_fact settings") end it "should not fail when using the default for :node_name_value" do expect do Puppet.settings[:node_name_fact] = "some_fact" end.not_to raise_error end end it "should have a clientyamldir setting" do expect(Puppet.settings[:clientyamldir]).not_to be_nil end it "should have different values for the yamldir and clientyamldir" do expect(Puppet.settings[:yamldir]).not_to eq(Puppet.settings[:clientyamldir]) end it "should have a client_datadir setting" do expect(Puppet.settings[:client_datadir]).not_to be_nil end it "should have different values for the server_datadir and client_datadir" do expect(Puppet.settings[:server_datadir]).not_to eq(Puppet.settings[:client_datadir]) end # See #1232 it "should not specify a user or group for the clientyamldir" do expect(Puppet.settings.setting(:clientyamldir).owner).to be_nil expect(Puppet.settings.setting(:clientyamldir).group).to be_nil end it "should use the service user and group for the yamldir" do allow(Puppet.settings).to receive(:service_user_available?).and_return(true) allow(Puppet.settings).to receive(:service_group_available?).and_return(true) expect(Puppet.settings.setting(:yamldir).owner).to eq(Puppet.settings[:user]) expect(Puppet.settings.setting(:yamldir).group).to eq(Puppet.settings[:group]) end it "should specify that the host private key should be owned by the service user" do allow(Puppet.settings).to receive(:service_user_available?).and_return(true) expect(Puppet.settings.setting(:hostprivkey).owner).to eq(Puppet.settings[:user]) end it "should specify that the host certificate should be owned by the service user" do allow(Puppet.settings).to receive(:service_user_available?).and_return(true) expect(Puppet.settings.setting(:hostcert).owner).to eq(Puppet.settings[:user]) end [:modulepath, :factpath].each do |setting| it "should configure '#{setting}' not to be a file setting, so multi-directory settings are acceptable" do expect(Puppet.settings.setting(setting)).to be_instance_of(Puppet::Settings::PathSetting) end end describe "on a Unix-like platform it", :if => Puppet.features.posix? do it "should add /usr/sbin and /sbin to the path if they're not there" do Puppet::Util.withenv("PATH" => "/usr/bin#{File::PATH_SEPARATOR}/usr/local/bin") do Puppet.settings[:path] = "none" # this causes it to ignore the setting expect(ENV["PATH"].split(File::PATH_SEPARATOR)).to be_include("/usr/sbin") expect(ENV["PATH"].split(File::PATH_SEPARATOR)).to be_include("/sbin") end end end describe "on a Windows-like platform it", :if => Puppet::Util::Platform.windows? do let (:rune_utf8) { "\u16A0\u16C7\u16BB\u16EB\u16D2\u16E6\u16A6\u16EB\u16A0\u16B1\u16A9\u16A0\u16A2\u16B1\u16EB\u16A0\u16C1\u16B1\u16AA\u16EB\u16B7\u16D6\u16BB\u16B9\u16E6\u16DA\u16B3\u16A2\u16D7" } it "path should not add anything" do path = "c:\\windows\\system32#{File::PATH_SEPARATOR}c:\\windows" Puppet::Util.withenv( {"PATH" => path }, :windows ) do Puppet.settings[:path] = "none" # this causes it to ignore the setting expect(ENV["PATH"]).to eq(path) end end it "path should support UTF8 characters" do path = "c:\\windows\\system32#{File::PATH_SEPARATOR}c:\\windows#{File::PATH_SEPARATOR}C:\\" + rune_utf8 Puppet::Util.withenv( {"PATH" => path }, :windows) do Puppet.settings[:path] = "none" # this causes it to ignore the setting envhash = Puppet::Util::Windows::Process.get_environment_strings expect(envhash['Path']).to eq(path) end end end it "should default to json for the preferred serialization format" do expect(Puppet.settings.value(:preferred_serialization_format)).to eq("json") end it "should have a setting for determining the configuration version and should default to an empty string" do expect(Puppet.settings[:config_version]).to eq("") end describe "when enabling reports" do it "should use the default server value when report server is unspecified" do Puppet.settings[:server] = "server" expect(Puppet.settings[:report_server]).to eq("server") end it "should use the default masterport value when report port is unspecified" do Puppet.settings[:masterport] = "1234" expect(Puppet.settings[:report_port]).to eq("1234") end it "should use report_port when set" do Puppet.settings[:masterport] = "1234" Puppet.settings[:report_port] = "5678" expect(Puppet.settings[:report_port]).to eq("5678") end end it "should have a 'prerun_command' that defaults to the empty string" do expect(Puppet.settings[:prerun_command]).to eq("") end it "should have a 'postrun_command' that defaults to the empty string" do expect(Puppet.settings[:postrun_command]).to eq("") end it "should have a 'certificate_revocation' setting that defaults to true" do expect(Puppet.settings[:certificate_revocation]).to be_truthy end describe "reportdir" do subject { Puppet.settings[:reportdir] } it { is_expected.to eq("#{Puppet[:vardir]}/reports") } end describe "reporturl" do subject { Puppet.settings[:reporturl] } it { is_expected.to eq("http://localhost:3000/reports/upload") } end describe "when configuring color" do subject { Puppet.settings[:color] } it { is_expected.to eq("ansi") } end describe "daemonize" do it "should default to true", :unless => Puppet::Util::Platform.windows? do expect(Puppet.settings[:daemonize]).to eq(true) end describe "on Windows", :if => Puppet::Util::Platform.windows? do it "should default to false" do expect(Puppet.settings[:daemonize]).to eq(false) end it "should raise an error if set to true" do expect { Puppet.settings[:daemonize] = true }.to raise_error(/Cannot daemonize on Windows/) end end end describe "diff" do it "should default to 'diff' on POSIX", :unless => Puppet::Util::Platform.windows? do expect(Puppet.settings[:diff]).to eq('diff') end it "should default to '' on Windows", :if => Puppet::Util::Platform.windows? do expect(Puppet.settings[:diff]).to eq('') end end describe "when configuring hiera" do it "should have a hiera_config setting" do expect(Puppet.settings[:hiera_config]).not_to be_nil end end describe "when configuring the data_binding terminus" do it "should have a data_binding_terminus setting" do expect(Puppet.settings[:data_binding_terminus]).not_to be_nil end it "should be set to hiera by default" do expect(Puppet.settings[:data_binding_terminus]).to eq(:hiera) end it "to be neither 'hiera' nor 'none', a deprecation warning is logged" do expect(@logs).to eql([]) Puppet[:data_binding_terminus] = 'magic' expect(@logs[0].to_s).to match(/Setting 'data_binding_terminus' is deprecated/) end it "to not log a warning if set to 'none' or 'hiera'" do expect(@logs).to eql([]) Puppet[:data_binding_terminus] = 'none' Puppet[:data_binding_terminus] = 'hiera' expect(@logs).to eql([]) end end describe "agent_catalog_run_lockfile" do it "(#2888) is not a file setting so it is absent from the Settings catalog" do expect(Puppet.settings.setting(:agent_catalog_run_lockfile)).not_to be_a_kind_of Puppet::Settings::FileSetting expect(Puppet.settings.setting(:agent_catalog_run_lockfile)).to be_a Puppet::Settings::StringSetting end end end
37.145669
201
0.701007
4a8a43c41159ae47cf454ec2b51b14025a1dabc4
1,118
# frozen_string_literal: true class CreateEmployees < ActiveRecord::Migration[6.0] def change create_table :employees do |t| t.references :user t.references :language t.references :company t.references :team t.string :uuid, limit: 36, null: false, index: true, unique: true t.string :name, index: true t.string :email, index: true, unique: true t.string :slack_username t.string :api_key, limit: 36 t.string :push_key t.string :external_id, index: true t.integer :hms, default: 0 t.integer :involvement, default: 0 t.integer :results_good, default: 0 t.integer :results_bad, default: 0 t.integer :results_fine, default: 0 t.integer :high5_received, default: 0 t.integer :high5_given, default: 0 t.integer :feedback_given, default: 0 t.integer :comments, default: 0 t.integer :points, default: 0 t.integer :role, default: 0, null: false t.string :level_name t.datetime :deleted_at t.timestamps end add_index :employees, :api_key, unique: true end end
31.055556
71
0.653846
4a1f31ea444c980a06a01fc3453c02dff15887e9
1,118
module Spree class Store < Spree::Base has_many :store_payment_methods, inverse_of: :store has_many :payment_methods, through: :store_payment_methods validates :code, presence: true, uniqueness: { allow_blank: true } validates :name, presence: true validates :url, presence: true validates :mail_from_address, presence: true before_save :ensure_default_exists_and_is_unique before_destroy :validate_not_default scope :by_url, lambda { |url| where("url like ?", "%#{url}%") } def self.current(store_key = nil) current_store = Store.find_by(code: store_key) || Store.by_url(store_key).first current_store || Store.default end def self.default where(default: true).first || new end private def ensure_default_exists_and_is_unique if default Store.where.not(id: id).update_all(default: false) elsif Store.where(default: true).count == 0 self.default = true end end def validate_not_default if default errors.add(:base, :cannot_destroy_default_store) end end end end
26.619048
85
0.685152
032efb2b1830c35dec4f8debb4c207913d5fa6ce
21,557
require 'timeout' require 'digest' # Message Queue entry to run a method on any server # zone # This states the subset of miq_servers in this region that can perform this job. # put: Defaults to the zone of the current caller ("MyZone") # Pass in nil to have this performed in any zone. # get: Fetches jobs both for the caller's zone and for any zone. # role # This states the role necessary for a miq_server to perform this job. # put: Defaults to nil (no role required). # Typically this is passed in to require a role. # get: Fetches jobs both for the caller's roles and for no role required. # queue_name # This states the worker queue that will perform this job. # put: Default to "generic" to be performed by the generic worker. # get: Defaults to "generic" but is typically overridden by the caller (a worker) # class MiqQueue < ApplicationRecord belongs_to :handler, :polymorphic => true belongs_to :miq_task attr_accessor :last_exception MAX_PRIORITY = 0 HIGH_PRIORITY = 20 NORMAL_PRIORITY = 100 LOW_PRIORITY = 150 MIN_PRIORITY = 200 PRIORITY_WHICH = [:max, :high, :normal, :low, :min] PRIORITY_DIR = [:higher, :lower] def self.artemis_client(client_ref) @artemis_client ||= {} @artemis_client[client_ref] ||= begin require "manageiq-messaging" ManageIQ::Messaging.logger = _log queue_settings = Settings.prototype.artemis connect_opts = { :host => ENV["ARTEMIS_QUEUE_HOSTNAME"] || queue_settings.queue_hostname, :port => (ENV["ARTEMIS_QUEUE_PORT"] || queue_settings.queue_port).to_i, :username => ENV["ARTEMIS_QUEUE_USERNAME"] || queue_settings.queue_username, :password => ENV["ARTEMIS_QUEUE_PASSWORD"] || queue_settings.queue_password, :client_ref => client_ref, } # caching the client works, even if the connection becomes unavailable # internally the client will track the state of the connection and re-open it, # once it's available again - at least thats true for a stomp connection ManageIQ::Messaging::Client.open(connect_opts) end end def self.columns_for_requeue @requeue_columns ||= MiqQueue.column_names.map(&:to_sym) - [:id] end def self.priority(which, dir = nil, by = 0) unless which.kind_of?(Integer) || PRIORITY_WHICH.include?(which) raise ArgumentError, _("which must be an Integer or one of %{priority}") % {:priority => PRIORITY_WHICH.join(", ")} end unless dir.nil? || PRIORITY_DIR.include?(dir) raise ArgumentError, _("dir must be one of %{directory}") % {:directory => PRIORITY_DIR.join(", ")} end which = const_get("#{which.to_s.upcase}_PRIORITY") unless which.kind_of?(Integer) priority = which.send(dir == :higher ? "-" : "+", by) priority = MIN_PRIORITY if priority > MIN_PRIORITY priority = MAX_PRIORITY if priority < MAX_PRIORITY priority end def self.higher_priority(*priorities) priorities.min end def self.lower_priority(*priorities) priorities.max end def self.higher_priority?(p1, p2) p1 < p2 end def self.lower_priority?(p1, p2) p1 > p2 end TIMEOUT = 10.minutes serialize :args, Array serialize :miq_callback, Hash STATE_READY = 'ready'.freeze STATE_DEQUEUE = 'dequeue'.freeze STATE_WARN = 'warn'.freeze STATE_ERROR = 'error'.freeze STATE_TIMEOUT = 'timeout'.freeze STATE_EXPIRED = "expired".freeze validates_inclusion_of :state, :in => [STATE_READY, STATE_DEQUEUE, STATE_WARN, STATE_ERROR, STATE_TIMEOUT, STATE_EXPIRED] FINISHED_STATES = [STATE_WARN, STATE_ERROR, STATE_TIMEOUT, STATE_EXPIRED].freeze STATUS_OK = 'ok'.freeze STATUS_RETRY = 'retry'.freeze STATUS_WARN = STATE_WARN STATUS_ERROR = STATE_ERROR STATUS_TIMEOUT = STATE_TIMEOUT DEFAULT_QUEUE = "generic" def data msg_data && Marshal.load(msg_data) end def data=(value) self.msg_data = Marshal.dump(value) end def self.put(options) options = options.merge( :zone => Zone.determine_queue_zone(options), :state => STATE_READY, :handler_type => nil, :handler_id => nil, ) create_with_options = all.values[:create_with] || {} options[:priority] ||= create_with_options[:priority] || NORMAL_PRIORITY options[:queue_name] ||= create_with_options[:queue_name] || "generic" options[:msg_timeout] ||= create_with_options[:msg_timeout] || TIMEOUT options[:task_id] = $_miq_worker_current_msg.try(:task_id) unless options.key?(:task_id) options[:tracking_label] = Thread.current[:tracking_label] || options[:task_id] unless options.key?(:tracking_label) options[:role] = options[:role].to_s unless options[:role].nil? options[:args] = [options[:args]] if options[:args] && !options[:args].kind_of?(Array) if !Rails.env.production? && options[:args] && (arg = options[:args].detect { |a| a.kind_of?(ActiveRecord::Base) && !a.new_record? }) raise ArgumentError, "MiqQueue.put(:class_name => #{options[:class_name]}, :method => #{options[:method_name]}) does not support args with #{arg.class.name} objects" end msg = MiqQueue.create!(options) _log.info(MiqQueue.format_full_log_msg(msg)) msg end # Trigger a background job # # target_worker: # # @options options [String] :class # @options options [String] :instance # @options options [String] :method # @options options [String] :args # @options options [String] :target_id (deprecated) # @options options [String] :data (deprecated) # # execution parameters: # # @options options [String] :expires_on # @options options [String] :ttl # @options options [String] :task_id (deprecated) # # routing: # # @options options [String] :service name of the service. Similar to previous role or queue name derives # queue_name, role, and zone. # @options options [ExtManagementSystem|Nil|Array<Class,id>] :affinity resource for affinity. Typically an ems # @options options [String] :miq_zone this overrides the auto derived zone. # def self.submit_job(options) service = options.delete(:service) || "generic" resource = options.delete(:affinity) case service when "automate" # options[:queue_name] = "generic" options[:role] = service when "ems_inventory" options[:queue_name] = MiqEmsRefreshWorker.queue_name_for_ems(resource) options[:role] = service options[:zone] = resource.my_zone when "ems_operations", "smartstate" # ems_operations, refresh is class method # some smartstate just want MiqServer.my_zone and pass in no resource # options[:queue_name] = "generic" options[:role] = service options[:zone] = resource.try(:my_zone) || MiqServer.my_zone when "event" options[:queue_name] = "ems" options[:role] = service when "generic" raise ArgumentError, "generic job should have no resource" if resource # TODO: can we transition to zone = nil when "notifier" options[:role] = service options[:zone] = nil # any zone when "reporting" options[:queue_name] = "generic" options[:role] = service when "smartproxy" options[:queue_name] = "smartproxy" options[:role] = "smartproxy" end put(options) end def self.where_queue_name(is_array) is_array ? "AND queue_name in (?)" : "AND queue_name = ?" end MIQ_QUEUE_GET = <<-EOL state = 'ready' AND (zone IS NULL OR zone = ?) AND (task_id IS NULL OR task_id NOT IN ( SELECT DISTINCT task_id FROM #{table_name} WHERE state = 'dequeue' AND (zone IS NULL OR zone = ?) AND task_id IS NOT NULL )) AND (role IS NULL OR role IN (?)) AND (server_guid IS NULL OR server_guid = ?) AND (deliver_on IS NULL OR deliver_on <= ?) AND (priority <= ?) EOL def self.get(options = {}) sql_for_get = MIQ_QUEUE_GET + where_queue_name(options[:queue_name].kind_of?(Array)) cond = [ sql_for_get, options[:zone] || MiqServer.my_server.zone.name, options[:zone] || MiqServer.my_server.zone.name, options[:role] || MiqServer.my_server.active_role_names, MiqServer.my_guid, Time.now.utc, options[:priority] || MIN_PRIORITY, options[:queue_name] || "generic", ] prefetch_max_per_worker = Settings.server.prefetch_max_per_worker msgs = MiqQueue.where(cond).order("priority, id").limit(prefetch_max_per_worker) result = nil msgs.each do |msg| begin _log.info("#{MiqQueue.format_short_log_msg(msg)} previously timed out, retrying...") if msg.state == STATE_TIMEOUT handler = MiqWorker.my_worker || MiqServer.my_server msg.update_attributes!(:state => STATE_DEQUEUE, :handler => handler) _log.info("#{MiqQueue.format_full_log_msg(msg)}, Dequeued in: [#{Time.now.utc - msg.created_on}] seconds") return msg rescue ActiveRecord::StaleObjectError result = :stale rescue => err raise _("%{log_message} \"%{error}\" attempting to get next message") % {:log_message => _log.prefix, :error => err} end end _log.debug("All #{prefetch_max_per_worker} messages stale, returning...") if result == :stale result end # This are the queue calls related to worker management which # might not be needed once we use kubernetes for worker/pod management def self.put_deprecated(*args) put(*args) end def unget(options = {}) update_attributes!(options.merge(:state => STATE_READY, :handler => nil)) @delivered_on = nil _log.info("#{MiqQueue.format_full_log_msg(self)}, Requeued") end # TODO (juliancheal) This is a hack. Brakeman was giving us an SQL injection # warning when we concatonated the queue_name string onto the query. # Creating two seperate queries like this, resolves the Brakeman issue, but # isn't idea. This will need to be rewritten using Arel queires at some point. MIQ_QUEUE_PEEK = <<-EOL state = 'ready' AND (zone IS NULL OR zone = ?) AND (role IS NULL OR role IN (?)) AND (server_guid IS NULL OR server_guid = ?) AND (deliver_on IS NULL OR deliver_on <= ?) AND (priority <= ?) AND queue_name = ? EOL MIQ_QUEUE_PEEK_ARRAY = <<-EOL state = 'ready' AND (zone IS NULL OR zone = ?) AND (role IS NULL OR role IN (?)) AND (server_guid IS NULL OR server_guid = ?) AND (deliver_on IS NULL OR deliver_on <= ?) AND (priority <= ?) AND queue_name in (?) EOL def self.peek(options = {}) conditions, select, limit = options.values_at(:conditions, :select, :limit) sql_for_peek = conditions[:queue_name].kind_of?(Array) ? MIQ_QUEUE_PEEK_ARRAY : MIQ_QUEUE_PEEK cond = [ sql_for_peek, conditions[:zone] || MiqServer.my_server.zone.name, conditions[:role] || MiqServer.my_server.active_role_names, MiqServer.my_guid, Time.now.utc, conditions[:priority] || MIN_PRIORITY, conditions[:queue_name] || "generic", ] result = MiqQueue.where(cond).order(:priority, :id).limit(limit || 1) result = result.select(select) unless select.nil? result.to_a end # Find the MiqQueue item with the specified find options, and yields that # record to a block. The block should return the options for updating # the record. If the record was not found, the block's options will be # used to put a new item on the queue. # def self.put_or_update(find_options) find_options = default_get_options(find_options) # Since args are a serializable field, remove them and manually dump them # for proper comparison. where_scope = if find_options.key?(:args) MiqQueue.where(find_options.except(:args)).where(['args = ?', find_options[:args].try(:to_yaml)]) else MiqQueue.where(find_options) end msg = nil loop do msg = where_scope.order("priority, id").first save_options = block_given? ? yield(msg, find_options) : nil # Add a new queue item based on the returned save options, or the find # options if no save options were given. if msg.nil? put_options = save_options || find_options put_options = put_options.except(:state) if put_options.key?(:state) msg = MiqQueue.put(put_options) break end begin # Update the queue item based on the returned save options. unless save_options.nil? if save_options.key?(:msg_timeout) && (msg.msg_timeout > save_options[:msg_timeout]) _log.warn("#{MiqQueue.format_short_log_msg(msg)} ignoring request to decrease timeout from <#{msg.msg_timeout}> to <#{save_options[:msg_timeout]}>") save_options = save_options.except(:msg_timeout) end msg.update_attributes!(save_options) _log.info("#{MiqQueue.format_short_log_msg(msg)} updated with following: #{save_options.except(:data, :msg_data).inspect}") _log.info("#{MiqQueue.format_full_log_msg(msg)}, Requeued") end break rescue ActiveRecord::StaleObjectError _log.debug("#{MiqQueue.format_short_log_msg(msg)} stale, retrying...") rescue => err raise RuntimeError, _("%{log_message} \"%{error}\" attempting merge next message") % {:log_message => _log.prefix, :error => err}, err.backtrace end end msg end # Find the MiqQueue item with the specified find options, and if not found # puts a new item on the queue. If the item was found, it will not be # changed, and will be yielded to an optional block, generally for logging # purposes. def self.put_unless_exists(find_options) put_or_update(find_options) do |msg, item_hash| ret = yield(msg, item_hash) if block_given? # create the record if the original message did not exist, don't change otherwise ret if msg.nil? end end def self.unqueue(options) find_by(optional_values(default_get_options(options))).try(:destroy) end def deliver(requester = nil) result = nil delivered_on _log.info("#{MiqQueue.format_short_log_msg(self)}, Delivering...") begin raise _("class_name cannot be nil") if class_name.nil? obj = class_name.constantize if instance_id begin if (class_name == requester.class.name) && requester.respond_to?(:id) && (instance_id == requester.id) obj = requester else obj = obj.find(instance_id) end rescue ActiveRecord::RecordNotFound => err _log.warn("#{MiqQueue.format_short_log_msg(self)} will not be delivered because #{err.message}") return STATUS_WARN, nil, nil rescue => err _log.error("#{MiqQueue.format_short_log_msg(self)} will not be delivered because #{err.message}") return STATUS_ERROR, err.message, nil end end data = self.data args.push(data) if data args.unshift(target_id) if obj.kind_of?(Class) && target_id begin status = STATUS_OK message = "Message delivered successfully" result = User.with_user_group(user_id, group_id) { dispatch_method(obj, args) } rescue MiqException::MiqQueueRetryLater => err unget(err.options) message = "Message not processed. Retrying #{err.options[:deliver_on] ? "at #{err.options[:deliver_on]}" : 'immediately'}" _log.error("#{MiqQueue.format_short_log_msg(self)}, #{message}") status = STATUS_RETRY rescue Timeout::Error message = "timed out after #{Time.now - delivered_on} seconds. Timeout threshold [#{msg_timeout}]" _log.error("#{MiqQueue.format_short_log_msg(self)}, #{message}") status = STATUS_TIMEOUT end rescue StandardError, SyntaxError => error _log.error("#{MiqQueue.format_short_log_msg(self)}, Error: [#{error}]") _log.log_backtrace(error) unless error.kind_of?(MiqException::Error) status = STATUS_ERROR self.last_exception = error message = error.message end return status, message, result end def dispatch_method(obj, args) Timeout.timeout(msg_timeout) do args = activate_miq_task(args) obj.send(method_name, *args) end end DELIVER_IN_ERROR_MSG = 'Deliver in error'.freeze def delivered_in_error(msg = nil) delivered('error', msg || DELIVER_IN_ERROR_MSG, nil) end def delivered(state, msg, result) self.state = state _log.info("#{MiqQueue.format_short_log_msg(self)}, State: [#{state}], Delivered in [#{Time.now - delivered_on}] seconds") m_callback(msg, result) unless miq_callback.blank? rescue => err _log.error("#{MiqQueue.format_short_log_msg(self)}, #{err.message}") ensure destroy_potentially_stale_record end def delivered_on @delivered_on ||= Time.now end def m_callback(msg, result) if miq_callback[:class_name] && miq_callback[:method_name] begin klass = miq_callback[:class_name].constantize if miq_callback[:instance_id] obj = klass.find(miq_callback[:instance_id]) else obj = klass _log.debug("#{MiqQueue.format_short_log_msg(self)}, Could not find callback in Class: [#{miq_callback[:class_name]}]") unless obj end if obj.respond_to?(miq_callback[:method_name]) miq_callback[:args] ||= [] log_args = result.inspect log_args = "#{log_args[0, 500]}..." if log_args.length > 500 # Trim long results log_args = miq_callback[:args] + [state, msg, log_args] _log.info("#{MiqQueue.format_short_log_msg(self)}, Invoking Callback with args: #{log_args.inspect}") unless obj.nil? cb_args = miq_callback[:args] + [state, msg, result] cb_args << self if cb_args.length == (obj.method(miq_callback[:method_name]).arity - 1) obj.send(miq_callback[:method_name], *cb_args) else _log.warn("#{MiqQueue.format_short_log_msg(self)}, Instance: [#{obj}], does not respond to Method: [#{miq_callback[:method_name]}], skipping") end rescue => err _log.error("#{MiqQueue.format_short_log_msg(self)}: #{err}") _log.log_backtrace(err) end else _log.warn("#{MiqQueue.format_short_log_msg(self)}, Callback is not well-defined, skipping") end end def requeue(options = {}) options.reverse_merge!(attributes.symbolize_keys) MiqQueue.put(options.slice(*MiqQueue.columns_for_requeue)) end def check_for_timeout(log_prefix = "MIQ(MiqQueue.check_for_timeout)", grace = 10.seconds, timeout = msg_timeout.seconds) if state == 'dequeue' && Time.now.utc > (updated_on + timeout.seconds + grace.seconds).utc msg = " processed by #{handler.format_full_log_msg}" unless handler.nil? $log.warn("#{log_prefix} Timed Out Active #{MiqQueue.format_short_log_msg(self)}#{msg} after #{Time.now.utc - updated_on} seconds") destroy rescue nil end end def finished? FINISHED_STATES.include?(state) end def unfinished? !finished? end def self.format_full_log_msg(msg) "Message id: [#{msg.id}], #{msg.handler_type} id: [#{msg.handler_id}], Zone: [#{msg.zone}], Role: [#{msg.role}], Server: [#{msg.server_guid}], MiqTask id: [#{msg.miq_task_id}], Ident: [#{msg.queue_name}], Target id: [#{msg.target_id}], Instance id: [#{msg.instance_id}], Task id: [#{msg.task_id}], Command: [#{msg.class_name}.#{msg.method_name}], Timeout: [#{msg.msg_timeout}], Priority: [#{msg.priority}], State: [#{msg.state}], Deliver On: [#{msg.deliver_on}], Data: [#{msg.data.nil? ? "" : "#{msg.data.length} bytes"}], Args: #{MiqPassword.sanitize_string(msg.args.inspect)}" end def self.format_short_log_msg(msg) "Message id: [#{msg.id}]" end def get_worker handler if handler.kind_of?(MiqWorker) end def self.get_worker(task_id) find_by(:task_id => task_id).try(:get_worker) end def self.display_name(number = 1) n_('Queue', 'Queues', number) end private def activate_miq_task(args) MiqTask.update_status(miq_task_id, MiqTask::STATE_ACTIVE, MiqTask::STATUS_OK, "Task starting") if miq_task_id params = args.first params[:miq_task_id] = miq_task_id if params.kind_of?(Hash) args end # default values for get operations def self.default_get_options(options) options.reverse_merge( :queue_name => DEFAULT_QUEUE, :state => STATE_READY, :zone => Zone.determine_queue_zone(options) ) end # when searching miq_queue, we often want to see if a key is nil, or a particular value # given a set of keys, modify the params to have those values # example: # optional_values({:a => 'x', :b => 'y'}, [:a]) # # => {:a => [nil, 'x'], :b => 'y'} # sql => "where (a is nil or a = 'x') and b = 'y'" # def self.optional_values(options, keys = [:zone]) options = options.dup Array(keys).each do |key| options[key] = [nil, options[key]].uniq if options.key?(key) end options end def destroy_potentially_stale_record destroy rescue ActiveRecord::StaleObjectError begin reload.destroy rescue ActiveRecord::RecordNotFound # ignore end end end # Class MiqQueue
36.475465
582
0.660528
01104aaad9c684be64a096389b5be5fa304bbf6c
35
module SSE VERSION = "2.1.0" end
8.75
19
0.628571
f742c84833f3686ffd84fc3a66860038d238d0dc
1,058
RSpec.shared_examples "ResourceList" do |klass| let(:all_records){ klass.list.to_a } context "default page size (50)" do subject { klass.list } specify "#to_a returns all records" do expect(subject.to_a).to match_array all_records end end context "page size: 2" do subject { klass.list(page_size: 2) } it "#to_a returns all records" do expect(subject.to_a).to match_array all_records end specify "#[] accesses all pages transparently" do expect(subject[0]).to eq all_records[0] expect(subject[3]).to eq all_records[3] expect(subject[-2]).to eq all_records[-2] end end context "page size: 300" do subject { klass.list(page_size: 300) } it "maxes out at 250" do expect{ subject.page(1) }.to_not raise_error expect(subject.page_size).to eq 250 end end context "page size: -1" do subject { klass.list(page_size: -1) } it "defaults to 50" do expect{ subject }.to_not raise_error expect(subject.page_size).to eq 50 end end end
24.045455
53
0.655955
038cbf1f2f2cf5d36aef8d1e0b95690d47026cfd
1,962
require "abstract_unit" require "active_support/core_ext/module/concerning" class ModuleConcerningTest < ActiveSupport::TestCase def test_concerning_declares_a_concern_and_includes_it_immediately klass = Class.new { concerning(:Foo) {} } assert klass.ancestors.include?(klass::Foo), klass.ancestors.inspect end end class ModuleConcernTest < ActiveSupport::TestCase def test_concern_creates_a_module_extended_with_active_support_concern klass = Class.new do concern :Baz do included { @foo = 1 } def should_be_public; end end end # Declares a concern but doesn't include it assert klass.const_defined?(:Baz, false) assert !ModuleConcernTest.const_defined?(:Baz) assert_kind_of ActiveSupport::Concern, klass::Baz assert !klass.ancestors.include?(klass::Baz), klass.ancestors.inspect # Public method visibility by default assert klass::Baz.public_instance_methods.map(&:to_s).include?("should_be_public") # Calls included hook assert_equal 1, Class.new { include klass::Baz }.instance_variable_get("@foo") end class Foo concerning :Bar do module ClassMethods def will_be_orphaned; end end const_set :ClassMethods, Module.new { def hacked_on; end } # Doesn't overwrite existing ClassMethods module. class_methods do def nicer_dsl; end end # Doesn't overwrite previous class_methods definitions. class_methods do def doesnt_clobber; end end end end def test_using_class_methods_blocks_instead_of_ClassMethods_module assert !Foo.respond_to?(:will_be_orphaned) assert Foo.respond_to?(:hacked_on) assert Foo.respond_to?(:nicer_dsl) assert Foo.respond_to?(:doesnt_clobber) # Orphan in Foo::ClassMethods, not Bar::ClassMethods. assert Foo.const_defined?(:ClassMethods) assert Foo::ClassMethods.method_defined?(:will_be_orphaned) end end
29.727273
86
0.724261
f89ba5b8af459d687f83f6db7918a209380b1817
36
require_relative 'utils/calculator'
18
35
0.861111
f7ddc6de12713315eeb306aab934aec359f0f38e
2,214
cask 'docker' do if MacOS.version <= :el_capitan version '18.06.1-ce-mac73,26764' sha256 '3429eac38cf0d198039ad6e1adce0016f642cdb914a34c67ce40f069cdb047a5' else version '2.0.0.0-ce-mac81,29211' sha256 '5343fa169b9f7bc6f41f91e5797d148a21d2c0fc1d5c2da80d7d93ed6b3bd1ff' end url "https://download.docker.com/mac/stable/#{version.after_comma}/Docker.dmg" appcast 'https://download.docker.com/mac/stable/appcast.xml' name 'Docker Community Edition' name 'Docker CE' homepage 'https://www.docker.com/community-edition' auto_updates true app 'Docker.app' uninstall delete: [ '/Library/PrivilegedHelperTools/com.docker.vmnetd', '/private/var/tmp/com.docker.vmnetd.socket', '/usr/local/bin/docker', '/usr/local/bin/docker-compose', '/usr/local/bin/docker-credential-osxkeychain', '/usr/local/bin/docker-machine', '/usr/local/bin/hyperkit', '/usr/local/bin/notary', '/usr/local/bin/vpnkit', ], launchctl: [ 'com.docker.helper', 'com.docker.vmnetd', ], quit: 'com.docker.docker' zap trash: [ '/usr/local/bin/docker-compose.backup', '/usr/local/bin/docker-machine.backup', '/usr/local/bin/docker.backup', '~/Library/Application Scripts/com.docker.helper', '~/Library/Caches/KSCrashReports/Docker', '~/Library/Caches/com.docker.docker', '~/Library/Caches/com.plausiblelabs.crashreporter.data/com.docker.docker', '~/Library/Containers/com.docker.docker', '~/Library/Containers/com.docker.helper', '~/Library/Group Containers/group.com.docker', '~/Library/Preferences/com.docker.docker.plist', ], rmdir: [ '~/Library/Caches/KSCrashReports', '~/Library/Caches/com.plausiblelabs.crashreporter.data', ] end
40.254545
89
0.552846
7abf45060d74f7bb6d9e96ff6ccce7ce2cfdbc77
762
$LOAD_PATH.push File.expand_path('../lib', __FILE__) # Maintain your gem's version: require 'fastread/version' # Describe your gem and declare its dependencies: Gem::Specification.new do |s| s.name = 'fastread' s.version = Fastread::VERSION s.authors = %w(sovetnik MistaTwista) s.email = [''] s.homepage = 'http://fastread.com' s.summary = 'Telegram bot for estimating time for articles in links' s.description = 'Fastread takes the link and answers with estimated time for reading text from that link' s.license = 'MIT' s.files = `git ls-files`.split("\n") s.executables = ['fastread'] s.require_paths = ['lib'] s.add_development_dependency 'rake' s.add_development_dependency 'rspec' end
31.75
107
0.675853
61f30a914da94f5f7b2b69e6b19b16c15b9e4ab3
115
require_relative "../models/group" class GroupRepository def self.list Group.order("LOWER(name)") end end
14.375
34
0.721739
ab2a172bd1266307120925d8b64fba0b67501a72
4,554
# frozen_string_literal: true # Copyright 2015-2017, the Linux Foundation, IDA, and the # CII Best Practices badge contributors # SPDX-License-Identifier: MIT # rubocop:disable Metrics/BlockLength # The priority is based upon order of creation: # first created -> highest priority. # See how all your routes lay out with "rake routes". Rails.application.routes.draw do # Root of site root 'static_pages#home' scope '(:locale)' do resources :project_stats get 'sessions/new' get 'signup' => 'users#new' get 'home' => 'static_pages#home' get 'criteria' => 'static_pages#criteria' get 'cookies' => 'static_pages#cookies' get 'robots' => 'static_pages#robots', defaults: { format: 'text' } get 'feed' => 'projects#feed', defaults: { format: 'atom' } get 'reminders' => 'projects#reminders_summary' VALID_CRITERIA_LEVEL = /[0-2]/ resources :projects do member do get 'badge', defaults: { format: 'svg' } get '' => 'projects#show_json', constraints: ->(req) { req.format == :json } get ':criteria_level(.:format)' => 'projects#show', constraints: { criteria_level: VALID_CRITERIA_LEVEL } get ':criteria_level/edit(.:format)' => 'projects#edit', constraints: { criteria_level: VALID_CRITERIA_LEVEL } end end match( 'projects/:id/(:criteria_level/)edit' => 'projects#update', via: %i[put patch], as: :put_project, constraints: { criteria_level: VALID_CRITERIA_LEVEL } ) resources :users resources :account_activations, only: [:edit] resources :password_resets, only: %i[new create edit update] get 'login' => 'sessions#new' post 'login' => 'sessions#create' delete 'logout' => 'sessions#destroy' get 'auth/:provider/callback' => 'sessions#create' get '/signout' => 'sessions#destroy', as: :signout end # If no route found in some cases, just redirect to a 404 page. # The production site is constantly hit by nonsense paths, # and while Rails has a built-in mechanism to handle nonsense, # Rails' built-in mechanism creates noisy logs. # Ideally we'd redirect all no-match cases quickly to a 404 handler. # Unfortunately, the noise-reduction approach for Rails 4 noted here: # http://rubyjunky.com/cleaning-up-rails-4-production-logging.html # works in development but does NOT work in production. # So instead, we'll select a few common cases where we have nothing # and there's no possible security problem, and fast-path its rejection # by redirecting to a 404 (without a lengthy log of the cause). # wp-login.php queries are evidences of WordPress brute-force attacks: # http://www.inmotionhosting.com/support/edu/wordpress/ # wp-login-brute-force-attack match 'kk.php', via: :all, to: 'static_pages#error_404' match 'wp-login.php', via: :all, to: 'static_pages#error_404' match '.well-known/*path', via: :all, to: 'static_pages#error_404' # Interpret a bare locale as going to the homepage with that locale. # This requires special handling. get '/:locale', to: 'static_pages#home' # Here are some examples of routes. # Example of regular route: # get 'products/:id' => 'catalog#view' # Example of named route that can be invoked with purchase_url(id: product.id) # get 'products/:id/purchase' => 'catalog#purchase', as: :purchase # Example resource route (maps HTTP verbs to controller actions # automatically): # resources :products # Example resource route with options: # resources :products do # member do # get 'short' # post 'toggle' # end # # collection do # get 'sold' # end # end # Example resource route with sub-resources: # resources :products do # resources :comments, :sales # resource :seller # end # Example resource route with more complex sub-resources: # resources :products do # resources :comments # resources :sales do # get 'recent', on: :collection # end # end # Example resource route with concerns: # concern :toggleable do # post 'toggle' # end # resources :posts, concerns: :toggleable # resources :photos, concerns: :toggleable # Example resource route within a namespace: # namespace :admin do # # Directs /admin/products/* to Admin::ProductsController # # (app/controllers/admin/products_controller.rb) # resources :products # end end # rubocop:enable Metrics/BlockLength
33.485294
80
0.668863
e818c73c2bbe5dbec572690561b794200a146688
620
require 'test_helper' class UsersProfileTest < ActionDispatch::IntegrationTest include ApplicationHelper def setup @user = users(:michael) end # test "profile display" do # get user_path(@user) # assert_template 'users/show' # assert_select 'title', full_title(@user.name) # assert_select 'h1', text: @user.name # # assert_select 'h1>img.gravatar' # assert_match @user.microposts.count.to_s, response.body # assert_select 'div.pagenation' # @user.microposts.paginate(page: 1).each do |micropost| # assert_match micropost.content, response.body # end # end end
26.956522
61
0.696774
919b2f50e10768fffcf74e8d0057413689cbe68d
7,005
require 'spec_helper' RSpec.describe Sentry::Event do let(:configuration) do Sentry::Configuration.new.tap do |config| config.dsn = DUMMY_DSN end end describe "#initialize" do it "initializes a Event when all required keys are provided" do expect(described_class.new(configuration: configuration)).to be_a(described_class) end it "initializes a Event with correct default values" do configuration.server_name = "foo.local" configuration.environment = "test" configuration.release = "721e41770371db95eee98ca2707686226b993eda" event = described_class.new(configuration: configuration) expect(event.timestamp).to be_a(String) expect(event.user).to eq({}) expect(event.extra).to eq({}) expect(event.contexts).to eq({}) expect(event.tags).to eq({}) expect(event.fingerprint).to eq([]) expect(event.platform).to eq(:ruby) expect(event.server_name).to eq("foo.local") expect(event.environment).to eq("test") expect(event.release).to eq("721e41770371db95eee98ca2707686226b993eda") expect(event.sdk).to eq("name" => "sentry.ruby", "version" => Sentry::VERSION) end end context 'rack context specified', rack: true do require 'stringio' before do Sentry.init do |config| config.dsn = DUMMY_DSN end Sentry.get_current_scope.set_rack_env( 'REQUEST_METHOD' => 'POST', 'QUERY_STRING' => 'biz=baz', 'HTTP_HOST' => 'localhost', 'SERVER_NAME' => 'localhost', 'SERVER_PORT' => '80', 'HTTP_X_FORWARDED_FOR' => '1.1.1.1, 2.2.2.2', 'HTTP_X_REQUEST_ID' => 'abcd-1234-abcd-1234', 'REMOTE_ADDR' => '192.168.1.1', 'PATH_INFO' => '/lol', 'rack.url_scheme' => 'http', 'rack.input' => StringIO.new('foo=bar') ) end let(:event) do Sentry::Event.new(configuration: Sentry.configuration) end let(:scope) { Sentry.get_current_scope } context "without config.send_default_pii = true" do it "filters out pii data" do scope.apply_to_event(event) expect(event.to_hash[:request]).to eq( env: { 'SERVER_NAME' => 'localhost', 'SERVER_PORT' => '80' }, headers: { 'Host' => 'localhost', 'X-Request-Id' => 'abcd-1234-abcd-1234' }, method: 'POST', query_string: 'biz=baz', url: 'http://localhost/lol', ) expect(event.to_hash[:tags][:request_id]).to eq("abcd-1234-abcd-1234") expect(event.to_hash[:user][:ip_address]).to eq(nil) end it "removes ip address headers" do scope.apply_to_event(event) # doesn't affect scope's rack_env expect(scope.rack_env).to include("REMOTE_ADDR") expect(event.request.headers.keys).not_to include("REMOTE_ADDR") expect(event.request.headers.keys).not_to include("Client-Ip") expect(event.request.headers.keys).not_to include("X-Real-Ip") expect(event.request.headers.keys).not_to include("X-Forwarded-For") end end context "with config.send_default_pii = true" do before do Sentry.configuration.send_default_pii = true end it "adds correct data" do Sentry.get_current_scope.apply_to_event(event) expect(event.to_hash[:request]).to eq( data: { 'foo' => 'bar' }, env: { 'SERVER_NAME' => 'localhost', 'SERVER_PORT' => '80', "REMOTE_ADDR" => "192.168.1.1" }, headers: { 'Host' => 'localhost', "X-Forwarded-For" => "1.1.1.1, 2.2.2.2", "X-Request-Id" => "abcd-1234-abcd-1234" }, method: 'POST', query_string: 'biz=baz', url: 'http://localhost/lol', cookies: {} ) expect(event.to_hash[:tags][:request_id]).to eq("abcd-1234-abcd-1234") expect(event.to_hash[:user][:ip_address]).to eq("2.2.2.2") end context "with config.trusted_proxies = [\"2.2.2.2\"]" do before do Sentry.configuration.trusted_proxies = ["2.2.2.2"] end it "calculates the correct ip address" do Sentry.get_current_scope.apply_to_event(event) expect(event.to_hash[:request]).to eq( :data=>{"foo"=>"bar"}, env: { 'SERVER_NAME' => 'localhost', 'SERVER_PORT' => '80', "REMOTE_ADDR" => "192.168.1.1" }, headers: { 'Host' => 'localhost', "X-Forwarded-For" => "1.1.1.1, 2.2.2.2", "X-Request-Id" => "abcd-1234-abcd-1234" }, method: 'POST', query_string: 'biz=baz', url: 'http://localhost/lol', cookies: {} ) expect(event.to_hash[:tags][:request_id]).to eq("abcd-1234-abcd-1234") expect(event.to_hash[:user][:ip_address]).to eq("1.1.1.1") end end end end describe "#initialize_stacktrace_interface" do let(:fixture_root) { File.join(Dir.pwd, "spec", "support") } let(:fixture_file) { File.join(fixture_root, "stacktrace_test_fixture.rb") } let(:configuration) do Sentry::Configuration.new.tap do |config| config.project_root = fixture_root end end let(:backtrace) do [ "#{fixture_file}:6:in `bar'", "#{fixture_file}:2:in `foo'" ] end subject do described_class.new(configuration: configuration) end it "returns an array of StacktraceInterface::Frames with correct information" do interface = subject.initialize_stacktrace_interface(backtrace) expect(interface).to be_a(Sentry::StacktraceInterface) frames = interface.frames first_frame = frames.first expect(first_frame.filename).to match(/stacktrace_test_fixture.rb/) expect(first_frame.function).to eq("foo") expect(first_frame.lineno).to eq(2) expect(first_frame.pre_context).to eq([nil, nil, "def foo\n"]) expect(first_frame.context_line).to eq(" bar\n") expect(first_frame.post_context).to eq(["end\n", "\n", "def bar\n"]) second_frame = frames.last expect(second_frame.filename).to match(/stacktrace_test_fixture.rb/) expect(second_frame.function).to eq("bar") expect(second_frame.lineno).to eq(6) expect(second_frame.pre_context).to eq(["end\n", "\n", "def bar\n"]) expect(second_frame.context_line).to eq(" baz\n") expect(second_frame.post_context).to eq(["end\n", nil, nil]) end end describe '#to_json_compatible' do subject do Sentry::Event.new(configuration: configuration).tap do |event| event.extra = { 'my_custom_variable' => 'value', 'date' => Time.utc(0), 'anonymous_module' => Class.new } end end it "should coerce non-JSON-compatible types" do json = subject.to_json_compatible expect(json["extra"]['my_custom_variable']).to eq('value') expect(json["extra"]['date']).to be_a(String) expect(json["extra"]['anonymous_module']).not_to be_a(Class) end end end
34.170732
129
0.61556
61e3865d536511aa4d8439daf659981a409d9bca
1,850
# cf. https://github.com/rubyzip/rubyzip/tree/d07b13a6cf0a413e010c48879aebd9576bfb5f68#zipping-a-directory-recursively require 'zip' # This is a simple example which uses rubyzip to # recursively generate a zip file from the contents of # a specified directory. The directory itself is not # included in the archive, rather just its contents. # # Usage: # directory_to_zip = "/tmp/input" # output_file = "/tmp/out.zip" # zf = ZipFileGenerator.new(directory_to_zip, output_file) # zf.write() class ZipFileGenerator # Initialize with the directory to zip and the location of the output archive. def initialize(input_dir, output_file) @input_dir = input_dir @output_file = output_file end # Zip the input directory. def write entries = Dir.children(@input_dir) ::Zip::File.open(@output_file, ::Zip::File::CREATE) do |zipfile| write_entries entries, '', zipfile end end private # A helper method to make the recursion work. def write_entries(entries, path, zipfile) entries.each do |e| zipfile_path = path == '' ? e : File.join(path, e) disk_file_path = File.join(@input_dir, zipfile_path) puts "Deflating #{disk_file_path}" if File.directory? disk_file_path recursively_deflate_directory(disk_file_path, zipfile, zipfile_path) else put_into_archive(disk_file_path, zipfile, zipfile_path) end end end def recursively_deflate_directory(disk_file_path, zipfile, zipfile_path) zipfile.mkdir zipfile_path subdir = Dir.children(disk_file_path) write_entries subdir, zipfile_path, zipfile end def put_into_archive(disk_file_path, zipfile, zipfile_path) zipfile.get_output_stream(zipfile_path) do |f| f.write(File.open(disk_file_path, 'rb').read) end end end
31.355932
118
0.708649
4ae5cb0b3446e8327fee3b14d715125f0e7a701f
2,440
module Helpers def self.random_lowercase_name [*('a'..'z')].sample(8).join end class ServiceFactory private_class_method :new attr_reader :service def self.new_service(client) new(client).service end private def initialize(client) @service = create_service(client) create_methods create_metrics plans = create_application_plans create_application_plan_limits(plans) create_mapping_rules end def create_service(client) service_name = "API_TEST_#{Helpers.random_lowercase_name}_#{Time.now.getutc.to_i}" system_name = service_name.delete("\s").downcase service_obj = { 'name' => service_name } ThreeScaleToolbox::Entities::Service.create( remote: client, service: service_obj, system_name: system_name ) end def create_methods hits_id = service.hits['id'] 3.times.each do method = { 'system_name' => Helpers.random_lowercase_name, 'friendly_name' => Helpers.random_lowercase_name } service.create_method(hits_id, method) end end def create_metrics 4.times.each do name = Helpers.random_lowercase_name metric = { 'name' => name, 'system_name' => name, 'unit' => '1' } service.create_metric(metric) end end def create_application_plans Array.new(2) do name = Helpers.random_lowercase_name application_plan = { 'name' => name, 'state' => 'published', 'default' => false, 'custom' => false, 'system_name' => name } service.create_application_plan(application_plan) end end def create_application_plan_limits(plans) hits_id = service.hits['id'] plans.each do |plan| # limits (only limits for hits metric) %w[day week month year].each do |period| limit = { 'period' => period, 'value' => 10_000 } service.create_application_plan_limit(plan.fetch('id'), hits_id, limit) end end end def create_mapping_rules hits_id = service.hits['id'] # mapping rules (only mapping rules for hits metric) 2.times.each do |idx| mapping_rule = { 'metric_id' => hits_id, 'pattern' => "/rule#{idx}", 'http_method' => 'GET', 'delta' => 1 } service.create_mapping_rule(mapping_rule) end end end end
27.41573
88
0.620082
acab7dd32d1a845a4bf42c08a53f1bff79f85866
3,994
# frozen_string_literal: true require "json" class ByteInterpreter ## # The Instructions class represents a collection of ordered operations to # perform on an IO stream. This class is used by ByteInterpreter to either # interpret or encode bytes in a rigid, structured way. # # At the most basic level, Instructions are just an Array filled with Hashes, # each Hash having exactly four keys -- :key, :type, :size, and :signed. Each # key has requirements for its value: # - :key -- Must be a value easily convertible to a Symbol object. # - :type -- Must match one of the elements in the constant VALID_TYPES. # - :size -- For binary types ("bin"), must match one of the elements in the # constant VALID_BIN_SIZES. String types ("str") must be a # positive Integer. # - :signed -- For binary types ("bin"), must be the +true+ or +false+ # literals. String types ("str") ignore this value completely. # # Writing your own method for loading instructions is fairly simple. The # method must call #add_field in the desired order of instruction execution, # passing to it a Hash that conforms to the requirements above. The method # must also be named +load_from_type+, where +type+ is what will be # passed into ByteInterpreter#load_instructions. See #load_from_json for an # example of this. class Instructions ## # Raised by instruction validation methods. class ValidationError < StandardError end ## # Valid values for the :type key in the instructions Hash. VALID_TYPES = %w[bin str].freeze ## # Valid values for binary types for the :size key in the instructions Hash. VALID_BIN_SIZES = [1, 2, 4, 8].freeze ## # Keys that are in every properly-formatted instructions Hash. FIELD_NAMES = %i[key type size signed].freeze ## # Creates a blank Instructions object. def initialize @data = [] end ## # Passes the given block to the internal Array's #each method. def each(&block) @data.each(&block) end ## # Clears all loaded instructions. def clear @data.clear end ## # Adds the given Hash to the end of the list of instructions, and validates # it. # @param field [Hash] A properly-formatted instructions Hash. See the # documentation for this class on the appropriate format for this Hash. # @return [void] def add_field(field:) @data.push(field.select { |k, _v| FIELD_NAMES.include? k }) validate_field(field: @data.last) end ## # Loads instructions from a JSON file. The JSON file should contain a # top-level array, with each element being an object with the appropriate # keys and values. Keys are automatically converted from strings to # symbols, but boolean values are not converted from strings to literals. # @param filename [String] The filename of the JSON file to load, # **including** the filetype extension, if any. # @return [void] def load_from_json(filename:) json_fields = JSON.parse(File.open(filename, "rt", &:read), symbolize_names: true) json_fields.each do |field| add_field(field: field) end end ## # Validates a given Hash to ensure it conforms to the instruction format. # @param field [Hash] The Hash object to evaluate. # @return [Boolean] # @raise [ValidationError] if the Hash does not conform to the instruction # format def validate_field(field:) unless VALID_TYPES.include? field[:type] raise ValidationError, "Illegal type defined at key \"#{field[:key]}\": #{field[:type]}. Valid types are #{VALID_TYPES}." end if (field[:type] == "bin") && !VALID_BIN_SIZES.include?(field[:size]) raise ValidationError, "Illegal size defined for binary field at key \"#{field[:key]}\": #{field[:size]}. Valid sizes for binary values are #{VALID_BIN_SIZES}." end true end end end
35.981982
113
0.673761